blob: f9b7347b891953637a62f004cf1f92496871fb6c [file] [log] [blame]
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -08001/*
2 *
Craig Tiller6169d5f2016-03-31 07:46:18 -07003 * Copyright 2015, Google Inc.
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -08004 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions are
8 * met:
9 *
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * * Redistributions in binary form must reproduce the above
13 * copyright notice, this list of conditions and the following disclaimer
14 * in the documentation and/or other materials provided with the
15 * distribution.
16 * * Neither the name of Google Inc. nor the names of its
17 * contributors may be used to endorse or promote products derived from
18 * this software without specific prior written permission.
19 *
20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 *
32 */
33
Craig Tiller9533d042016-03-25 17:11:06 -070034#include "src/core/lib/channel/channel_stack.h"
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080035#include <grpc/support/log.h>
36
37#include <stdlib.h>
Craig Tiller83f88d92015-04-21 16:02:05 -070038#include <string.h>
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080039
Craig Tillerfaa84802015-03-01 21:56:38 -080040int grpc_trace_channel = 0;
41
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080042/* Memory layouts.
43
44 Channel stack is laid out as: {
45 grpc_channel_stack stk;
46 padding to GPR_MAX_ALIGNMENT
47 grpc_channel_element[stk.count];
48 per-filter memory, aligned to GPR_MAX_ALIGNMENT
49 }
50
51 Call stack is laid out as: {
52 grpc_call_stack stk;
53 padding to GPR_MAX_ALIGNMENT
54 grpc_call_element[stk.count];
55 per-filter memory, aligned to GPR_MAX_ALIGNMENT
56 } */
57
58/* Given a size, round up to the next multiple of sizeof(void*) */
59#define ROUND_UP_TO_ALIGNMENT_SIZE(x) \
Craig Tiller3121fd42015-09-10 09:56:20 -070060 (((x) + GPR_MAX_ALIGNMENT - 1u) & ~(GPR_MAX_ALIGNMENT - 1u))
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080061
Craig Tillera82950e2015-09-22 12:33:20 -070062size_t grpc_channel_stack_size(const grpc_channel_filter **filters,
63 size_t filter_count) {
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080064 /* always need the header, and size for the channel elements */
Craig Tillera82950e2015-09-22 12:33:20 -070065 size_t size =
66 ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_channel_stack)) +
67 ROUND_UP_TO_ALIGNMENT_SIZE(filter_count * sizeof(grpc_channel_element));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080068 size_t i;
69
Craig Tillera82950e2015-09-22 12:33:20 -070070 GPR_ASSERT((GPR_MAX_ALIGNMENT & (GPR_MAX_ALIGNMENT - 1)) == 0 &&
71 "GPR_MAX_ALIGNMENT must be a power of two");
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080072
73 /* add the size for each filter */
Craig Tillera82950e2015-09-22 12:33:20 -070074 for (i = 0; i < filter_count; i++) {
75 size += ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data);
76 }
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080077
78 return size;
79}
80
Craig Tiller87d5b192015-04-16 14:37:57 -070081#define CHANNEL_ELEMS_FROM_STACK(stk) \
82 ((grpc_channel_element *)((char *)(stk) + ROUND_UP_TO_ALIGNMENT_SIZE( \
83 sizeof(grpc_channel_stack))))
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080084
85#define CALL_ELEMS_FROM_STACK(stk) \
86 ((grpc_call_element *)((char *)(stk) + \
87 ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack))))
88
Craig Tillera82950e2015-09-22 12:33:20 -070089grpc_channel_element *grpc_channel_stack_element(
90 grpc_channel_stack *channel_stack, size_t index) {
91 return CHANNEL_ELEMS_FROM_STACK(channel_stack) + index;
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080092}
93
Craig Tillera82950e2015-09-22 12:33:20 -070094grpc_channel_element *grpc_channel_stack_last_element(
95 grpc_channel_stack *channel_stack) {
96 return grpc_channel_stack_element(channel_stack, channel_stack->count - 1);
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080097}
98
Craig Tillera82950e2015-09-22 12:33:20 -070099grpc_call_element *grpc_call_stack_element(grpc_call_stack *call_stack,
100 size_t index) {
101 return CALL_ELEMS_FROM_STACK(call_stack) + index;
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800102}
103
Craig Tiller7b435612015-11-24 08:15:05 -0800104void grpc_channel_stack_init(grpc_exec_ctx *exec_ctx, int initial_refs,
105 grpc_iomgr_cb_func destroy, void *destroy_arg,
Craig Tillera82950e2015-09-22 12:33:20 -0700106 const grpc_channel_filter **filters,
Craig Tiller7b435612015-11-24 08:15:05 -0800107 size_t filter_count,
Craig Tiller577c9b22015-11-02 14:11:15 -0800108 const grpc_channel_args *channel_args,
Craig Tiller9d69e802016-06-06 11:37:50 -0700109 grpc_transport *optional_transport,
Craig Tiller1d881fb2015-12-01 07:39:04 -0800110 const char *name, grpc_channel_stack *stack) {
Craig Tillera82950e2015-09-22 12:33:20 -0700111 size_t call_size =
112 ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack)) +
113 ROUND_UP_TO_ALIGNMENT_SIZE(filter_count * sizeof(grpc_call_element));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800114 grpc_channel_element *elems;
Craig Tiller577c9b22015-11-02 14:11:15 -0800115 grpc_channel_element_args args;
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800116 char *user_data;
117 size_t i;
118
119 stack->count = filter_count;
Craig Tiller27e5aa42015-11-24 16:28:54 -0800120 GRPC_STREAM_REF_INIT(&stack->refcount, initial_refs, destroy, destroy_arg,
Craig Tiller50ec2672015-11-27 21:45:11 -0800121 name);
Craig Tillera82950e2015-09-22 12:33:20 -0700122 elems = CHANNEL_ELEMS_FROM_STACK(stack);
123 user_data =
124 ((char *)elems) +
125 ROUND_UP_TO_ALIGNMENT_SIZE(filter_count * sizeof(grpc_channel_element));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800126
127 /* init per-filter data */
Craig Tillera82950e2015-09-22 12:33:20 -0700128 for (i = 0; i < filter_count; i++) {
Craig Tiller906e3bc2015-11-24 07:31:31 -0800129 args.channel_stack = stack;
Craig Tiller577c9b22015-11-02 14:11:15 -0800130 args.channel_args = channel_args;
Craig Tiller9d69e802016-06-06 11:37:50 -0700131 args.optional_transport = optional_transport;
Craig Tiller577c9b22015-11-02 14:11:15 -0800132 args.is_first = i == 0;
133 args.is_last = i == (filter_count - 1);
Craig Tillera82950e2015-09-22 12:33:20 -0700134 elems[i].filter = filters[i];
135 elems[i].channel_data = user_data;
Craig Tiller577c9b22015-11-02 14:11:15 -0800136 elems[i].filter->init_channel_elem(exec_ctx, &elems[i], &args);
Craig Tillera82950e2015-09-22 12:33:20 -0700137 user_data += ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data);
138 call_size += ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_call_data);
139 }
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800140
Craig Tillera82950e2015-09-22 12:33:20 -0700141 GPR_ASSERT(user_data > (char *)stack);
Craig Tiller7536af02015-12-22 13:49:30 -0800142 GPR_ASSERT((uintptr_t)(user_data - (char *)stack) ==
Craig Tillera82950e2015-09-22 12:33:20 -0700143 grpc_channel_stack_size(filters, filter_count));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800144
145 stack->call_stack_size = call_size;
146}
147
Craig Tillera82950e2015-09-22 12:33:20 -0700148void grpc_channel_stack_destroy(grpc_exec_ctx *exec_ctx,
149 grpc_channel_stack *stack) {
150 grpc_channel_element *channel_elems = CHANNEL_ELEMS_FROM_STACK(stack);
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800151 size_t count = stack->count;
152 size_t i;
153
154 /* destroy per-filter data */
Craig Tillera82950e2015-09-22 12:33:20 -0700155 for (i = 0; i < count; i++) {
156 channel_elems[i].filter->destroy_channel_elem(exec_ctx, &channel_elems[i]);
157 }
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800158}
159
Craig Tillera82950e2015-09-22 12:33:20 -0700160void grpc_call_stack_init(grpc_exec_ctx *exec_ctx,
Craig Tiller577c9b22015-11-02 14:11:15 -0800161 grpc_channel_stack *channel_stack, int initial_refs,
162 grpc_iomgr_cb_func destroy, void *destroy_arg,
163 grpc_call_context_element *context,
Craig Tillera82950e2015-09-22 12:33:20 -0700164 const void *transport_server_data,
Craig Tillera82950e2015-09-22 12:33:20 -0700165 grpc_call_stack *call_stack) {
166 grpc_channel_element *channel_elems = CHANNEL_ELEMS_FROM_STACK(channel_stack);
Craig Tiller577c9b22015-11-02 14:11:15 -0800167 grpc_call_element_args args;
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800168 size_t count = channel_stack->count;
169 grpc_call_element *call_elems;
170 char *user_data;
171 size_t i;
172
173 call_stack->count = count;
Craig Tiller27e5aa42015-11-24 16:28:54 -0800174 GRPC_STREAM_REF_INIT(&call_stack->refcount, initial_refs, destroy,
175 destroy_arg, "CALL_STACK");
Craig Tillera82950e2015-09-22 12:33:20 -0700176 call_elems = CALL_ELEMS_FROM_STACK(call_stack);
177 user_data = ((char *)call_elems) +
178 ROUND_UP_TO_ALIGNMENT_SIZE(count * sizeof(grpc_call_element));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800179
180 /* init per-filter data */
Craig Tillera82950e2015-09-22 12:33:20 -0700181 for (i = 0; i < count; i++) {
Craig Tiller906e3bc2015-11-24 07:31:31 -0800182 args.call_stack = call_stack;
Craig Tiller577c9b22015-11-02 14:11:15 -0800183 args.server_transport_data = transport_server_data;
184 args.context = context;
Craig Tillera82950e2015-09-22 12:33:20 -0700185 call_elems[i].filter = channel_elems[i].filter;
186 call_elems[i].channel_data = channel_elems[i].channel_data;
187 call_elems[i].call_data = user_data;
Craig Tiller577c9b22015-11-02 14:11:15 -0800188 call_elems[i].filter->init_call_elem(exec_ctx, &call_elems[i], &args);
Craig Tillera82950e2015-09-22 12:33:20 -0700189 user_data +=
190 ROUND_UP_TO_ALIGNMENT_SIZE(call_elems[i].filter->sizeof_call_data);
191 }
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800192}
193
David Garcia Quintasf72eb972016-05-03 18:28:09 -0700194void grpc_call_stack_set_pollset_or_pollset_set(grpc_exec_ctx *exec_ctx,
195 grpc_call_stack *call_stack,
David Garcia Quintas2a50dfe2016-05-31 15:09:12 -0700196 grpc_polling_entity *pollent) {
Craig Tiller577c9b22015-11-02 14:11:15 -0800197 size_t count = call_stack->count;
198 grpc_call_element *call_elems;
199 char *user_data;
200 size_t i;
201
202 call_elems = CALL_ELEMS_FROM_STACK(call_stack);
203 user_data = ((char *)call_elems) +
204 ROUND_UP_TO_ALIGNMENT_SIZE(count * sizeof(grpc_call_element));
205
206 /* init per-filter data */
207 for (i = 0; i < count; i++) {
David Garcia Quintasf72eb972016-05-03 18:28:09 -0700208 call_elems[i].filter->set_pollset_or_pollset_set(exec_ctx, &call_elems[i],
David Garcia Quintas2a50dfe2016-05-31 15:09:12 -0700209 pollent);
Craig Tiller577c9b22015-11-02 14:11:15 -0800210 user_data +=
211 ROUND_UP_TO_ALIGNMENT_SIZE(call_elems[i].filter->sizeof_call_data);
212 }
213}
214
David Garcia Quintas2a50dfe2016-05-31 15:09:12 -0700215void grpc_call_stack_ignore_set_pollset_or_pollset_set(
216 grpc_exec_ctx *exec_ctx, grpc_call_element *elem,
217 grpc_polling_entity *pollent) {}
Craig Tiller577c9b22015-11-02 14:11:15 -0800218
Craig Tiller2c8063c2016-03-22 22:12:15 -0700219void grpc_call_stack_destroy(grpc_exec_ctx *exec_ctx, grpc_call_stack *stack,
David Garcia Quintas01c4d992016-07-07 20:11:27 -0700220 const grpc_call_final_info *final_info,
Craig Tiller2c8063c2016-03-22 22:12:15 -0700221 void *and_free_memory) {
Craig Tillera82950e2015-09-22 12:33:20 -0700222 grpc_call_element *elems = CALL_ELEMS_FROM_STACK(stack);
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800223 size_t count = stack->count;
224 size_t i;
225
226 /* destroy per-filter data */
Craig Tillera82950e2015-09-22 12:33:20 -0700227 for (i = 0; i < count; i++) {
David Garcia Quintas01c4d992016-07-07 20:11:27 -0700228 elems[i].filter->destroy_call_elem(exec_ctx, &elems[i], final_info,
Craig Tiller2c8063c2016-03-22 22:12:15 -0700229 i == count - 1 ? and_free_memory : NULL);
Craig Tillera82950e2015-09-22 12:33:20 -0700230 }
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800231}
232
Craig Tillera82950e2015-09-22 12:33:20 -0700233void grpc_call_next_op(grpc_exec_ctx *exec_ctx, grpc_call_element *elem,
234 grpc_transport_stream_op *op) {
Craig Tiller83f88d92015-04-21 16:02:05 -0700235 grpc_call_element *next_elem = elem + 1;
Craig Tillera82950e2015-09-22 12:33:20 -0700236 next_elem->filter->start_transport_stream_op(exec_ctx, next_elem, op);
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800237}
238
Craig Tillera82950e2015-09-22 12:33:20 -0700239char *grpc_call_next_get_peer(grpc_exec_ctx *exec_ctx,
240 grpc_call_element *elem) {
Craig Tiller1b22b9d2015-07-20 13:42:22 -0700241 grpc_call_element *next_elem = elem + 1;
Craig Tillera82950e2015-09-22 12:33:20 -0700242 return next_elem->filter->get_peer(exec_ctx, next_elem);
Craig Tiller1b22b9d2015-07-20 13:42:22 -0700243}
244
Craig Tillera82950e2015-09-22 12:33:20 -0700245void grpc_channel_next_op(grpc_exec_ctx *exec_ctx, grpc_channel_element *elem,
246 grpc_transport_op *op) {
Craig Tiller3f475422015-06-25 10:43:05 -0700247 grpc_channel_element *next_elem = elem + 1;
Craig Tillera82950e2015-09-22 12:33:20 -0700248 next_elem->filter->start_transport_op(exec_ctx, next_elem, op);
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800249}
250
Craig Tillera82950e2015-09-22 12:33:20 -0700251grpc_channel_stack *grpc_channel_stack_from_top_element(
252 grpc_channel_element *elem) {
253 return (grpc_channel_stack *)((char *)(elem)-ROUND_UP_TO_ALIGNMENT_SIZE(
254 sizeof(grpc_channel_stack)));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800255}
256
Craig Tillera82950e2015-09-22 12:33:20 -0700257grpc_call_stack *grpc_call_stack_from_top_element(grpc_call_element *elem) {
258 return (grpc_call_stack *)((char *)(elem)-ROUND_UP_TO_ALIGNMENT_SIZE(
259 sizeof(grpc_call_stack)));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800260}
261
Craig Tillera82950e2015-09-22 12:33:20 -0700262void grpc_call_element_send_cancel(grpc_exec_ctx *exec_ctx,
263 grpc_call_element *cur_elem) {
Craig Tillerb7959a02015-06-25 08:50:54 -0700264 grpc_transport_stream_op op;
Craig Tillera82950e2015-09-22 12:33:20 -0700265 memset(&op, 0, sizeof(op));
Craig Tillerf0f70a82016-06-23 13:55:06 -0700266 op.cancel_error = GRPC_ERROR_CANCELLED;
Craig Tillera82950e2015-09-22 12:33:20 -0700267 grpc_call_next_op(exec_ctx, cur_elem, &op);
Craig Tiller190d3602015-02-18 09:23:38 -0800268}
Yuchen Zengec066b32016-06-13 18:10:23 -0700269
270void grpc_call_element_send_cancel_with_message(grpc_exec_ctx *exec_ctx,
271 grpc_call_element *cur_elem,
272 grpc_status_code status,
273 gpr_slice *optional_message) {
274 grpc_transport_stream_op op;
275 memset(&op, 0, sizeof(op));
276 grpc_transport_stream_op_add_cancellation_with_message(&op, status,
277 optional_message);
278 grpc_call_next_op(exec_ctx, cur_elem, &op);
279}