blob: 056fcd93de68da91983bed1107f1e86f80af5be0 [file] [log] [blame]
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -08001/*
2 *
Jan Tattermusch7897ae92017-06-07 22:57:36 +02003 * Copyright 2015 gRPC authors.
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -08004 *
Jan Tattermusch7897ae92017-06-07 22:57:36 +02005 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -08008 *
Jan Tattermusch7897ae92017-06-07 22:57:36 +02009 * http://www.apache.org/licenses/LICENSE-2.0
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080010 *
Jan Tattermusch7897ae92017-06-07 22:57:36 +020011 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080016 *
17 */
18
Alexander Polcyndb3e8982018-02-21 16:59:24 -080019#include <grpc/support/port_platform.h>
20
Craig Tiller57726ca2016-09-12 11:59:45 -070021#include <grpc/support/alloc.h>
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080022#include <grpc/support/log.h>
Alexander Polcyndb3e8982018-02-21 16:59:24 -080023#include "src/core/lib/channel/channel_stack.h"
Mark D. Roth2223e602018-06-27 07:23:33 -070024#include "src/core/lib/gpr/alloc.h"
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080025
26#include <stdlib.h>
Craig Tiller83f88d92015-04-21 16:02:05 -070027#include <string.h>
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080028
Craig Tiller694580f2017-10-18 14:48:14 -070029grpc_core::TraceFlag grpc_trace_channel(false, "channel");
Craig Tillerfaa84802015-03-01 21:56:38 -080030
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080031/* Memory layouts.
32
33 Channel stack is laid out as: {
34 grpc_channel_stack stk;
35 padding to GPR_MAX_ALIGNMENT
36 grpc_channel_element[stk.count];
37 per-filter memory, aligned to GPR_MAX_ALIGNMENT
38 }
39
40 Call stack is laid out as: {
41 grpc_call_stack stk;
42 padding to GPR_MAX_ALIGNMENT
43 grpc_call_element[stk.count];
44 per-filter memory, aligned to GPR_MAX_ALIGNMENT
45 } */
46
Craig Tillerbaa14a92017-11-03 09:09:36 -070047size_t grpc_channel_stack_size(const grpc_channel_filter** filters,
Craig Tillera82950e2015-09-22 12:33:20 -070048 size_t filter_count) {
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080049 /* always need the header, and size for the channel elements */
Mark D. Roth2223e602018-06-27 07:23:33 -070050 size_t size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_channel_stack)) +
51 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count *
52 sizeof(grpc_channel_element));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080053 size_t i;
54
Craig Tillera82950e2015-09-22 12:33:20 -070055 GPR_ASSERT((GPR_MAX_ALIGNMENT & (GPR_MAX_ALIGNMENT - 1)) == 0 &&
56 "GPR_MAX_ALIGNMENT must be a power of two");
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080057
58 /* add the size for each filter */
Craig Tillera82950e2015-09-22 12:33:20 -070059 for (i = 0; i < filter_count; i++) {
Mark D. Roth2223e602018-06-27 07:23:33 -070060 size += GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data);
Craig Tillera82950e2015-09-22 12:33:20 -070061 }
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080062
63 return size;
64}
65
Mark D. Roth2223e602018-06-27 07:23:33 -070066#define CHANNEL_ELEMS_FROM_STACK(stk) \
67 ((grpc_channel_element*)((char*)(stk) + GPR_ROUND_UP_TO_ALIGNMENT_SIZE( \
Craig Tillerbaa14a92017-11-03 09:09:36 -070068 sizeof(grpc_channel_stack))))
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080069
Mark D. Roth2223e602018-06-27 07:23:33 -070070#define CALL_ELEMS_FROM_STACK(stk) \
71 ((grpc_call_element*)((char*)(stk) + GPR_ROUND_UP_TO_ALIGNMENT_SIZE( \
72 sizeof(grpc_call_stack))))
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080073
Craig Tillerbaa14a92017-11-03 09:09:36 -070074grpc_channel_element* grpc_channel_stack_element(
75 grpc_channel_stack* channel_stack, size_t index) {
Craig Tillera82950e2015-09-22 12:33:20 -070076 return CHANNEL_ELEMS_FROM_STACK(channel_stack) + index;
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080077}
78
Craig Tillerbaa14a92017-11-03 09:09:36 -070079grpc_channel_element* grpc_channel_stack_last_element(
80 grpc_channel_stack* channel_stack) {
Craig Tillera82950e2015-09-22 12:33:20 -070081 return grpc_channel_stack_element(channel_stack, channel_stack->count - 1);
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080082}
83
Craig Tillerbaa14a92017-11-03 09:09:36 -070084grpc_call_element* grpc_call_stack_element(grpc_call_stack* call_stack,
Craig Tillera82950e2015-09-22 12:33:20 -070085 size_t index) {
86 return CALL_ELEMS_FROM_STACK(call_stack) + index;
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -080087}
88
Craig Tillerbaa14a92017-11-03 09:09:36 -070089grpc_error* grpc_channel_stack_init(
Yash Tibrewal8cf14702017-12-06 09:47:54 -080090 int initial_refs, grpc_iomgr_cb_func destroy, void* destroy_arg,
91 const grpc_channel_filter** filters, size_t filter_count,
Craig Tillerbaa14a92017-11-03 09:09:36 -070092 const grpc_channel_args* channel_args, grpc_transport* optional_transport,
93 const char* name, grpc_channel_stack* stack) {
Craig Tillera82950e2015-09-22 12:33:20 -070094 size_t call_size =
Mark D. Roth2223e602018-06-27 07:23:33 -070095 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack)) +
96 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count * sizeof(grpc_call_element));
Craig Tillerbaa14a92017-11-03 09:09:36 -070097 grpc_channel_element* elems;
Craig Tiller577c9b22015-11-02 14:11:15 -080098 grpc_channel_element_args args;
Craig Tillerbaa14a92017-11-03 09:09:36 -070099 char* user_data;
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800100 size_t i;
101
102 stack->count = filter_count;
Craig Tiller27e5aa42015-11-24 16:28:54 -0800103 GRPC_STREAM_REF_INIT(&stack->refcount, initial_refs, destroy, destroy_arg,
Craig Tiller50ec2672015-11-27 21:45:11 -0800104 name);
Craig Tillera82950e2015-09-22 12:33:20 -0700105 elems = CHANNEL_ELEMS_FROM_STACK(stack);
Mark D. Roth2223e602018-06-27 07:23:33 -0700106 user_data = (reinterpret_cast<char*>(elems)) +
107 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count *
108 sizeof(grpc_channel_element));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800109
110 /* init per-filter data */
Craig Tillerbaa14a92017-11-03 09:09:36 -0700111 grpc_error* first_error = GRPC_ERROR_NONE;
Craig Tillera82950e2015-09-22 12:33:20 -0700112 for (i = 0; i < filter_count; i++) {
Craig Tiller906e3bc2015-11-24 07:31:31 -0800113 args.channel_stack = stack;
Craig Tiller577c9b22015-11-02 14:11:15 -0800114 args.channel_args = channel_args;
Craig Tiller9d69e802016-06-06 11:37:50 -0700115 args.optional_transport = optional_transport;
Craig Tiller577c9b22015-11-02 14:11:15 -0800116 args.is_first = i == 0;
117 args.is_last = i == (filter_count - 1);
Craig Tillera82950e2015-09-22 12:33:20 -0700118 elems[i].filter = filters[i];
119 elems[i].channel_data = user_data;
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800120 grpc_error* error = elems[i].filter->init_channel_elem(&elems[i], &args);
Mark D. Roth5e2566e2016-11-18 10:53:13 -0800121 if (error != GRPC_ERROR_NONE) {
122 if (first_error == GRPC_ERROR_NONE) {
123 first_error = error;
124 } else {
125 GRPC_ERROR_UNREF(error);
126 }
127 }
Mark D. Roth2223e602018-06-27 07:23:33 -0700128 user_data +=
129 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data);
130 call_size += GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_call_data);
Craig Tillera82950e2015-09-22 12:33:20 -0700131 }
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800132
Craig Tillerbaa14a92017-11-03 09:09:36 -0700133 GPR_ASSERT(user_data > (char*)stack);
134 GPR_ASSERT((uintptr_t)(user_data - (char*)stack) ==
Craig Tillera82950e2015-09-22 12:33:20 -0700135 grpc_channel_stack_size(filters, filter_count));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800136
137 stack->call_stack_size = call_size;
Mark D. Roth5e2566e2016-11-18 10:53:13 -0800138 return first_error;
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800139}
140
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800141void grpc_channel_stack_destroy(grpc_channel_stack* stack) {
Craig Tillerbaa14a92017-11-03 09:09:36 -0700142 grpc_channel_element* channel_elems = CHANNEL_ELEMS_FROM_STACK(stack);
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800143 size_t count = stack->count;
144 size_t i;
145
146 /* destroy per-filter data */
Craig Tillera82950e2015-09-22 12:33:20 -0700147 for (i = 0; i < count; i++) {
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800148 channel_elems[i].filter->destroy_channel_elem(&channel_elems[i]);
Craig Tillera82950e2015-09-22 12:33:20 -0700149 }
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800150}
151
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800152grpc_error* grpc_call_stack_init(grpc_channel_stack* channel_stack,
Craig Tillerd426cac2017-03-13 12:30:45 -0700153 int initial_refs, grpc_iomgr_cb_func destroy,
Craig Tillerbaa14a92017-11-03 09:09:36 -0700154 void* destroy_arg,
155 const grpc_call_element_args* elem_args) {
156 grpc_channel_element* channel_elems = CHANNEL_ELEMS_FROM_STACK(channel_stack);
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800157 size_t count = channel_stack->count;
Craig Tillerbaa14a92017-11-03 09:09:36 -0700158 grpc_call_element* call_elems;
159 char* user_data;
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800160 size_t i;
161
Craig Tillerd426cac2017-03-13 12:30:45 -0700162 elem_args->call_stack->count = count;
163 GRPC_STREAM_REF_INIT(&elem_args->call_stack->refcount, initial_refs, destroy,
Craig Tiller27e5aa42015-11-24 16:28:54 -0800164 destroy_arg, "CALL_STACK");
Craig Tillerd426cac2017-03-13 12:30:45 -0700165 call_elems = CALL_ELEMS_FROM_STACK(elem_args->call_stack);
Noah Eisenbe82e642018-02-09 09:16:55 -0800166 user_data = (reinterpret_cast<char*>(call_elems)) +
Mark D. Roth2223e602018-06-27 07:23:33 -0700167 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(count * sizeof(grpc_call_element));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800168
169 /* init per-filter data */
Craig Tillerbaa14a92017-11-03 09:09:36 -0700170 grpc_error* first_error = GRPC_ERROR_NONE;
Craig Tillera82950e2015-09-22 12:33:20 -0700171 for (i = 0; i < count; i++) {
172 call_elems[i].filter = channel_elems[i].filter;
173 call_elems[i].channel_data = channel_elems[i].channel_data;
174 call_elems[i].call_data = user_data;
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800175 grpc_error* error =
176 call_elems[i].filter->init_call_elem(&call_elems[i], elem_args);
Mark D. Roth05d73af2016-07-27 15:52:46 +0000177 if (error != GRPC_ERROR_NONE) {
178 if (first_error == GRPC_ERROR_NONE) {
179 first_error = error;
180 } else {
181 GRPC_ERROR_UNREF(error);
182 }
183 }
Craig Tillera82950e2015-09-22 12:33:20 -0700184 user_data +=
Mark D. Roth2223e602018-06-27 07:23:33 -0700185 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(call_elems[i].filter->sizeof_call_data);
Craig Tillera82950e2015-09-22 12:33:20 -0700186 }
Mark D. Roth5d11e432016-06-23 13:14:05 -0700187 return first_error;
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800188}
189
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800190void grpc_call_stack_set_pollset_or_pollset_set(grpc_call_stack* call_stack,
Craig Tillerbaa14a92017-11-03 09:09:36 -0700191 grpc_polling_entity* pollent) {
Craig Tiller577c9b22015-11-02 14:11:15 -0800192 size_t count = call_stack->count;
Craig Tillerbaa14a92017-11-03 09:09:36 -0700193 grpc_call_element* call_elems;
Craig Tiller577c9b22015-11-02 14:11:15 -0800194 size_t i;
195
196 call_elems = CALL_ELEMS_FROM_STACK(call_stack);
Craig Tiller577c9b22015-11-02 14:11:15 -0800197
198 /* init per-filter data */
199 for (i = 0; i < count; i++) {
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800200 call_elems[i].filter->set_pollset_or_pollset_set(&call_elems[i], pollent);
Craig Tiller577c9b22015-11-02 14:11:15 -0800201 }
202}
203
David Garcia Quintas2a50dfe2016-05-31 15:09:12 -0700204void grpc_call_stack_ignore_set_pollset_or_pollset_set(
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800205 grpc_call_element* elem, grpc_polling_entity* pollent) {}
Craig Tiller577c9b22015-11-02 14:11:15 -0800206
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800207void grpc_call_stack_destroy(grpc_call_stack* stack,
Craig Tillerbaa14a92017-11-03 09:09:36 -0700208 const grpc_call_final_info* final_info,
209 grpc_closure* then_schedule_closure) {
210 grpc_call_element* elems = CALL_ELEMS_FROM_STACK(stack);
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800211 size_t count = stack->count;
212 size_t i;
213
214 /* destroy per-filter data */
Craig Tillera82950e2015-09-22 12:33:20 -0700215 for (i = 0; i < count; i++) {
Craig Tillere7a17022017-03-13 10:20:38 -0700216 elems[i].filter->destroy_call_elem(
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800217 &elems[i], final_info,
Craig Tiller4782d922017-11-10 09:53:21 -0800218 i == count - 1 ? then_schedule_closure : nullptr);
Craig Tillera82950e2015-09-22 12:33:20 -0700219 }
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800220}
221
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800222void grpc_call_next_op(grpc_call_element* elem,
Craig Tillerbaa14a92017-11-03 09:09:36 -0700223 grpc_transport_stream_op_batch* op) {
224 grpc_call_element* next_elem = elem + 1;
Mark D. Roth764cf042017-09-01 09:00:06 -0700225 GRPC_CALL_LOG_OP(GPR_INFO, next_elem, op);
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800226 next_elem->filter->start_transport_stream_op_batch(next_elem, op);
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800227}
228
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800229void grpc_channel_next_get_info(grpc_channel_element* elem,
Craig Tillerbaa14a92017-11-03 09:09:36 -0700230 const grpc_channel_info* channel_info) {
231 grpc_channel_element* next_elem = elem + 1;
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800232 next_elem->filter->get_channel_info(next_elem, channel_info);
Mark D. Rothb2d24882016-10-27 15:44:07 -0700233}
234
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800235void grpc_channel_next_op(grpc_channel_element* elem, grpc_transport_op* op) {
Craig Tillerbaa14a92017-11-03 09:09:36 -0700236 grpc_channel_element* next_elem = elem + 1;
Yash Tibrewal8cf14702017-12-06 09:47:54 -0800237 next_elem->filter->start_transport_op(next_elem, op);
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800238}
239
Craig Tillerbaa14a92017-11-03 09:09:36 -0700240grpc_channel_stack* grpc_channel_stack_from_top_element(
241 grpc_channel_element* elem) {
Noah Eisen4d20a662018-02-09 09:34:04 -0800242 return reinterpret_cast<grpc_channel_stack*>(
243 reinterpret_cast<char*>(elem) -
Mark D. Roth2223e602018-06-27 07:23:33 -0700244 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_channel_stack)));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800245}
246
Craig Tillerbaa14a92017-11-03 09:09:36 -0700247grpc_call_stack* grpc_call_stack_from_top_element(grpc_call_element* elem) {
Noah Eisen4d20a662018-02-09 09:34:04 -0800248 return reinterpret_cast<grpc_call_stack*>(
249 reinterpret_cast<char*>(elem) -
Mark D. Roth2223e602018-06-27 07:23:33 -0700250 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack)));
Nicolas Nobleb7ebd3b2014-11-26 16:33:03 -0800251}