blob: 20c64cb05108583b1a973c14205b1ff05c30687d [file] [log] [blame]
Sami Kyostila0a34b032019-05-16 18:28:48 +01001#!/usr/bin/env python
2# Copyright (C) 2019 The Android Open Source Project
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16# This tool uses a collection of BUILD.gn files and build targets to generate
17# an "amalgamated" C++ header and source file pair which compiles to an
18# equivalent program. The tool also outputs the necessary compiler and linker
19# flags needed to compile the resulting source code.
20
Sami Kyostila3c88a1d2019-05-22 18:29:42 +010021from __future__ import print_function
Sami Kyostila0a34b032019-05-16 18:28:48 +010022import argparse
Sami Kyostila0a34b032019-05-16 18:28:48 +010023import os
24import re
25import shutil
26import subprocess
27import sys
Sami Kyostila468e61d2019-05-23 15:54:01 +010028import tempfile
Sami Kyostila0a34b032019-05-16 18:28:48 +010029
Sami Kyostila3c88a1d2019-05-22 18:29:42 +010030import gn_utils
31
Sami Kyostila0a34b032019-05-16 18:28:48 +010032# Default targets to include in the result.
33default_targets = [
34 '//:libperfetto',
35]
36
37# Arguments for the GN output directory (unless overridden from the command
38# line).
39gn_args = 'is_debug=false'
40
41# Compiler flags which aren't filtered out.
42cflag_whitelist = r'^-(W.*|fno-exceptions|fPIC|std.*|fvisibility.*)$'
43
44# Linker flags which aren't filtered out.
45ldflag_whitelist = r'^-()$'
46
47# Libraries which are filtered out.
48lib_blacklist = r'^(c|gcc_eh)$'
49
50# Macros which aren't filtered out.
51define_whitelist = r'^(PERFETTO.*|GOOGLE_PROTOBUF.*)$'
52
Sami Kyostila0a34b032019-05-16 18:28:48 +010053# Includes which will be removed from the generated source.
54includes_to_remove = r'^(gtest).*$'
55
56# Build flags to satisfy a protobuf (lite or full) dependency.
57protobuf_cflags = [
58 # Note that these point to the local copy of protobuf in buildtools. In
59 # reality the user of the amalgamated result will have to provide a path to
60 # an installed copy of the exact same version of protobuf which was used to
61 # generate the amalgamated build.
62 '-isystembuildtools/protobuf/src',
63 '-Lbuildtools/protobuf/src/.libs',
64 # We also need to disable some warnings for protobuf.
65 '-Wno-missing-prototypes',
66 '-Wno-missing-variable-declarations',
67 '-Wno-sign-conversion',
68 '-Wno-unknown-pragmas',
69 '-Wno-unused-macros',
70]
71
72# A mapping of dependencies to system libraries. Libraries in this map will not
73# be built statically but instead added as dependencies of the amalgamated
74# project.
75system_library_map = {
76 '//buildtools:protobuf_full': {
77 'libs': ['protobuf'],
78 'cflags': protobuf_cflags,
79 },
80 '//buildtools:protobuf_lite': {
81 'libs': ['protobuf-lite'],
82 'cflags': protobuf_cflags,
83 },
84 '//buildtools:protoc_lib': {'libs': ['protoc']},
85 # This prevents us from expanding the prod-only gtest header into a full
86 # gtest dependency. This also requires some patching in headers -- see
87 # AmalgamatedProject._patch_header() below.
88 '//gn:gtest_prod_config': {},
89}
90
91# ----------------------------------------------------------------------------
92# End of configuration.
93# ----------------------------------------------------------------------------
94
95tool_name = os.path.basename(__file__)
96preamble = """// Copyright (C) 2019 The Android Open Source Project
97//
98// Licensed under the Apache License, Version 2.0 (the "License");
99// you may not use this file except in compliance with the License.
100// You may obtain a copy of the License at
101//
102// http://www.apache.org/licenses/LICENSE-2.0
103//
104// Unless required by applicable law or agreed to in writing, software
105// distributed under the License is distributed on an "AS IS" BASIS,
106// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
107// See the License for the specific language governing permissions and
108// limitations under the License.
109//
110// This file is automatically generated by %s. Do not edit.
111""" % tool_name
112
113
114def apply_blacklist(blacklist, items):
115 return [item for item in items if not re.match(blacklist, item)]
116
117
118def apply_whitelist(whitelist, items):
119 return [item for item in items if re.match(whitelist, item)]
120
121
122class Error(Exception):
123 pass
124
125
126class DependencyNode(object):
127 """A target in a GN build description along with its dependencies."""
128
129 def __init__(self, target_name):
130 self.target_name = target_name
131 self.dependencies = set()
132
133 def add_dependency(self, target_node):
134 if target_node in self.dependencies:
135 return
136 self.dependencies.add(target_node)
137
138 def iterate_depth_first(self):
139 for node in sorted(self.dependencies, key=lambda n: n.target_name):
140 for node in node.iterate_depth_first():
141 yield node
142 if self.target_name:
143 yield self
144
145
146class DependencyTree(object):
147 """A tree of GN build target dependencies."""
148
149 def __init__(self):
150 self.target_to_node_map = {}
151 self.root = self._get_or_create_node(None)
152
153 def _get_or_create_node(self, target_name):
154 if target_name in self.target_to_node_map:
155 return self.target_to_node_map[target_name]
156 node = DependencyNode(target_name)
157 self.target_to_node_map[target_name] = node
158 return node
159
160 def add_dependency(self, from_target, to_target):
161 from_node = self._get_or_create_node(from_target)
162 to_node = self._get_or_create_node(to_target)
163 assert from_node is not to_node
164 from_node.add_dependency(to_node)
165
166 def iterate_depth_first(self):
167 for node in self.root.iterate_depth_first():
168 yield node
169
170
171class AmalgamatedProject(object):
172 """In-memory representation of an amalgamated source/header pair."""
173
174 def __init__(self, desc, source_deps):
175 """Constructor.
176
177 Args:
178 desc: JSON build description.
179 source_deps: A map of (source file, [dependency header]) which is
180 to detect which header files are included by each source file.
181 """
182 self.desc = desc
183 self.source_deps = source_deps
184 self.header = []
185 self.source = []
186 self.cflags = set() # Note that we don't support multi-arg flags.
187 self.ldflags = set()
188 self.defines = set()
189 self.libs = set()
190 self._dependency_tree = DependencyTree()
191 self._included_sources = set()
192 self._included_headers = set()
193 self._include_re = re.compile(r'#include "(.*)"')
194
195 def add_target(self, target_name):
196 """Include |target_name| in the amalgamated result."""
197 self._dependency_tree.add_dependency(None, target_name)
198 self._add_target_dependencies(target_name)
199 self._add_target_flags(target_name)
200
201 def _iterate_dep_edges(self, target_name):
202 target = self.desc[target_name]
203 for dep in target.get('deps', []):
204 # Ignore system libraries since they will be added as build-time
205 # dependencies.
206 if dep in system_library_map:
207 continue
208 # Don't descend into build action dependencies.
209 if self.desc[dep]['type'] == 'action':
210 continue
211 for sub_target, sub_dep in self._iterate_dep_edges(dep):
212 yield sub_target, sub_dep
213 yield target_name, dep
214
215 def _iterate_target_and_deps(self, target_name):
216 yield target_name
217 for _, dep in self._iterate_dep_edges(target_name):
218 yield dep
219
220 def _add_target_dependencies(self, target_name):
221 for target, dep in self._iterate_dep_edges(target_name):
222 self._dependency_tree.add_dependency(target, dep)
223
224 def process_dep(dep):
225 if dep in system_library_map:
226 self.libs.update(system_library_map[dep].get('libs', []))
227 self.cflags.update(system_library_map[dep].get('cflags', []))
228 self.defines.update(system_library_map[dep].get('defines', []))
229 return True
230
231 def walk_all_deps(target_name):
232 target = self.desc[target_name]
233 for dep in target.get('deps', []):
234 if process_dep(dep):
235 return
236 walk_all_deps(dep)
237 walk_all_deps(target_name)
238
239 def _filter_cflags(self, cflags):
240 # Since we want to deduplicate flags, combine two-part switches (e.g.,
241 # "-foo bar") into one value ("-foobar") so we can store the result as
242 # a set.
243 result = []
244 for flag in cflags:
245 if flag.startswith('-'):
246 result.append(flag)
247 else:
248 result[-1] += flag
249 return apply_whitelist(cflag_whitelist, result)
250
251 def _add_target_flags(self, target_name):
252 for target_name in self._iterate_target_and_deps(target_name):
253 target = self.desc[target_name]
254 self.cflags.update(self._filter_cflags(target.get('cflags', [])))
255 self.cflags.update(self._filter_cflags(target.get('cflags_cc', [])))
256 self.ldflags.update(
257 apply_whitelist(ldflag_whitelist, target.get('ldflags', [])))
258 self.libs.update(
259 apply_blacklist(lib_blacklist, target.get('libs', [])))
260 self.defines.update(
261 apply_whitelist(define_whitelist, target.get('defines', [])))
262
263 def _get_include_dirs(self, target_name):
264 include_dirs = set()
265 for target_name in self._iterate_target_and_deps(target_name):
266 target = self.desc[target_name]
267 if 'include_dirs' in target:
268 include_dirs.update(
Sami Kyostila3c88a1d2019-05-22 18:29:42 +0100269 [gn_utils.label_to_path(d) for d in target['include_dirs']])
Sami Kyostila0a34b032019-05-16 18:28:48 +0100270 return include_dirs
271
272 def _add_header(self, include_dirs, allowed_files, header_name):
273 if header_name in self._included_headers:
274 return
275 self._included_headers.add(header_name)
276 for include_dir in include_dirs:
277 full_path = os.path.join(include_dir, header_name)
278 if os.path.exists(full_path):
279 if not full_path in allowed_files:
280 return
281 with open(full_path) as f:
282 self.header.append(
283 '// %s begin header: %s' % (tool_name, full_path))
284 self.header.extend(self._patch_header(
285 self._process_includes(include_dirs, allowed_files, f)))
286 return
287 msg = 'Looked in %s' % ', '.join('"%s"' % d for d in include_dirs)
288 raise Error('Header file %s not found. %s' % (header_name, msg))
289
290 def _add_source(self, target_name, source_name):
291 if source_name in self._included_sources:
292 return
293 self._included_sources.add(source_name)
294 include_dirs = self._get_include_dirs(target_name)
295 deps = self.source_deps[source_name]
296 if not os.path.exists(source_name):
297 raise Error('Source file %s not found' % source_name)
298 with open(source_name) as f:
299 self.source.append(
300 '// %s begin source: %s' % (tool_name, source_name))
301 try:
302 self.source.extend(self._patch_source(source_name,
303 self._process_includes(include_dirs, deps, f)))
304 except Error as e:
305 raise Error(
306 'Failed adding source %s: %s' % (source_name, e.message))
307
308 def _patch_header(self, lines):
309 result = []
310 for line in lines:
311 # We don't want to propagate any gtest dependencies into the
312 # result, so remove any macros used from gtest_prod_util.h.
313 if 'FRIEND_TEST' in line:
314 continue
315 result.append(line)
316 return result
317
318 def _patch_source(self, source_name, lines):
319 result = []
320 namespace = re.sub(r'[^a-z]', '_',
321 os.path.splitext(os.path.basename(source_name))[0])
322 for line in lines:
323 # Protobuf generates an identical anonymous function into each
324 # message description. Rename all but the first occurrence to avoid
325 # duplicate symbol definitions.
326 line = line.replace('MergeFromFail', '%s_MergeFromFail' % namespace)
327 result.append(line)
328 return result
329
330 def _process_includes(self, include_dirs, allowed_files, file):
331 result = []
332 for line in file:
333 line = line.rstrip('\n')
334 m = self._include_re.match(line)
335 if not m:
336 result.append(line)
337 continue
338 elif re.match(includes_to_remove, m.group(1)):
339 result.append('// %s removed: %s' % (tool_name, line))
Sami Kyostilafd367762019-05-22 17:25:50 +0100340 else:
Sami Kyostila0a34b032019-05-16 18:28:48 +0100341 result.append('// %s expanded: %s' % (tool_name, line))
342 self._add_header(include_dirs, allowed_files, m.group(1))
Sami Kyostila0a34b032019-05-16 18:28:48 +0100343 return result
344
345 def generate(self):
346 """Prepares the output for this amalgamated project.
347
348 Call save() to persist the result.
349 """
350
351 source_files = []
352 for node in self._dependency_tree.iterate_depth_first():
353 target = self.desc[node.target_name]
354 if not 'sources' in target:
355 continue
Sami Kyostila3c88a1d2019-05-22 18:29:42 +0100356 sources = [(node.target_name, gn_utils.label_to_path(s))
Sami Kyostila0a34b032019-05-16 18:28:48 +0100357 for s in target['sources'] if s.endswith('.cc')]
358 source_files.extend(sources)
359 for target_name, source_name in source_files:
360 self._add_source(target_name, source_name)
361
362 def _get_nice_path(self, prefix, format):
363 basename = os.path.basename(prefix)
364 return os.path.join(
365 os.path.relpath(os.path.dirname(prefix)), format % basename)
366
367 def save(self, output_prefix):
368 """Save the generated header and source file pair.
369
370 Returns a message describing the output with build instructions.
371 """
372 header_file = self._get_nice_path(output_prefix, '%s.h')
373 source_file = self._get_nice_path(output_prefix, '%s.cc')
374 with open(header_file, 'w') as f:
375 f.write('\n'.join([preamble] + self.header + ['\n']))
376 with open(source_file, 'w') as f:
377 include_stmt = '#include "%s"' % os.path.basename(header_file)
378 f.write('\n'.join([preamble, include_stmt] + self.source + ['\n']))
379 build_cmd = self.get_build_command(output_prefix)
380
381 return """Amalgamated project written to %s and %s.
382
383Build settings:
384 - cflags: %s
385 - ldflags: %s
386 - libs: %s
387 - defines: %s
388
389Example build command:
390
391%s
392""" % (header_file, source_file, ' '.join(self.cflags), ' '.join(self.ldflags),
393 ' '.join(self.libs), ' '.join(self.defines), ' '.join(build_cmd))
394
395 def get_build_command(self, output_prefix):
396 """Returns an example command line for building the output source."""
397 source = self._get_nice_path(output_prefix, '%s.cc')
398 library = self._get_nice_path(output_prefix, 'lib%s.so')
399 build_cmd = ['clang++', source, '-o', library, '-shared'] + \
400 sorted(self.cflags) + sorted(self.ldflags)
401 for lib in sorted(self.libs):
402 build_cmd.append('-l%s' % lib)
403 for define in sorted(self.defines):
404 build_cmd.append('-D%s' % define)
405 return build_cmd
406
407
408
Sami Kyostila0a34b032019-05-16 18:28:48 +0100409def create_amalgamated_project_for_targets(desc, targets, source_deps):
410 """Generate an amalgamated project for a list of GN targets."""
411 project = AmalgamatedProject(desc, source_deps)
412 for target in targets:
413 project.add_target(target)
414 project.generate()
415 return project
416
417
Sami Kyostila0a34b032019-05-16 18:28:48 +0100418def main():
419 parser = argparse.ArgumentParser(
420 description='Generate an amalgamated header/source pair from a GN '
421 'build description.')
422 parser.add_argument(
423 '--output',
424 help='Base name of files to create. A .cc/.h extension will be added',
Sami Kyostila3c88a1d2019-05-22 18:29:42 +0100425 default=os.path.join(gn_utils.repo_root(), 'perfetto'))
Sami Kyostila0a34b032019-05-16 18:28:48 +0100426 parser.add_argument(
427 '--gn_args', help='GN arguments used to prepare the output directory',
428 default=gn_args)
429 parser.add_argument(
Sami Kyostila468e61d2019-05-23 15:54:01 +0100430 '--keep', help='Don\'t delete the GN output directory at exit',
Sami Kyostila0a34b032019-05-16 18:28:48 +0100431 action='store_true')
432 parser.add_argument(
433 '--build', help='Also compile the generated files',
434 action='store_true')
435 parser.add_argument(
Sami Kyostila468e61d2019-05-23 15:54:01 +0100436 '--check', help='Don\'t keep the generated files',
437 action='store_true')
438 parser.add_argument('--quiet', help='Only report errors',
439 action='store_true')
440 parser.add_argument(
Sami Kyostila0a34b032019-05-16 18:28:48 +0100441 'targets',
442 nargs=argparse.REMAINDER,
443 help='Targets to include in the output (e.g., "//:libperfetto")')
444 args = parser.parse_args()
445 targets = args.targets or default_targets
446
Sami Kyostila468e61d2019-05-23 15:54:01 +0100447 output = args.output
448 if args.check:
449 output = os.path.join(tempfile.mkdtemp(), 'perfetto_amalgamated')
450
Sami Kyostila0a34b032019-05-16 18:28:48 +0100451 try:
Sami Kyostila468e61d2019-05-23 15:54:01 +0100452 if not args.quiet:
453 print('Building project...')
Sami Kyostila3c88a1d2019-05-22 18:29:42 +0100454 out = gn_utils.prepare_out_directory(
455 args.gn_args, 'tmp.gen_amalgamated')
456 desc = gn_utils.load_build_description(out)
Sami Kyostila0a34b032019-05-16 18:28:48 +0100457 # We need to build everything first so that the necessary header
458 # dependencies get generated.
Sami Kyostila3c88a1d2019-05-22 18:29:42 +0100459 gn_utils.build_targets(out, targets)
460 source_deps = gn_utils.compute_source_dependencies(out)
Sami Kyostila0a34b032019-05-16 18:28:48 +0100461 project = create_amalgamated_project_for_targets(
462 desc, targets, source_deps)
Sami Kyostila468e61d2019-05-23 15:54:01 +0100463 result = project.save(output)
464 if not args.quiet:
465 print(result)
Sami Kyostila0a34b032019-05-16 18:28:48 +0100466 if args.build:
Sami Kyostila468e61d2019-05-23 15:54:01 +0100467 if not args.quiet:
468 sys.stdout.write('Building amalgamated project...')
469 sys.stdout.flush()
470 subprocess.check_call(project.get_build_command(output))
471 if not args.quiet:
472 print('done')
Sami Kyostila0a34b032019-05-16 18:28:48 +0100473 finally:
474 if not args.keep:
475 shutil.rmtree(out)
Sami Kyostila468e61d2019-05-23 15:54:01 +0100476 if args.check:
477 shutil.rmtree(os.path.dirname(output))
Sami Kyostila0a34b032019-05-16 18:28:48 +0100478
479if __name__ == '__main__':
480 sys.exit(main())