Merge from Chromium at DEPS revision 275586
This commit was generated by merge_to_master.py.
Change-Id: Ief3a0ffd810858bfddbe0ec5931e3ee90d53f78c
diff --git a/build/all.gyp b/build/all.gyp
index 5aba3f7..60ee4b2 100644
--- a/build/all.gyp
+++ b/build/all.gyp
@@ -31,6 +31,7 @@
'../third_party/zlib/zlib.gyp:*',
'../ui/accessibility/accessibility.gyp:*',
'../ui/base/ui_base.gyp:*',
+ '../ui/display/display.gyp:display_unittests',
'../ui/snapshot/snapshot.gyp:*',
'../url/url.gyp:*',
],
@@ -258,6 +259,7 @@
'../net/net.gyp:net_unittests',
'../sql/sql.gyp:sql_unittests',
'../sync/sync.gyp:sync_unit_tests',
+ '../ui/display/display.gyp:display_unittests',
'../ui/gfx/gfx_tests.gyp:gfx_unittests',
'../ui/ui_unittests.gyp:ui_unittests',
'../url/url.gyp:url_unittests',
@@ -291,6 +293,7 @@
'../remoting/remoting.gyp:remoting_unittests',
'../third_party/WebKit/public/all.gyp:all_blink',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
'../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
'../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
'../tools/telemetry/telemetry.gyp:*',
@@ -301,9 +304,7 @@
'dependencies': [
'../chrome/chrome.gyp:crash_service',
'../chrome/chrome.gyp:installer_util_unittests',
- '../chrome/chrome.gyp:mini_installer_test',
- # mini_installer_tests depends on mini_installer. This should be
- # defined in installer.gyp.
+ # ../chrome/test/mini_installer requires mini_installer.
'../chrome/installer/mini_installer.gyp:mini_installer',
'../chrome_elf/chrome_elf.gyp:chrome_elf_unittests',
'../content/content_shell_and_tests.gyp:copy_test_netscape_plugin',
@@ -745,6 +746,7 @@
'../sandbox/sandbox.gyp:sandbox_linux_unittests_stripped',
'../sql/sql.gyp:sql_unittests',
'../sync/sync.gyp:sync_unit_tests',
+ '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
'../third_party/WebKit/public/all.gyp:*',
'../tools/android/android_tools.gyp:android_tools',
'../tools/android/android_tools.gyp:memconsumer',
@@ -850,6 +852,7 @@
'../sql/sql.gyp:sql_unittests',
'../sync/sync.gyp:sync_unit_tests',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
'../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
'../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
'../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
@@ -885,6 +888,7 @@
'../sql/sql.gyp:sql_unittests',
'../sync/sync.gyp:sync_unit_tests',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
'../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
'../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
'../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
@@ -935,6 +939,7 @@
'../sql/sql.gyp:sql_unittests',
'../sync/sync.gyp:sync_unit_tests',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
'../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
'../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
'../ui/gfx/gfx_tests.gyp:gfx_unittests',
@@ -958,7 +963,6 @@
'../chrome/chrome.gyp:gcapi_test',
'../chrome/chrome.gyp:installer_util_unittests',
'../chrome/chrome.gyp:interactive_ui_tests',
- '../chrome/chrome.gyp:mini_installer_test',
'../chrome/chrome.gyp:performance_browser_tests',
'../chrome/chrome.gyp:sync_integration_tests',
'../chrome/chrome.gyp:unit_tests',
@@ -967,8 +971,7 @@
'../content/content_shell_and_tests.gyp:content_browsertests',
'../content/content_shell_and_tests.gyp:content_unittests',
'../content/content_shell_and_tests.gyp:copy_test_netscape_plugin',
- # mini_installer_tests depends on mini_installer. This should be
- # defined in installer.gyp.
+ # ../chrome/test/mini_installer requires mini_installer.
'../chrome/installer/mini_installer.gyp:mini_installer',
'../courgette/courgette.gyp:courgette_unittests',
'../device/device_tests.gyp:device_unittests',
@@ -983,6 +986,7 @@
'../sql/sql.gyp:sql_unittests',
'../sync/sync.gyp:sync_unit_tests',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
'../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
'../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
'../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
@@ -1056,6 +1060,7 @@
'../sql/sql.gyp:sql_unittests',
'../sync/sync.gyp:sync_unit_tests',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+ '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
'../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
'../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
'../url/url.gyp:url_unittests',
@@ -1136,6 +1141,7 @@
'../ui/app_list/app_list.gyp:*',
'../ui/aura/aura.gyp:*',
'../ui/compositor/compositor.gyp:*',
+ '../ui/display/display.gyp:display_unittests',
'../ui/events/events.gyp:*',
'../ui/gfx/gfx_tests.gyp:gfx_unittests',
'../ui/keyboard/keyboard.gyp:*',
@@ -1182,6 +1188,9 @@
],
}],
['use_ozone==1', {
+ 'dependencies': [
+ '../ui/ozone/ozone.gyp:*',
+ ],
'dependencies!': [
'../chrome/chrome.gyp:interactive_ui_tests', # crbug.com/362166
],
@@ -1222,5 +1231,18 @@
}, # target_name: chromium_swarm_tests
],
}],
- ], # conditions
+ ['OS=="mac" and toolkit_views==1', {
+ 'targets': [
+ {
+ 'target_name': 'macviews_builder',
+ 'type': 'none',
+ 'dependencies': [
+ '../ui/views/examples/examples.gyp:views_examples_with_content_exe',
+ '../ui/views/views.gyp:views',
+ '../ui/views/views.gyp:views_unittests',
+ ],
+ }, # target_name: macviews_builder
+ ], # targets
+ }], # os=='mac' and toolkit_views==1
+ ], # conditions
}
diff --git a/build/android/adb_gdb b/build/android/adb_gdb
index 93bb317..1998963 100755
--- a/build/android/adb_gdb
+++ b/build/android/adb_gdb
@@ -320,6 +320,7 @@
--su-prefix=<prefix> Prepend <prefix> to 'adb shell' commands that are
run by this script. This can be useful to use
the 'su' program on rooted production devices.
+ e.g. --su-prefix="su -c"
--pull-libs Force system libraries extraction.
--no-pull-libs Do not extract any system library.
@@ -852,7 +853,7 @@
cat $SU_PREFIX_TEST_LOG
exit 1
fi
- COMMAND_PREFIX=$SU_PREFIX
+ COMMAND_PREFIX="$SU_PREFIX"
else
SHELL_UID=$(adb shell cat /proc/self/status | \
awk '$1 == "Uid:" { print $2; }')
diff --git a/build/android/buildbot/bb_device_status_check.py b/build/android/buildbot/bb_device_status_check.py
index 3ed0fba..73aa1de 100755
--- a/build/android/buildbot/bb_device_status_check.py
+++ b/build/android/buildbot/bb_device_status_check.py
@@ -31,6 +31,7 @@
from pylib.cmd_helper import GetCmdOutput
from pylib.device import device_blacklist
from pylib.device import device_errors
+from pylib.device import device_list
from pylib.device import device_utils
def DeviceInfo(serial, options):
@@ -108,26 +109,6 @@
return device_type, device_build, battery_level, full_report, errors, True
-def GetLastDevices(out_dir):
- """Returns a list of devices that have been seen on the bot.
-
- Args:
- options: out_dir parameter of options argument is used as the base
- directory to load and update the cache file.
-
- Returns: List of device serial numbers that were on the bot.
- """
- devices_path = os.path.join(out_dir, '.last_devices')
- devices = []
- try:
- with open(devices_path) as f:
- devices = f.read().splitlines()
- except IOError:
- # Ignore error, file might not exist
- pass
- return devices
-
-
def CheckForMissingDevices(options, adb_online_devs):
"""Uses file of previous online devices to detect broken phones.
@@ -144,21 +125,19 @@
out_dir = os.path.abspath(options.out_dir)
- def WriteDeviceList(file_name, device_list):
- path = os.path.join(out_dir, file_name)
- if not os.path.exists(out_dir):
- os.makedirs(out_dir)
- with open(path, 'w') as f:
- # Write devices currently visible plus devices previously seen.
- f.write('\n'.join(set(device_list)))
-
- last_devices_path = os.path.join(out_dir, '.last_devices')
- last_devices = GetLastDevices(out_dir)
+ last_devices_path = os.path.join(out_dir, device_list.LAST_DEVICES_FILENAME)
+ try:
+ last_devices = device_list.GetPersistentDeviceList(last_devices_path)
+ except IOError:
+ # Ignore error, file might not exist
+ last_devices = []
missing_devs = list(set(last_devices) - set(adb_online_devs))
all_known_devices = list(set(adb_online_devs) | set(last_devices))
- WriteDeviceList('.last_devices', all_known_devices)
- WriteDeviceList('.last_missing', missing_devs)
+ device_list.WritePersistentDeviceList(last_devices_path, all_known_devices)
+ device_list.WritePersistentDeviceList(
+ os.path.join(out_dir, device_list.LAST_MISSING_DEVICES_FILENAME),
+ missing_devs)
if not all_known_devices:
# This can happen if for some reason the .last_devices file is not
@@ -289,7 +268,11 @@
device_blacklist.ResetBlacklist()
if options.restart_usb:
- expected_devices = GetLastDevices(os.path.abspath(options.out_dir))
+ try:
+ expected_devices = device_list.GetPersistentDeviceList(
+ os.path.join(options.out_dir, device_list.LAST_DEVICES_FILENAME))
+ except IOError:
+ expected_devices = []
devices = android_commands.GetAttachedDevices()
# Only restart usb if devices are missing.
if set(expected_devices) != set(devices):
diff --git a/build/android/buildbot/bb_device_steps.py b/build/android/buildbot/bb_device_steps.py
index c418a57..3f9b96a 100755
--- a/build/android/buildbot/bb_device_steps.py
+++ b/build/android/buildbot/bb_device_steps.py
@@ -148,7 +148,7 @@
options: options object.
"""
InstallApk(options, INSTRUMENTATION_TESTS['ChromeShell'], False)
- args = ['--browser', 'android-chromium-testshell']
+ args = ['--browser', 'android-chrome-shell']
devices = android_commands.GetAttachedDevices()
if devices:
args = args + ['--device', devices[0]]
@@ -479,6 +479,16 @@
'--browser=android-content-shell', 'webgl_conformance',
'--webgl-conformance-version=1.0.1'])
+ bb_annotations.PrintNamedStep('gpu_rasterization_tests')
+ RunCmd(['content/test/gpu/run_gpu_test.py',
+ 'gpu_rasterization',
+ '--browser',
+ 'android-content-shell',
+ '--build-revision',
+ str(revision),
+ '--test-machine-name',
+ EscapeBuilderName(builder_name)])
+
def GetTestStepCmds():
return [
@@ -639,6 +649,13 @@
parser.add_option(
'--logcat-dump-output',
help='The logcat dump output will be "tee"-ed into this file')
+ # During processing perf bisects, a seperate working directory created under
+ # which builds are produced. Therefore we should look for relevent output
+ # file under this directory.(/b/build/slave/<slave_name>/build/bisect/src/out)
+ parser.add_option(
+ '--chrome-output-dir',
+ help='Chrome output directory to be used while bisecting.')
+
parser.add_option('--disable-stack-tool', action='store_true',
help='Do not run stack tool.')
parser.add_option('--asan-symbolize', action='store_true',
@@ -658,6 +675,13 @@
return sys.exit('Unknown tests %s' % list(unknown_tests))
setattr(options, 'target', options.factory_properties.get('target', 'Debug'))
+
+ if options.chrome_output_dir:
+ global CHROME_OUT_DIR
+ global LOGCAT_DIR
+ CHROME_OUT_DIR = options.chrome_output_dir
+ LOGCAT_DIR = os.path.join(CHROME_OUT_DIR, 'logcat')
+
if options.coverage_bucket:
setattr(options, 'coverage_dir',
os.path.join(CHROME_OUT_DIR, options.target, 'coverage'))
diff --git a/build/android/buildbot/bb_run_bot.py b/build/android/buildbot/bb_run_bot.py
index 62a2a02..d2a6010 100755
--- a/build/android/buildbot/bb_run_bot.py
+++ b/build/android/buildbot/bb_run_bot.py
@@ -123,7 +123,9 @@
flakiness_server = (
'--flakiness-server=%s' % constants.UPSTREAM_FLAKINESS_SERVER)
experimental = ['--experimental']
-
+ bisect_chrome_output_dir = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+ os.pardir, 'bisect', 'src', 'out'))
B = BotConfig
H = (lambda steps, extra_args=None, extra_gyp=None, target_arch=None :
HostConfig('build/android/buildbot/bb_host_steps.py', steps, extra_args,
@@ -168,7 +170,9 @@
H(compile_step),
T(['gpu'], ['--install=ContentShell'])),
# Pass empty T([]) so that logcat monitor and device status check are run.
- B('perf-bisect-builder-tests-dbg', H(['bisect_perf_regression']), T([])),
+ B('perf-bisect-builder-tests-dbg',
+ H(['bisect_perf_regression']),
+ T([], ['--chrome-output-dir', bisect_chrome_output_dir])),
B('perf-tests-rel', H(std_test_steps),
T([], ['--install=ChromeShell'])),
B('webkit-latest-webkit-tests', H(std_test_steps),
diff --git a/build/android/chrome_profiler/main.py b/build/android/chrome_profiler/main.py
index 401115f..729163f 100755
--- a/build/android/chrome_profiler/main.py
+++ b/build/android/chrome_profiler/main.py
@@ -11,6 +11,7 @@
import webbrowser
from chrome_profiler import chrome_controller
+from chrome_profiler import perf_controller
from chrome_profiler import profiler
from chrome_profiler import systrace_controller
from chrome_profiler import ui
@@ -45,6 +46,21 @@
return options.systrace_categories.split(',')
+def _ComputePerfCategories(options):
+ if not options.perf_categories:
+ return []
+ return options.perf_categories.split(',')
+
+
+def _OptionalValueCallback(default_value):
+ def callback(option, _, __, parser):
+ value = default_value
+ if parser.rargs and not parser.rargs[0].startswith('-'):
+ value = parser.rargs.pop(0)
+ setattr(parser.values, option.dest, value)
+ return callback
+
+
def _CreateOptionParser():
parser = optparse.OptionParser(description='Record about://tracing profiles '
'from Android browsers. See http://dev.'
@@ -104,6 +120,16 @@
dest='systrace_categories', default='')
parser.add_option_group(systrace_opts)
+ if perf_controller.PerfProfilerController.IsSupported():
+ perf_opts = optparse.OptionGroup(parser, 'Perf profiling options')
+ perf_opts.add_option('-p', '--perf', help='Capture a perf profile with '
+ 'the chosen comma-delimited event categories. '
+ 'Samples CPU cycles by default. Use "list" to see '
+ 'the available sample types.', action='callback',
+ default='', callback=_OptionalValueCallback('cycles'),
+ metavar='PERF_CATEGORIES', dest='perf_categories')
+ parser.add_option_group(perf_opts)
+
output_options = optparse.OptionGroup(parser, 'Output options')
output_options.add_option('-o', '--output', help='Save trace output to file.')
output_options.add_option('--json', help='Save trace as raw JSON instead of '
@@ -169,12 +195,18 @@
systrace_controller.SystraceController.GetCategories(device)))
return 0
+ if options.perf_categories in ['list', 'help']:
+ ui.PrintMessage('\n'.join(
+ perf_controller.PerfProfilerController.GetCategories(device)))
+ return 0
+
if not options.time and not options.continuous:
ui.PrintMessage('Time interval or continuous tracing should be specified.')
return 1
chrome_categories = _ComputeChromeCategories(options)
systrace_categories = _ComputeSystraceCategories(options)
+ perf_categories = _ComputePerfCategories(options)
if chrome_categories and 'webview' in systrace_categories:
logging.warning('Using the "webview" category in systrace together with '
@@ -194,6 +226,11 @@
systrace_categories,
options.ring_buffer))
+ if perf_categories:
+ enabled_controllers.append(
+ perf_controller.PerfProfilerController(device,
+ perf_categories))
+
if not enabled_controllers:
ui.PrintMessage('No trace categories enabled.')
return 1
@@ -211,7 +248,3 @@
os.system('/usr/bin/open %s' % os.path.abspath(result))
else:
webbrowser.open(result)
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/build/android/chrome_profiler/perf_controller.py b/build/android/chrome_profiler/perf_controller.py
new file mode 100644
index 0000000..d5f3b80
--- /dev/null
+++ b/build/android/chrome_profiler/perf_controller.py
@@ -0,0 +1,160 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import subprocess
+import sys
+import tempfile
+
+from chrome_profiler import controllers
+
+from pylib import android_commands
+from pylib import constants
+
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT,
+ 'tools',
+ 'telemetry'))
+try:
+ # pylint: disable=F0401
+ from telemetry.core.platform.profiler import android_profiling_helper
+ from telemetry.util import support_binaries
+except ImportError:
+ android_profiling_helper = None
+ support_binaries = None
+
+
+_PERF_OPTIONS = [
+ # Sample across all processes and CPUs to so that the current CPU gets
+ # recorded to each sample.
+ '--all-cpus',
+ # In perf 3.13 --call-graph requires an argument, so use the -g short-hand
+ # which does not.
+ '-g',
+ # Increase priority to avoid dropping samples. Requires root.
+ '--realtime', '80',
+ # Record raw samples to get CPU information.
+ '--raw-samples',
+ # Increase sampling frequency for better coverage.
+ '--freq', '2000',
+]
+
+
+class _PerfProfiler(object):
+ def __init__(self, device, perf_binary, categories):
+ self._device = device
+ self._output_file = android_commands.DeviceTempFile(
+ self._device.old_interface, prefix='perf_output')
+ self._log_file = tempfile.TemporaryFile()
+
+ device_param = (['-s', self._device.old_interface.GetDevice()]
+ if self._device.old_interface.GetDevice() else [])
+ cmd = ['adb'] + device_param + \
+ ['shell', perf_binary, 'record',
+ '--output', self._output_file.name] + _PERF_OPTIONS
+ if categories:
+ cmd += ['--event', ','.join(categories)]
+ self._perf_process = subprocess.Popen(cmd,
+ stdout=self._log_file,
+ stderr=subprocess.STDOUT)
+
+ def SignalAndWait(self):
+ perf_pids = self._device.old_interface.ExtractPid('perf')
+ self._device.old_interface.RunShellCommand(
+ 'kill -SIGINT ' + ' '.join(perf_pids))
+ self._perf_process.wait()
+
+ def _FailWithLog(self, msg):
+ self._log_file.seek(0)
+ log = self._log_file.read()
+ raise RuntimeError('%s. Log output:\n%s' % (msg, log))
+
+ def PullResult(self, output_path):
+ if not self._device.old_interface.FileExistsOnDevice(
+ self._output_file.name):
+ self._FailWithLog('Perf recorded no data')
+
+ perf_profile = os.path.join(output_path,
+ os.path.basename(self._output_file.name))
+ self._device.old_interface.PullFileFromDevice(self._output_file.name,
+ perf_profile)
+ if not os.stat(perf_profile).st_size:
+ os.remove(perf_profile)
+ self._FailWithLog('Perf recorded a zero-sized file')
+
+ self._log_file.close()
+ self._output_file.close()
+ return perf_profile
+
+
+class PerfProfilerController(controllers.BaseController):
+ def __init__(self, device, categories):
+ controllers.BaseController.__init__(self)
+ self._device = device
+ self._categories = categories
+ self._perf_binary = self._PrepareDevice(device)
+ self._perf_instance = None
+
+ def __repr__(self):
+ return 'perf profile'
+
+ @staticmethod
+ def IsSupported():
+ return bool(android_profiling_helper)
+
+ @staticmethod
+ def _PrepareDevice(device):
+ if not 'BUILDTYPE' in os.environ:
+ os.environ['BUILDTYPE'] = 'Release'
+ return android_profiling_helper.PrepareDeviceForPerf(device)
+
+ @classmethod
+ def GetCategories(cls, device):
+ perf_binary = cls._PrepareDevice(device)
+ return device.old_interface.RunShellCommand('%s list' % perf_binary)
+
+ def StartTracing(self, _):
+ self._perf_instance = _PerfProfiler(self._device,
+ self._perf_binary,
+ self._categories)
+
+ def StopTracing(self):
+ if not self._perf_instance:
+ return
+ self._perf_instance.SignalAndWait()
+
+ def PullTrace(self):
+ symfs_dir = os.path.join(tempfile.gettempdir(),
+ os.path.expandvars('$USER-perf-symfs'))
+ if not os.path.exists(symfs_dir):
+ os.makedirs(symfs_dir)
+ required_libs = set()
+
+ # Download the recorded perf profile.
+ perf_profile = self._perf_instance.PullResult(symfs_dir)
+ required_libs = \
+ android_profiling_helper.GetRequiredLibrariesForPerfProfile(
+ perf_profile)
+ if not required_libs:
+ logging.warning('No libraries required by perf trace. Most likely there '
+ 'are no samples in the trace.')
+
+ # Build a symfs with all the necessary libraries.
+ kallsyms = android_profiling_helper.CreateSymFs(self._device,
+ symfs_dir,
+ required_libs,
+ use_symlinks=False)
+ # Convert the perf profile into JSON.
+ perfhost_path = os.path.abspath(support_binaries.FindPath(
+ 'perfhost', 'linux'))
+ perf_script_path = os.path.join(constants.DIR_SOURCE_ROOT,
+ 'tools', 'telemetry', 'telemetry', 'core', 'platform', 'profiler',
+ 'perf_vis', 'perf_to_tracing.py')
+ json_file_name = os.path.basename(perf_profile)
+ with open(os.devnull, 'w') as dev_null, \
+ open(json_file_name, 'w') as json_file:
+ cmd = [perfhost_path, 'script', '-s', perf_script_path, '-i',
+ perf_profile, '--symfs', symfs_dir, '--kallsyms', kallsyms]
+ subprocess.call(cmd, stdout=json_file, stderr=dev_null)
+ return json_file_name
diff --git a/build/android/chrome_profiler/perf_controller_unittest.py b/build/android/chrome_profiler/perf_controller_unittest.py
new file mode 100644
index 0000000..a621a2e
--- /dev/null
+++ b/build/android/chrome_profiler/perf_controller_unittest.py
@@ -0,0 +1,38 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import json
+
+from chrome_profiler import controllers_unittest
+from chrome_profiler import perf_controller
+
+
+class PerfProfilerControllerTest(controllers_unittest.BaseControllerTest):
+ def testGetCategories(self):
+ if not perf_controller.PerfProfilerController.IsSupported():
+ return
+ categories = \
+ perf_controller.PerfProfilerController.GetCategories(self.device)
+ assert 'cycles' in ' '.join(categories)
+
+ def testTracing(self):
+ if not perf_controller.PerfProfilerController.IsSupported():
+ return
+ categories = ['cycles']
+ controller = perf_controller.PerfProfilerController(self.device,
+ categories)
+
+ interval = 1
+ try:
+ controller.StartTracing(interval)
+ finally:
+ controller.StopTracing()
+
+ result = controller.PullTrace()
+ try:
+ with open(result) as f:
+ json.loads(f.read())
+ finally:
+ os.remove(result)
diff --git a/build/android/chrome_profiler/profiler.py b/build/android/chrome_profiler/profiler.py
index 019d3c0..5393ed3 100644
--- a/build/android/chrome_profiler/profiler.py
+++ b/build/android/chrome_profiler/profiler.py
@@ -2,44 +2,13 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import gzip
import os
-import shutil
-import sys
-import zipfile
+from chrome_profiler import trace_packager
from chrome_profiler import ui
-from chrome_profiler import util
from pylib import constants
-sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT,
- 'third_party',
- 'trace-viewer'))
-# pylint: disable=F0401
-from trace_viewer.build import trace2html
-
-
-def _CompressFile(host_file, output):
- with gzip.open(output, 'wb') as out:
- with open(host_file, 'rb') as input_file:
- out.write(input_file.read())
- os.unlink(host_file)
-
-
-def _ArchiveFiles(host_files, output):
- with zipfile.ZipFile(output, 'w', zipfile.ZIP_DEFLATED) as z:
- for host_file in host_files:
- z.write(host_file)
- os.unlink(host_file)
-
-
-def _PackageTracesAsHtml(trace_files, html_file):
- with open(html_file, 'w') as f:
- trace2html.WriteHTMLForTracesToFile(trace_files, f)
- for trace_file in trace_files:
- os.unlink(trace_file)
-
def _StartTracing(controllers, interval):
for controller in controllers:
@@ -53,27 +22,11 @@
def _PullTraces(controllers, output, compress, write_json):
ui.PrintMessage('Downloading...', eol='')
- trace_files = []
- for controller in controllers:
- trace_files.append(controller.PullTrace())
-
- if not write_json:
- html_file = os.path.splitext(trace_files[0])[0] + '.html'
- _PackageTracesAsHtml(trace_files, html_file)
- trace_files = [html_file]
-
- if compress and len(trace_files) == 1:
- result = output or trace_files[0] + '.gz'
- _CompressFile(trace_files[0], result)
- elif len(trace_files) > 1:
- result = output or 'chrome-combined-trace-%s.zip' % util.GetTraceTimestamp()
- _ArchiveFiles(trace_files, result)
- elif output:
- result = output
- shutil.move(trace_files[0], result)
- else:
- result = trace_files[0]
-
+ trace_files = [controller.PullTrace() for controller in controllers]
+ result = trace_packager.PackageTraces(trace_files,
+ output=output,
+ compress=compress,
+ write_json=write_json)
ui.PrintMessage('done')
ui.PrintMessage('Trace written to file://%s' % os.path.abspath(result))
return result
diff --git a/build/android/chrome_profiler/trace_packager.py b/build/android/chrome_profiler/trace_packager.py
new file mode 100644
index 0000000..e56a7de
--- /dev/null
+++ b/build/android/chrome_profiler/trace_packager.py
@@ -0,0 +1,94 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import gzip
+import json
+import os
+import shutil
+import sys
+import zipfile
+
+from chrome_profiler import util
+
+from pylib import constants
+
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT,
+ 'third_party',
+ 'trace-viewer'))
+# pylint: disable=F0401
+from trace_viewer.build import trace2html
+
+
+def _PackageTracesAsHtml(trace_files, html_file):
+ with open(html_file, 'w') as f:
+ trace2html.WriteHTMLForTracesToFile(trace_files, f)
+ for trace_file in trace_files:
+ os.unlink(trace_file)
+
+
+def _CompressFile(host_file, output):
+ with gzip.open(output, 'wb') as out, \
+ open(host_file, 'rb') as input_file:
+ out.write(input_file.read())
+ os.unlink(host_file)
+
+
+def _ArchiveFiles(host_files, output):
+ with zipfile.ZipFile(output, 'w', zipfile.ZIP_DEFLATED) as z:
+ for host_file in host_files:
+ z.write(host_file)
+ os.unlink(host_file)
+
+
+def _MergeTracesIfNeeded(trace_files):
+ if len(trace_files) <= 1:
+ return trace_files
+ merge_candidates = []
+ for trace_file in trace_files:
+ with open(trace_file) as f:
+ # Try to detect a JSON file cheaply since that's all we can merge.
+ if f.read(1) != '{':
+ continue
+ f.seek(0)
+ try:
+ json_data = json.load(f)
+ except ValueError:
+ continue
+ merge_candidates.append((trace_file, json_data))
+ if len(merge_candidates) <= 1:
+ return trace_files
+
+ other_files = [f for f in trace_files
+ if not f in [c[0] for c in merge_candidates]]
+ merged_file, merged_data = merge_candidates[0]
+ for trace_file, json_data in merge_candidates[1:]:
+ for key, value in json_data.items():
+ if not merged_data.get(key) or json_data[key]:
+ merged_data[key] = value
+ os.unlink(trace_file)
+
+ with open(merged_file, 'w') as f:
+ json.dump(merged_data, f)
+ return [merged_file] + other_files
+
+
+def PackageTraces(trace_files, output=None, compress=False, write_json=False):
+ trace_files = _MergeTracesIfNeeded(trace_files)
+ if not write_json:
+ html_file = os.path.splitext(trace_files[0])[0] + '.html'
+ _PackageTracesAsHtml(trace_files, html_file)
+ trace_files = [html_file]
+
+ if compress and len(trace_files) == 1:
+ result = output or trace_files[0] + '.gz'
+ _CompressFile(trace_files[0], result)
+ elif len(trace_files) > 1:
+ result = output or 'chrome-combined-trace-%s.zip' % util.GetTraceTimestamp()
+ _ArchiveFiles(trace_files, result)
+ elif output:
+ result = output
+ shutil.move(trace_files[0], result)
+ else:
+ result = trace_files[0]
+ return result
diff --git a/build/android/chrome_profiler/trace_packager_unittest.py b/build/android/chrome_profiler/trace_packager_unittest.py
new file mode 100644
index 0000000..8c50d3a
--- /dev/null
+++ b/build/android/chrome_profiler/trace_packager_unittest.py
@@ -0,0 +1,35 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from chrome_profiler import trace_packager
+
+import json
+import tempfile
+import unittest
+
+
+class TracePackagerTest(unittest.TestCase):
+ def testJsonTraceMerging(self):
+ t1 = {'traceEvents': [{'ts': 123, 'ph': 'b'}]}
+ t2 = {'traceEvents': [], 'stackFrames': ['blah']}
+
+ # Both trace files will be merged to a third file and will get deleted in
+ # the process, so there's no need for NamedTemporaryFile to do the
+ # deletion.
+ with tempfile.NamedTemporaryFile(delete=False) as f1, \
+ tempfile.NamedTemporaryFile(delete=False) as f2:
+ f1.write(json.dumps(t1))
+ f2.write(json.dumps(t2))
+ f1.flush()
+ f2.flush()
+
+ with tempfile.NamedTemporaryFile() as output:
+ trace_packager.PackageTraces([f1.name, f2.name],
+ output.name,
+ compress=False,
+ write_json=True)
+ with open(output.name) as output:
+ output = json.load(output)
+ self.assertEquals(output['traceEvents'], t1['traceEvents'])
+ self.assertEquals(output['stackFrames'], t2['stackFrames'])
diff --git a/build/android/pylib/android_commands.py b/build/android/pylib/android_commands.py
index ee12cd4..960f2ae 100644
--- a/build/android/pylib/android_commands.py
+++ b/build/android/pylib/android_commands.py
@@ -544,19 +544,16 @@
raise errors.MsgException('Remount failed: %s' % out)
def RestartAdbdOnDevice(self):
- logging.info('Killing adbd on the device...')
- adb_pids = self.ExtractPid('adbd')
- if not adb_pids:
- raise errors.MsgException('Unable to obtain adbd pid')
- try:
- self.KillAll('adbd', signum=signal.SIGTERM, with_su=True)
- logging.info('Waiting for device to settle...')
+ logging.info('Restarting adbd on the device...')
+ with DeviceTempFile(self, suffix=".sh") as temp_script_file:
+ host_script_path = os.path.join(constants.DIR_SOURCE_ROOT,
+ 'build',
+ 'android',
+ 'pylib',
+ 'restart_adbd.sh')
+ self._adb.Push(host_script_path, temp_script_file.name)
+ self.RunShellCommand('. %s' % temp_script_file.name)
self._adb.SendCommand('wait-for-device')
- new_adb_pids = self.ExtractPid('adbd')
- if new_adb_pids == adb_pids:
- logging.warning('adbd on the device may not have been restarted.')
- except Exception as e:
- logging.error('Exception when trying to kill adbd on the device [%s]', e)
def RestartAdbServer(self):
"""Restart the adb server."""
@@ -985,8 +982,12 @@
md5sum_dist_path = os.path.join(constants.GetOutDirectory(),
'md5sum_dist')
assert os.path.exists(md5sum_dist_path), 'Please build md5sum.'
- command = 'push %s %s' % (md5sum_dist_path, MD5SUM_DEVICE_FOLDER)
- assert _HasAdbPushSucceeded(self._adb.SendCommand(command))
+ md5sum_dist_mtime = os.stat(md5sum_dist_path).st_mtime
+ if (md5sum_dist_path not in self._push_if_needed_cache or
+ self._push_if_needed_cache[md5sum_dist_path] != md5sum_dist_mtime):
+ command = 'push %s %s' % (md5sum_dist_path, MD5SUM_DEVICE_FOLDER)
+ assert _HasAdbPushSucceeded(self._adb.SendCommand(command))
+ self._push_if_needed_cache[md5sum_dist_path] = md5sum_dist_mtime
(_, md5_device_output) = self.GetAndroidToolStatusAndOutput(
self._util_wrapper + ' ' + MD5SUM_DEVICE_PATH + ' ' + device_path,
diff --git a/build/android/pylib/device/device_list.py b/build/android/pylib/device/device_list.py
new file mode 100644
index 0000000..0eb6acb
--- /dev/null
+++ b/build/android/pylib/device/device_list.py
@@ -0,0 +1,30 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module to keep track of devices across builds."""
+
+import os
+
+LAST_DEVICES_FILENAME = '.last_devices'
+LAST_MISSING_DEVICES_FILENAME = '.last_missing'
+
+
+def GetPersistentDeviceList(file_name):
+ """Returns a list of devices.
+
+ Args:
+ file_name: the file name containing a list of devices.
+
+ Returns: List of device serial numbers that were on the bot.
+ """
+ with open(file_name) as f:
+ return f.read().splitlines()
+
+
+def WritePersistentDeviceList(file_name, device_list):
+ path = os.path.dirname(file_name)
+ if not os.path.exists(path):
+ os.makedirs(path)
+ with open(file_name, 'w') as f:
+ f.write('\n'.join(set(device_list)))
diff --git a/build/android/pylib/gtest/filter/gfx_unittests_disabled b/build/android/pylib/gtest/filter/gfx_unittests_disabled
new file mode 100644
index 0000000..aadc4de
--- /dev/null
+++ b/build/android/pylib/gtest/filter/gfx_unittests_disabled
@@ -0,0 +1,25 @@
+FontListTest.FontDescString_Derive
+FontListTest.FontDescString_FromFont
+FontListTest.FontDescString_FromFontNamesStyleAndSize
+FontListTest.FontDescString_FromFontVector
+FontListTest.FontDescString_FromFontWithNonNormalStyle
+FontListTest.Fonts_Derive
+FontListTest.Fonts_DeriveWithSizeDelta
+FontListTest.Fonts_DescStringWithStyleInFlexibleFormat_RoundTrip
+FontListTest.Fonts_FontVector_RoundTrip
+FontListTest.Fonts_FromDescString
+FontListTest.Fonts_FromDescStringInFlexibleFormat
+FontListTest.Fonts_FromDescStringWithStyleInFlexibleFormat
+FontListTest.Fonts_FromFont
+FontListTest.Fonts_FromFontVector
+FontListTest.Fonts_FromFontWithNonNormalStyle
+FontListTest.Fonts_GetHeight_GetBaseline
+FontListTest.Fonts_GetStyle
+FontTest.Ascent
+FontTest.AvgWidths
+FontTest.CapHeight
+FontTest.GetActualFontNameForTesting
+FontTest.Height
+FontTest.LoadArial
+FontTest.LoadArialBold
+TextUtilsTest.GetStringWidth
diff --git a/build/android/pylib/gtest/filter/net_unittests_disabled b/build/android/pylib/gtest/filter/net_unittests_disabled
index da727af..28e1db1 100644
--- a/build/android/pylib/gtest/filter/net_unittests_disabled
+++ b/build/android/pylib/gtest/filter/net_unittests_disabled
@@ -3,6 +3,9 @@
# Bug: 171812
MultiThreadedCertVerifierTest.CancelRequest
+# Bug: 380340
+SSLServerSocketTest.Handshake
+
PythonUtils.PythonRunTime
URLRequestTestHTTP.HTTPSToHTTPRedirectNoRefererTest
VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/0
diff --git a/build/android/pylib/gtest/filter/ui_unittests_disabled b/build/android/pylib/gtest/filter/ui_unittests_disabled
index de04dba..2f6e9af 100644
--- a/build/android/pylib/gtest/filter/ui_unittests_disabled
+++ b/build/android/pylib/gtest/filter/ui_unittests_disabled
@@ -1,31 +1,3 @@
-# List of suppressions
-# This file was automatically generated by build/android/run_tests.py
CanvasTest.StringSizeEmptyString
CanvasTest.StringWidth
-ClipboardTest.RTFTest
-FontListTest.FontDescString_Derive
-FontListTest.FontDescString_FromFont
-FontListTest.FontDescString_FromFontNamesStyleAndSize
-FontListTest.FontDescString_FromFontVector
-FontListTest.FontDescString_FromFontWithNonNormalStyle
-FontListTest.Fonts_Derive
-FontListTest.Fonts_DeriveWithSizeDelta
-FontListTest.Fonts_DescStringWithStyleInFlexibleFormat_RoundTrip
-FontListTest.Fonts_FontVector_RoundTrip
-FontListTest.Fonts_FromDescString
-FontListTest.Fonts_FromDescStringInFlexibleFormat
-FontListTest.Fonts_FromDescStringWithStyleInFlexibleFormat
-FontListTest.Fonts_FromFont
-FontListTest.Fonts_FromFontVector
-FontListTest.Fonts_FromFontWithNonNormalStyle
-FontListTest.Fonts_GetHeight_GetBaseline
-FontListTest.Fonts_GetStyle
-FontTest.Ascent
-FontTest.AvgWidths
-FontTest.CapHeight
-FontTest.GetActualFontNameForTesting
-FontTest.Height
-FontTest.LoadArial
-FontTest.LoadArialBold
ResourceBundleTest.DelegateGetFontList
-TextUtilsTest.GetStringWidth
diff --git a/build/android/pylib/perf/setup.py b/build/android/pylib/perf/setup.py
index cf84456..5314d36 100644
--- a/build/android/pylib/perf/setup.py
+++ b/build/android/pylib/perf/setup.py
@@ -6,15 +6,73 @@
import json
import fnmatch
+import logging
import os
import shutil
+from pylib import android_commands
from pylib import constants
from pylib import forwarder
+from pylib.device import device_list
from pylib.perf import test_runner
from pylib.utils import test_environment
+def _GetAllDevices():
+ devices_path = os.path.join(os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+ device_list.LAST_DEVICES_FILENAME)
+ try:
+ devices = device_list.GetPersistentDeviceList(devices_path)
+ except IOError as e:
+ logging.error('Unable to find %s [%s]', devices_path, e)
+ devices = android_commands.GetAttachedDevices()
+ return sorted(devices)
+
+
+def _GetStepsDictFromSingleStep(test_options):
+ # Running a single command, build the tests structure.
+ steps_dict = {
+ 'version': 1,
+ 'steps': {
+ 'single_step': {
+ 'device_affinity': 0,
+ 'cmd': test_options.single_step
+ },
+ }
+ }
+ return steps_dict
+
+# TODO(bulach): remove once it rolls downstream, crbug.com/378862.
+def _GetStepsDictFromV0(steps_v0):
+ steps_dict = {
+ 'version': 1,
+ 'steps': {},
+ }
+ affinity = 0
+ for step in steps_v0:
+ steps_dict['steps'][step[0]] = {
+ 'device_affinity': affinity,
+ 'cmd': step[1],
+ }
+ affinity += 1
+ return steps_dict
+
+
+def _GetStepsDict(test_options):
+ if test_options.single_step:
+ return _GetStepsDictFromSingleStep(test_options)
+ if test_options.steps:
+ with file(test_options.steps, 'r') as f:
+ steps = json.load(f)
+ # TODO(bulach): remove once it rolls downstream, crbug.com/378862.
+ if isinstance(steps, list):
+ return _GetStepsDictFromV0(steps)
+
+ # Already using the new format.
+ assert steps['version'] == 1
+ return steps
+
+
def Setup(test_options):
"""Create and return the test runner factory and tests.
@@ -22,7 +80,7 @@
test_options: A PerformanceOptions object.
Returns:
- A tuple of (TestRunnerFactory, tests).
+ A tuple of (TestRunnerFactory, tests, devices).
"""
# TODO(bulach): remove this once the bot side lands. BUG=318369
constants.SetBuildType('Release')
@@ -34,33 +92,24 @@
test_environment.CleanupLeftoverProcesses()
forwarder.Forwarder.UseMultiprocessing()
- if test_options.single_step:
- # Running a single command, build the tests structure.
- tests = [['single_step', test_options.single_step]]
+ # We want to keep device affinity, so return all devices ever seen.
+ all_devices = _GetAllDevices()
- if test_options.steps:
- with file(test_options.steps, 'r') as f:
- tests = json.load(f)
-
- # The list is necessary to keep the steps order, but internally
- # the format is squashed from a list of lists into a single dict:
- # [["A", "cmd"], ["B", "cmd"]] into {"A": "cmd", "B": "cmd"}
- sorted_test_names = [i[0] for i in tests]
- tests_dict = dict(tests)
+ steps_dict = _GetStepsDict(test_options)
+ sorted_step_names = sorted(steps_dict['steps'].keys())
if test_options.test_filter:
- sorted_test_names = fnmatch.filter(sorted_test_names,
+ sorted_step_names = fnmatch.filter(sorted_step_names,
test_options.test_filter)
- tests_dict = dict((k, v) for k, v in tests_dict.iteritems()
- if k in sorted_test_names)
flaky_steps = []
if test_options.flaky_steps:
with file(test_options.flaky_steps, 'r') as f:
flaky_steps = json.load(f)
- def TestRunnerFactory(device, _shard_index):
+ def TestRunnerFactory(device, shard_index):
return test_runner.TestRunner(
- test_options, device, tests_dict, flaky_steps)
+ test_options, device, shard_index, len(all_devices),
+ steps_dict, flaky_steps)
- return (TestRunnerFactory, sorted_test_names)
+ return (TestRunnerFactory, sorted_step_names, all_devices)
diff --git a/build/android/pylib/perf/test_runner.py b/build/android/pylib/perf/test_runner.py
index 71a613e..1d0a25c 100644
--- a/build/android/pylib/perf/test_runner.py
+++ b/build/android/pylib/perf/test_runner.py
@@ -20,8 +20,22 @@
with the step results previously saved. The buildbot will then process the graph
data accordingly.
-
The JSON steps file contains a dictionary in the format:
+{ "version": int,
+ "steps": {
+ "foo": {
+ "device_affinity": int,
+ "cmd": "script_to_execute foo"
+ },
+ "bar": {
+ "device_affinity": int,
+ "cmd": "script_to_execute bar"
+ }
+ }
+}
+
+# TODO(bulach): remove once it rolls downstream, crbug.com/378862.
+The OLD JSON steps file contains a dictionary in the format:
[
["step_name_foo", "script_to_execute foo"],
["step_name_bar", "script_to_execute bar"]
@@ -41,6 +55,7 @@
--device: the serial number to be passed to all adb commands.
"""
+import collections
import datetime
import logging
import os
@@ -84,7 +99,7 @@
def PrintSummary(test_names):
logging.info('*' * 80)
logging.info('Sharding summary')
- total_time = 0
+ device_total_time = collections.defaultdict(int)
for test_name in test_names:
file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name)
if not os.path.exists(file_name):
@@ -95,8 +110,10 @@
logging.info('%s : exit_code=%d in %d secs at %s',
result['name'], result['exit_code'], result['total_time'],
result['device'])
- total_time += result['total_time']
- logging.info('Total steps time: %d secs', total_time)
+ device_total_time[result['device']] += result['total_time']
+ for device, device_time in device_total_time.iteritems():
+ logging.info('Total for device %s : %d secs', device, device_time)
+ logging.info('Total steps time: %d secs', sum(device_total_time.values()))
class _HeartBeatLogger(object):
@@ -131,17 +148,22 @@
class TestRunner(base_test_runner.BaseTestRunner):
- def __init__(self, test_options, device, tests, flaky_tests):
+ def __init__(self, test_options, device, shard_index, max_shard, tests,
+ flaky_tests):
"""A TestRunner instance runs a perf test on a single device.
Args:
test_options: A PerfOptions object.
device: Device to run the tests.
+ shard_index: the index of this device.
+ max_shards: the maximum shard index.
tests: a dict mapping test_name to command.
flaky_tests: a list of flaky test_name.
"""
super(TestRunner, self).__init__(device, None, 'Release')
self._options = test_options
+ self._shard_index = shard_index
+ self._max_shard = max_shard
self._tests = tests
self._flaky_tests = flaky_tests
@@ -164,6 +186,16 @@
result['name']), 'w') as f:
f.write(pickle.dumps(result))
+ def _CheckDeviceAffinity(self, test_name):
+ """Returns True if test_name has affinity for this shard."""
+ affinity = (self._tests['steps'][test_name]['device_affinity'] %
+ self._max_shard)
+ if self._shard_index == affinity:
+ return True
+ logging.info('Skipping %s on %s (affinity is %s, device is %s)',
+ test_name, self.device_serial, affinity, self._shard_index)
+ return False
+
def _LaunchPerfTest(self, test_name):
"""Runs a perf test.
@@ -173,6 +205,9 @@
Returns:
A tuple containing (Output, base_test_result.ResultType)
"""
+ if not self._CheckDeviceAffinity(test_name):
+ return '', base_test_result.ResultType.PASS
+
try:
logging.warning('Unmapping device ports')
forwarder.Forwarder.UnmapAllDevicePorts(self.device)
@@ -181,7 +216,8 @@
logging.error('Exception when tearing down device %s', e)
cmd = ('%s --device %s' %
- (self._tests[test_name], self.device.old_interface.GetDevice()))
+ (self._tests['steps'][test_name]['cmd'],
+ self.device_serial))
logging.info('%s : %s', test_name, cmd)
start_time = datetime.datetime.now()
@@ -211,7 +247,7 @@
exit_code = -1
logging.info('%s : exit_code=%d in %d secs at %s',
test_name, exit_code, (end_time - start_time).seconds,
- self.device.old_interface.GetDevice())
+ self.device_serial)
result_type = base_test_result.ResultType.FAIL
if exit_code == 0:
result_type = base_test_result.ResultType.PASS
@@ -230,7 +266,7 @@
'actual_exit_code': actual_exit_code,
'result_type': result_type,
'total_time': (end_time - start_time).seconds,
- 'device': self.device.old_interface.GetDevice(),
+ 'device': self.device_serial,
'cmd': cmd,
}
self._SaveResult(persisted_result)
diff --git a/build/android/pylib/restart_adbd.sh b/build/android/pylib/restart_adbd.sh
new file mode 100755
index 0000000..393b2eb
--- /dev/null
+++ b/build/android/pylib/restart_adbd.sh
@@ -0,0 +1,20 @@
+#!/system/bin/sh
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android shell script to restart adbd on the device. This has to be run
+# atomically as a shell script because stopping adbd prevents further commands
+# from running (even if called in the same adb shell).
+
+trap '' HUP
+trap '' TERM
+trap '' PIPE
+
+function restart() {
+ stop adbd
+ start adbd
+}
+
+restart &
diff --git a/build/android/pylib/symbols/elf_symbolizer.py b/build/android/pylib/symbols/elf_symbolizer.py
index 90e99e5..b294654 100644
--- a/build/android/pylib/symbols/elf_symbolizer.py
+++ b/build/android/pylib/symbols/elf_symbolizer.py
@@ -15,6 +15,13 @@
import threading
+# addr2line builds a possibly infinite memory cache that can exhaust
+# the computer's memory if allowed to grow for too long. This constant
+# controls how many lookups we do before restarting the process. 4000
+# gives near peak performance without extreme memory usage.
+ADDR2LINE_RECYCLE_LIMIT = 4000
+
+
class ELFSymbolizer(object):
"""An uber-fast (multiprocessing, pipelined and asynchronous) ELF symbolizer.
@@ -117,6 +124,7 @@
# Essentially, this drains all the addr2line(s) out queues.
for a2l_to_purge in self._a2l_instances:
a2l_to_purge.ProcessAllResolvedSymbolsInQueue()
+ a2l_to_purge.RecycleIfNecessary()
# Find the best instance according to this logic:
# 1. Find an existing instance with the shortest queue.
@@ -184,6 +192,10 @@
# separate field because turned out to be a perf hot-spot.
self.queue_size = 0
+ # Keep track of the number of symbols a process has processed to
+ # avoid a single process growing too big and using all the memory.
+ self._processed_symbols_count = 0
+
# Objects required to handle the addr2line subprocess.
self._proc = None # Subprocess.Popen(...) instance.
self._thread = None # Threading.thread instance.
@@ -251,6 +263,15 @@
break
self._ProcessSymbolOutput(lines)
+ def RecycleIfNecessary(self):
+ """Restarts the process if it has been used for too long.
+
+ A long running addr2line process will consume excessive amounts
+ of memory without any gain in performance."""
+ if self._processed_symbols_count >= ADDR2LINE_RECYCLE_LIMIT:
+ self._RestartAddr2LineProcess()
+
+
def Terminate(self):
"""Kills the underlying addr2line process.
@@ -297,6 +318,7 @@
if not innermost_sym_info:
innermost_sym_info = sym_info
+ self._processed_symbols_count += 1
self._symbolizer.callback(innermost_sym_info, callback_arg)
def _RestartAddr2LineProcess(self):
@@ -325,6 +347,8 @@
self._thread.daemon = True # Don't prevent early process exit.
self._thread.start()
+ self._processed_symbols_count = 0
+
# Replay the pending requests on the new process (only for the case
# of a hung addr2line timing out during the game).
for (addr, _, _) in self._request_queue:
diff --git a/build/android/test_runner.py b/build/android/test_runner.py
index a0c3b72..897a2bd 100755
--- a/build/android/test_runner.py
+++ b/build/android/test_runner.py
@@ -633,17 +633,21 @@
return exit_code
-def _RunPerfTests(options, args, error_func, devices):
+def _RunPerfTests(options, args, error_func):
"""Subcommand of RunTestsCommands which runs perf tests."""
perf_options = ProcessPerfTestOptions(options, args, error_func)
# Just print the results from a single previously executed step.
if perf_options.print_step:
return perf_test_runner.PrintTestOutput(perf_options.print_step)
- runner_factory, tests = perf_setup.Setup(perf_options)
+ runner_factory, tests, devices = perf_setup.Setup(perf_options)
+ # shard=False means that each device will get the full list of tests
+ # and then each one will decide their own affinity.
+ # shard=True means each device will pop the next test available from a queue,
+ # which increases throughput but have no affinity.
results, _ = test_dispatcher.RunTests(
- tests, runner_factory, devices, shard=True, test_timeout=None,
+ tests, runner_factory, devices, shard=False, test_timeout=None,
num_retries=options.num_retries)
report_results.LogFull(
@@ -731,7 +735,7 @@
elif command == 'monkey':
return _RunMonkeyTests(options, option_parser.error, devices)
elif command == 'perf':
- return _RunPerfTests(options, args, option_parser.error, devices)
+ return _RunPerfTests(options, args, option_parser.error)
else:
raise Exception('Unknown test type.')
diff --git a/build/build_config.h b/build/build_config.h
index c384347..6e31a73 100644
--- a/build/build_config.h
+++ b/build/build_config.h
@@ -4,7 +4,7 @@
// This file adds defines about the platform we're currently building on.
// Operating System:
-// OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX)
+// OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX) / OS_NACL
// Compiler:
// COMPILER_MSVC / COMPILER_GCC
// Processor:
@@ -19,15 +19,16 @@
#endif
// A set of macros to use for platform detection.
-#if defined(ANDROID)
+#if defined(__native_client__)
+// __native_client__ must be first, so that other OS_ defines are not set.
+#define OS_NACL 1
+#elif defined(ANDROID)
#define OS_ANDROID 1
#elif defined(__APPLE__)
#define OS_MACOSX 1
#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
#define OS_IOS 1
#endif // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
-#elif defined(__native_client__)
-#define OS_NACL 1
#elif defined(__linux__)
#define OS_LINUX 1
// include a system header to pull in features.h for glibc/uclibc macros.
diff --git a/build/common.gypi b/build/common.gypi
index 0e9cdea..73c2a44 100644
--- a/build/common.gypi
+++ b/build/common.gypi
@@ -41,7 +41,7 @@
['OS=="win" or OS=="mac" or OS=="ios"', {
'host_arch%': 'ia32',
}, {
- 'host_arch%': '<!(python <(DEPTH)/build/linux/detect_host_arch.py)',
+ 'host_arch%': '<!pymod_do_main(detect_host_arch)',
}],
],
},
@@ -166,6 +166,9 @@
'sysroot%': '',
'chroot_cmd%': '',
+ # The system libdir used for this ABI.
+ 'system_libdir%': 'lib',
+
'conditions': [
# Ash needs Aura.
['use_aura==0', {
@@ -261,6 +264,7 @@
'arm_version%': '<(arm_version)',
'sysroot%': '<(sysroot)',
'chroot_cmd%': '<(chroot_cmd)',
+ 'system_libdir%': '<(system_libdir)',
# Set to 1 to enable fast builds. Set to 2 for even faster builds
# (it disables debug info for fastest compilation - only for use
@@ -288,8 +292,6 @@
# Detect NEON support at run-time.
'arm_neon_optional%': 0,
- # The system libdir used for this ABI.
- 'system_libdir%': 'lib',
# Use libjpeg-turbo as the JPEG codec used by Chromium.
'use_libjpeg_turbo%': 1,
@@ -362,18 +364,23 @@
# See https://sites.google.com/a/chromium.org/dev/developers/testing/leaksanitizer
'lsan%': 0,
- # Enable building with TSAN (Clang's -fsanitize=thread option).
+ # Enable building with TSan (Clang's -fsanitize=thread option).
# -fsanitize=thread only works with clang, but tsan=1 implies clang=1
# See http://clang.llvm.org/docs/ThreadSanitizer.html
'tsan%': 0,
'tsan_blacklist%': '<(PRODUCT_DIR)/../../tools/valgrind/tsan_v2/ignores.txt',
- # Enable building with MSAN (Clang's -fsanitize=memory option).
+ # Enable building with MSan (Clang's -fsanitize=memory option).
# MemorySanitizer only works with clang, but msan=1 implies clang=1
# See http://clang.llvm.org/docs/MemorySanitizer.html
'msan%': 0,
'msan_blacklist%': '<(PRODUCT_DIR)/../../tools/msan/blacklist.txt',
+ # Enable building with UBSan (Clang's -fsanitize=undefined option).
+ # -fsanitize=undefined only works with clang, but ubsan=1 implies clang=1
+ # See http://clang.llvm.org/docs/UsersManual.html
+ 'ubsan%': 0,
+
# Use the dynamic libraries instrumented by one of the sanitizers
# instead of the standard system libraries.
'use_instrumented_libraries%': 0,
@@ -550,6 +557,9 @@
'spdy_proxy_auth_property%' : '',
'spdy_proxy_auth_value%' : '',
'data_reduction_proxy_probe_url%' : '',
+ 'data_reduction_proxy_ssl_origin%' : '',
+ 'data_reduction_proxy_alt_origin%' : '',
+ 'data_reduction_proxy_alt_fallback_origin%' : '',
'enable_mdns%' : 0,
'enable_service_discovery%': 0,
'enable_wifi_bootstrapping%': 0,
@@ -745,7 +755,7 @@
'chromium_win_pch%': 1
}],
- ['chromeos==1 or OS=="android" or OS=="ios"', {
+ ['chromeos==1 or OS=="android" or OS=="ios" or desktop_linux==1', {
'enable_plugin_installation%': 0,
}, {
'enable_plugin_installation%': 1,
@@ -972,6 +982,8 @@
'google_api_key%': '',
'google_default_client_id%': '',
'google_default_client_secret%': '',
+ # Native Client is enabled by default.
+ 'disable_nacl%': '0',
},
# Copy conditionally-set variables out one scope.
@@ -1042,6 +1054,7 @@
'msan_blacklist%': '<(msan_blacklist)',
'tsan%': '<(tsan)',
'tsan_blacklist%': '<(tsan_blacklist)',
+ 'ubsan%': '<(ubsan)',
'use_instrumented_libraries%': '<(use_instrumented_libraries)',
'use_custom_libcxx%': '<(use_custom_libcxx)',
'clang_type_profiler%': '<(clang_type_profiler)',
@@ -1099,6 +1112,9 @@
'spdy_proxy_auth_property%': '<(spdy_proxy_auth_property)',
'spdy_proxy_auth_value%': '<(spdy_proxy_auth_value)',
'data_reduction_proxy_probe_url%': '<(data_reduction_proxy_probe_url)',
+ 'data_reduction_proxy_ssl_origin%' : '<(data_reduction_proxy_ssl_origin)',
+ 'data_reduction_proxy_alt_origin%' : '<(data_reduction_proxy_alt_origin)',
+ 'data_reduction_proxy_alt_fallback_origin%' : '<(data_reduction_proxy_alt_fallback_origin)',
'enable_mdns%' : '<(enable_mdns)',
'enable_service_discovery%' : '<(enable_service_discovery)',
'enable_wifi_bootstrapping%': '<(enable_wifi_bootstrapping)',
@@ -1333,8 +1349,8 @@
# Disable Dart by default.
'enable_dart%': 0,
- # Native Client is enabled by default.
- 'disable_nacl%': 0,
+ # Copy out the setting of disable_nacl.
+ 'disable_nacl%': '<(disable_nacl)',
# Portable Native Client is enabled by default.
'disable_pnacl%': 0,
@@ -1429,7 +1445,7 @@
# compiler_version works with clang.
# TODO(glider): set clang to 1 earlier for ASan and TSan builds so
# that it takes effect here.
- ['clang==0 and asan==0 and lsan==0 and tsan==0 and msan==0', {
+ ['clang==0 and asan==0 and lsan==0 and tsan==0 and msan==0 and ubsan==0', {
'binutils_version%': '<!pymod_do_main(compiler_version target assembler)',
}],
# On Android we know the binutils version in the toolchain.
@@ -2018,6 +2034,9 @@
'clang%': 1,
'use_allocator%': 'none',
}],
+ ['ubsan==1', {
+ 'clang%': 1,
+ }],
['asan==1 and OS=="mac"', {
# TODO(glider): we do not strip ASan binaries until the dynamic ASan
# runtime is fully adopted. See http://crbug.com/242503.
@@ -2289,11 +2308,6 @@
# Targets are by default not nacl untrusted code.
'nacl_untrusted_build%': 0,
- # Enable a new Gamepad interface.
- # TODO(cdumez): This is temporary and should go away once the chromium
- # and blink interfaces are in sync, http://crbug.com/344556.
- 'enable_new_gamepad_api%': 1,
-
'pnacl_compile_flags': [
# pnacl uses the clang compiler so we need to suppress all the
# same warnings as we do for clang.
@@ -2463,9 +2477,6 @@
['enable_hidpi==1', {
'defines': ['ENABLE_HIDPI=1'],
}],
- ['enable_new_gamepad_api==1', {
- 'defines': ['ENABLE_NEW_GAMEPAD_API=1'],
- }],
['native_discardable_memory==1', {
'defines': ['DISCARDABLE_MEMORY_ALWAYS_SUPPORTED_NATIVELY'],
}],
@@ -2718,6 +2729,18 @@
'defines': [
'DATA_REDUCTION_PROXY_PROBE_URL="<(data_reduction_proxy_probe_url)"'],
}],
+ ['data_reduction_proxy_ssl_origin != ""', {
+ 'defines': [
+ 'DATA_REDUCTION_PROXY_SSL_ORIGIN="<(data_reduction_proxy_ssl_origin)"'],
+ }],
+ ['data_reduction_proxy_alt_origin != ""', {
+ 'defines': [
+ 'DATA_REDUCTION_PROXY_ALT_ORIGIN="<(data_reduction_proxy_alt_origin)"'],
+ }],
+ ['data_reduction_proxy_alt_fallback_origin != ""', {
+ 'defines': [
+ 'DATA_REDUCTION_PROXY_ALT_FALLBACK_ORIGIN="<(data_reduction_proxy_alt_fallback_origin)"'],
+ }],
['enable_mdns==1', {
'defines': ['ENABLE_MDNS=1'],
}],
@@ -2950,6 +2973,9 @@
'msvs_configuration_platform': 'x64',
'msvs_settings': {
'VCLinkerTool': {
+ # Make sure to understand http://crbug.com/361720 if you want to
+ # increase this.
+ 'MinimumRequiredVersion': '5.02', # Server 2003.
'TargetMachine': '17', # x86 - 64
'AdditionalLibraryDirectories!':
['<(windows_sdk_path)/Lib/win8/um/x86'],
@@ -3781,7 +3807,7 @@
}],
# Common options for AddressSanitizer, LeakSanitizer,
# ThreadSanitizer and MemorySanitizer.
- ['asan==1 or lsan==1 or tsan==1 or msan==1', {
+ ['asan==1 or lsan==1 or tsan==1 or msan==1 or ubsan==1', {
'target_conditions': [
['_toolset=="target"', {
'cflags': [
@@ -3791,6 +3817,12 @@
'cflags!': [
'-fomit-frame-pointer',
],
+ }],
+ ],
+ }],
+ ['asan==1 or lsan==1 or tsan==1 or msan==1', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
'ldflags!': [
# Functions interposed by the sanitizers can make ld think
# that some libraries aren't needed when they actually are,
@@ -3831,6 +3863,23 @@
}],
],
}],
+ ['ubsan==1', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': [
+ '-fsanitize=undefined',
+ # -fsanitize=vptr is incompatible with -fno-rtti.
+ '-fno-sanitize=vptr',
+ '-w', # http://crbug.com/162783
+ ],
+ 'ldflags': [
+ '-fsanitize=undefined',
+ # -fsanitize=vptr is incompatible with -fno-rtti.
+ '-fno-sanitize=vptr',
+ ],
+ }],
+ ],
+ }],
['asan_coverage!=0', {
'target_conditions': [
['_toolset=="target"', {
diff --git a/build/config/BUILD.gn b/build/config/BUILD.gn
index 21c54f9..3fb9c9b 100644
--- a/build/config/BUILD.gn
+++ b/build/config/BUILD.gn
@@ -47,11 +47,6 @@
"V8_DEPRECATION_WARNINGS", # Don't use deprecated V8 APIs anywhere.
# Temporary suppression until Blink code can be removed.
"BLINK_SCALE_FILTERS_AT_RECORD_TIME",
-
- # Enable a new Gamepad interface.
- # TODO(cdumez): This is temporary and should go away once the chromium
- # and blink interfaces are in sync, http://crbug.com/344556.
- "ENABLE_NEW_GAMEPAD_API=1",
]
if (cld_version > 0) {
@@ -131,6 +126,9 @@
if (enable_webrtc) {
defines += [ "ENABLE_WEBRTC=1" ]
}
+ if (disable_ftp_support) {
+ defines += [ "DISABLE_FTP_SUPPORT=1" ]
+ }
}
# Debug/release ----------------------------------------------------------------
diff --git a/build/config/BUILDCONFIG.gn b/build/config/BUILDCONFIG.gn
index f7c81c7..3df338d 100644
--- a/build/config/BUILDCONFIG.gn
+++ b/build/config/BUILDCONFIG.gn
@@ -157,6 +157,8 @@
is_win = false
}
+is_desktop_linux = is_linux && !is_chromeos
+
# =============================================================================
# CPU ARCHITECTURE
# =============================================================================
@@ -291,7 +293,7 @@
if (is_component_build) {
component_mode = "shared_library"
} else {
- component_mode = "static_library"
+ component_mode = "source_set"
}
# These Sanitizers all imply using the Clang compiler. On Windows they either
@@ -327,7 +329,15 @@
"//build/config/win:sdk",
"//build/config/win:unicode",
]
-} else if (is_linux) {
+}
+if (is_posix) {
+ _native_compiler_configs += [
+ "//build/config/gcc:no_exceptions",
+ "//build/config/gcc:symbol_visibility_hidden",
+ ]
+}
+
+if (is_linux) {
_native_compiler_configs += [ "//build/config/linux:sdk", ]
} else if (is_mac) {
_native_compiler_configs += [ "//build/config/mac:sdk", ]
@@ -336,9 +346,7 @@
} else if (is_android) {
_native_compiler_configs += [ "//build/config/android:sdk", ]
}
-if (!is_win) {
- _native_compiler_configs += [ "//build/config/gcc:symbol_visibility_hidden" ]
-}
+
if (is_clang) {
_native_compiler_configs += [
"//build/config/clang:find_bad_constructs",
diff --git a/build/config/allocator.gni b/build/config/allocator.gni
index ef10c3d..3c4fe90 100644
--- a/build/config/allocator.gni
+++ b/build/config/allocator.gni
@@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-if (is_android || cpu_arch == "mipsel") {
+if (is_android || cpu_arch == "mipsel" || is_mac) {
_default_allocator = "none"
} else {
_default_allocator = "tcmalloc"
diff --git a/build/config/android/config.gni b/build/config/android/config.gni
index f28165b..1e3601d 100644
--- a/build/config/android/config.gni
+++ b/build/config/android/config.gni
@@ -46,8 +46,12 @@
# than just the current one) since these are needed by the Android toolchain
# file to define toolchains for all possible targets in one pass.
- # Path to the Android NDK.
+ # Path to the Android NDK and SDK.
android_ndk_root = "//third_party/android_tools/ndk"
+ android_sdk_root = "//third_party/android_tools/sdk"
+
+ # Path to the SDK's android.jar
+ android_sdk_jar = "$android_sdk_root/platforms/android-19/android.jar"
# Subdirectories inside android_ndk_root that contain the sysroot for the
# associated platform.
diff --git a/build/config/android/rules.gni b/build/config/android/rules.gni
index abf64a1..d63c7bf 100644
--- a/build/config/android/rules.gni
+++ b/build/config/android/rules.gni
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("config.gni")
import("internal_rules.gni")
# Declare a jni target
@@ -30,17 +31,13 @@
base_output_dir = "${root_gen_dir}/${target_name}/${jni_package}"
jni_output_dir = "${base_output_dir}/jni"
- jni_generator_includes =
- rebase_path("//base/android/jni_generator/jni_generator_helper.h")
+ jni_generator_include = "//base/android/jni_generator/jni_generator_helper.h"
- config("jni_includes_${target_name}") {
- include_dirs = [ base_output_dir ]
- }
-
- action_foreach(target_name) {
+ foreach_target_name = "${target_name}__jni_gen"
+ action_foreach(foreach_target_name) {
script = "//base/android/jni_generator/jni_generator.py"
- direct_dependent_configs = [ ":jni_includes_${target_name}" ]
sources = invoker.sources
+ source_prereqs = [ jni_generator_include ]
outputs = [
"${jni_output_dir}/{{source_name_part}}_jni.h"
]
@@ -49,15 +46,117 @@
"--input_file={{source}}",
"--optimize_generation=1",
"--ptr_type=long",
- "--output_dir", rebase_path(jni_output_dir),
- "--includes=${jni_generator_includes}",
+ "--output_dir", rebase_path(jni_output_dir, root_build_dir),
+ "--includes", rebase_path(jni_generator_include, "//"),
]
if (defined(invoker.jni_generator_jarjar_file)) {
args += [
- "--jarjar", rebase_path(jni_generator_jarjar_file)
+ "--jarjar", rebase_path(jni_generator_jarjar_file, root_build_dir),
]
}
}
+
+ config("jni_includes_${target_name}") {
+ include_dirs = [ base_output_dir ]
+ }
+
+ group(target_name) {
+ deps = [ ":$foreach_target_name" ]
+ direct_dependent_configs = [ ":jni_includes_${target_name}" ]
+
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ if (defined(invoker.forward_dependent_configs_from)) {
+ forward_dependent_configs_from = invoker.forward_dependent_configs_from
+ }
+ }
+}
+
+
+# Declare a jni target for a prebuilt jar
+#
+# This target generates the native jni bindings for a set of classes in a .jar.
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+#
+# Variables
+# classes: list of .class files in the jar to generate jni for. These should
+# include the full path to the .class file.
+# jni_package: subdirectory path for generated bindings
+# jar_file: the path to the .jar. If not provided, will default to the sdk's
+# android.jar
+#
+# Example
+# generate_jar_jni("foo_jni") {
+# classes = [
+# "android/view/Foo.class",
+# ]
+# jni_package = "foo"
+# }
+template("generate_jar_jni") {
+ assert(defined(invoker.classes))
+ assert(defined(invoker.jni_package))
+
+ if (defined(invoker.jar_file)) {
+ jar_file = invoker.jar_file
+ } else {
+ jar_file = android_sdk_jar
+ }
+
+ jni_package = invoker.jni_package
+ base_output_dir = "${root_gen_dir}/${target_name}/${jni_package}"
+ jni_output_dir = "${base_output_dir}/jni"
+
+ jni_generator_include =
+ rebase_path("//base/android/jni_generator/jni_generator_helper.h",
+ root_build_dir)
+
+ # TODO(cjhopman): make jni_generator.py support generating jni for multiple
+ # .class files from a .jar.
+ jni_actions = []
+ foreach(class, invoker.classes) {
+ classname_list = process_file_template(
+ [class], "{{source_name_part}}")
+ classname = classname_list[0]
+ jni_target_name = "${target_name}__jni_${classname}"
+ jni_actions += [ ":$jni_target_name" ]
+ action(jni_target_name) {
+ script = "//base/android/jni_generator/jni_generator.py"
+ sources = [
+ jni_generator_include,
+ jar_file,
+ ]
+ outputs = [
+ "${jni_output_dir}/${classname}_jni.h"
+ ]
+
+ args = [
+ "--jar_file", rebase_path(jar_file, root_build_dir),
+ "--input_file", class,
+ "--optimize_generation=1",
+ "--ptr_type=long",
+ "--output_dir", rebase_path(jni_output_dir, root_build_dir),
+ "--includes", rebase_path(jni_generator_include, "//"),
+ ]
+ }
+ }
+
+ config("jni_includes_${target_name}") {
+ include_dirs = [ base_output_dir ]
+ }
+
+ group(target_name) {
+ deps = jni_actions
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ if (defined(invoker.forward_dependent_configs_from)) {
+ forward_dependent_configs_from = invoker.forward_dependent_configs_from
+ }
+ direct_dependent_configs = [ ":jni_includes_${target_name}" ]
+ }
}
# Declare a target for c-preprocessor-generated java files
diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn
index eb33be9..de6de2b 100644
--- a/build/config/compiler/BUILD.gn
+++ b/build/config/compiler/BUILD.gn
@@ -43,10 +43,14 @@
cflags += [
"/Gy", # Enable function-level linking.
"/GS", # Enable buffer security checking.
- "/EHsc", # Assume C functions can't throw exceptions and don't catch
- # structured exceptions (only C++ ones).
"/FS", # Preserve previous PDB behavior.
]
+ if (is_component_build) {
+ cflags += [
+ "/EHsc", # Assume C functions can't throw exceptions and don't catch
+ # structured exceptions (only C++ ones).
+ ]
+ }
} else {
# Common GCC compiler flags setup.
# --------------------------------
@@ -54,7 +58,6 @@
"-fno-strict-aliasing", # See http://crbug.com/32204
]
cflags_cc += [
- "-fno-exceptions",
"-fno-threadsafe-statics",
# Not exporting C++ inline functions can generally be applied anywhere
# so we do so here. Normal function visibility is controlled by
diff --git a/build/config/features.gni b/build/config/features.gni
index f76f42c..2f66e02 100644
--- a/build/config/features.gni
+++ b/build/config/features.gni
@@ -60,3 +60,6 @@
# Enable notifications everywhere except Android.
# Android is http://crbug.com/115320
enable_notifications = !is_android
+
+# TODO(brettw) this should be moved to net and only dependents get this define.
+disable_ftp_support = is_ios
diff --git a/build/config/gcc/BUILD.gn b/build/config/gcc/BUILD.gn
index 8272d8a..d5b9ddb 100644
--- a/build/config/gcc/BUILD.gn
+++ b/build/config/gcc/BUILD.gn
@@ -35,3 +35,7 @@
]
}
}
+
+config("no_exceptions") {
+ cflags_cc = [ "-fno-exceptions" ]
+}
diff --git a/build/config/linux/BUILD.gn b/build/config/linux/BUILD.gn
index 6d814fa..46cea9e 100644
--- a/build/config/linux/BUILD.gn
+++ b/build/config/linux/BUILD.gn
@@ -48,6 +48,10 @@
packages = [ "libudev" ]
}
+pkg_config("dbus") {
+ packages = [ "dbus-1" ]
+}
+
config("x11") {
# Don't bother running pkg-config for these X related libraries since it just
# returns the same libs, and forking pkg-config is slow.
diff --git a/build/linux/detect_host_arch.py b/build/detect_host_arch.py
similarity index 87%
rename from build/linux/detect_host_arch.py
rename to build/detect_host_arch.py
index 2686461..638dd68 100755
--- a/build/linux/detect_host_arch.py
+++ b/build/detect_host_arch.py
@@ -11,6 +11,12 @@
def main():
+ print DoMain([])
+ return 0
+
+def DoMain(_):
+ """Hook to be called from gyp without starting a separate python
+ interpreter."""
host_arch = platform.machine()
# Convert machine type to format recognized by gyp.
@@ -28,8 +34,7 @@
if host_arch == 'x64' and platform.architecture()[0] == '32bit':
host_arch = 'ia32'
- print host_arch
- return 0
+ return host_arch
if __name__ == '__main__':
sys.exit(main())
diff --git a/build/filename_rules.gypi b/build/filename_rules.gypi
index 8b49202..e225699 100644
--- a/build/filename_rules.gypi
+++ b/build/filename_rules.gypi
@@ -35,7 +35,7 @@
['exclude', '(^|/)linux/'],
],
}],
- ['OS!="android" or _toolset=="host"', {
+ ['OS!="android" or _toolset=="host" or >(nacl_untrusted_build)==1', {
'sources/': [
['exclude', '_android(_unittest)?\\.cc$'],
['exclude', '(^|/)android/'],
diff --git a/build/get_landmines.py b/build/get_landmines.py
index daa9172..8cfd795 100755
--- a/build/get_landmines.py
+++ b/build/get_landmines.py
@@ -50,8 +50,10 @@
print "Switched win from VS2010 to VS2013."
print "Update to VS2013 Update 2."
print 'Need to clobber everything due to an IDL change in r154579 (blink)'
+ print 'Need to clobber everything due to gen file moves in r175513 (Blink)'
if (platform() != 'ios'):
print 'Clobber to get rid of obselete test plugin after r248358'
+ print 'Clobber to rebuild GN files for V8'
def main():
diff --git a/build/gyp_chromium b/build/gyp_chromium
index 26028dc..48c7389 100755
--- a/build/gyp_chromium
+++ b/build/gyp_chromium
@@ -190,6 +190,10 @@
args = sys.argv[1:]
if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
+ # Check for landmines (reasons to clobber the build) in any case.
+ print 'Running build/landmines.py...'
+ subprocess.check_call(
+ [sys.executable, os.path.join(script_dir, 'landmines.py')])
print 'Skipping gyp_chromium due to GYP_CHROMIUM_NO_ACTION env var.'
sys.exit(0)
diff --git a/build/gypi_to_gn.py b/build/gypi_to_gn.py
index 3d5b899..d52e3a0 100644
--- a/build/gypi_to_gn.py
+++ b/build/gypi_to_gn.py
@@ -85,25 +85,17 @@
assert isinstance(file_data, dict), "%s does not eval to a dictionary" % path
+ # Flatten any variables to the top level.
+ if 'variables' in file_data:
+ file_data.update(file_data['variables'])
+ del file_data['variables']
+
# Strip any conditions.
if 'conditions' in file_data:
del file_data['conditions']
if 'target_conditions' in file_data:
del file_data['target_conditions']
- # Flatten any varaiables to the top level.
- if 'variables' in file_data:
- file_data.update(file_data['variables'])
- del file_data['variables']
-
- # If the contents of the root is a dictionary with exactly one kee
- # "variables", promote the contents of that to the top level. Some .gypi
- # files contain this and some don't depending on how they expect to be
- # embedded in a .gyp file. We don't actually care either way so collapse it
- # away.
- if len(file_data) == 1 and 'variables' in file_data:
- return file_data['variables']
-
return file_data
@@ -151,6 +143,15 @@
assert len(split) == 2, "Replacement must be of the form 'key=value'."
data = ReplaceSubstrings(data, split[0], split[1])
+ # Sometimes .gypi files use the GYP syntax with percents at the end of the
+ # variable name (to indicate not to overwrite a previously-defined value):
+ # 'foo%': 'bar',
+ # Convert these to regular variables.
+ for key in data:
+ if len(key) > 1 and key[len(key) - 1] == '%':
+ data[key[:-1]] = data[key]
+ del data[key]
+
print gn_helpers.ToGNString(data)
if __name__ == '__main__':
diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh
index 0832cc4..31a9b9c 100755
--- a/build/install-build-deps.sh
+++ b/build/install-build-deps.sh
@@ -142,8 +142,17 @@
linux-libc-dev-armhf-cross
g++-arm-linux-gnueabihf"
-# Packages to build standalone NaCl and all its toolchains.
-nacl_list="g++-mingw-w64-i686 libtinfo-dev:i386"
+# Packages to build NaCl, its toolchains, and its ports.
+nacl_list="bison cmake xvfb gawk texinfo autoconf libtool
+ libssl0.9.8:i386 lib32z1-dev
+ libgpm2:i386 libncurses5:i386
+ g++-mingw-w64-i686 libtinfo-dev libtinfo-dev:i386
+ libglib2.0-0:i386 libnss3:i386
+ libgconf-2-4:i386 libfontconfig:i386
+ libpango1.0-0:i386 libxi6:i386 libxcursor1:i386 libxcomposite1:i386
+ libasound2:i386 libxdamage1:i386 libxtst6:i386 libxrandr2:i386
+ libcap2:i386 libgtk2.0-0:i386 libxss1:i386
+ libexif12:i386 libgl1-mesa-glx:i386"
# Some package names have changed over time
if package_exists ttf-mscorefonts-installer; then
@@ -165,8 +174,10 @@
fi
if package_exists libudev1; then
dev_list="${dev_list} libudev1"
+ nacl_list="${nacl_list} libudev1:i386"
else
dev_list="${dev_list} libudev0"
+ nacl_list="${nacl_list} libudev0:i386"
fi
if package_exists libbrlapi0.6; then
dev_list="${dev_list} libbrlapi0.6"
@@ -260,9 +271,9 @@
fi
if test "$do_inst_nacl" = "1"; then
- echo "Including standalone NaCl dependencies."
+ echo "Including NaCl, NaCl toolchain, NaCl ports dependencies."
else
- echo "Skipping standalone NaCl dependencies."
+ echo "Skipping NaCl, NaCl toolchain, NaCl ports dependencies."
nacl_list=
fi
@@ -371,6 +382,18 @@
echo "Skipping installation of Chrome OS fonts."
fi
+if test "$do_inst_nacl" = "1"; then
+ echo "Installing symbolic links for NaCl."
+ if [ ! -r /usr/lib/i386-linux-gnu/libcrypto.so ]; then
+ sudo ln -fs libcrypto.so.0.9.8 /usr/lib/i386-linux-gnu/libcrypto.so
+ fi
+ if [ ! -r /usr/lib/i386-linux-gnu/libssl.so ]; then
+ sudo ln -fs libssl.so.0.9.8 /usr/lib/i386-linux-gnu/libssl.so
+ fi
+else
+ echo "Skipping symbolic links for NaCl."
+fi
+
# Install 32bit backwards compatibility support for 64bit systems
if file /sbin/init | grep -q 'ELF 64-bit'; then
if test "$do_inst_lib32" != "1"
diff --git a/build/ios/grit_whitelist.txt b/build/ios/grit_whitelist.txt
index 6174648..ad9f0a8 100644
--- a/build/ios/grit_whitelist.txt
+++ b/build/ios/grit_whitelist.txt
@@ -55,6 +55,7 @@
IDR_SYNC_INTERNALS_SYNC_LOG_JS
IDR_SYNC_INTERNALS_SYNC_NODE_BROWSER_JS
IDR_SYNC_INTERNALS_SYNC_SEARCH_JS
+IDR_SYNC_INTERNALS_TYPES_JS
IDR_THROBBER
IDR_TRANSLATE_JS
IDR_WEBUI_I18N_PROCESS_JS
@@ -126,6 +127,8 @@
IDS_BOOKMARK_ADD_EDITOR_TITLE
IDS_BOOKMARK_ALL_TABS_DIALOG_TITLE
IDS_BOOKMARK_BAR_FOLDER_NAME
+IDS_BOOKMARK_BAR_MANAGED_FOLDER_DEFAULT_NAME
+IDS_BOOKMARK_BAR_MANAGED_FOLDER_DOMAIN_NAME
IDS_BOOKMARK_BAR_MOBILE_FOLDER_NAME
IDS_BOOKMARK_BAR_OTHER_FOLDER_NAME
IDS_BOOKMARK_BAR_REDO
@@ -685,8 +688,6 @@
IDS_POLICY_INVALID_PROXY_MODE_ERROR
IDS_POLICY_INVALID_SEARCH_URL_ERROR
IDS_POLICY_LIST_ENTRY_ERROR
-IDS_POLICY_MANAGED_BOOKMARKS
-IDS_POLICY_MANAGED_BOOKMARKS_DEFAULT_NAME
IDS_POLICY_NOT_SPECIFIED_ERROR
IDS_POLICY_OUT_OF_RANGE_ERROR
IDS_POLICY_OVERRIDDEN
@@ -984,8 +985,23 @@
IDS_TOUCH_EVENTS_DESCRIPTION
IDS_TOUCH_EVENTS_NAME
IDS_TRANSLATE_INFOBAR_ACCEPT
+IDS_TRANSLATE_INFOBAR_AFTER_MESSAGE
+IDS_TRANSLATE_INFOBAR_AFTER_MESSAGE_AUTODETERMINED_SOURCE_LANGUAGE
IDS_TRANSLATE_INFOBAR_BEFORE_MESSAGE
IDS_TRANSLATE_INFOBAR_DENY
+IDS_TRANSLATE_INFOBAR_ERROR_CANT_CONNECT
+IDS_TRANSLATE_INFOBAR_ERROR_CANT_TRANSLATE
+IDS_TRANSLATE_INFOBAR_ERROR_SAME_LANGUAGE
+IDS_TRANSLATE_INFOBAR_OPTIONS_ABOUT
+IDS_TRANSLATE_INFOBAR_OPTIONS_ALWAYS
+IDS_TRANSLATE_INFOBAR_OPTIONS_NEVER_TRANSLATE_LANG
+IDS_TRANSLATE_INFOBAR_OPTIONS_NEVER_TRANSLATE_SITE
+IDS_TRANSLATE_INFOBAR_OPTIONS_REPORT_ERROR
+IDS_TRANSLATE_INFOBAR_RETRY
+IDS_TRANSLATE_INFOBAR_REVERT
+IDS_TRANSLATE_INFOBAR_TRANSLATING_TO
+IDS_TRANSLATE_INFOBAR_UNKNOWN_PAGE_LANGUAGE
+IDS_TRANSLATE_INFOBAR_UNSUPPORTED_PAGE_LANGUAGE
IDS_UPGRADE_AVAILABLE
IDS_UPGRADE_AVAILABLE_BUTTON
IDS_WEB_FONT_FAMILY
diff --git a/build/linux/libgcrypt-config-wrapper b/build/linux/libgcrypt-config-wrapper
deleted file mode 100755
index 48a2a8e..0000000
--- a/build/linux/libgcrypt-config-wrapper
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-
-# This program wraps around libgcrypt-config to generate the correct include and
-# library paths when cross-compiling using a sysroot.
-
-sysroot="$1"
-shift
-
-if [ -z "$sysroot" ]
-then
- echo "usage: $0 /path/to/sysroot [libgcrypt-config-arguments]" >&2
- exit 1
-fi
-
-config_path=$sysroot/usr/bin/libgcrypt-config
-set -e
-echo `$config_path "$@" | sed -e 's|/|'$sysroot'/|'`
diff --git a/build/linux/system.gyp b/build/linux/system.gyp
index 9ad356a..7ccd973 100644
--- a/build/linux/system.gyp
+++ b/build/linux/system.gyp
@@ -7,12 +7,8 @@
'conditions': [
['sysroot!=""', {
'pkg-config': '<(chroot_cmd) ./pkg-config-wrapper "<(sysroot)" "<(target_arch)" "<(system_libdir)"',
- # libgcrypt-config-wrapper invokes libgcrypt-config directly from the
- # sysroot, so there's no need to prefix it with <(chroot_cmd).
- 'libgcrypt-config': './libgcrypt-config-wrapper "<(sysroot)"',
}, {
'pkg-config': 'pkg-config',
- 'libgcrypt-config': 'libgcrypt-config',
}],
],
@@ -793,27 +789,6 @@
},
},
{
- 'target_name': 'libgcrypt',
- 'type': 'none',
- 'conditions': [
- ['_toolset=="target" and use_cups==1', {
- 'direct_dependent_settings': {
- 'cflags': [
- '<!@(<(libgcrypt-config) --cflags)',
- ],
- },
- 'link_settings': {
- 'libraries': [
- # libgcrypt-config does not support --libs-only-l options,
- # and the result contains -L options, which shouldn't be in
- # the entries of 'libraries'. So filter them out.
- '<!@(<(libgcrypt-config) --libs | sed -e \'s/-L[^ ]*//g\')',
- ],
- },
- }],
- ],
- },
- {
'target_name': 'libpci',
'type': 'static_library',
'cflags': [
diff --git a/build/linux/unbundle/harfbuzz.gyp b/build/linux/unbundle/harfbuzz.gyp
index 93b2a69..3bc1744 100644
--- a/build/linux/unbundle/harfbuzz.gyp
+++ b/build/linux/unbundle/harfbuzz.gyp
@@ -33,6 +33,15 @@
'<!@(pkg-config --libs-only-l <(harfbuzz_libraries))',
],
},
+ 'variables': {
+ 'headers_root_path': 'src',
+ 'header_filenames': [
+ 'hb.h',
+ ],
+ },
+ 'includes': [
+ '../../build/shim_headers.gypi',
+ ],
},
],
}
diff --git a/build/mac/tweak_info_plist.py b/build/mac/tweak_info_plist.py
index 3e87b10..4a6c475 100755
--- a/build/mac/tweak_info_plist.py
+++ b/build/mac/tweak_info_plist.py
@@ -132,54 +132,6 @@
return True
-def _DoPDFKeys(plist, add_keys):
- """Adds PDF support to the document types list. If add_keys is True, it will
- add the type information dictionary. If it is False, it will remove it if
- present."""
-
- PDF_FILE_EXTENSION = 'pdf'
-
- def __AddPDFKeys(sub_plist):
- """Writes the keys into a sub-dictionary of the plist."""
- sub_plist['CFBundleTypeExtensions'] = [PDF_FILE_EXTENSION]
- sub_plist['CFBundleTypeIconFile'] = 'document.icns'
- sub_plist['CFBundleTypeMIMETypes'] = 'application/pdf'
- sub_plist['CFBundleTypeName'] = 'PDF Document'
- sub_plist['CFBundleTypeRole'] = 'Viewer'
-
- DOCUMENT_TYPES_KEY = 'CFBundleDocumentTypes'
-
- # First get the list of document types, creating it if necessary.
- try:
- extensions = plist[DOCUMENT_TYPES_KEY]
- except KeyError:
- # If this plist doesn't have a type dictionary, create one if set to add the
- # keys. If not, bail.
- if not add_keys:
- return
- extensions = plist[DOCUMENT_TYPES_KEY] = []
-
- # Loop over each entry in the list, looking for one that handles PDF types.
- for i, ext in enumerate(extensions):
- # If an entry for .pdf files is found...
- if 'CFBundleTypeExtensions' not in ext:
- continue
- if PDF_FILE_EXTENSION in ext['CFBundleTypeExtensions']:
- if add_keys:
- # Overwrite the existing keys with new ones.
- __AddPDFKeys(ext)
- else:
- # Otherwise, delete the entry entirely.
- del extensions[i]
- return
-
- # No PDF entry exists. If one needs to be added, do so now.
- if add_keys:
- pdf_entry = {}
- __AddPDFKeys(pdf_entry)
- extensions.append(pdf_entry)
-
-
def _AddBreakpadKeys(plist, branding):
"""Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and
also requires the |branding| argument."""
@@ -259,8 +211,6 @@
type='int', default=False, help='Enable Keystone [1 or 0]')
parser.add_option('--scm', dest='add_scm_info', action='store', type='int',
default=True, help='Add SCM metadata [1 or 0]')
- parser.add_option('--pdf', dest='add_pdf_support', action='store', type='int',
- default=False, help='Add PDF file handler support [1 or 0]')
parser.add_option('--branding', dest='branding', action='store',
type='string', default=None, help='The branding of the binary')
parser.add_option('--bundle_id', dest='bundle_identifier',
@@ -314,9 +264,6 @@
if not _DoSCMKeys(plist, options.add_scm_info):
return 3
- # Adds or removes the PDF file handler entry.
- _DoPDFKeys(plist, options.add_pdf_support)
-
# Now that all keys have been mutated, rewrite the file.
temp_info_plist = tempfile.NamedTemporaryFile()
plistlib.writePlist(plist, temp_info_plist.name)
diff --git a/build/toolchain/mac/BUILD.gn b/build/toolchain/mac/BUILD.gn
index d70774d..acf3d84 100644
--- a/build/toolchain/mac/BUILD.gn
+++ b/build/toolchain/mac/BUILD.gn
@@ -80,10 +80,8 @@
deps = "gcc"
}
tool("alink") {
- command = "rm -f \$out && ./gyp-mac-tool filter-libtool libtool \$libtool_flags -static -o \$out @\$rspfile \$postbuilds"
+ command = "rm -f \$out && ./gyp-mac-tool filter-libtool libtool \$libtool_flags -static -o \$out \$in \$postbuilds"
description = "LIBTOOL-STATIC \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$in"
}
tool("solink") {
command = "if [ ! -e \$lib -o ! -e \${lib}.TOC ] || otool -l \$lib | grep -q LC_REEXPORT_DYLIB ; then $ld -shared \$ldflags -o \$lib @\$rspfile \$solibs \$libs \$postbuilds && { otool -l \$lib | grep LC_ID_DYLIB -A 5; nm -gP \$lib | cut -f1-2 -d' ' | grep -v U\$\$; true; } > \${lib}.TOC; else $ld -shared \$ldflags -o \$lib \$in \$solibs \$libs \$postbuilds && { otool -l \$lib | grep LC_ID_DYLIB -A 5; nm -gP \$lib | cut -f1-2 -d' ' | grep -v U\$\$; true; } > \${lib}.tmp && if ! cmp -s \${lib}.tmp \${lib}.TOC; then mv \${lib}.tmp \${lib}.TOC ; fi; fi"
diff --git a/build/util/LASTCHANGE b/build/util/LASTCHANGE
index 110d637..09254a0 100644
--- a/build/util/LASTCHANGE
+++ b/build/util/LASTCHANGE
@@ -1 +1 @@
-LASTCHANGE=273901
+LASTCHANGE=275586
diff --git a/build/util/LASTCHANGE.blink b/build/util/LASTCHANGE.blink
index 2da2c90..1082d2f 100644
--- a/build/util/LASTCHANGE.blink
+++ b/build/util/LASTCHANGE.blink
@@ -1 +1 @@
-LASTCHANGE=174973
+LASTCHANGE=175677
diff --git a/build/win/ftol3.obj b/build/win/ftol3.obj
deleted file mode 100644
index dc9d9ed..0000000
--- a/build/win/ftol3.obj
+++ /dev/null
Binary files differ