Update V8 to version 4.1.0.21
This is a cherry-pick of all commits up to and including the
4.1.0.21 cherry-pick in Chromium.
Original commit message:
Version 4.1.0.21 (cherry-pick)
Merged 206e9136bde0f2b5ae8cb77afbb1e7833e5bd412
Unlink pages from the space page list after evacuation.
BUG=430201
LOG=N
R=jkummerow@chromium.org
Review URL: https://codereview.chromium.org/953813002
Cr-Commit-Position: refs/branch-heads/4.1@{#22}
Cr-Branched-From: 2e08d2a7aa9d65d269d8c57aba82eb38a8cb0a18-refs/heads/candidates@{#25353}
---
FPIIM-449
Change-Id: I8c23c7bbb70772b4858fe8a47b64fa97ee0d1f8c
diff --git a/tools/android-sync.sh b/tools/android-sync.sh
index 460e92d..4acb1cc 100755
--- a/tools/android-sync.sh
+++ b/tools/android-sync.sh
@@ -88,6 +88,7 @@
echo -n "sync to $ANDROID_V8/$OUTDIR/$ARCH_MODE"
sync_file "$OUTDIR/$ARCH_MODE/cctest"
sync_file "$OUTDIR/$ARCH_MODE/d8"
+sync_file "$OUTDIR/$ARCH_MODE/unittests"
echo ""
echo -n "sync to $ANDROID_V8/tools"
sync_file tools/consarray.js
diff --git a/tools/check-name-clashes.py b/tools/check-name-clashes.py
index e448930..89a7dee 100755
--- a/tools/check-name-clashes.py
+++ b/tools/check-name-clashes.py
@@ -8,135 +8,53 @@
import re
import sys
-FILENAME = "src/runtime.cc"
-FUNCTION = re.compile("^RUNTIME_FUNCTION\(Runtime_(\w+)")
-FUNCTIONEND = "}\n"
-MACRO = re.compile(r"^#define ([^ ]+)\(([^)]*)\) *([^\\]*)\\?\n$")
-FIRST_WORD = re.compile("^\s*(.*?)[\s({\[]")
-
-# Expand these macros, they define further runtime functions.
-EXPAND_MACROS = [
- "BUFFER_VIEW_GETTER",
- "DATA_VIEW_GETTER",
- "DATA_VIEW_SETTER",
- "ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION",
- "FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION",
- "RUNTIME_UNARY_MATH",
- "TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION",
-]
+FILENAME = "src/runtime/runtime.h"
+LISTHEAD = re.compile(r"#define\s+(\w+LIST\w*)\((\w+)\)")
+LISTBODY = re.compile(r".*\\$")
+BLACKLIST = ['INLINE_FUNCTION_LIST']
class Function(object):
def __init__(self, match):
- self.name = match.group(1)
+ self.name = match.group(1).strip()
+
+def ListMacroRe(list):
+ macro = LISTHEAD.match(list[0]).group(2)
+ re_string = "\s*%s\((\w+)" % macro
+ return re.compile(re_string)
-class Macro(object):
- def __init__(self, match):
- self.name = match.group(1)
- self.args = [s.strip() for s in match.group(2).split(",")]
- self.lines = []
- self.indentation = 0
- self.AddLine(match.group(3))
-
- def AddLine(self, line):
- if not line: return
- if not self.lines:
- # This is the first line, detect indentation.
- self.indentation = len(line) - len(line.lstrip())
- line = line.rstrip("\\\n ")
- if not line: return
- assert len(line[:self.indentation].strip()) == 0, \
- ("expected whitespace: '%s', full line: '%s'" %
- (line[:self.indentation], line))
- line = line[self.indentation:]
- if not line: return
- self.lines.append(line + "\n")
-
- def Finalize(self):
- for arg in self.args:
- pattern = re.compile(r"(##|\b)%s(##|\b)" % arg)
- for i in range(len(self.lines)):
- self.lines[i] = re.sub(pattern, "%%(%s)s" % arg, self.lines[i])
-
- def FillIn(self, arg_values):
- filler = {}
- assert len(arg_values) == len(self.args)
- for i in range(len(self.args)):
- filler[self.args[i]] = arg_values[i]
- result = []
- for line in self.lines:
- result.append(line % filler)
- return result
-
-
-def ReadFileAndExpandMacros(filename):
- found_macros = {}
- expanded_lines = []
+def FindLists(filename):
+ lists = []
+ current_list = []
+ mode = "SEARCHING"
with open(filename, "r") as f:
- found_macro = None
for line in f:
- if found_macro is not None:
- found_macro.AddLine(line)
- if not line.endswith("\\\n"):
- found_macro.Finalize()
- found_macro = None
- continue
-
- match = MACRO.match(line)
- if match:
- found_macro = Macro(match)
- if found_macro.name in EXPAND_MACROS:
- found_macros[found_macro.name] = found_macro
- else:
- found_macro = None
- continue
-
- match = FIRST_WORD.match(line)
- if match:
- first_word = match.group(1)
- if first_word in found_macros:
- MACRO_CALL = re.compile("%s\(([^)]*)\)" % first_word)
- match = MACRO_CALL.match(line)
- assert match
- args = [s.strip() for s in match.group(1).split(",")]
- expanded_lines += found_macros[first_word].FillIn(args)
- continue
-
- expanded_lines.append(line)
- return expanded_lines
+ if mode == "SEARCHING":
+ match = LISTHEAD.match(line)
+ if match and match.group(1) not in BLACKLIST:
+ mode = "APPENDING"
+ current_list.append(line)
+ else:
+ current_list.append(line)
+ match = LISTBODY.match(line)
+ if not match:
+ mode = "SEARCHING"
+ lists.append(current_list)
+ current_list = []
+ return lists
# Detects runtime functions by parsing FILENAME.
def FindRuntimeFunctions():
functions = []
- expanded_lines = ReadFileAndExpandMacros(FILENAME)
- function = None
- partial_line = ""
- for line in expanded_lines:
- # Multi-line definition support, ignoring macros.
- if line.startswith("RUNTIME_FUNCTION") and not line.endswith("{\n"):
- if line.endswith("\\\n"): continue
- partial_line = line.rstrip()
- continue
- if partial_line:
- partial_line += " " + line.strip()
- if partial_line.endswith("{"):
- line = partial_line
- partial_line = ""
- else:
- continue
-
- match = FUNCTION.match(line)
- if match:
- function = Function(match)
- continue
- if function is None: continue
-
- if line == FUNCTIONEND:
- if function is not None:
- functions.append(function)
- function = None
+ lists = FindLists(FILENAME)
+ for list in lists:
+ function_re = ListMacroRe(list)
+ for line in list:
+ match = function_re.match(line)
+ if match:
+ functions.append(Function(match))
return functions
diff --git a/tools/codemap.js b/tools/codemap.js
index 129179e..fa6c36b 100644
--- a/tools/codemap.js
+++ b/tools/codemap.js
@@ -258,11 +258,13 @@
*
* @param {number} size Code entry size in bytes.
* @param {string} opt_name Code entry name.
+ * @param {string} opt_type Code entry type, e.g. SHARED_LIB, CPP.
* @constructor
*/
-CodeMap.CodeEntry = function(size, opt_name) {
+CodeMap.CodeEntry = function(size, opt_name, opt_type) {
this.size = size;
this.name = opt_name || '';
+ this.type = opt_type || '';
this.nameUpdated_ = false;
};
diff --git a/tools/find-commit-for-patch.py b/tools/find-commit-for-patch.py
new file mode 100755
index 0000000..657826c
--- /dev/null
+++ b/tools/find-commit-for-patch.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+# Copyright 2014 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import subprocess
+import sys
+
+
+def GetArgs():
+ parser = argparse.ArgumentParser(
+ description="Finds a commit that a given patch can be applied to. "
+ "Does not actually apply the patch or modify your checkout "
+ "in any way.")
+ parser.add_argument("patch_file", help="Patch file to match")
+ parser.add_argument(
+ "--branch", "-b", default="origin/master", type=str,
+ help="Git tree-ish where to start searching for commits, "
+ "default: %(default)s")
+ parser.add_argument(
+ "--limit", "-l", default=500, type=int,
+ help="Maximum number of commits to search, default: %(default)s")
+ parser.add_argument(
+ "--verbose", "-v", default=False, action="store_true",
+ help="Print verbose output for your entertainment")
+ return parser.parse_args()
+
+
+def FindFilesInPatch(patch_file):
+ files = {}
+ next_file = ""
+ with open(patch_file) as patch:
+ for line in patch:
+ if line.startswith("diff --git "):
+ # diff --git a/src/objects.cc b/src/objects.cc
+ words = line.split()
+ assert words[2].startswith("a/") and len(words[2]) > 2
+ next_file = words[2][2:]
+ elif line.startswith("index "):
+ # index add3e61..d1bbf6a 100644
+ hashes = line.split()[1]
+ old_hash = hashes.split("..")[0]
+ if old_hash.startswith("0000000"): continue # Ignore new files.
+ files[next_file] = old_hash
+ return files
+
+
+def GetGitCommitHash(treeish):
+ cmd = ["git", "log", "-1", "--format=%H", treeish]
+ return subprocess.check_output(cmd).strip()
+
+
+def CountMatchingFiles(commit, files):
+ matched_files = 0
+ # Calling out to git once and parsing the result Python-side is faster
+ # than calling 'git ls-tree' for every file.
+ cmd = ["git", "ls-tree", "-r", commit] + [f for f in files]
+ output = subprocess.check_output(cmd)
+ for line in output.splitlines():
+ # 100644 blob c6d5daaa7d42e49a653f9861224aad0a0244b944 src/objects.cc
+ _, _, actual_hash, filename = line.split()
+ expected_hash = files[filename]
+ if actual_hash.startswith(expected_hash): matched_files += 1
+ return matched_files
+
+
+def FindFirstMatchingCommit(start, files, limit, verbose):
+ commit = GetGitCommitHash(start)
+ num_files = len(files)
+ if verbose: print(">>> Found %d files modified by patch." % num_files)
+ for _ in range(limit):
+ matched_files = CountMatchingFiles(commit, files)
+ if verbose: print("Commit %s matched %d files" % (commit, matched_files))
+ if matched_files == num_files:
+ return commit
+ commit = GetGitCommitHash("%s^" % commit)
+ print("Sorry, no matching commit found. "
+ "Try running 'git fetch', specifying the correct --branch, "
+ "and/or setting a higher --limit.")
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ args = GetArgs()
+ files = FindFilesInPatch(args.patch_file)
+ commit = FindFirstMatchingCommit(args.branch, files, args.limit, args.verbose)
+ if args.verbose:
+ print(">>> Matching commit: %s" % commit)
+ print(subprocess.check_output(["git", "log", "-1", commit]))
+ print(">>> Kthxbai.")
+ else:
+ print(commit)
diff --git a/tools/find_depot_tools.py b/tools/find_depot_tools.py
new file mode 100644
index 0000000..95ae9e8
--- /dev/null
+++ b/tools/find_depot_tools.py
@@ -0,0 +1,40 @@
+# Copyright 2014 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Small utility function to find depot_tools and add it to the python path.
+"""
+
+import os
+import sys
+
+
+def directory_really_is_depot_tools(directory):
+ return os.path.isfile(os.path.join(directory, 'gclient.py'))
+
+
+def add_depot_tools_to_path():
+ """Search for depot_tools and add it to sys.path."""
+ # First look if depot_tools is already in PYTHONPATH.
+ for i in sys.path:
+ if i.rstrip(os.sep).endswith('depot_tools'):
+ if directory_really_is_depot_tools(i):
+ return i
+
+ # Then look if depot_tools is in PATH, common case.
+ for i in os.environ['PATH'].split(os.pathsep):
+ if i.rstrip(os.sep).endswith('depot_tools'):
+ if directory_really_is_depot_tools(i):
+ sys.path.insert(0, i.rstrip(os.sep))
+ return i
+ # Rare case, it's not even in PATH, look upward up to root.
+ root_dir = os.path.dirname(os.path.abspath(__file__))
+ previous_dir = os.path.abspath(__file__)
+ while root_dir and root_dir != previous_dir:
+ if directory_really_is_depot_tools(os.path.join(root_dir, 'depot_tools')):
+ i = os.path.join(root_dir, 'depot_tools')
+ sys.path.insert(0, i)
+ return i
+ previous_dir = root_dir
+ root_dir = os.path.dirname(root_dir)
+ print >> sys.stderr, 'Failed to find depot_tools'
+ return None
diff --git a/tools/gdbinit b/tools/gdbinit
index 20cdff6..8d0345a 100644
--- a/tools/gdbinit
+++ b/tools/gdbinit
@@ -20,9 +20,27 @@
Usage: jco pc
end
+# Print DescriptorArray.
+define jda
+print ((v8::internal::DescriptorArray*)($arg0))->Print()
+end
+document jda
+Print a v8 DescriptorArray object
+Usage: jda tagged_ptr
+end
+
+# Print TransitionArray.
+define jta
+print ((v8::internal::TransitionArray*)($arg0))->Print()
+end
+document jta
+Print a v8 TransitionArray object
+Usage: jta tagged_ptr
+end
+
# Print JavaScript stack trace.
define jst
-print v8::internal::Isolate::Current()->PrintStack(stdout)
+print v8::internal::Isolate::Current()->PrintStack((FILE*) stdout)
end
document jst
Print the current JavaScript stack trace
diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp
index c63cd94..696434d 100644
--- a/tools/gyp/v8.gyp
+++ b/tools/gyp/v8.gyp
@@ -164,7 +164,6 @@
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'<(INTERMEDIATE_DIR)/snapshot.cc',
- '../../src/snapshot-common.cc',
],
'actions': [
{
@@ -206,7 +205,6 @@
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
- '../../src/snapshot-common.cc',
'../../src/snapshot-empty.cc',
],
'conditions': [
@@ -229,89 +227,105 @@
'target_name': 'v8_external_snapshot',
'type': 'static_library',
'conditions': [
- ['want_separate_host_toolset==1', {
- 'toolsets': ['host', 'target'],
- 'dependencies': [
- 'mksnapshot#host',
- 'js2c#host',
- 'natives_blob',
- ]}, {
- 'toolsets': ['target'],
- 'dependencies': [
- 'mksnapshot',
- 'js2c',
- 'natives_blob',
- ],
- }],
- ['component=="shared_library"', {
- 'defines': [
- 'V8_SHARED',
- 'BUILDING_V8_SHARED',
- ],
- 'direct_dependent_settings': {
- 'defines': [
- 'V8_SHARED',
- 'USING_V8_SHARED',
- ],
- },
- }],
- ],
- 'dependencies': [
- 'v8_base',
- ],
- 'include_dirs+': [
- '../..',
- ],
- 'sources': [
- '../../src/natives-external.cc',
- '../../src/snapshot-external.cc',
- ],
- 'actions': [
- {
- 'action_name': 'run_mksnapshot (external)',
- 'inputs': [
- '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)mksnapshot<(EXECUTABLE_SUFFIX)',
- ],
+ [ 'v8_use_external_startup_data==1', {
'conditions': [
['want_separate_host_toolset==1', {
- 'target_conditions': [
- ['_toolset=="host"', {
- 'outputs': [
- '<(INTERMEDIATE_DIR)/snapshot.cc',
- '<(PRODUCT_DIR)/snapshot_blob_host.bin',
+ 'toolsets': ['host', 'target'],
+ 'dependencies': [
+ 'mksnapshot#host',
+ 'js2c#host',
+ 'natives_blob',
+ ]}, {
+ 'toolsets': ['target'],
+ 'dependencies': [
+ 'mksnapshot',
+ 'js2c',
+ 'natives_blob',
+ ],
+ }],
+ ['component=="shared_library"', {
+ 'defines': [
+ 'V8_SHARED',
+ 'BUILDING_V8_SHARED',
+ ],
+ 'direct_dependent_settings': {
+ 'defines': [
+ 'V8_SHARED',
+ 'USING_V8_SHARED',
+ ],
+ },
+ }],
+ ],
+ 'dependencies': [
+ 'v8_base',
+ ],
+ 'include_dirs+': [
+ '../..',
+ ],
+ 'sources': [
+ '../../src/natives-external.cc',
+ '../../src/snapshot-external.cc',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'run_mksnapshot (external)',
+ 'inputs': [
+ '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)mksnapshot<(EXECUTABLE_SUFFIX)',
+ ],
+ 'variables': {
+ 'mksnapshot_flags': [
+ '--log-snapshot-positions',
+ '--logfile', '<(INTERMEDIATE_DIR)/snapshot.log',
+ ],
+ 'conditions': [
+ ['v8_random_seed!=0', {
+ 'mksnapshot_flags': ['--random-seed', '<(v8_random_seed)'],
+ }],
+ ],
+ },
+ 'conditions': [
+ ['want_separate_host_toolset==1', {
+ 'target_conditions': [
+ ['_toolset=="host"', {
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/snapshot.cc',
+ '<(PRODUCT_DIR)/snapshot_blob_host.bin',
+ ],
+ 'action': [
+ '<@(_inputs)',
+ '<@(mksnapshot_flags)',
+ '<@(INTERMEDIATE_DIR)/snapshot.cc',
+ '--startup_blob', '<(PRODUCT_DIR)/snapshot_blob_host.bin',
+ ],
+ }, {
+ 'outputs': [
+ '<(INTERMEDIATE_DIR)/snapshot.cc',
+ '<(PRODUCT_DIR)/snapshot_blob.bin',
+ ],
+ 'action': [
+ '<@(_inputs)',
+ '<@(mksnapshot_flags)',
+ '<@(INTERMEDIATE_DIR)/snapshot.cc',
+ '--startup_blob', '<(PRODUCT_DIR)/snapshot_blob.bin',
+ ],
+ }],
],
}, {
'outputs': [
'<(INTERMEDIATE_DIR)/snapshot.cc',
'<(PRODUCT_DIR)/snapshot_blob.bin',
],
+ 'action': [
+ '<@(_inputs)',
+ '<@(mksnapshot_flags)',
+ '<@(INTERMEDIATE_DIR)/snapshot.cc',
+ '--startup_blob', '<(PRODUCT_DIR)/snapshot_blob.bin',
+ ],
}],
],
- }, {
- 'outputs': [
- '<(INTERMEDIATE_DIR)/snapshot.cc',
- '<(PRODUCT_DIR)/snapshot_blob.bin',
- ],
- }],
+ },
],
- 'variables': {
- 'mksnapshot_flags': [
- '--log-snapshot-positions',
- '--logfile', '<(INTERMEDIATE_DIR)/snapshot.log',
- ],
- 'conditions': [
- ['v8_random_seed!=0', {
- 'mksnapshot_flags': ['--random-seed', '<(v8_random_seed)'],
- }],
- ],
- },
- 'action': [
- '<@(_inputs)',
- '<@(mksnapshot_flags)',
- '<@(INTERMEDIATE_DIR)/snapshot.cc',
- '--startup_blob', '<(PRODUCT_DIR)/snapshot_blob.bin',
- ],
- },
+ }],
],
},
{
@@ -343,18 +357,26 @@
'../../src/assembler.h',
'../../src/assert-scope.h',
'../../src/assert-scope.cc',
+ '../../src/ast-this-access-visitor.cc',
+ '../../src/ast-this-access-visitor.h',
'../../src/ast-value-factory.cc',
'../../src/ast-value-factory.h',
+ '../../src/ast-numbering.cc',
+ '../../src/ast-numbering.h',
'../../src/ast.cc',
'../../src/ast.h',
'../../src/background-parsing-task.cc',
'../../src/background-parsing-task.h',
'../../src/bailout-reason.cc',
'../../src/bailout-reason.h',
+ '../../src/basic-block-profiler.cc',
+ '../../src/basic-block-profiler.h',
'../../src/bignum-dtoa.cc',
'../../src/bignum-dtoa.h',
'../../src/bignum.cc',
'../../src/bignum.h',
+ '../../src/bit-vector.cc',
+ '../../src/bit-vector.h',
'../../src/bootstrapper.cc',
'../../src/bootstrapper.h',
'../../src/builtins.cc',
@@ -362,6 +384,7 @@
'../../src/bytecodes-irregexp.h',
'../../src/cached-powers.cc',
'../../src/cached-powers.h',
+ '../../src/char-predicates.cc',
'../../src/char-predicates-inl.h',
'../../src/char-predicates.h',
'../../src/checks.cc',
@@ -378,28 +401,37 @@
'../../src/codegen.h',
'../../src/compilation-cache.cc',
'../../src/compilation-cache.h',
+ '../../src/compilation-statistics.cc',
+ '../../src/compilation-statistics.h',
'../../src/compiler/access-builder.cc',
'../../src/compiler/access-builder.h',
'../../src/compiler/ast-graph-builder.cc',
'../../src/compiler/ast-graph-builder.h',
+ '../../src/compiler/ast-loop-assignment-analyzer.cc',
+ '../../src/compiler/ast-loop-assignment-analyzer.h',
+ '../../src/compiler/basic-block-instrumentor.cc',
+ '../../src/compiler/basic-block-instrumentor.h',
'../../src/compiler/change-lowering.cc',
'../../src/compiler/change-lowering.h',
'../../src/compiler/code-generator-impl.h',
'../../src/compiler/code-generator.cc',
'../../src/compiler/code-generator.h',
+ '../../src/compiler/common-node-cache.cc',
'../../src/compiler/common-node-cache.h',
+ '../../src/compiler/common-operator-reducer.cc',
+ '../../src/compiler/common-operator-reducer.h',
'../../src/compiler/common-operator.cc',
'../../src/compiler/common-operator.h',
'../../src/compiler/control-builders.cc',
'../../src/compiler/control-builders.h',
+ '../../src/compiler/control-equivalence.h',
+ '../../src/compiler/control-reducer.cc',
+ '../../src/compiler/control-reducer.h',
+ '../../src/compiler/diamond.h',
'../../src/compiler/frame.h',
'../../src/compiler/gap-resolver.cc',
'../../src/compiler/gap-resolver.h',
- '../../src/compiler/generic-algorithm-inl.h',
'../../src/compiler/generic-algorithm.h',
- '../../src/compiler/generic-graph.h',
- '../../src/compiler/generic-node-inl.h',
- '../../src/compiler/generic-node.h',
'../../src/compiler/graph-builder.cc',
'../../src/compiler/graph-builder.h',
'../../src/compiler/graph-inl.h',
@@ -427,18 +459,29 @@
'../../src/compiler/js-graph.h',
'../../src/compiler/js-inlining.cc',
'../../src/compiler/js-inlining.h',
+ '../../src/compiler/js-intrinsic-builder.cc',
+ '../../src/compiler/js-intrinsic-builder.h',
+ '../../src/compiler/js-operator.cc',
'../../src/compiler/js-operator.h',
'../../src/compiler/js-typed-lowering.cc',
'../../src/compiler/js-typed-lowering.h',
+ '../../src/compiler/jump-threading.cc',
+ '../../src/compiler/jump-threading.h',
'../../src/compiler/linkage-impl.h',
'../../src/compiler/linkage.cc',
'../../src/compiler/linkage.h',
+ '../../src/compiler/load-elimination.cc',
+ '../../src/compiler/load-elimination.h',
+ '../../src/compiler/loop-analysis.cc',
+ '../../src/compiler/loop-analysis.h',
'../../src/compiler/machine-operator-reducer.cc',
'../../src/compiler/machine-operator-reducer.h',
'../../src/compiler/machine-operator.cc',
'../../src/compiler/machine-operator.h',
'../../src/compiler/machine-type.cc',
'../../src/compiler/machine-type.h',
+ '../../src/compiler/move-optimizer.cc',
+ '../../src/compiler/move-optimizer.h',
'../../src/compiler/node-aux-data-inl.h',
'../../src/compiler/node-aux-data.h',
'../../src/compiler/node-cache.cc',
@@ -448,23 +491,31 @@
'../../src/compiler/node-properties.h',
'../../src/compiler/node.cc',
'../../src/compiler/node.h',
+ '../../src/compiler/opcodes.cc',
'../../src/compiler/opcodes.h',
- '../../src/compiler/operator-properties-inl.h',
+ '../../src/compiler/operator-properties.cc',
'../../src/compiler/operator-properties.h',
'../../src/compiler/operator.cc',
'../../src/compiler/operator.h',
- '../../src/compiler/phi-reducer.h',
'../../src/compiler/pipeline.cc',
'../../src/compiler/pipeline.h',
+ '../../src/compiler/pipeline-statistics.cc',
+ '../../src/compiler/pipeline-statistics.h',
'../../src/compiler/raw-machine-assembler.cc',
'../../src/compiler/raw-machine-assembler.h',
'../../src/compiler/register-allocator.cc',
'../../src/compiler/register-allocator.h',
+ '../../src/compiler/register-allocator-verifier.cc',
+ '../../src/compiler/register-allocator-verifier.h',
+ '../../src/compiler/register-configuration.cc',
+ '../../src/compiler/register-configuration.h',
'../../src/compiler/representation-change.h',
'../../src/compiler/schedule.cc',
'../../src/compiler/schedule.h',
'../../src/compiler/scheduler.cc',
'../../src/compiler/scheduler.h',
+ '../../src/compiler/select-lowering.cc',
+ '../../src/compiler/select-lowering.h',
'../../src/compiler/simplified-lowering.cc',
'../../src/compiler/simplified-lowering.h',
'../../src/compiler/simplified-operator-reducer.cc',
@@ -479,6 +530,8 @@
'../../src/compiler/value-numbering-reducer.h',
'../../src/compiler/verifier.cc',
'../../src/compiler/verifier.h',
+ '../../src/compiler/zone-pool.cc',
+ '../../src/compiler/zone-pool.h',
'../../src/compiler.cc',
'../../src/compiler.h',
'../../src/contexts.cc',
@@ -491,8 +544,6 @@
'../../src/cpu-profiler-inl.h',
'../../src/cpu-profiler.cc',
'../../src/cpu-profiler.h',
- '../../src/data-flow.cc',
- '../../src/data-flow.h',
'../../src/date.cc',
'../../src/date.h',
'../../src/dateparser-inl.h',
@@ -531,7 +582,6 @@
'../../src/factory.h',
'../../src/fast-dtoa.cc',
'../../src/fast-dtoa.h',
- '../../src/feedback-slots.h',
'../../src/field-index.h',
'../../src/field-index-inl.h',
'../../src/fixed-dtoa.cc',
@@ -582,8 +632,6 @@
'../../src/heap/store-buffer-inl.h',
'../../src/heap/store-buffer.cc',
'../../src/heap/store-buffer.h',
- '../../src/heap/sweeper-thread.h',
- '../../src/heap/sweeper-thread.cc',
'../../src/hydrogen-alias-analysis.h',
'../../src/hydrogen-bce.cc',
'../../src/hydrogen-bce.h',
@@ -666,6 +714,9 @@
'../../src/jsregexp-inl.h',
'../../src/jsregexp.cc',
'../../src/jsregexp.h',
+ '../../src/layout-descriptor-inl.h',
+ '../../src/layout-descriptor.cc',
+ '../../src/layout-descriptor.h',
'../../src/list-inl.h',
'../../src/list.h',
'../../src/lithium-allocator-inl.h',
@@ -731,8 +782,35 @@
'../../src/rewriter.h',
'../../src/runtime-profiler.cc',
'../../src/runtime-profiler.h',
- '../../src/runtime.cc',
- '../../src/runtime.h',
+ '../../src/runtime/runtime-api.cc',
+ '../../src/runtime/runtime-array.cc',
+ '../../src/runtime/runtime-classes.cc',
+ '../../src/runtime/runtime-collections.cc',
+ '../../src/runtime/runtime-compiler.cc',
+ '../../src/runtime/runtime-date.cc',
+ '../../src/runtime/runtime-debug.cc',
+ '../../src/runtime/runtime-function.cc',
+ '../../src/runtime/runtime-generator.cc',
+ '../../src/runtime/runtime-i18n.cc',
+ '../../src/runtime/runtime-internal.cc',
+ '../../src/runtime/runtime-json.cc',
+ '../../src/runtime/runtime-literals.cc',
+ '../../src/runtime/runtime-liveedit.cc',
+ '../../src/runtime/runtime-maths.cc',
+ '../../src/runtime/runtime-numbers.cc',
+ '../../src/runtime/runtime-object.cc',
+ '../../src/runtime/runtime-observe.cc',
+ '../../src/runtime/runtime-proxy.cc',
+ '../../src/runtime/runtime-regexp.cc',
+ '../../src/runtime/runtime-scopes.cc',
+ '../../src/runtime/runtime-strings.cc',
+ '../../src/runtime/runtime-symbol.cc',
+ '../../src/runtime/runtime-test.cc',
+ '../../src/runtime/runtime-typedarray.cc',
+ '../../src/runtime/runtime-uri.cc',
+ '../../src/runtime/runtime-utils.h',
+ '../../src/runtime/runtime.cc',
+ '../../src/runtime/runtime.h',
'../../src/safepoint-table.cc',
'../../src/safepoint-table.h',
'../../src/sampler.cc',
@@ -750,8 +828,11 @@
'../../src/small-pointer-list.h',
'../../src/smart-pointers.h',
'../../src/snapshot.h',
+ '../../src/snapshot-common.cc',
'../../src/snapshot-source-sink.cc',
'../../src/snapshot-source-sink.h',
+ '../../src/string-builder.cc',
+ '../../src/string-builder.h',
'../../src/string-search.cc',
'../../src/string-search.h',
'../../src/string-stream.cc',
@@ -780,8 +861,9 @@
'../../src/unicode-inl.h',
'../../src/unicode.cc',
'../../src/unicode.h',
+ '../../src/unicode-decoder.cc',
+ '../../src/unicode-decoder.h',
'../../src/unique.h',
- '../../src/uri.h',
'../../src/utils-inl.h',
'../../src/utils.cc',
'../../src/utils.h',
@@ -800,8 +882,8 @@
'../../src/zone-inl.h',
'../../src/zone.cc',
'../../src/zone.h',
- '../../third_party/fdlibm/fdlibm.cc',
- '../../third_party/fdlibm/fdlibm.h',
+ '../../src/third_party/fdlibm/fdlibm.cc',
+ '../../src/third_party/fdlibm/fdlibm.h',
],
'conditions': [
['want_separate_host_toolset==1', {
@@ -1014,6 +1096,10 @@
'../../src/mips/regexp-macro-assembler-mips.cc',
'../../src/mips/regexp-macro-assembler-mips.h',
'../../src/mips/simulator-mips.cc',
+ '../../src/compiler/mips/code-generator-mips.cc',
+ '../../src/compiler/mips/instruction-codes-mips.h',
+ '../../src/compiler/mips/instruction-selector-mips.cc',
+ '../../src/compiler/mips/linkage-mips.cc',
'../../src/ic/mips/access-compiler-mips.cc',
'../../src/ic/mips/handler-compiler-mips.cc',
'../../src/ic/mips/ic-mips.cc',
@@ -1052,6 +1138,10 @@
'../../src/mips64/regexp-macro-assembler-mips64.cc',
'../../src/mips64/regexp-macro-assembler-mips64.h',
'../../src/mips64/simulator-mips64.cc',
+ '../../src/compiler/mips64/code-generator-mips64.cc',
+ '../../src/compiler/mips64/instruction-codes-mips64.h',
+ '../../src/compiler/mips64/instruction-selector-mips64.cc',
+ '../../src/compiler/mips64/linkage-mips64.cc',
'../../src/ic/mips64/access-compiler-mips64.cc',
'../../src/ic/mips64/handler-compiler-mips64.cc',
'../../src/ic/mips64/ic-mips64.cc',
@@ -1087,10 +1177,6 @@
'../../src/x64/macro-assembler-x64.h',
'../../src/x64/regexp-macro-assembler-x64.cc',
'../../src/x64/regexp-macro-assembler-x64.h',
- '../../src/compiler/x64/code-generator-x64.cc',
- '../../src/compiler/x64/instruction-codes-x64.h',
- '../../src/compiler/x64/instruction-selector-x64.cc',
- '../../src/compiler/x64/linkage-x64.cc',
'../../src/ic/x64/access-compiler-x64.cc',
'../../src/ic/x64/handler-compiler-x64.cc',
'../../src/ic/x64/ic-x64.cc',
@@ -1098,18 +1184,14 @@
'../../src/ic/x64/stub-cache-x64.cc',
],
}],
- ['OS=="linux"', {
- 'link_settings': {
- 'conditions': [
- ['v8_compress_startup_data=="bz2"', {
- 'libraries': [
- '-lbz2',
- ]
- }],
- ],
- },
- }
- ],
+ ['v8_target_arch=="x64"', {
+ 'sources': [
+ '../../src/compiler/x64/code-generator-x64.cc',
+ '../../src/compiler/x64/instruction-codes-x64.h',
+ '../../src/compiler/x64/instruction-selector-x64.cc',
+ '../../src/compiler/x64/linkage-x64.cc',
+ ],
+ }],
['OS=="win"', {
'variables': {
'gyp_generators': '<!(echo $GYP_GENERATORS)',
@@ -1185,6 +1267,9 @@
'../../src/base/division-by-constant.cc',
'../../src/base/division-by-constant.h',
'../../src/base/flags.h',
+ '../../src/base/functional.cc',
+ '../../src/base/functional.h',
+ '../../src/base/iterator.h',
'../../src/base/lazy-instance.h',
'../../src/base/logging.cc',
'../../src/base/logging.h',
@@ -1217,11 +1302,19 @@
'toolsets': ['target'],
}],
['OS=="linux"', {
- 'link_settings': {
- 'libraries': [
- '-lrt'
- ]
- },
+ 'conditions': [
+ ['nacl_target_arch=="none"', {
+ 'link_settings': {
+ 'libraries': [
+ '-lrt'
+ ],
+ },
+ }, {
+ 'defines': [
+ 'V8_LIBRT_NOT_AVAILABLE=1',
+ ],
+ }],
+ ],
'sources': [
'../../src/base/platform/platform-linux.cc',
'../../src/base/platform/platform-posix.cc'
@@ -1349,7 +1442,7 @@
['OS=="solaris"', {
'link_settings': {
'libraries': [
- '-lnsl',
+ '-lnsl -lrt',
]},
'sources': [
'../../src/base/platform/platform-solaris.cc',
@@ -1385,8 +1478,6 @@
'sources': [
'../../src/base/platform/platform-win32.cc',
'../../src/base/win32-headers.h',
- '../../src/base/win32-math.cc',
- '../../src/base/win32-math.h'
],
}],
],
@@ -1397,8 +1488,6 @@
'sources': [
'../../src/base/platform/platform-win32.cc',
'../../src/base/win32-headers.h',
- '../../src/base/win32-math.cc',
- '../../src/base/win32-math.h'
],
'msvs_disabled_warnings': [4351, 4355, 4800],
'link_settings': {
@@ -1523,7 +1612,7 @@
'../../src/array.js',
'../../src/string.js',
'../../src/uri.js',
- '../../third_party/fdlibm/fdlibm.js',
+ '../../src/third_party/fdlibm/fdlibm.js',
'../../src/math.js',
'../../src/apinatives.js',
'../../src/date.js',
@@ -1551,7 +1640,12 @@
'../../src/generator.js',
'../../src/harmony-string.js',
'../../src/harmony-array.js',
+ '../../src/harmony-array-includes.js',
+ '../../src/harmony-tostring.js',
+ '../../src/harmony-typedarray.js',
'../../src/harmony-classes.js',
+ '../../src/harmony-templates.js',
+ '../../src/harmony-regexp.js'
],
'libraries_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries.bin',
'libraries_experimental_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-experimental.bin',
@@ -1572,7 +1666,6 @@
'../../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'CORE',
- '<(v8_compress_startup_data)',
'<@(library_files)',
'<@(i18n_library_files)',
],
@@ -1599,7 +1692,6 @@
'../../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'EXPERIMENTAL',
- '<(v8_compress_startup_data)',
'<@(experimental_library_files)'
],
'conditions': [
@@ -1652,16 +1744,17 @@
'../../src/mksnapshot.cc',
],
'conditions': [
+ ['v8_enable_i18n_support==1', {
+ 'dependencies': [
+ '<(icu_gyp_path):icui18n',
+ '<(icu_gyp_path):icuuc',
+ ]
+ }],
['want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}],
- ['v8_compress_startup_data=="bz2"', {
- 'libraries': [
- '-lbz2',
- ]
- }],
],
},
],
diff --git a/tools/js2c.py b/tools/js2c.py
index 77485f6..621ed5a 100755
--- a/tools/js2c.py
+++ b/tools/js2c.py
@@ -255,8 +255,6 @@
%(sources_declaration)s\
-%(raw_sources_declaration)s\
-
template <>
int NativesCollection<%(type)s>::GetBuiltinsCount() {
return %(builtin_count)i;
@@ -274,13 +272,8 @@
}
template <>
- int NativesCollection<%(type)s>::GetRawScriptsSize() {
- return %(raw_total_length)i;
- }
-
- template <>
- Vector<const char> NativesCollection<%(type)s>::GetRawScriptSource(int index) {
-%(get_raw_script_source_cases)s\
+ Vector<const char> NativesCollection<%(type)s>::GetScriptSource(int index) {
+%(get_script_source_cases)s\
return Vector<const char>("", 0);
}
@@ -291,32 +284,15 @@
}
template <>
- Vector<const byte> NativesCollection<%(type)s>::GetScriptsSource() {
- return Vector<const byte>(sources, %(total_length)i);
+ Vector<const char> NativesCollection<%(type)s>::GetScriptsSource() {
+ return Vector<const char>(sources, %(total_length)i);
}
-
- template <>
- void NativesCollection<%(type)s>::SetRawScriptsSource(Vector<const char> raw_source) {
- DCHECK(%(raw_total_length)i == raw_source.length());
- raw_sources = raw_source.start();
- }
-
} // internal
} // v8
"""
SOURCES_DECLARATION = """\
- static const byte sources[] = { %s };
-"""
-
-
-RAW_SOURCES_COMPRESSION_DECLARATION = """\
- static const char* raw_sources = NULL;
-"""
-
-
-RAW_SOURCES_DECLARATION = """\
- static const char* raw_sources = reinterpret_cast<const char*>(sources);
+ static const char sources[] = { %s };
"""
@@ -325,8 +301,8 @@
"""
-GET_RAW_SCRIPT_SOURCE_CASE = """\
- if (index == %(i)i) return Vector<const char>(raw_sources + %(offset)i, %(raw_length)i);
+GET_SCRIPT_SOURCE_CASE = """\
+ if (index == %(i)i) return Vector<const char>(sources + %(offset)i, %(source_length)i);
"""
@@ -440,7 +416,7 @@
# Loop over modules and build up indices into the source blob:
get_index_cases = []
get_script_name_cases = []
- get_raw_script_source_cases = []
+ get_script_source_cases = []
offset = 0
for i in xrange(len(sources.modules)):
native_name = "native %s.js" % sources.names[i]
@@ -450,57 +426,38 @@
"name": native_name,
"length": len(native_name),
"offset": offset,
- "raw_length": len(sources.modules[i]),
+ "source_length": len(sources.modules[i]),
}
get_index_cases.append(GET_INDEX_CASE % d)
get_script_name_cases.append(GET_SCRIPT_NAME_CASE % d)
- get_raw_script_source_cases.append(GET_RAW_SCRIPT_SOURCE_CASE % d)
+ get_script_source_cases.append(GET_SCRIPT_SOURCE_CASE % d)
offset += len(sources.modules[i])
assert offset == len(raw_sources)
- # If we have the raw sources we can declare them accordingly.
- have_raw_sources = source_bytes == raw_sources
- raw_sources_declaration = (RAW_SOURCES_DECLARATION
- if have_raw_sources else RAW_SOURCES_COMPRESSION_DECLARATION)
-
metadata = {
"builtin_count": len(sources.modules),
"debugger_count": sum(sources.is_debugger_id),
"sources_declaration": SOURCES_DECLARATION % ToCArray(source_bytes),
- "raw_sources_declaration": raw_sources_declaration,
- "raw_total_length": sum(map(len, sources.modules)),
"total_length": total_length,
"get_index_cases": "".join(get_index_cases),
- "get_raw_script_source_cases": "".join(get_raw_script_source_cases),
+ "get_script_source_cases": "".join(get_script_source_cases),
"get_script_name_cases": "".join(get_script_name_cases),
"type": native_type,
}
return metadata
-def CompressMaybe(sources, compression_type):
- """Take the prepared sources and generate a sequence of bytes.
-
- Args:
- sources: A Sources instance with the prepared sourced.
- compression_type: string, describing the desired compression.
-
- Returns:
- A sequence of bytes.
- """
- sources_bytes = "".join(sources.modules)
- if compression_type == "off":
- return sources_bytes
- elif compression_type == "bz2":
- return bz2.compress(sources_bytes)
- else:
- raise Error("Unknown compression type %s." % compression_type)
-
-
def PutInt(blob_file, value):
- assert(value >= 0 and value < (1 << 20))
- size = 1 if (value < 1 << 6) else (2 if (value < 1 << 14) else 3)
- value_with_length = (value << 2) | size
+ assert(value >= 0 and value < (1 << 28))
+ if (value < 1 << 6):
+ size = 1
+ elif (value < 1 << 14):
+ size = 2
+ elif (value < 1 << 22):
+ size = 3
+ else:
+ size = 4
+ value_with_length = (value << 2) | (size - 1)
byte_sequence = bytearray()
for i in xrange(size):
@@ -538,9 +495,9 @@
output.close()
-def JS2C(source, target, native_type, compression_type, raw_file, startup_blob):
+def JS2C(source, target, native_type, raw_file, startup_blob):
sources = PrepareSources(source)
- sources_bytes = CompressMaybe(sources, compression_type)
+ sources_bytes = "".join(sources.modules)
metadata = BuildMetadata(sources, sources_bytes, native_type)
# Optionally emit raw file.
@@ -564,14 +521,13 @@
help="file to write the processed sources array to.")
parser.add_option("--startup_blob", action="store",
help="file to write the startup blob to.")
- parser.set_usage("""js2c out.cc type compression sources.js ...
+ parser.set_usage("""js2c out.cc type sources.js ...
out.cc: C code to be generated.
type: type parameter for NativesCollection template.
- compression: type of compression used. [off|bz2]
sources.js: JS internal sources or macros.py.""")
(options, args) = parser.parse_args()
- JS2C(args[3:], args[0], args[1], args[2], options.raw, options.startup_blob)
+ JS2C(args[2:], args[0], args[1], options.raw, options.startup_blob)
if __name__ == "__main__":
diff --git a/tools/lexer-shell.cc b/tools/lexer-shell.cc
deleted file mode 100644
index f8ddc02..0000000
--- a/tools/lexer-shell.cc
+++ /dev/null
@@ -1,236 +0,0 @@
-// Copyright 2013 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include <assert.h>
-#include <string.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string>
-#include <vector>
-#include "src/v8.h"
-
-#include "include/libplatform/libplatform.h"
-#include "src/api.h"
-#include "src/base/platform/platform.h"
-#include "src/messages.h"
-#include "src/runtime.h"
-#include "src/scanner-character-streams.h"
-#include "src/scopeinfo.h"
-#include "tools/shell-utils.h"
-#include "src/string-stream.h"
-#include "src/scanner.h"
-
-
-using namespace v8::internal;
-
-
-class BaselineScanner {
- public:
- BaselineScanner(const char* fname,
- Isolate* isolate,
- Encoding encoding,
- v8::base::ElapsedTimer* timer,
- int repeat)
- : stream_(NULL) {
- int length = 0;
- source_ = ReadFileAndRepeat(fname, &length, repeat);
- unicode_cache_ = new UnicodeCache();
- scanner_ = new Scanner(unicode_cache_);
- switch (encoding) {
- case UTF8:
- stream_ = new Utf8ToUtf16CharacterStream(source_, length);
- break;
- case UTF16: {
- Handle<String> result = isolate->factory()->NewStringFromTwoByte(
- Vector<const uint16_t>(
- reinterpret_cast<const uint16_t*>(source_),
- length / 2)).ToHandleChecked();
- stream_ =
- new GenericStringUtf16CharacterStream(result, 0, result->length());
- break;
- }
- case LATIN1: {
- Handle<String> result = isolate->factory()->NewStringFromOneByte(
- Vector<const uint8_t>(source_, length)).ToHandleChecked();
- stream_ =
- new GenericStringUtf16CharacterStream(result, 0, result->length());
- break;
- }
- }
- timer->Start();
- scanner_->Initialize(stream_);
- }
-
- ~BaselineScanner() {
- delete scanner_;
- delete stream_;
- delete unicode_cache_;
- delete[] source_;
- }
-
- Token::Value Next(int* beg_pos, int* end_pos) {
- Token::Value res = scanner_->Next();
- *beg_pos = scanner_->location().beg_pos;
- *end_pos = scanner_->location().end_pos;
- return res;
- }
-
- private:
- UnicodeCache* unicode_cache_;
- Scanner* scanner_;
- const byte* source_;
- BufferedUtf16CharacterStream* stream_;
-};
-
-
-struct TokenWithLocation {
- Token::Value value;
- size_t beg;
- size_t end;
- TokenWithLocation() : value(Token::ILLEGAL), beg(0), end(0) { }
- TokenWithLocation(Token::Value value, size_t beg, size_t end) :
- value(value), beg(beg), end(end) { }
- bool operator==(const TokenWithLocation& other) {
- return value == other.value && beg == other.beg && end == other.end;
- }
- bool operator!=(const TokenWithLocation& other) {
- return !(*this == other);
- }
- void Print(const char* prefix) const {
- printf("%s %11s at (%d, %d)\n",
- prefix, Token::Name(value),
- static_cast<int>(beg), static_cast<int>(end));
- }
-};
-
-
-v8::base::TimeDelta RunBaselineScanner(const char* fname, Isolate* isolate,
- Encoding encoding, bool dump_tokens,
- std::vector<TokenWithLocation>* tokens,
- int repeat) {
- v8::base::ElapsedTimer timer;
- BaselineScanner scanner(fname, isolate, encoding, &timer, repeat);
- Token::Value token;
- int beg, end;
- do {
- token = scanner.Next(&beg, &end);
- if (dump_tokens) {
- tokens->push_back(TokenWithLocation(token, beg, end));
- }
- } while (token != Token::EOS);
- return timer.Elapsed();
-}
-
-
-void PrintTokens(const char* name,
- const std::vector<TokenWithLocation>& tokens) {
- printf("No of tokens: %d\n",
- static_cast<int>(tokens.size()));
- printf("%s:\n", name);
- for (size_t i = 0; i < tokens.size(); ++i) {
- tokens[i].Print("=>");
- }
-}
-
-
-v8::base::TimeDelta ProcessFile(
- const char* fname,
- Encoding encoding,
- Isolate* isolate,
- bool print_tokens,
- int repeat) {
- if (print_tokens) {
- printf("Processing file %s\n", fname);
- }
- HandleScope handle_scope(isolate);
- std::vector<TokenWithLocation> baseline_tokens;
- v8::base::TimeDelta baseline_time;
- baseline_time = RunBaselineScanner(
- fname, isolate, encoding, print_tokens,
- &baseline_tokens, repeat);
- if (print_tokens) {
- PrintTokens("Baseline", baseline_tokens);
- }
- return baseline_time;
-}
-
-
-int main(int argc, char* argv[]) {
- v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
- v8::V8::InitializeICU();
- v8::Platform* platform = v8::platform::CreateDefaultPlatform();
- v8::V8::InitializePlatform(platform);
- v8::V8::Initialize();
- Encoding encoding = LATIN1;
- bool print_tokens = false;
- std::vector<std::string> fnames;
- std::string benchmark;
- int repeat = 1;
- for (int i = 0; i < argc; ++i) {
- if (strcmp(argv[i], "--latin1") == 0) {
- encoding = LATIN1;
- } else if (strcmp(argv[i], "--utf8") == 0) {
- encoding = UTF8;
- } else if (strcmp(argv[i], "--utf16") == 0) {
- encoding = UTF16;
- } else if (strcmp(argv[i], "--print-tokens") == 0) {
- print_tokens = true;
- } else if (strncmp(argv[i], "--benchmark=", 12) == 0) {
- benchmark = std::string(argv[i]).substr(12);
- } else if (strncmp(argv[i], "--repeat=", 9) == 0) {
- std::string repeat_str = std::string(argv[i]).substr(9);
- repeat = atoi(repeat_str.c_str());
- } else if (i > 0 && argv[i][0] != '-') {
- fnames.push_back(std::string(argv[i]));
- }
- }
- v8::Isolate* isolate = v8::Isolate::New();
- {
- v8::Isolate::Scope isolate_scope(isolate);
- v8::HandleScope handle_scope(isolate);
- v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
- v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
- DCHECK(!context.IsEmpty());
- {
- v8::Context::Scope scope(context);
- double baseline_total = 0;
- for (size_t i = 0; i < fnames.size(); i++) {
- v8::base::TimeDelta time;
- time = ProcessFile(fnames[i].c_str(), encoding,
- reinterpret_cast<Isolate*>(isolate), print_tokens,
- repeat);
- baseline_total += time.InMillisecondsF();
- }
- if (benchmark.empty()) benchmark = "Baseline";
- printf("%s(RunTime): %.f ms\n", benchmark.c_str(), baseline_total);
- }
- }
- v8::V8::Dispose();
- v8::V8::ShutdownPlatform();
- delete platform;
- return 0;
-}
diff --git a/tools/logreader.js b/tools/logreader.js
index a8141da..5f0ec7f 100644
--- a/tools/logreader.js
+++ b/tools/logreader.js
@@ -108,6 +108,8 @@
// Filter out possible 'overflow' string.
} else if (firstChar != 'o') {
fullStack.push(parseInt(frame, 16));
+ } else {
+ print("dropping: " + frame);
}
}
return fullStack;
diff --git a/tools/nacl-run.py b/tools/nacl-run.py
index 135172c..32055fe 100755
--- a/tools/nacl-run.py
+++ b/tools/nacl-run.py
@@ -32,6 +32,7 @@
import os
from os.path import join, dirname, abspath
+import re
import subprocess
import sys
import tempfile
@@ -82,7 +83,7 @@
try:
p = subprocess.Popen(['file', nexe], stdout=subprocess.PIPE)
out, err = p.communicate()
- lines = out.split('\n')
+ lines = [re.sub("\s+", " " , line) for line in out.split('\n')]
if lines[0].find(": ELF 32-bit LSB executable, Intel 80386") > 0:
return "x86_32"
if lines[0].find(": ELF 64-bit LSB executable, x86-64") > 0:
@@ -116,17 +117,13 @@
print("NaCl V8 ARM support is not ready yet.")
sys.exit(1)
else:
- print("Invalid nexe %s" % nexe)
+ print("Invalid nexe %s with NaCl arch %s" % (nexe, nacl_arch))
sys.exit(1)
nacl_sel_ldr = os.path.join(nacl_sdk_dir, "tools", sel_ldr)
nacl_irt = os.path.join(nacl_sdk_dir, "tools", irt)
- nacl_ld_so = os.path.join(nacl_sdk_dir, "toolchain", toolchain,
- "x86_64-nacl", libdir, "runnable-ld.so")
- nacl_lib_path = os.path.join(nacl_sdk_dir, "toolchain", toolchain,
- "x86_64-nacl", libdir)
- return (nacl_sdk_dir, nacl_sel_ldr, nacl_irt, nacl_ld_so, nacl_lib_path)
+ return (nacl_sdk_dir, nacl_sel_ldr, nacl_irt)
def Main():
if (len(sys.argv) == 1):
@@ -135,15 +132,14 @@
args = [Escape(arg) for arg in sys.argv[1:]]
- (nacl_sdk_dir, nacl_sel_ldr, nacl_irt, nacl_ld_so,
- nacl_lib_path) = GetNaClResources(sys.argv[1])
+ (nacl_sdk_dir, nacl_sel_ldr, nacl_irt) = GetNaClResources(sys.argv[1])
# sel_ldr Options:
# -c -c: disable validation (for performance)
# -a: allow file access
# -B <irt>: load the IRT
- command = ' '.join([nacl_sel_ldr, '-c', '-c', '-a', '-B', nacl_irt, '--',
- nacl_ld_so, '--library-path', nacl_lib_path] + args)
+ command = ' '.join([nacl_sel_ldr, '-c', '-c', '-a', '-B', nacl_irt, '--'] +
+ args)
error_code = Execute(command)
return error_code
diff --git a/tools/lexer-shell.gyp b/tools/parser-shell.gyp
similarity index 81%
rename from tools/lexer-shell.gyp
rename to tools/parser-shell.gyp
index 836ea97..f0f0b8b 100644
--- a/tools/lexer-shell.gyp
+++ b/tools/parser-shell.gyp
@@ -33,29 +33,6 @@
'includes': ['../build/toolchain.gypi', '../build/features.gypi'],
'targets': [
{
- 'target_name': 'lexer-shell',
- 'type': 'executable',
- 'dependencies': [
- '../tools/gyp/v8.gyp:v8',
- '../tools/gyp/v8.gyp:v8_libplatform',
- ],
- 'conditions': [
- ['v8_enable_i18n_support==1', {
- 'dependencies': [
- '<(icu_gyp_path):icui18n',
- '<(icu_gyp_path):icuuc',
- ],
- }],
- ],
- 'include_dirs+': [
- '..',
- ],
- 'sources': [
- 'lexer-shell.cc',
- 'shell-utils.h',
- ],
- },
- {
'target_name': 'parser-shell',
'type': 'executable',
'dependencies': [
diff --git a/tools/presubmit.py b/tools/presubmit.py
index 8a6ff2a..321d291 100755
--- a/tools/presubmit.py
+++ b/tools/presubmit.py
@@ -236,7 +236,8 @@
or (name in CppLintProcessor.IGNORE_LINT))
def GetPathsToSearch(self):
- return ['src', 'include', 'samples', join('test', 'cctest')]
+ return ['src', 'include', 'samples', join('test', 'cctest'),
+ join('test', 'unittests')]
def GetCpplintScript(self, prio_path):
for path in [prio_path] + os.environ["PATH"].split(os.pathsep):
@@ -326,16 +327,25 @@
return (super(SourceProcessor, self).IgnoreDir(name) or
name in ('third_party', 'gyp', 'out', 'obj', 'DerivedSources'))
- IGNORE_COPYRIGHTS = ['cpplint.py',
+ IGNORE_COPYRIGHTS = ['box2d.js',
+ 'cpplint.py',
+ 'copy.js',
+ 'corrections.js',
+ 'crypto.js',
'daemon.py',
'earley-boyer.js',
- 'raytrace.js',
- 'crypto.js',
+ 'fannkuch.js',
+ 'fasta.js',
+ 'jsmin.py',
'libraries.cc',
'libraries-empty.cc',
- 'jsmin.py',
+ 'lua_binarytrees.js',
+ 'memops.js',
+ 'primes.js',
+ 'raytrace.js',
'regexp-pcre.js',
- 'gnuplot-4.6.3-emscripten.js']
+ 'gnuplot-4.6.3-emscripten.js',
+ 'zlib.js']
IGNORE_TABS = IGNORE_COPYRIGHTS + ['unicode-test.js', 'html-comments.js']
def EndOfDeclaration(self, line):
diff --git a/tools/profile.js b/tools/profile.js
index 10a07f8..a06cd3a 100644
--- a/tools/profile.js
+++ b/tools/profile.js
@@ -36,6 +36,7 @@
this.codeMap_ = new CodeMap();
this.topDownTree_ = new CallTree();
this.bottomUpTree_ = new CallTree();
+ this.c_entries_ = {};
};
@@ -102,7 +103,7 @@
Profile.prototype.addLibrary = function(
name, startAddr, endAddr) {
var entry = new CodeMap.CodeEntry(
- endAddr - startAddr, name);
+ endAddr - startAddr, name, 'SHARED_LIB');
this.codeMap_.addLibrary(startAddr, entry);
return entry;
};
@@ -118,7 +119,7 @@
Profile.prototype.addStaticCode = function(
name, startAddr, endAddr) {
var entry = new CodeMap.CodeEntry(
- endAddr - startAddr, name);
+ endAddr - startAddr, name, 'CPP');
this.codeMap_.addStaticCode(startAddr, entry);
return entry;
};
@@ -250,10 +251,26 @@
*/
Profile.prototype.resolveAndFilterFuncs_ = function(stack) {
var result = [];
+ var last_seen_c_function = '';
+ var look_for_first_c_function = false;
for (var i = 0; i < stack.length; ++i) {
var entry = this.codeMap_.findEntry(stack[i]);
if (entry) {
var name = entry.getName();
+ if (i == 0 && (entry.type == 'CPP' || entry.type == 'SHARED_LIB')) {
+ look_for_first_c_function = true;
+ }
+ if (look_for_first_c_function) {
+ if (entry.type == 'CPP') {
+ last_seen_c_function = name;
+ } else if (i > 0 && last_seen_c_function != '') {
+ if (this.c_entries_[last_seen_c_function] === undefined) {
+ this.c_entries_[last_seen_c_function] = 0;
+ }
+ this.c_entries_[last_seen_c_function]++;
+ look_for_first_c_function = false; // Found it, we're done.
+ }
+ }
if (!this.skipThisFunction(name)) {
result.push(name);
}
@@ -381,6 +398,28 @@
};
+Profile.CEntryNode = function(name, ticks) {
+ this.name = name;
+ this.ticks = ticks;
+}
+
+
+Profile.prototype.getCEntryProfile = function() {
+ var result = [new Profile.CEntryNode("TOTAL", 0)];
+ var total_ticks = 0;
+ for (var f in this.c_entries_) {
+ var ticks = this.c_entries_[f];
+ total_ticks += ticks;
+ result.push(new Profile.CEntryNode(f, ticks));
+ }
+ result[0].ticks = total_ticks; // Sorting will keep this at index 0.
+ result.sort(function(n1, n2) {
+ return n2.ticks - n1.ticks || (n2.name < n1.name ? -1 : 1)
+ });
+ return result;
+}
+
+
/**
* Cleans up function entries that are not referenced by code entries.
*/
@@ -415,8 +454,7 @@
* @constructor
*/
Profile.DynamicCodeEntry = function(size, type, name) {
- CodeMap.CodeEntry.call(this, size, name);
- this.type = type;
+ CodeMap.CodeEntry.call(this, size, name, type);
};
@@ -456,8 +494,7 @@
* @constructor
*/
Profile.DynamicFuncCodeEntry = function(size, type, func, state) {
- CodeMap.CodeEntry.call(this, size);
- this.type = type;
+ CodeMap.CodeEntry.call(this, size, '', type);
this.func = func;
this.state = state;
};
diff --git a/tools/push-to-trunk/auto_push.py b/tools/push-to-trunk/auto_push.py
index fef3b53..34afa4a 100755
--- a/tools/push-to-trunk/auto_push.py
+++ b/tools/push-to-trunk/auto_push.py
@@ -36,7 +36,7 @@
from common_includes import *
import push_to_trunk
-PUSH_MESSAGE_RE = re.compile(r".* \(based on bleeding_edge revision r(\d+)\)$")
+PUSH_MESSAGE_RE = re.compile(r".* \(based on ([a-fA-F0-9]+)\)$")
class Preparation(Step):
MESSAGE = "Preparation."
@@ -70,13 +70,12 @@
% self["tree_message"])
-class FetchLKGR(Step):
- MESSAGE = "Fetching V8 LKGR."
+class FetchCandidate(Step):
+ MESSAGE = "Fetching V8 roll candidate ref."
def RunStep(self):
- lkgr_url = "https://v8-status.appspot.com/lkgr"
- # Retry several times since app engine might have issues.
- self["lkgr"] = self.ReadURL(lkgr_url, wait_plan=[5, 20, 300, 300])
+ self.Git("fetch origin +refs/heads/candidate:refs/heads/candidate")
+ self["candidate"] = self.Git("show-ref -s refs/heads/candidate").strip()
class CheckLastPush(Step):
@@ -94,28 +93,30 @@
self.Die("Could not retrieve bleeding edge revision for trunk push %s"
% last_push)
- # TODO(machenbach): This metric counts all revisions. It could be
- # improved by counting only the revisions on bleeding_edge.
- if int(self["lkgr"]) - int(last_push_be) < 10: # pragma: no cover
- # This makes sure the script doesn't push twice in a row when the cron
- # job retries several times.
- self.Die("Last push too recently: %s" % last_push_be)
+ if self["candidate"] == last_push_be:
+ print "Already pushed current candidate %s" % last_push_be
+ return True
-class PushToTrunk(Step):
- MESSAGE = "Pushing to trunk if specified."
+class PushToCandidates(Step):
+ MESSAGE = "Pushing to candidates if specified."
def RunStep(self):
- print "Pushing lkgr %s to trunk." % self["lkgr"]
+ print "Pushing candidate %s to candidates." % self["candidate"]
+
+ args = [
+ "--author", self._options.author,
+ "--reviewer", self._options.reviewer,
+ "--revision", self["candidate"],
+ "--force",
+ ]
+
+ if self._options.work_dir:
+ args.extend(["--work-dir", self._options.work_dir])
# TODO(machenbach): Update the script before calling it.
if self._options.push:
- self._side_effect_handler.Call(
- push_to_trunk.PushToTrunk().Run,
- ["--author", self._options.author,
- "--reviewer", self._options.reviewer,
- "--revision", self["lkgr"],
- "--force"])
+ self._side_effect_handler.Call(push_to_trunk.PushToTrunk().Run, args)
class AutoPush(ScriptsBase):
@@ -142,9 +143,9 @@
Preparation,
CheckAutoPushSettings,
CheckTreeStatus,
- FetchLKGR,
+ FetchCandidate,
CheckLastPush,
- PushToTrunk,
+ PushToCandidates,
]
diff --git a/tools/push-to-trunk/auto_roll.py b/tools/push-to-trunk/auto_roll.py
index 120e633..1b57097 100755
--- a/tools/push-to-trunk/auto_roll.py
+++ b/tools/push-to-trunk/auto_roll.py
@@ -42,8 +42,9 @@
MESSAGE = "Detect commit ID of the last push to trunk."
def RunStep(self):
+ self.vc.Fetch()
push_hash = self.FindLastTrunkPush(
- branch="origin/master", include_patches=True)
+ branch="origin/candidates", include_patches=True)
self["last_push"] = self.GetCommitPositionNumber(push_hash)
@@ -99,6 +100,8 @@
"--sheriff", "--googlers-mapping", self._options.googlers_mapping])
if self._options.dry_run:
args.extend(["--dry-run"])
+ if self._options.work_dir:
+ args.extend(["--work-dir", self._options.work_dir])
self._side_effect_handler.Call(chromium_roll.ChromiumRoll().Run, args)
diff --git a/tools/push-to-trunk/auto_tag.py b/tools/push-to-trunk/auto_tag.py
index 175e10e..a52a028 100755
--- a/tools/push-to-trunk/auto_tag.py
+++ b/tools/push-to-trunk/auto_tag.py
@@ -13,10 +13,15 @@
MESSAGE = "Preparation."
def RunStep(self):
+ # TODO(machenbach): Remove after the git switch.
+ if self.Config("PERSISTFILE_BASENAME") == "/tmp/v8-auto-tag-tempfile":
+ print "This script is disabled until after the v8 git migration."
+ return True
+
self.CommonPrepare()
self.PrepareBranch()
self.GitCheckout("master")
- self.GitSVNRebase()
+ self.vc.Pull()
class GetTags(Step):
@@ -24,13 +29,7 @@
def RunStep(self):
self.GitCreateBranch(self._config["BRANCHNAME"])
-
- # Get remote tags.
- tags = filter(lambda s: re.match(r"^svn/tags/[\d+\.]+$", s),
- self.GitRemotes())
-
- # Remove 'svn/tags/' prefix.
- self["tags"] = map(lambda s: s[9:], tags)
+ self["tags"] = self.vc.GetTags()
class GetOldestUntaggedVersion(Step):
@@ -114,9 +113,9 @@
def RunStep(self):
# Get the lkgr after the tag candidate and before the next tag candidate.
- candidate_svn = self.GitSVNFindSVNRev(self["candidate"])
+ candidate_svn = self.vc.GitSvn(self["candidate"])
if self["next"]:
- next_svn = self.GitSVNFindSVNRev(self["next"])
+ next_svn = self.vc.GitSvn(self["next"])
else:
# Don't include the version change commit itself if there is no upper
# limit yet.
@@ -130,7 +129,7 @@
return True
# Let's check if the lkgr is at least three hours old.
- self["lkgr"] = self.GitSVNFindGitHash(lkgr_svn)
+ self["lkgr"] = self.vc.SvnGit(lkgr_svn)
if not self["lkgr"]:
print "Couldn't find git hash for lkgr %s" % lkgr_svn
self.CommonCleanup()
@@ -153,7 +152,10 @@
def RunStep(self):
if not self._options.dry_run:
self.GitReset(self["lkgr"])
- self.GitSVNTag(self["candidate_version"])
+ # FIXME(machenbach): Make this work with the git repo.
+ self.vc.Tag(self["candidate_version"],
+ "svn/bleeding_edge",
+ "This won't work!")
class CleanUp(Step):
diff --git a/tools/push-to-trunk/bump_up_version.py b/tools/push-to-trunk/bump_up_version.py
index c9f052b..647708c 100755
--- a/tools/push-to-trunk/bump_up_version.py
+++ b/tools/push-to-trunk/bump_up_version.py
@@ -28,17 +28,23 @@
VERSION_BRANCH = "auto-bump-up-version"
+# TODO(machenbach): Add vc interface that works on git mirror.
class Preparation(Step):
MESSAGE = "Preparation."
def RunStep(self):
+ # TODO(machenbach): Remove after the git switch.
+ if(self.Config("PERSISTFILE_BASENAME") ==
+ "/tmp/v8-bump-up-version-tempfile"):
+ print "This script is disabled until after the v8 git migration."
+ return True
+
# Check for a clean workdir.
if not self.GitIsWorkdirClean(): # pragma: no cover
# This is in case a developer runs this script on a dirty tree.
self.GitStash()
- # TODO(machenbach): This should be called master after the git switch.
- self.GitCheckout("bleeding_edge")
+ self.GitCheckout("master")
self.GitPull()
@@ -50,8 +56,7 @@
MESSAGE = "Get latest bleeding edge version."
def RunStep(self):
- # TODO(machenbach): This should be called master after the git switch.
- self.GitCheckout("bleeding_edge")
+ self.GitCheckout("master")
# Store latest version and revision.
self.ReadAndPersistVersion()
@@ -88,7 +93,7 @@
MESSAGE = "Get bleeding edge lkgr version."
def RunStep(self):
- self.GitCheckout("bleeding_edge")
+ self.GitCheckout("master")
# If the commit was made from svn, there is a mapping entry in the commit
# message.
self["lkgr"] = self.GitLog(
@@ -106,7 +111,7 @@
print "LKGR version: %s" % self["lkgr_version"]
# Ensure a clean version branch.
- self.GitCheckout("bleeding_edge")
+ self.GitCheckout("master")
self.DeleteBranch(VERSION_BRANCH)
@@ -131,8 +136,7 @@
MESSAGE = "Get latest trunk version."
def RunStep(self):
- # TODO(machenbach): This should be called trunk after the git switch.
- self.GitCheckout("master")
+ self.GitCheckout("candidates")
self.GitPull()
self.ReadAndPersistVersion("trunk_")
self["trunk_version"] = self.ArrayToVersion("trunk_")
@@ -186,7 +190,7 @@
MESSAGE = "Bump up the version."
def RunStep(self):
- self.GitCreateBranch(VERSION_BRANCH, "bleeding_edge")
+ self.GitCreateBranch(VERSION_BRANCH, "master")
self.SetVersion(os.path.join(self.default_cwd, VERSION_FILE), "new_")
@@ -194,17 +198,14 @@
msg = "[Auto-roll] Bump up version to %s" % self["new_version"]
self.GitCommit("%s\n\nTBR=%s" % (msg, self._options.author),
author=self._options.author)
- if self._options.svn:
- self.SVNCommit("branches/bleeding_edge", msg)
- else:
- self.GitUpload(author=self._options.author,
- force=self._options.force_upload,
- bypass_hooks=True)
- self.GitDCommit()
+ self.GitUpload(author=self._options.author,
+ force=self._options.force_upload,
+ bypass_hooks=True)
+ self.GitCLLand()
print "Successfully changed the version."
finally:
# Clean up.
- self.GitCheckout("bleeding_edge")
+ self.GitCheckout("master")
self.DeleteBranch(VERSION_BRANCH)
@@ -225,6 +226,7 @@
def _Config(self):
return {
"PERSISTFILE_BASENAME": "/tmp/v8-bump-up-version-tempfile",
+ "PATCH_FILE": "/tmp/v8-bump-up-version-tempfile-patch-file",
}
def _Steps(self):
diff --git a/tools/push-to-trunk/check_clusterfuzz.py b/tools/push-to-trunk/check_clusterfuzz.py
new file mode 100755
index 0000000..d4ba90b
--- /dev/null
+++ b/tools/push-to-trunk/check_clusterfuzz.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+# Copyright 2014 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Script to check for new clusterfuzz issues since the last rolled v8 revision.
+
+Returns a json list with test case IDs if any.
+
+Security considerations: The security key and request data must never be
+written to public logs. Public automated callers of this script should
+suppress stdout and stderr and only process contents of the results_file.
+"""
+
+
+import argparse
+import httplib
+import json
+import os
+import re
+import sys
+import urllib
+import urllib2
+
+
+# Constants to git repos.
+BASE_URL = "https://chromium.googlesource.com"
+DEPS_LOG = BASE_URL + "/chromium/src/+log/master/DEPS?format=JSON"
+
+# Constants for retrieving v8 rolls.
+CRREV = "https://cr-rev.appspot.com/_ah/api/crrev/v1/commit/%s"
+V8_COMMIT_RE = re.compile(
+ r"^Update V8 to version \d+\.\d+\.\d+ \(based on ([a-fA-F0-9]+)\)\..*")
+
+# Constants for the clusterfuzz backend.
+HOSTNAME = "backend-dot-cluster-fuzz.appspot.com"
+
+# Crash patterns.
+V8_INTERNAL_RE = re.compile(r"^v8::internal.*")
+ANY_RE = re.compile(r".*")
+
+# List of all api requests.
+BUG_SPECS = [
+ {
+ "args": {
+ "job_type": "linux_asan_chrome_v8",
+ "reproducible": "True",
+ "open": "True",
+ "bug_information": "",
+ },
+ "crash_state": V8_INTERNAL_RE,
+ },
+ {
+ "args": {
+ "job_type": "linux_asan_d8_dbg",
+ "reproducible": "True",
+ "open": "True",
+ "bug_information": "",
+ },
+ "crash_state": ANY_RE,
+ },
+]
+
+
+def GetRequest(url):
+ url_fh = urllib2.urlopen(url, None, 60)
+ try:
+ return url_fh.read()
+ finally:
+ url_fh.close()
+
+
+def GetLatestV8InChromium():
+ """Returns the commit position number of the latest v8 roll in chromium."""
+
+ # Check currently rolled v8 revision.
+ result = GetRequest(DEPS_LOG)
+ if not result:
+ return None
+
+ # Strip security header and load json.
+ commits = json.loads(result[5:])
+
+ git_revision = None
+ for commit in commits["log"]:
+ # Get latest commit that matches the v8 roll pattern. Ignore cherry-picks.
+ match = re.match(V8_COMMIT_RE, commit["message"])
+ if match:
+ git_revision = match.group(1)
+ break
+ else:
+ return None
+
+ # Get commit position number for v8 revision.
+ result = GetRequest(CRREV % git_revision)
+ if not result:
+ return None
+
+ commit = json.loads(result)
+ assert commit["repo"] == "v8/v8"
+ return commit["number"]
+
+
+def APIRequest(key, **params):
+ """Send a request to the clusterfuzz api.
+
+ Returns a json dict of the response.
+ """
+
+ params["api_key"] = key
+ params = urllib.urlencode(params)
+
+ headers = {"Content-type": "application/x-www-form-urlencoded"}
+
+ try:
+ conn = httplib.HTTPSConnection(HOSTNAME)
+ conn.request("POST", "/_api/", params, headers)
+
+ response = conn.getresponse()
+
+ # Never leak "data" into public logs.
+ data = response.read()
+ except:
+ raise Exception("ERROR: Connection problem.")
+
+ try:
+ return json.loads(data)
+ except:
+ raise Exception("ERROR: Could not read response. Is your key valid?")
+
+ return None
+
+
+def Main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-k", "--key-file", required=True,
+ help="A file with the clusterfuzz api key.")
+ parser.add_argument("-r", "--results-file",
+ help="A file to write the results to.")
+ options = parser.parse_args()
+
+ # Get api key. The key's content must never be logged.
+ assert options.key_file
+ with open(options.key_file) as f:
+ key = f.read().strip()
+ assert key
+
+ revision_number = GetLatestV8InChromium()
+
+ results = []
+ for spec in BUG_SPECS:
+ args = dict(spec["args"])
+ # Use incremented revision as we're interested in all revision greater than
+ # what's currently rolled into chromium.
+ if revision_number:
+ args["revision_greater_or_equal"] = str(int(revision_number) + 1)
+
+ # Never print issue details in public logs.
+ issues = APIRequest(key, **args)
+ assert issues is not None
+ for issue in issues:
+ if re.match(spec["crash_state"], issue["crash_state"]):
+ results.append(issue["id"])
+
+ if options.results_file:
+ with open(options.results_file, "w") as f:
+ f.write(json.dumps(results))
+ else:
+ print results
+
+
+if __name__ == "__main__":
+ sys.exit(Main())
diff --git a/tools/push-to-trunk/chromium_roll.py b/tools/push-to-trunk/chromium_roll.py
index dc5e6eb..5c9a38e 100755
--- a/tools/push-to-trunk/chromium_roll.py
+++ b/tools/push-to-trunk/chromium_roll.py
@@ -23,8 +23,7 @@
def RunStep(self):
self["last_push"] = self._options.last_push or self.FindLastTrunkPush(
- branch="origin/master", include_patches=True)
- self["trunk_revision"] = self.GetCommitPositionNumber(self["last_push"])
+ branch="origin/candidates", include_patches=True)
self["push_title"] = self.GitLog(n=1, format="%s",
git_hash=self["last_push"])
@@ -56,7 +55,7 @@
# Update v8 remotes.
self.GitFetchOrigin()
- self.GitCreateBranch("v8-roll-%s" % self["trunk_revision"],
+ self.GitCreateBranch("v8-roll-%s" % self["last_push"],
cwd=self._options.chromium)
@@ -66,9 +65,9 @@
def RunStep(self):
# Patch DEPS file.
if self.Command(
- "roll-dep", "v8 %s" % self["trunk_revision"],
+ "roll-dep", "v8 %s" % self["last_push"],
cwd=self._options.chromium) is None:
- self.Die("Failed to create deps for %s" % self["trunk_revision"])
+ self.Die("Failed to create deps for %s" % self["last_push"])
commit_title = "Update V8 to %s." % self["push_title"].lower()
sheriff = ""
@@ -87,7 +86,7 @@
print "CL uploaded."
else:
self.GitCheckout("master", cwd=self._options.chromium)
- self.GitDeleteBranch("v8-roll-%s" % self["trunk_revision"],
+ self.GitDeleteBranch("v8-roll-%s" % self["last_push"],
cwd=self._options.chromium)
print "Dry run - don't upload."
@@ -105,9 +104,9 @@
MESSAGE = "Done!"
def RunStep(self):
- print("Congratulations, you have successfully rolled the push r%s it into "
+ print("Congratulations, you have successfully rolled %s into "
"Chromium. Please don't forget to update the v8rel spreadsheet."
- % self["trunk_revision"])
+ % self["last_push"])
# Clean up all temporary files.
Command("rm", "-f %s*" % self._config["PERSISTFILE_BASENAME"])
diff --git a/tools/push-to-trunk/common_includes.py b/tools/push-to-trunk/common_includes.py
index 00fb097..ac78ef8 100644
--- a/tools/push-to-trunk/common_includes.py
+++ b/tools/push-to-trunk/common_includes.py
@@ -45,10 +45,11 @@
from git_recipes import GitRecipesMixin
from git_recipes import GitFailedException
+CHANGELOG_FILE = "ChangeLog"
VERSION_FILE = os.path.join("src", "version.cc")
# V8 base directory.
-DEFAULT_CWD = os.path.dirname(
+V8_BASE = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@@ -255,6 +256,107 @@
pass
+class VCInterface(object):
+ def InjectStep(self, step):
+ self.step=step
+
+ def Pull(self):
+ raise NotImplementedError()
+
+ def Fetch(self):
+ raise NotImplementedError()
+
+ def GetTags(self):
+ raise NotImplementedError()
+
+ def GetBranches(self):
+ raise NotImplementedError()
+
+ def MasterBranch(self):
+ raise NotImplementedError()
+
+ def CandidateBranch(self):
+ raise NotImplementedError()
+
+ def RemoteMasterBranch(self):
+ raise NotImplementedError()
+
+ def RemoteCandidateBranch(self):
+ raise NotImplementedError()
+
+ def RemoteBranch(self, name):
+ raise NotImplementedError()
+
+ def CLLand(self):
+ raise NotImplementedError()
+
+ def Tag(self, tag, remote, message):
+ """Sets a tag for the current commit.
+
+ Assumptions: The commit already landed and the commit message is unique.
+ """
+ raise NotImplementedError()
+
+
+class GitInterface(VCInterface):
+ def Pull(self):
+ self.step.GitPull()
+
+ def Fetch(self):
+ self.step.Git("fetch")
+
+ def GetTags(self):
+ return self.step.Git("tag").strip().splitlines()
+
+ def GetBranches(self):
+ # Get relevant remote branches, e.g. "branch-heads/3.25".
+ branches = filter(
+ lambda s: re.match(r"^branch\-heads/\d+\.\d+$", s),
+ self.step.GitRemotes())
+ # Remove 'branch-heads/' prefix.
+ return map(lambda s: s[13:], branches)
+
+ def MasterBranch(self):
+ return "master"
+
+ def CandidateBranch(self):
+ return "candidates"
+
+ def RemoteMasterBranch(self):
+ return "origin/master"
+
+ def RemoteCandidateBranch(self):
+ return "origin/candidates"
+
+ def RemoteBranch(self, name):
+ if name in ["candidates", "master"]:
+ return "origin/%s" % name
+ return "branch-heads/%s" % name
+
+ def Tag(self, tag, remote, message):
+ # Wait for the commit to appear. Assumes unique commit message titles (this
+ # is the case for all automated merge and push commits - also no title is
+ # the prefix of another title).
+ commit = None
+ for wait_interval in [3, 7, 15, 35, 45, 60]:
+ self.step.Git("fetch")
+ commit = self.step.GitLog(n=1, format="%H", grep=message, branch=remote)
+ if commit:
+ break
+ print("The commit has not replicated to git. Waiting for %s seconds." %
+ wait_interval)
+ self.step._side_effect_handler.Sleep(wait_interval)
+ else:
+ self.step.Die("Couldn't determine commit for setting the tag. Maybe the "
+ "git updater is lagging behind?")
+
+ self.step.Git("tag %s %s" % (tag, commit))
+ self.step.Git("push origin %s" % tag)
+
+ def CLLand(self):
+ self.step.GitCLLand()
+
+
class Step(GitRecipesMixin):
def __init__(self, text, number, config, state, options, handler):
self._text = text
@@ -263,9 +365,12 @@
self._state = state
self._options = options
self._side_effect_handler = handler
+ self.vc = GitInterface()
+ self.vc.InjectStep(self)
# The testing configuration might set a different default cwd.
- self.default_cwd = self._config.get("DEFAULT_CWD") or DEFAULT_CWD
+ self.default_cwd = (self._config.get("DEFAULT_CWD") or
+ os.path.join(self._options.work_dir, "v8"))
assert self._number >= 0
assert self._config is not None
@@ -354,11 +459,6 @@
raise GitFailedException("'git %s' failed." % args)
return result
- def SVN(self, args="", prefix="", pipe=True, retry_on=None, cwd=None):
- cmd = lambda: self._side_effect_handler.Command(
- "svn", args, prefix, pipe, cwd=cwd or self.default_cwd)
- return self.Retry(cmd, retry_on, [5, 30])
-
def Editor(self, args):
if self._options.requires_editor:
return self._side_effect_handler.Command(
@@ -422,14 +522,17 @@
self["current_branch"] = self.GitCurrentBranch()
# Fetch unfetched revisions.
- self.GitSVNFetch()
+ self.vc.Fetch()
def PrepareBranch(self):
# Delete the branch that will be created later if it exists already.
self.DeleteBranch(self._config["BRANCHNAME"])
def CommonCleanup(self):
- self.GitCheckout(self["current_branch"])
+ if ' ' in self["current_branch"]:
+ self.GitCheckout('master')
+ else:
+ self.GitCheckout(self["current_branch"])
if self._config["BRANCHNAME"] != self["current_branch"]:
self.GitDeleteBranch(self._config["BRANCHNAME"])
@@ -493,7 +596,7 @@
# Non-patched versions only have three numbers followed by the "(based
# on...) comment."
push_pattern += " (based"
- branch = "" if parent_hash else branch or "svn/trunk"
+ branch = "" if parent_hash else branch or self.vc.RemoteCandidateBranch()
return self.GitLog(n=1, format="%H", grep=push_pattern,
parent_hash=parent_hash, branch=branch)
@@ -517,21 +620,18 @@
output += "%s\n" % line
TextToFile(output, version_file)
- def SVNCommit(self, root, commit_message):
- patch = self.GitDiff("HEAD^", "HEAD")
- TextToFile(patch, self._config["PATCH_FILE"])
- self.Command("svn", "update", cwd=self._options.svn)
- if self.Command("svn", "status", cwd=self._options.svn) != "":
- self.Die("SVN checkout not clean.")
- if not self.Command("patch", "-d %s -p1 -i %s" %
- (root, self._config["PATCH_FILE"]),
- cwd=self._options.svn):
- self.Die("Could not apply patch.")
- self.Command(
- "svn",
- "commit --non-interactive --username=%s --config-dir=%s -m \"%s\"" %
- (self._options.author, self._options.svn_config, commit_message),
- cwd=self._options.svn)
+
+class BootstrapStep(Step):
+ MESSAGE = "Bootstapping v8 checkout."
+
+ def RunStep(self):
+ if os.path.realpath(self.default_cwd) == os.path.realpath(V8_BASE):
+ self.Die("Can't use v8 checkout with calling script as work checkout.")
+ # Directory containing the working v8 checkout.
+ if not os.path.exists(self._options.work_dir):
+ os.makedirs(self._options.work_dir)
+ if not os.path.exists(self.default_cwd):
+ self.Command("fetch", "v8", cwd=self._options.work_dir)
class UploadStep(Step):
@@ -546,7 +646,8 @@
self.DieNoManualMode("A reviewer must be specified in forced mode.")
reviewer = self.ReadLine()
self.GitUpload(reviewer, self._options.author, self._options.force_upload,
- bypass_hooks=self._options.bypass_upload_hooks)
+ bypass_hooks=self._options.bypass_upload_hooks,
+ cc=self._options.cc)
class DetermineV8Sheriff(Step):
@@ -600,7 +701,6 @@
class ScriptsBase(object):
- # TODO(machenbach): Move static config here.
def __init__(self,
config=None,
side_effect_handler=DEFAULT_SIDE_EFFECT_HANDLER,
@@ -638,14 +738,12 @@
help=("Determine current sheriff to review CLs. On "
"success, this will overwrite the reviewer "
"option."))
- parser.add_argument("--svn",
- help=("Optional full svn checkout for the commit."
- "The folder needs to be the svn root."))
- parser.add_argument("--svn-config",
- help=("Optional folder used as svn --config-dir."))
parser.add_argument("-s", "--step",
help="Specify the step where to start work. Default: 0.",
default=0, type=int)
+ parser.add_argument("--work-dir",
+ help=("Location where to bootstrap a working v8 "
+ "checkout."))
self._PrepareOptions(parser)
if args is None: # pragma: no cover
@@ -662,10 +760,6 @@
print "To determine the current sheriff, requires the googler mapping"
parser.print_help()
return None
- if options.svn and not options.svn_config:
- print "Using pure svn for committing requires also --svn-config"
- parser.print_help()
- return None
# Defaults for options, common to all scripts.
options.manual = getattr(options, "manual", True)
@@ -682,6 +776,9 @@
if not self._ProcessOptions(options):
parser.print_help()
return None
+
+ if not options.work_dir:
+ options.work_dir = "/tmp/v8-release-scripts-work-dir"
return options
def RunSteps(self, step_classes, args=None):
@@ -694,7 +791,7 @@
os.remove(state_file)
steps = []
- for (number, step_class) in enumerate(step_classes):
+ for (number, step_class) in enumerate([BootstrapStep] + step_classes):
steps.append(MakeStep(step_class, number, self._state, self._config,
options, self._side_effect_handler))
for step in steps[options.step:]:
diff --git a/tools/push-to-trunk/generate_version.py b/tools/push-to-trunk/generate_version.py
new file mode 100755
index 0000000..b4a0221
--- /dev/null
+++ b/tools/push-to-trunk/generate_version.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# Copyright 2014 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Script to set v8's version file to the version given by the latest tag.
+"""
+
+
+import os
+import re
+import subprocess
+import sys
+
+
+CWD = os.path.abspath(
+ os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
+VERSION_CC = os.path.join(CWD, "src", "version.cc")
+
+def main():
+ tag = subprocess.check_output(
+ "git describe --tags",
+ shell=True,
+ cwd=CWD,
+ ).strip()
+ assert tag
+
+ # Check for commits not exactly matching a tag. Those are candidate builds
+ # for the next version. The output has the form
+ # <tag name>-<n commits>-<hash>.
+ if "-" in tag:
+ version = tag.split("-")[0]
+ candidate = "1"
+ else:
+ version = tag
+ candidate = "0"
+ version_levels = version.split(".")
+
+ # Set default patch level if none is given.
+ if len(version_levels) == 3:
+ version_levels.append("0")
+ assert len(version_levels) == 4
+
+ major, minor, build, patch = version_levels
+
+ # Increment build level for candidate builds.
+ if candidate == "1":
+ build = str(int(build) + 1)
+ patch = "0"
+
+ # Modify version.cc with the new values.
+ with open(VERSION_CC, "r") as f:
+ text = f.read()
+ output = []
+ for line in text.split("\n"):
+ for definition, substitute in (
+ ("MAJOR_VERSION", major),
+ ("MINOR_VERSION", minor),
+ ("BUILD_NUMBER", build),
+ ("PATCH_LEVEL", patch),
+ ("IS_CANDIDATE_VERSION", candidate)):
+ if line.startswith("#define %s" % definition):
+ line = re.sub("\d+$", substitute, line)
+ output.append(line)
+ with open(VERSION_CC, "w") as f:
+ f.write("\n".join(output))
+
+ # Log what was done.
+ candidate_txt = " (candidate)" if candidate == "1" else ""
+ patch_txt = ".%s" % patch if patch != "0" else ""
+ version_txt = ("%s.%s.%s%s%s" %
+ (major, minor, build, patch_txt, candidate_txt))
+ print "Modified version.cc. Set V8 version to %s" % version_txt
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/tools/push-to-trunk/git_recipes.py b/tools/push-to-trunk/git_recipes.py
index 0f8fcef..3d2a9ef 100644
--- a/tools/push-to-trunk/git_recipes.py
+++ b/tools/push-to-trunk/git_recipes.py
@@ -45,7 +45,7 @@
# e.g., git-svn-id: https://v8.googlecode.com/svn/trunk@23117
# ce2b1a6d-e550-0410-aec6-3dcde31c8c00
-GIT_SVN_ID_RE = re.compile(r'((?:\w+)://[^@]+)@(\d+)\s+(?:[a-zA-Z0-9\-]+)')
+GIT_SVN_ID_RE = re.compile(r'[^@]+@(\d+)\s+(?:[a-zA-Z0-9\-]+)')
# Copied from bot_update.py.
@@ -80,7 +80,11 @@
def Strip(f):
def new_f(*args, **kwargs):
- return f(*args, **kwargs).strip()
+ result = f(*args, **kwargs)
+ if result is None:
+ return result
+ else:
+ return result.strip()
return new_f
@@ -101,9 +105,10 @@
def GitBranch(self, **kwargs):
return self.Git("branch", **kwargs)
- def GitCreateBranch(self, name, branch="", **kwargs):
+ def GitCreateBranch(self, name, remote="", **kwargs):
assert name
- self.Git(MakeArgs(["checkout -b", name, branch]), **kwargs)
+ remote_args = ["--upstream", remote] if remote else []
+ self.Git(MakeArgs(["new-branch", name] + remote_args), **kwargs)
def GitDeleteBranch(self, name, **kwargs):
assert name
@@ -194,7 +199,7 @@
self.Git(MakeArgs(args), **kwargs)
def GitUpload(self, reviewer="", author="", force=False, cq=False,
- bypass_hooks=False, **kwargs):
+ bypass_hooks=False, cc="", **kwargs):
args = ["cl upload --send-mail"]
if author:
args += ["--email", Quoted(author)]
@@ -206,6 +211,8 @@
args.append("--use-commit-queue")
if bypass_hooks:
args.append("--bypass-hooks")
+ if cc:
+ args += ["--cc", Quoted(cc)]
# TODO(machenbach): Check output in forced mode. Verify that all required
# base files were uploaded, if not retry.
self.Git(MakeArgs(args), pipe=False, **kwargs)
@@ -224,9 +231,9 @@
def GitPresubmit(self, **kwargs):
self.Git("cl presubmit", "PRESUBMIT_TREE_CHECK=\"skip\"", **kwargs)
- def GitDCommit(self, **kwargs):
+ def GitCLLand(self, **kwargs):
self.Git(
- "cl dcommit -f --bypass-hooks", retry_on=lambda x: x is None, **kwargs)
+ "cl land -f --bypass-hooks", retry_on=lambda x: x is None, **kwargs)
def GitDiff(self, loc1, loc2, **kwargs):
return self.Git(MakeArgs(["diff", loc1, loc2]), **kwargs)
@@ -237,17 +244,6 @@
def GitFetchOrigin(self, **kwargs):
self.Git("fetch origin", **kwargs)
- def GitConvertToSVNRevision(self, git_hash, **kwargs):
- result = self.Git(MakeArgs(["rev-list", "-n", "1", git_hash]), **kwargs)
- if not result or not SHA1_RE.match(result):
- raise GitFailedException("Git hash %s is unknown." % git_hash)
- log = self.GitLog(n=1, format="%B", git_hash=git_hash, **kwargs)
- for line in reversed(log.splitlines()):
- match = ROLL_DEPS_GIT_SVN_ID_RE.match(line.strip())
- if match:
- return match.group(1)
- raise GitFailedException("Couldn't convert %s to SVN." % git_hash)
-
@Strip
# Copied from bot_update.py and modified for svn-like numbers only.
def GetCommitPositionNumber(self, git_hash, **kwargs):
@@ -274,36 +270,6 @@
if value:
match = GIT_SVN_ID_RE.match(value)
if match:
- return match.group(2)
- return None
-
- ### Git svn stuff
-
- def GitSVNFetch(self, **kwargs):
- self.Git("svn fetch", **kwargs)
-
- def GitSVNRebase(self, **kwargs):
- self.Git("svn rebase", **kwargs)
-
- # TODO(machenbach): Unused? Remove.
- @Strip
- def GitSVNLog(self, **kwargs):
- return self.Git("svn log -1 --oneline", **kwargs)
-
- @Strip
- def GitSVNFindGitHash(self, revision, branch="", **kwargs):
- assert revision
- return self.Git(
- MakeArgs(["svn find-rev", "r%s" % revision, branch]), **kwargs)
-
- @Strip
- def GitSVNFindSVNRev(self, git_hash, branch="", **kwargs):
- return self.Git(MakeArgs(["svn find-rev", git_hash, branch]), **kwargs)
-
- def GitSVNDCommit(self, **kwargs):
- return self.Git("svn dcommit 2>&1", retry_on=lambda x: x is None, **kwargs)
-
- def GitSVNTag(self, version, **kwargs):
- self.Git(("svn tag %s -m \"Tagging version %s\"" % (version, version)),
- retry_on=lambda x: x is None,
- **kwargs)
+ return match.group(1)
+ raise GitFailedException("Couldn't determine commit position for %s" %
+ git_hash)
diff --git a/tools/push-to-trunk/merge_to_branch.py b/tools/push-to-trunk/merge_to_branch.py
index 3fd3450..9e7f1fb 100755
--- a/tools/push-to-trunk/merge_to_branch.py
+++ b/tools/push-to-trunk/merge_to_branch.py
@@ -32,6 +32,9 @@
from common_includes import *
+def IsSvnNumber(rev):
+ return rev.isdigit() and len(rev) < 8
+
class Preparation(Step):
MESSAGE = "Preparation."
@@ -45,6 +48,7 @@
self.InitialEnvironmentChecks(self.default_cwd)
if self._options.revert_bleeding_edge:
+ # FIXME(machenbach): Make revert bleeding_edge obsolete?
self["merge_to_branch"] = "bleeding_edge"
elif self._options.branch:
self["merge_to_branch"] = self._options.branch
@@ -60,7 +64,7 @@
def RunStep(self):
self.GitCreateBranch(self.Config("BRANCHNAME"),
- "svn/%s" % self["merge_to_branch"])
+ self.vc.RemoteBranch(self["merge_to_branch"]))
class SearchArchitecturePorts(Step):
@@ -71,24 +75,21 @@
self._options.revisions))
port_revision_list = []
for revision in self["full_revision_list"]:
- # Search for commits which matches the "Port rXXX" pattern.
+ # Search for commits which matches the "Port XXX" pattern.
git_hashes = self.GitLog(reverse=True, format="%H",
- grep="Port r%d" % int(revision),
- branch="svn/bleeding_edge")
+ grep="Port %s" % revision,
+ branch=self.vc.RemoteMasterBranch())
for git_hash in git_hashes.splitlines():
- svn_revision = self.GitSVNFindSVNRev(git_hash, "svn/bleeding_edge")
- if not svn_revision: # pragma: no cover
- self.Die("Cannot determine svn revision for %s" % git_hash)
revision_title = self.GitLog(n=1, format="%s", git_hash=git_hash)
# Is this revision included in the original revision list?
- if svn_revision in self["full_revision_list"]:
- print("Found port of r%s -> r%s (already included): %s"
- % (revision, svn_revision, revision_title))
+ if git_hash in self["full_revision_list"]:
+ print("Found port of %s -> %s (already included): %s"
+ % (revision, git_hash, revision_title))
else:
- print("Found port of r%s -> r%s: %s"
- % (revision, svn_revision, revision_title))
- port_revision_list.append(svn_revision)
+ print("Found port of %s -> %s: %s"
+ % (revision, git_hash, revision_title))
+ port_revision_list.append(git_hash)
# Do we find any port?
if len(port_revision_list) > 0:
@@ -98,16 +99,10 @@
self["full_revision_list"].extend(port_revision_list)
-class FindGitRevisions(Step):
- MESSAGE = "Find the git revisions associated with the patches."
+class CreateCommitMessage(Step):
+ MESSAGE = "Create commit message."
def RunStep(self):
- self["patch_commit_hashes"] = []
- for revision in self["full_revision_list"]:
- next_hash = self.GitSVNFindGitHash(revision, "svn/bleeding_edge")
- if not next_hash: # pragma: no cover
- self.Die("Cannot determine git hash for r%s" % revision)
- self["patch_commit_hashes"].append(next_hash)
# Stringify: [123, 234] -> "r123, r234"
self["revision_list"] = ", ".join(map(lambda s: "r%s" % s,
@@ -116,29 +111,38 @@
if not self["revision_list"]: # pragma: no cover
self.Die("Revision list is empty.")
- # The commit message title is added below after the version is specified.
- self["new_commit_msg"] = ""
+ if self._options.revert and not self._options.revert_bleeding_edge:
+ action_text = "Rollback of %s"
+ else:
+ action_text = "Merged %s"
- for commit_hash in self["patch_commit_hashes"]:
+ # The commit message title is added below after the version is specified.
+ msg_pieces = [
+ "\n".join(action_text % s for s in self["full_revision_list"]),
+ ]
+ msg_pieces.append("\n\n")
+
+ for commit_hash in self["full_revision_list"]:
patch_merge_desc = self.GitLog(n=1, format="%s", git_hash=commit_hash)
- self["new_commit_msg"] += "%s\n\n" % patch_merge_desc
+ msg_pieces.append("%s\n\n" % patch_merge_desc)
bugs = []
- for commit_hash in self["patch_commit_hashes"]:
+ for commit_hash in self["full_revision_list"]:
msg = self.GitLog(n=1, git_hash=commit_hash)
- for bug in re.findall(r"^[ \t]*BUG[ \t]*=[ \t]*(.*?)[ \t]*$", msg,
- re.M):
- bugs.extend(map(lambda s: s.strip(), bug.split(",")))
+ for bug in re.findall(r"^[ \t]*BUG[ \t]*=[ \t]*(.*?)[ \t]*$", msg, re.M):
+ bugs.extend(s.strip() for s in bug.split(","))
bug_aggregate = ",".join(sorted(filter(lambda s: s and s != "none", bugs)))
if bug_aggregate:
- self["new_commit_msg"] += "BUG=%s\nLOG=N\n" % bug_aggregate
+ msg_pieces.append("BUG=%s\nLOG=N\n" % bug_aggregate)
+
+ self["new_commit_msg"] = "".join(msg_pieces)
class ApplyPatches(Step):
MESSAGE = "Apply patches for selected revisions."
def RunStep(self):
- for commit_hash in self["patch_commit_hashes"]:
+ for commit_hash in self["full_revision_list"]:
print("Applying patch for %s to %s..."
% (commit_hash, self["merge_to_branch"]))
patch = self.GitGetPatch(commit_hash)
@@ -188,16 +192,14 @@
def RunStep(self):
# Add a commit message title.
- if self._options.revert:
- if not self._options.revert_bleeding_edge:
- title = ("Version %s (rollback of %s)"
- % (self["version"], self["revision_list"]))
- else:
- title = "Revert %s." % self["revision_list"]
+ if self._options.revert and self._options.revert_bleeding_edge:
+ # TODO(machenbach): Find a better convention if multiple patches are
+ # reverted in one CL.
+ self["commit_title"] = "Revert on master"
else:
- title = ("Version %s (merged %s)"
- % (self["version"], self["revision_list"]))
- self["new_commit_msg"] = "%s\n\n%s" % (title, self["new_commit_msg"])
+ self["commit_title"] = "Version %s (cherry-pick)" % self["version"]
+ self["new_commit_msg"] = "%s\n\n%s" % (self["commit_title"],
+ self["new_commit_msg"])
TextToFile(self["new_commit_msg"], self.Config("COMMITMSG_FILE"))
self.GitCommit(file_name=self.Config("COMMITMSG_FILE"))
@@ -209,22 +211,7 @@
self.GitCheckout(self.Config("BRANCHNAME"))
self.WaitForLGTM()
self.GitPresubmit()
- self.GitDCommit()
-
-
-class PrepareSVN(Step):
- MESSAGE = "Determine svn commit revision."
-
- def RunStep(self):
- if self._options.revert_bleeding_edge:
- return
- self.GitSVNFetch()
- commit_hash = self.GitLog(n=1, format="%H", grep=self["new_commit_msg"],
- branch="svn/%s" % self["merge_to_branch"])
- if not commit_hash: # pragma: no cover
- self.Die("Unable to map git commit to svn revision.")
- self["svn_revision"] = self.GitSVNFindSVNRev(commit_hash)
- print "subversion revision number is r%s" % self["svn_revision"]
+ self.vc.CLLand()
class TagRevision(Step):
@@ -233,16 +220,10 @@
def RunStep(self):
if self._options.revert_bleeding_edge:
return
- print "Creating tag svn/tags/%s" % self["version"]
- if self["merge_to_branch"] == "trunk":
- self["to_url"] = "trunk"
- else:
- self["to_url"] = "branches/%s" % self["merge_to_branch"]
- self.SVN("copy -r %s https://v8.googlecode.com/svn/%s "
- "https://v8.googlecode.com/svn/tags/%s -m "
- "\"Tagging version %s\""
- % (self["svn_revision"], self["to_url"],
- self["version"], self["version"]))
+ print "Creating tag %s" % self["version"]
+ self.vc.Tag(self["version"],
+ self.vc.RemoteBranch(self["merge_to_branch"]),
+ self["commit_title"])
class CleanUp(Step):
@@ -253,8 +234,7 @@
if not self._options.revert_bleeding_edge:
print "*** SUMMARY ***"
print "version: %s" % self["version"]
- print "branch: %s" % self["to_url"]
- print "svn revision: %s" % self["svn_revision"]
+ print "branch: %s" % self["merge_to_branch"]
if self["revision_list"]:
print "patches: %s" % self["revision_list"]
@@ -293,6 +273,16 @@
print "You must specify a merge comment if no patches are specified"
return False
options.bypass_upload_hooks = True
+ # CC ulan to make sure that fixes are merged to Google3.
+ options.cc = "ulan@chromium.org"
+
+ # Make sure to use git hashes in the new workflows.
+ for revision in options.revisions:
+ if (IsSvnNumber(revision) or
+ (revision[0:1] == "r" and IsSvnNumber(revision[1:]))):
+ print "Please provide full git hashes of the patches to merge."
+ print "Got: %s" % revision
+ return False
return True
def _Config(self):
@@ -310,14 +300,13 @@
Preparation,
CreateBranch,
SearchArchitecturePorts,
- FindGitRevisions,
+ CreateCommitMessage,
ApplyPatches,
PrepareVersion,
IncrementVersion,
CommitLocal,
UploadStep,
CommitRepository,
- PrepareSVN,
TagRevision,
CleanUp,
]
diff --git a/tools/push-to-trunk/push_to_trunk.py b/tools/push-to-trunk/push_to_trunk.py
index 8a9629e..6e821f2 100755
--- a/tools/push-to-trunk/push_to_trunk.py
+++ b/tools/push-to-trunk/push_to_trunk.py
@@ -34,8 +34,8 @@
from common_includes import *
-PUSH_MESSAGE_SUFFIX = " (based on bleeding_edge revision r%d)"
-PUSH_MESSAGE_RE = re.compile(r".* \(based on bleeding_edge revision r(\d+)\)$")
+PUSH_MSG_GIT_SUFFIX = " (based on %s)"
+PUSH_MSG_GIT_RE = re.compile(r".* \(based on (?P<git_rev>[a-fA-F0-9]+)\)$")
class Preparation(Step):
MESSAGE = "Preparation."
@@ -56,7 +56,8 @@
MESSAGE = "Create a fresh branch."
def RunStep(self):
- self.GitCreateBranch(self.Config("BRANCHNAME"), "svn/bleeding_edge")
+ self.GitCreateBranch(self.Config("BRANCHNAME"),
+ self.vc.RemoteMasterBranch())
class PreparePushRevision(Step):
@@ -64,7 +65,7 @@
def RunStep(self):
if self._options.revision:
- self["push_hash"] = self.GitSVNFindGitHash(self._options.revision)
+ self["push_hash"] = self._options.revision
else:
self["push_hash"] = self.GitLog(n=1, format="%H", git_hash="HEAD")
if not self["push_hash"]: # pragma: no cover
@@ -91,16 +92,14 @@
# Retrieve the bleeding edge revision of the last push from the text in
# the push commit message.
last_push_title = self.GitLog(n=1, format="%s", git_hash=last_push)
- last_push_be_svn = PUSH_MESSAGE_RE.match(last_push_title).group(1)
- if not last_push_be_svn: # pragma: no cover
- self.Die("Could not retrieve bleeding edge revision for trunk push %s"
- % last_push)
- last_push_bleeding_edge = self.GitSVNFindGitHash(last_push_be_svn)
+ last_push_bleeding_edge = PUSH_MSG_GIT_RE.match(
+ last_push_title).group("git_rev")
+
if not last_push_bleeding_edge: # pragma: no cover
self.Die("Could not retrieve bleeding edge git hash for trunk push %s"
% last_push)
- # This points to the svn revision of the last push on trunk.
+ # This points to the git hash of the last push on trunk.
self["last_push_trunk"] = last_push
# This points to the last bleeding_edge revision that went into the last
# push.
@@ -116,7 +115,7 @@
MESSAGE = "Get latest bleeding edge version."
def RunStep(self):
- self.GitCheckoutFile(VERSION_FILE, "svn/bleeding_edge")
+ self.GitCheckoutFile(VERSION_FILE, self.vc.RemoteMasterBranch())
# Store latest version.
self.ReadAndPersistVersion("latest_")
@@ -140,7 +139,7 @@
if SortingKey(self["trunk_version"]) < SortingKey(self["latest_version"]):
# If the version on bleeding_edge is newer than on trunk, use it.
- self.GitCheckoutFile(VERSION_FILE, "svn/bleeding_edge")
+ self.GitCheckoutFile(VERSION_FILE, self.vc.RemoteMasterBranch())
self.ReadAndPersistVersion()
if self.Confirm(("Automatically increment BUILD_NUMBER? (Saying 'n' will "
@@ -249,8 +248,8 @@
"started.")
def RunStep(self):
- self.GitSVNFetch()
- self.GitCheckout("svn/bleeding_edge")
+ self.vc.Fetch()
+ self.GitCheckout(self.vc.RemoteMasterBranch())
class SquashCommits(Step):
@@ -259,7 +258,8 @@
def RunStep(self):
# Instead of relying on "git rebase -i", we'll just create a diff, because
# that's easier to automate.
- TextToFile(self.GitDiff("svn/trunk", self["push_hash"]),
+ TextToFile(self.GitDiff(self.vc.RemoteCandidateBranch(),
+ self["push_hash"]),
self.Config("PATCH_FILE"))
# Convert the ChangeLog entry to commit message format.
@@ -268,10 +268,8 @@
# Remove date and trailing white space.
text = re.sub(r"^%s: " % self["date"], "", text.rstrip())
- # Retrieve svn revision for showing the used bleeding edge revision in the
- # commit message.
- self["svn_revision"] = self.GitSVNFindSVNRev(self["push_hash"])
- suffix = PUSH_MESSAGE_SUFFIX % int(self["svn_revision"])
+ # Show the used master hash in the commit message.
+ suffix = PUSH_MSG_GIT_SUFFIX % self["push_hash"]
text = MSub(r"^(Version \d+\.\d+\.\d+)$", "\\1%s" % suffix, text)
# Remove indentation and merge paragraphs into single long lines, keeping
@@ -283,6 +281,7 @@
if not text: # pragma: no cover
self.Die("Commit message editing failed.")
+ self["commit_title"] = text.splitlines()[0]
TextToFile(text, self.Config("COMMITMSG_FILE"))
@@ -290,7 +289,8 @@
MESSAGE = "Create a new branch from trunk."
def RunStep(self):
- self.GitCreateBranch(self.Config("TRUNKBRANCH"), "svn/trunk")
+ self.GitCreateBranch(self.Config("TRUNKBRANCH"),
+ self.vc.RemoteCandidateBranch())
class ApplyChanges(Step):
@@ -308,11 +308,11 @@
# The change log has been modified by the patch. Reset it to the version
# on trunk and apply the exact changes determined by this PrepareChangeLog
# step above.
- self.GitCheckoutFile(self.Config("CHANGELOG_FILE"), "svn/trunk")
+ self.GitCheckoutFile(CHANGELOG_FILE, self.vc.RemoteCandidateBranch())
changelog_entry = FileToText(self.Config("CHANGELOG_ENTRY_FILE"))
- old_change_log = FileToText(self.Config("CHANGELOG_FILE"))
+ old_change_log = FileToText(os.path.join(self.default_cwd, CHANGELOG_FILE))
new_change_log = "%s\n\n\n%s" % (changelog_entry, old_change_log)
- TextToFile(new_change_log, self.Config("CHANGELOG_FILE"))
+ TextToFile(new_change_log, os.path.join(self.default_cwd, CHANGELOG_FILE))
os.remove(self.Config("CHANGELOG_ENTRY_FILE"))
@@ -322,7 +322,7 @@
def RunStep(self):
# The version file has been modified by the patch. Reset it to the version
# on trunk and apply the correct version.
- self.GitCheckoutFile(VERSION_FILE, "svn/trunk")
+ self.GitCheckoutFile(VERSION_FILE, self.vc.RemoteCandidateBranch())
self.SetVersion(os.path.join(self.default_cwd, VERSION_FILE), "new_")
@@ -346,35 +346,19 @@
self.Die("Execution canceled.") # pragma: no cover
-class CommitSVN(Step):
- MESSAGE = "Commit to SVN."
+class Land(Step):
+ MESSAGE = "Land the patch."
def RunStep(self):
- result = self.GitSVNDCommit()
- if not result: # pragma: no cover
- self.Die("'git svn dcommit' failed.")
- result = filter(lambda x: re.search(r"^Committed r[0-9]+", x),
- result.splitlines())
- if len(result) > 0:
- self["trunk_revision"] = re.sub(r"^Committed r([0-9]+)", r"\1",result[0])
-
- # Sometimes grepping for the revision fails. No idea why. If you figure
- # out why it is flaky, please do fix it properly.
- if not self["trunk_revision"]:
- print("Sorry, grepping for the SVN revision failed. Please look for it "
- "in the last command's output above and provide it manually (just "
- "the number, without the leading \"r\").")
- self.DieNoManualMode("Can't prompt in forced mode.")
- while not self["trunk_revision"]:
- print "> ",
- self["trunk_revision"] = self.ReadLine()
+ self.vc.CLLand()
class TagRevision(Step):
MESSAGE = "Tag the new revision."
def RunStep(self):
- self.GitSVNTag(self["version"])
+ self.vc.Tag(
+ self["version"], self.vc.RemoteCandidateBranch(), self["commit_title"])
class CleanUp(Step):
@@ -382,10 +366,8 @@
def RunStep(self):
print("Congratulations, you have successfully created the trunk "
- "revision %s. Please don't forget to roll this new version into "
- "Chromium, and to update the v8rel spreadsheet:"
+ "revision %s."
% self["version"])
- print "%s\ttrunk\t%s" % (self["version"], self["trunk_revision"])
self.CommonCleanup()
if self.Config("TRUNKBRANCH") != self["current_branch"]:
@@ -408,7 +390,7 @@
parser.add_argument("-l", "--last-push",
help="The git commit ID of the last push to trunk.")
parser.add_argument("-R", "--revision",
- help="The svn revision to push (defaults to HEAD).")
+ help="The git commit ID to push (defaults to HEAD).")
def _ProcessOptions(self, options): # pragma: no cover
if not options.manual and not options.reviewer:
@@ -417,10 +399,6 @@
if not options.manual and not options.author:
print "Specify your chromium.org email with -a in (semi-)automatic mode."
return False
- if options.revision and not int(options.revision) > 0:
- print("The --revision flag must be a positiv integer pointing to a "
- "valid svn revision.")
- return False
options.tbr_commit = not options.manual
return True
@@ -430,7 +408,6 @@
"BRANCHNAME": "prepare-push",
"TRUNKBRANCH": "trunk-push",
"PERSISTFILE_BASENAME": "/tmp/v8-push-to-trunk-tempfile",
- "CHANGELOG_FILE": "ChangeLog",
"CHANGELOG_ENTRY_FILE": "/tmp/v8-push-to-trunk-tempfile-changelog-entry",
"PATCH_FILE": "/tmp/v8-push-to-trunk-tempfile-patch-file",
"COMMITMSG_FILE": "/tmp/v8-push-to-trunk-tempfile-commitmsg",
@@ -454,7 +431,7 @@
SetVersion,
CommitTrunk,
SanityCheck,
- CommitSVN,
+ Land,
TagRevision,
CleanUp,
]
diff --git a/tools/push-to-trunk/releases.py b/tools/push-to-trunk/releases.py
index 1d26198..1a5b15c 100755
--- a/tools/push-to-trunk/releases.py
+++ b/tools/push-to-trunk/releases.py
@@ -26,16 +26,25 @@
}
# Expression for retrieving the bleeding edge revision from a commit message.
-PUSH_MESSAGE_RE = re.compile(r".* \(based on bleeding_edge revision r(\d+)\)$")
+PUSH_MSG_SVN_RE = re.compile(r".* \(based on bleeding_edge revision r(\d+)\)$")
+PUSH_MSG_GIT_RE = re.compile(r".* \(based on ([a-fA-F0-9]+)\)$")
# Expression for retrieving the merged patches from a merge commit message
# (old and new format).
MERGE_MESSAGE_RE = re.compile(r"^.*[M|m]erged (.+)(\)| into).*$", re.M)
+CHERRY_PICK_TITLE_GIT_RE = re.compile(r"^.* \(cherry\-pick\)\.?$")
+
+# New git message for cherry-picked CLs. One message per line.
+MERGE_MESSAGE_GIT_RE = re.compile(r"^Merged ([a-fA-F0-9]+)\.?$")
+
# Expression for retrieving reverted patches from a commit message (old and
# new format).
ROLLBACK_MESSAGE_RE = re.compile(r"^.*[R|r]ollback of (.+)(\)| in).*$", re.M)
+# New git message for reverted CLs. One message per line.
+ROLLBACK_MESSAGE_GIT_RE = re.compile(r"^Rollback of ([a-fA-F0-9]+)\.?$")
+
# Expression for retrieving the code review link.
REVIEW_LINK_RE = re.compile(r"^Review URL: (.+)$", re.M)
@@ -127,8 +136,8 @@
return (self._options.max_releases > 0
and len(releases) > self._options.max_releases)
- def GetBleedingEdgeFromPush(self, title):
- return MatchSafe(PUSH_MESSAGE_RE.match(title))
+ def GetBleedingEdgeGitFromPush(self, title):
+ return MatchSafe(PUSH_MSG_GIT_RE.match(title))
def GetMergedPatches(self, body):
patches = MatchSafe(MERGE_MESSAGE_RE.search(body))
@@ -139,14 +148,31 @@
patches = "-%s" % patches
return patches
+ def GetMergedPatchesGit(self, body):
+ patches = []
+ for line in body.splitlines():
+ patch = MatchSafe(MERGE_MESSAGE_GIT_RE.match(line))
+ if patch:
+ patches.append(patch)
+ patch = MatchSafe(ROLLBACK_MESSAGE_GIT_RE.match(line))
+ if patch:
+ patches.append("-%s" % patch)
+ return ", ".join(patches)
+
+
def GetReleaseDict(
- self, git_hash, bleeding_edge_rev, branch, version, patches, cl_body):
- revision = self.GitSVNFindSVNRev(git_hash)
+ self, git_hash, bleeding_edge_rev, bleeding_edge_git, branch, version,
+ patches, cl_body):
+ revision = self.GetCommitPositionNumber(git_hash)
return {
- # The SVN revision on the branch.
+ # The cr commit position number on the branch.
"revision": revision,
- # The SVN revision on bleeding edge (only for newer trunk pushes).
+ # The git revision on the branch.
+ "revision_git": git_hash,
+ # The cr commit position number on master.
"bleeding_edge": bleeding_edge_rev,
+ # The same for git.
+ "bleeding_edge_git": bleeding_edge_git,
# The branch name.
"branch": branch,
# The version for displaying in the form 3.26.3 or 3.26.3.12.
@@ -176,29 +202,40 @@
patches = ""
if self["patch"] != "0":
version += ".%s" % self["patch"]
- patches = self.GetMergedPatches(body)
+ if CHERRY_PICK_TITLE_GIT_RE.match(body.splitlines()[0]):
+ patches = self.GetMergedPatchesGit(body)
+ else:
+ patches = self.GetMergedPatches(body)
title = self.GitLog(n=1, format="%s", git_hash=git_hash)
+ bleeding_edge_git = self.GetBleedingEdgeGitFromPush(title)
+ bleeding_edge_position = ""
+ if bleeding_edge_git:
+ bleeding_edge_position = self.GetCommitPositionNumber(bleeding_edge_git)
+ # TODO(machenbach): Add the commit position number.
return self.GetReleaseDict(
- git_hash, self.GetBleedingEdgeFromPush(title), branch, version,
+ git_hash, bleeding_edge_position, bleeding_edge_git, branch, version,
patches, body), self["patch"]
- def GetReleasesFromBleedingEdge(self):
- tag_text = self.SVN("log https://v8.googlecode.com/svn/tags -v --limit 20")
- releases = []
- for (tag, revision) in re.findall(BLEEDING_EDGE_TAGS_RE, tag_text):
- git_hash = self.GitSVNFindGitHash(revision)
+ def GetReleasesFromMaster(self):
+ # TODO(machenbach): Implement this in git as soon as we tag again on
+ # master.
+ # tag_text = self.SVN("log https://v8.googlecode.com/svn/tags -v
+ # --limit 20")
+ # releases = []
+ # for (tag, revision) in re.findall(BLEEDING_EDGE_TAGS_RE, tag_text):
+ # git_hash = self.vc.SvnGit(revision)
# Add bleeding edge release. It does not contain patches or a code
# review link, as tags are not uploaded.
- releases.append(self.GetReleaseDict(
- git_hash, revision, "bleeding_edge", tag, "", ""))
- return releases
+ # releases.append(self.GetReleaseDict(
+ # git_hash, revision, git_hash, self.vc.MasterBranch(), tag, "", ""))
+ return []
def GetReleasesFromBranch(self, branch):
- self.GitReset("svn/%s" % branch)
- if branch == 'bleeding_edge':
- return self.GetReleasesFromBleedingEdge()
+ self.GitReset(self.vc.RemoteBranch(branch))
+ if branch == self.vc.MasterBranch():
+ return self.GetReleasesFromMaster()
releases = []
try:
@@ -217,7 +254,7 @@
# TODO(machenbach): This omits patches if the version file wasn't
# manipulated correctly. Find a better way to detect the point where
# the parent of the branch head leads to the trunk branch.
- if branch != "trunk" and patch_level == "0":
+ if branch != self.vc.CandidateBranch() and patch_level == "0":
break
# Allow Ctrl-C interrupt.
@@ -230,12 +267,7 @@
def RunStep(self):
self.GitCreateBranch(self._config["BRANCHNAME"])
- # Get relevant remote branches, e.g. "svn/3.25".
- branches = filter(lambda s: re.match(r"^svn/\d+\.\d+$", s),
- self.GitRemotes())
- # Remove 'svn/' prefix.
- branches = map(lambda s: s[4:], branches)
-
+ branches = self.vc.GetBranches()
releases = []
if self._options.branch == 'recent':
# Get only recent development on trunk, beta and stable.
@@ -244,17 +276,18 @@
beta, stable = SortBranches(branches)[0:2]
releases += self.GetReleasesFromBranch(stable)
releases += self.GetReleasesFromBranch(beta)
- releases += self.GetReleasesFromBranch("trunk")
- releases += self.GetReleasesFromBranch("bleeding_edge")
+ releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
+ releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
elif self._options.branch == 'all': # pragma: no cover
# Retrieve the full release history.
for branch in branches:
releases += self.GetReleasesFromBranch(branch)
- releases += self.GetReleasesFromBranch("trunk")
- releases += self.GetReleasesFromBranch("bleeding_edge")
+ releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
+ releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
else: # pragma: no cover
# Retrieve history for a specified branch.
- assert self._options.branch in branches + ["trunk", "bleeding_edge"]
+ assert self._options.branch in (branches +
+ [self.vc.CandidateBranch(), self.vc.MasterBranch()])
releases += self.GetReleasesFromBranch(self._options.branch)
self["releases"] = sorted(releases,
@@ -289,7 +322,7 @@
# Simple check for git hashes.
if revision.isdigit() and len(revision) < 8:
return revision
- return step.GitConvertToSVNRevision(
+ return step.GetCommitPositionNumber(
revision, cwd=os.path.join(step._options.chromium, "v8"))
@@ -299,7 +332,9 @@
def RunStep(self):
cwd = self._options.chromium
releases = filter(
- lambda r: r["branch"] in ["trunk", "bleeding_edge"], self["releases"])
+ lambda r: r["branch"] in [self.vc.CandidateBranch(),
+ self.vc.MasterBranch()],
+ self["releases"])
if not releases: # pragma: no cover
print "No releases detected. Skipping chromium history."
return True
@@ -351,7 +386,8 @@
def RunStep(self):
cwd = self._options.chromium
- trunk_releases = filter(lambda r: r["branch"] == "trunk", self["releases"])
+ trunk_releases = filter(lambda r: r["branch"] == self.vc.CandidateBranch(),
+ self["releases"])
if not trunk_releases: # pragma: no cover
print "No trunk releases detected. Skipping chromium history."
return True
diff --git a/tools/push-to-trunk/test_scripts.py b/tools/push-to-trunk/test_scripts.py
index b0d1c58..db702a3 100644
--- a/tools/push-to-trunk/test_scripts.py
+++ b/tools/push-to-trunk/test_scripts.py
@@ -57,7 +57,6 @@
"BRANCHNAME": "test-prepare-push",
"TRUNKBRANCH": "test-trunk-push",
"PERSISTFILE_BASENAME": "/tmp/test-v8-push-to-trunk-tempfile",
- "CHANGELOG_FILE": None,
"CHANGELOG_ENTRY_FILE": "/tmp/test-v8-push-to-trunk-tempfile-changelog-entry",
"PATCH_FILE": "/tmp/test-v8-push-to-trunk-tempfile-patch",
"COMMITMSG_FILE": "/tmp/test-v8-push-to-trunk-tempfile-commitmsg",
@@ -446,7 +445,7 @@
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git status -s -b -uno", "## some_branch"),
- Cmd("git svn fetch", ""),
+ Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("Y"),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
@@ -459,7 +458,7 @@
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git status -s -b -uno", "## some_branch"),
- Cmd("git svn fetch", ""),
+ Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("n"),
])
@@ -471,7 +470,7 @@
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git status -s -b -uno", "## some_branch"),
- Cmd("git svn fetch", ""),
+ Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("Y"),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], None),
@@ -488,6 +487,23 @@
])
self.MakeStep().InitialEnvironmentChecks(TEST_CONFIG["DEFAULT_CWD"])
+ def testTagTimeout(self):
+ self.Expect([
+ Cmd("git fetch", ""),
+ Cmd("git log -1 --format=%H --grep=\"Title\" origin/candidates", ""),
+ Cmd("git fetch", ""),
+ Cmd("git log -1 --format=%H --grep=\"Title\" origin/candidates", ""),
+ Cmd("git fetch", ""),
+ Cmd("git log -1 --format=%H --grep=\"Title\" origin/candidates", ""),
+ Cmd("git fetch", ""),
+ Cmd("git log -1 --format=%H --grep=\"Title\" origin/candidates", ""),
+ ])
+ args = ["--branch", "candidates", "ab12345"]
+ self._state["version"] = "tag_name"
+ self._state["commit_title"] = "Title"
+ self.assertRaises(Exception,
+ lambda: self.RunStep(MergeToBranch, TagRevision, args))
+
def testReadAndPersistVersion(self):
self.WriteFakeVersionFile(build=5)
step = self.MakeStep()
@@ -610,7 +626,7 @@
self.Expect([
Cmd("git checkout -f hash1 -- src/version.cc", ""),
- Cmd("git checkout -f svn/bleeding_edge -- src/version.cc",
+ Cmd("git checkout -f origin/master -- src/version.cc",
"", cb=lambda: self.WriteFakeVersionFile(22, 6)),
RL("Y"), # Increment build number.
])
@@ -628,8 +644,7 @@
f.write(change_log)
self.Expect([
- Cmd("git diff svn/trunk hash1", "patch content"),
- Cmd("git svn find-rev hash1", "123455\n"),
+ Cmd("git diff origin/candidates hash1", "patch content"),
])
self._state["push_hash"] = "hash1"
@@ -648,7 +663,7 @@
Chromium issue 12345
Performance and stability improvements on all platforms.\n"""
- commit_msg = """Version 3.22.5 (based on bleeding_edge revision r123455)
+ commit_msg = """Version 3.22.5 (based on hash1)
Log text 1. Chromium issue 12345
@@ -662,7 +677,7 @@
12345).
Performance and stability improvements on all platforms.\n"""
- commit_msg = """Version 3.22.5 (based on bleeding_edge revision r123455)
+ commit_msg = """Version 3.22.5 (based on hash1)
Long commit message that fills more than 80 characters (Chromium issue 12345).
@@ -674,6 +689,21 @@
commit_msg = """Line with "quotation marks"."""
self._TestSquashCommits(change_log, commit_msg)
+ def testBootstrapper(self):
+ work_dir = self.MakeEmptyTempDirectory()
+ class FakeScript(ScriptsBase):
+ def _Steps(self):
+ return []
+
+ # Use the test configuration without the fake testing default work dir.
+ fake_config = dict(TEST_CONFIG)
+ del(fake_config["DEFAULT_CWD"])
+
+ self.Expect([
+ Cmd("fetch v8", "", cwd=work_dir),
+ ])
+ FakeScript(fake_config, self).Run(["--work-dir", work_dir])
+
def _PushToTrunk(self, force=False, manual=False):
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
@@ -682,9 +712,9 @@
self.WriteFakeVersionFile(build=5)
TEST_CONFIG["CHANGELOG_ENTRY_FILE"] = self.MakeEmptyTempFile()
- TEST_CONFIG["CHANGELOG_FILE"] = self.MakeEmptyTempFile()
bleeding_edge_change_log = "2014-03-17: Sentinel\n"
- TextToFile(bleeding_edge_change_log, TEST_CONFIG["CHANGELOG_FILE"])
+ TextToFile(bleeding_edge_change_log,
+ os.path.join(TEST_CONFIG["DEFAULT_CWD"], CHANGELOG_FILE))
os.environ["EDITOR"] = "vi"
def ResetChangeLog():
@@ -693,7 +723,8 @@
trunk_change_log = """1999-04-05: Version 3.22.4
Performance and stability improvements on all platforms.\n"""
- TextToFile(trunk_change_log, TEST_CONFIG["CHANGELOG_FILE"])
+ TextToFile(trunk_change_log,
+ os.path.join(TEST_CONFIG["DEFAULT_CWD"], CHANGELOG_FILE))
def ResetToTrunk():
ResetChangeLog()
@@ -702,7 +733,7 @@
def CheckSVNCommit():
commit = FileToText(TEST_CONFIG["COMMITMSG_FILE"])
self.assertEquals(
-"""Version 3.22.5 (based on bleeding_edge revision r123455)
+"""Version 3.22.5 (based on push_hash)
Log text 1 (issue 321).
@@ -716,7 +747,8 @@
self.assertTrue(re.search(r"#define IS_CANDIDATE_VERSION\s+0", version))
# Check that the change log on the trunk branch got correctly modified.
- change_log = FileToText(TEST_CONFIG["CHANGELOG_FILE"])
+ change_log = FileToText(
+ os.path.join(TEST_CONFIG["DEFAULT_CWD"], CHANGELOG_FILE))
self.assertEquals(
"""1999-07-31: Version 3.22.5
@@ -737,24 +769,23 @@
expectations += [
Cmd("git status -s -uno", ""),
Cmd("git status -s -b -uno", "## some_branch\n"),
- Cmd("git svn fetch", ""),
+ Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* branch2\n"),
Cmd("git branch", " branch1\n* branch2\n"),
- Cmd("git checkout -b %s svn/bleeding_edge" % TEST_CONFIG["BRANCHNAME"],
+ Cmd(("git new-branch %s --upstream origin/master" %
+ TEST_CONFIG["BRANCHNAME"]),
""),
- Cmd("git svn find-rev r123455", "push_hash\n"),
Cmd(("git log -1 --format=%H --grep="
"\"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]* (based\" "
- "svn/trunk"), "hash2\n"),
+ "origin/candidates"), "hash2\n"),
Cmd("git log -1 hash2", "Log message\n"),
]
if manual:
expectations.append(RL("Y")) # Confirm last push.
expectations += [
Cmd("git log -1 --format=%s hash2",
- "Version 3.4.5 (based on bleeding_edge revision r1234)\n"),
- Cmd("git svn find-rev r1234", "hash3\n"),
- Cmd("git checkout -f svn/bleeding_edge -- src/version.cc",
+ "Version 3.4.5 (based on abc3)\n"),
+ Cmd("git checkout -f origin/master -- src/version.cc",
"", cb=self.WriteFakeVersionFile),
Cmd("git checkout -f hash2 -- src/version.cc", "",
cb=self.WriteFakeVersionFile),
@@ -762,7 +793,7 @@
if manual:
expectations.append(RL("")) # Increment build number.
expectations += [
- Cmd("git log --format=%H hash3..push_hash", "rev1\n"),
+ Cmd("git log --format=%H abc3..push_hash", "rev1\n"),
Cmd("git log -1 --format=%s rev1", "Log text 1.\n"),
Cmd("git log -1 --format=%B rev1", "Text\nLOG=YES\nBUG=v8:321\nText\n"),
Cmd("git log -1 --format=%an rev1", "author1@chromium.org\n"),
@@ -773,16 +804,15 @@
expectations.append(
Cmd("vi %s" % TEST_CONFIG["CHANGELOG_ENTRY_FILE"], ""))
expectations += [
- Cmd("git svn fetch", "fetch result\n"),
- Cmd("git checkout -f svn/bleeding_edge", ""),
- Cmd("git diff svn/trunk push_hash", "patch content\n"),
- Cmd("git svn find-rev push_hash", "123455\n"),
- Cmd("git checkout -b %s svn/trunk" % TEST_CONFIG["TRUNKBRANCH"], "",
- cb=ResetToTrunk),
+ Cmd("git fetch", ""),
+ Cmd("git checkout -f origin/master", ""),
+ Cmd("git diff origin/candidates push_hash", "patch content\n"),
+ Cmd(("git new-branch %s --upstream origin/candidates" %
+ TEST_CONFIG["TRUNKBRANCH"]), "", cb=ResetToTrunk),
Cmd("git apply --index --reject \"%s\"" % TEST_CONFIG["PATCH_FILE"], ""),
- Cmd("git checkout -f svn/trunk -- %s" % TEST_CONFIG["CHANGELOG_FILE"], "",
+ Cmd("git checkout -f origin/candidates -- ChangeLog", "",
cb=ResetChangeLog),
- Cmd("git checkout -f svn/trunk -- src/version.cc", "",
+ Cmd("git checkout -f origin/candidates -- src/version.cc", "",
cb=self.WriteFakeVersionFile),
Cmd("git commit -aF \"%s\"" % TEST_CONFIG["COMMITMSG_FILE"], "",
cb=CheckSVNCommit),
@@ -790,22 +820,26 @@
if manual:
expectations.append(RL("Y")) # Sanity check.
expectations += [
- Cmd("git svn dcommit 2>&1",
- "Some output\nCommitted r123456\nSome output\n"),
- Cmd("git svn tag 3.22.5 -m \"Tagging version 3.22.5\"", ""),
+ Cmd("git cl land -f --bypass-hooks", ""),
+ Cmd("git fetch", ""),
+ Cmd("git log -1 --format=%H --grep="
+ "\"Version 3.22.5 (based on push_hash)\""
+ " origin/candidates", "hsh_to_tag"),
+ Cmd("git tag 3.22.5 hsh_to_tag", ""),
+ Cmd("git push origin 3.22.5", ""),
Cmd("git checkout -f some_branch", ""),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
Cmd("git branch -D %s" % TEST_CONFIG["TRUNKBRANCH"], ""),
]
self.Expect(expectations)
- args = ["-a", "author@chromium.org", "--revision", "123455"]
+ args = ["-a", "author@chromium.org", "--revision", "push_hash"]
if force: args.append("-f")
if manual: args.append("-m")
else: args += ["-r", "reviewer@chromium.org"]
PushToTrunk(TEST_CONFIG, self).Run(args)
- cl = FileToText(TEST_CONFIG["CHANGELOG_FILE"])
+ cl = FileToText(os.path.join(TEST_CONFIG["DEFAULT_CWD"], CHANGELOG_FILE))
self.assertTrue(re.search(r"^\d\d\d\d\-\d+\-\d+: Version 3\.22\.5", cl))
self.assertTrue(re.search(r" Log text 1 \(issue 321\).", cl))
self.assertTrue(re.search(r"1999\-04\-05: Version 3\.22\.4", cl))
@@ -867,8 +901,7 @@
Cmd("git fetch origin", ""),
Cmd(("git log -1 --format=%H --grep="
"\"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]*\" "
- "origin/master"), "push_hash\n"),
- Cmd("git log -1 --format=%B push_hash", self.C_V8_22624_LOG),
+ "origin/candidates"), "push_hash\n"),
Cmd("git log -1 --format=%s push_hash",
"Version 3.22.5 (based on bleeding_edge revision r22622)\n"),
URL("https://chromium-build.appspot.com/p/chromium/sheriff_v8.js",
@@ -878,8 +911,8 @@
Cmd("gclient sync --nohooks", "syncing...", cwd=chrome_dir),
Cmd("git pull", "", cwd=chrome_dir),
Cmd("git fetch origin", ""),
- Cmd("git checkout -b v8-roll-22624", "", cwd=chrome_dir),
- Cmd("roll-dep v8 22624", "rolled", cb=WriteDeps, cwd=chrome_dir),
+ Cmd("git new-branch v8-roll-push_hash", "", cwd=chrome_dir),
+ Cmd("roll-dep v8 push_hash", "rolled", cb=WriteDeps, cwd=chrome_dir),
Cmd(("git commit -am \"Update V8 to version 3.22.5 "
"(based on bleeding_edge revision r22622).\n\n"
"Please reply to the V8 sheriff c_name@chromium.org in "
@@ -903,16 +936,14 @@
self.Expect([
Cmd(("git log -1 --format=%H --grep="
"\"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]* (based\" "
- "svn/trunk"), "hash2\n"),
+ "origin/candidates"), "hash2\n"),
Cmd("git log -1 --format=%s hash2",
- "Version 3.4.5 (based on bleeding_edge revision r99)\n"),
+ "Version 3.4.5 (based on abc123)\n"),
])
- self._state["lkgr"] = "101"
-
- self.assertRaises(Exception, lambda: self.RunStep(auto_push.AutoPush,
- CheckLastPush,
- AUTO_PUSH_ARGS))
+ self._state["candidate"] = "abc123"
+ self.assertEquals(0, self.RunStep(
+ auto_push.AutoPush, CheckLastPush, AUTO_PUSH_ARGS))
def testAutoPush(self):
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
@@ -921,16 +952,16 @@
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git status -s -b -uno", "## some_branch\n"),
- Cmd("git svn fetch", ""),
+ Cmd("git fetch", ""),
URL("https://v8-status.appspot.com/current?format=json",
"{\"message\": \"Tree is throttled\"}"),
- URL("https://v8-status.appspot.com/lkgr", Exception("Network problem")),
- URL("https://v8-status.appspot.com/lkgr", "100"),
+ Cmd("git fetch origin +refs/heads/candidate:refs/heads/candidate", ""),
+ Cmd("git show-ref -s refs/heads/candidate", "abc123\n"),
Cmd(("git log -1 --format=%H --grep=\""
"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]* (based\""
- " svn/trunk"), "push_hash\n"),
+ " origin/candidates"), "push_hash\n"),
Cmd("git log -1 --format=%s push_hash",
- "Version 3.4.5 (based on bleeding_edge revision r79)\n"),
+ "Version 3.4.5 (based on abc101)\n"),
])
auto_push.AutoPush(TEST_CONFIG, self).Run(AUTO_PUSH_ARGS + ["--push"])
@@ -938,7 +969,7 @@
state = json.loads(FileToText("%s-state.json"
% TEST_CONFIG["PERSISTFILE_BASENAME"]))
- self.assertEquals("100", state["lkgr"])
+ self.assertEquals("abc123", state["candidate"])
def testAutoPushStoppedBySettings(self):
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
@@ -949,7 +980,7 @@
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git status -s -b -uno", "## some_branch\n"),
- Cmd("git svn fetch", ""),
+ Cmd("git fetch", ""),
])
def RunAutoPush():
@@ -963,7 +994,7 @@
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git status -s -b -uno", "## some_branch\n"),
- Cmd("git svn fetch", ""),
+ Cmd("git fetch", ""),
URL("https://v8-status.appspot.com/current?format=json",
"{\"message\": \"Tree is throttled (no push)\"}"),
])
@@ -1003,9 +1034,10 @@
URL("https://codereview.chromium.org/search",
"owner=author%40chromium.org&limit=30&closed=3&format=json",
("{\"results\": [{\"subject\": \"different\"}]}")),
+ Cmd("git fetch", ""),
Cmd(("git log -1 --format=%H --grep="
"\"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]*\" "
- "origin/master"), "push_hash\n"),
+ "origin/candidates"), "push_hash\n"),
Cmd("git log -1 --format=%B push_hash", self.C_V8_22624_LOG),
Cmd("git log -1 --format=%B abcd123455", self.C_V8_123455_LOG),
])
@@ -1024,9 +1056,10 @@
URL("https://codereview.chromium.org/search",
"owner=author%40chromium.org&limit=30&closed=3&format=json",
("{\"results\": [{\"subject\": \"different\"}]}")),
+ Cmd("git fetch", ""),
Cmd(("git log -1 --format=%H --grep="
"\"^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]*\" "
- "origin/master"), "push_hash\n"),
+ "origin/candidates"), "push_hash\n"),
Cmd("git log -1 --format=%B push_hash", self.C_V8_123456_LOG),
Cmd("git log -1 --format=%B abcd123455", self.C_V8_123455_LOG),
])
@@ -1046,7 +1079,13 @@
return lambda: self.assertEquals(patch,
FileToText(TEST_CONFIG["TEMPORARY_PATCH_FILE"]))
- msg = """Version 3.22.5.1 (merged r12345, r23456, r34567, r45678, r56789)
+ msg = """Version 3.22.5.1 (cherry-pick)
+
+Merged ab12345
+Merged ab23456
+Merged ab34567
+Merged ab45678
+Merged ab56789
Title4
@@ -1075,61 +1114,53 @@
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git status -s -b -uno", "## some_branch\n"),
- Cmd("git svn fetch", ""),
+ Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* branch2\n"),
- Cmd("git checkout -b %s svn/trunk" % TEST_CONFIG["BRANCHNAME"], ""),
- Cmd(("git log --format=%H --grep=\"Port r12345\" "
- "--reverse svn/bleeding_edge"),
- "hash1\nhash2"),
- Cmd("git svn find-rev hash1 svn/bleeding_edge", "45678"),
- Cmd("git log -1 --format=%s hash1", "Title1"),
- Cmd("git svn find-rev hash2 svn/bleeding_edge", "23456"),
- Cmd("git log -1 --format=%s hash2", "Title2"),
- Cmd(("git log --format=%H --grep=\"Port r23456\" "
- "--reverse svn/bleeding_edge"),
+ Cmd("git new-branch %s --upstream origin/candidates" %
+ TEST_CONFIG["BRANCHNAME"], ""),
+ Cmd(("git log --format=%H --grep=\"Port ab12345\" "
+ "--reverse origin/master"),
+ "ab45678\nab23456"),
+ Cmd("git log -1 --format=%s ab45678", "Title1"),
+ Cmd("git log -1 --format=%s ab23456", "Title2"),
+ Cmd(("git log --format=%H --grep=\"Port ab23456\" "
+ "--reverse origin/master"),
""),
- Cmd(("git log --format=%H --grep=\"Port r34567\" "
- "--reverse svn/bleeding_edge"),
- "hash3"),
- Cmd("git svn find-rev hash3 svn/bleeding_edge", "56789"),
- Cmd("git log -1 --format=%s hash3", "Title3"),
- RL("Y"), # Automatically add corresponding ports (34567, 56789)?
- Cmd("git svn find-rev r12345 svn/bleeding_edge", "hash4"),
- # Simulate svn being down which stops the script.
- Cmd("git svn find-rev r23456 svn/bleeding_edge", None),
+ Cmd(("git log --format=%H --grep=\"Port ab34567\" "
+ "--reverse origin/master"),
+ "ab56789"),
+ Cmd("git log -1 --format=%s ab56789", "Title3"),
+ RL("Y"), # Automatically add corresponding ports (ab34567, ab56789)?
+ # Simulate git being down which stops the script.
+ Cmd("git log -1 --format=%s ab12345", None),
# Restart script in the failing step.
- Cmd("git svn find-rev r12345 svn/bleeding_edge", "hash4"),
- Cmd("git svn find-rev r23456 svn/bleeding_edge", "hash2"),
- Cmd("git svn find-rev r34567 svn/bleeding_edge", "hash3"),
- Cmd("git svn find-rev r45678 svn/bleeding_edge", "hash1"),
- Cmd("git svn find-rev r56789 svn/bleeding_edge", "hash5"),
- Cmd("git log -1 --format=%s hash4", "Title4"),
- Cmd("git log -1 --format=%s hash2", "Title2"),
- Cmd("git log -1 --format=%s hash3", "Title3"),
- Cmd("git log -1 --format=%s hash1", "Title1"),
- Cmd("git log -1 --format=%s hash5", "Revert \"Something\""),
- Cmd("git log -1 hash4", "Title4\nBUG=123\nBUG=234"),
- Cmd("git log -1 hash2", "Title2\n BUG = v8:123,345"),
- Cmd("git log -1 hash3", "Title3\nLOG=n\nBUG=567, 456"),
- Cmd("git log -1 hash1", "Title1\nBUG="),
- Cmd("git log -1 hash5", "Revert \"Something\"\nBUG=none"),
- Cmd("git log -1 -p hash4", "patch4"),
+ Cmd("git log -1 --format=%s ab12345", "Title4"),
+ Cmd("git log -1 --format=%s ab23456", "Title2"),
+ Cmd("git log -1 --format=%s ab34567", "Title3"),
+ Cmd("git log -1 --format=%s ab45678", "Title1"),
+ Cmd("git log -1 --format=%s ab56789", "Revert \"Something\""),
+ Cmd("git log -1 ab12345", "Title4\nBUG=123\nBUG=234"),
+ Cmd("git log -1 ab23456", "Title2\n BUG = v8:123,345"),
+ Cmd("git log -1 ab34567", "Title3\nLOG=n\nBUG=567, 456"),
+ Cmd("git log -1 ab45678", "Title1\nBUG="),
+ Cmd("git log -1 ab56789", "Revert \"Something\"\nBUG=none"),
+ Cmd("git log -1 -p ab12345", "patch4"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch4")),
- Cmd("git log -1 -p hash2", "patch2"),
+ Cmd("git log -1 -p ab23456", "patch2"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch2")),
- Cmd("git log -1 -p hash3", "patch3"),
+ Cmd("git log -1 -p ab34567", "patch3"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch3")),
- Cmd("git log -1 -p hash1", "patch1"),
+ Cmd("git log -1 -p ab45678", "patch1"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch1")),
- Cmd("git log -1 -p hash5", "patch5\n"),
+ Cmd("git log -1 -p ab56789", "patch5\n"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch5\n")),
@@ -1138,64 +1169,42 @@
Cmd("git commit -aF \"%s\"" % TEST_CONFIG["COMMITMSG_FILE"], ""),
RL("reviewer@chromium.org"), # V8 reviewer.
Cmd("git cl upload --send-mail -r \"reviewer@chromium.org\" "
- "--bypass-hooks", ""),
+ "--bypass-hooks --cc \"ulan@chromium.org\"", ""),
Cmd("git checkout -f %s" % TEST_CONFIG["BRANCHNAME"], ""),
RL("LGTM"), # Enter LGTM for V8 CL.
Cmd("git cl presubmit", "Presubmit successfull\n"),
- Cmd("git cl dcommit -f --bypass-hooks", "Closing issue\n",
+ Cmd("git cl land -f --bypass-hooks", "Closing issue\n",
cb=VerifySVNCommit),
- Cmd("git svn fetch", ""),
- Cmd(("git log -1 --format=%%H --grep=\"%s\" svn/trunk"
- % msg.replace("\"", "\\\"")), "hash6"),
- Cmd("git svn find-rev hash6", "1324"),
- Cmd(("svn copy -r 1324 https://v8.googlecode.com/svn/trunk "
- "https://v8.googlecode.com/svn/tags/3.22.5.1 -m "
- "\"Tagging version 3.22.5.1\""), ""),
+ Cmd("git fetch", ""),
+ Cmd("git log -1 --format=%H --grep=\""
+ "Version 3.22.5.1 (cherry-pick)"
+ "\" origin/candidates",
+ ""),
+ Cmd("git fetch", ""),
+ Cmd("git log -1 --format=%H --grep=\""
+ "Version 3.22.5.1 (cherry-pick)"
+ "\" origin/candidates",
+ "hsh_to_tag"),
+ Cmd("git tag 3.22.5.1 hsh_to_tag", ""),
+ Cmd("git push origin 3.22.5.1", ""),
Cmd("git checkout -f some_branch", ""),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
])
- # r12345 and r34567 are patches. r23456 (included) and r45678 are the MIPS
- # ports of r12345. r56789 is the MIPS port of r34567.
- args = ["-f", "-p", extra_patch, "--branch", "trunk", "12345", "23456",
- "34567"]
+ # ab12345 and ab34567 are patches. ab23456 (included) and ab45678 are the
+ # MIPS ports of ab12345. ab56789 is the MIPS port of ab34567.
+ args = ["-f", "-p", extra_patch, "--branch", "candidates",
+ "ab12345", "ab23456", "ab34567"]
- # The first run of the script stops because of the svn being down.
+ # The first run of the script stops because of git being down.
self.assertRaises(GitFailedException,
lambda: MergeToBranch(TEST_CONFIG, self).Run(args))
# Test that state recovery after restarting the script works.
- args += ["-s", "3"]
+ args += ["-s", "4"]
MergeToBranch(TEST_CONFIG, self).Run(args)
def testReleases(self):
- tag_response_text = """
-------------------------------------------------------------------------
-r22631 | author1@chromium.org | 2014-07-28 02:05:29 +0200 (Mon, 28 Jul 2014)
-Changed paths:
- A /tags/3.28.43 (from /trunk:22630)
-
-Tagging version 3.28.43
-------------------------------------------------------------------------
-r22629 | author2@chromium.org | 2014-07-26 05:09:29 +0200 (Sat, 26 Jul 2014)
-Changed paths:
- A /tags/3.28.41 (from /branches/bleeding_edge:22626)
-
-Tagging version 3.28.41
-------------------------------------------------------------------------
-r22556 | author3@chromium.org | 2014-07-23 13:31:59 +0200 (Wed, 23 Jul 2014)
-Changed paths:
- A /tags/3.27.34.7 (from /branches/3.27:22555)
-
-Tagging version 3.27.34.7
-------------------------------------------------------------------------
-r22627 | author4@chromium.org | 2014-07-26 01:39:15 +0200 (Sat, 26 Jul 2014)
-Changed paths:
- A /tags/3.28.40 (from /branches/bleeding_edge:22624)
-
-Tagging version 3.28.40
-------------------------------------------------------------------------
-"""
c_hash2_commit_log = """Revert something.
BUG=12345
@@ -1216,6 +1225,23 @@
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@3456 0039-1c4b
"""
+ c_hash_234_commit_log = """Version 3.3.1.1 (cherry-pick).
+
+Merged abc12.
+
+Review URL: fake.com
+
+Cr-Commit-Position: refs/heads/candidates@{#234}
+"""
+ c_hash_123_commit_log = """Version 3.3.1.0
+
+git-svn-id: googlecode@123 0039-1c4b
+"""
+ c_hash_345_commit_log = """Version 3.4.0.
+
+Cr-Commit-Position: refs/heads/candidates@{#345}
+"""
+
json_output = self.MakeEmptyTempFile()
csv_output = self.MakeEmptyTempFile()
self.WriteFakeVersionFile()
@@ -1240,58 +1266,50 @@
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git status -s -b -uno", "## some_branch\n"),
- Cmd("git svn fetch", ""),
+ Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* branch2\n"),
- Cmd("git checkout -b %s" % TEST_CONFIG["BRANCHNAME"], ""),
- Cmd("git branch -r", " svn/3.21\n svn/3.3\n"),
- Cmd("git reset --hard svn/3.3", ""),
- Cmd("git log --format=%H", "hash1\nhash2"),
+ Cmd("git new-branch %s" % TEST_CONFIG["BRANCHNAME"], ""),
+ Cmd("git branch -r", " branch-heads/3.21\n branch-heads/3.3\n"),
+ Cmd("git reset --hard branch-heads/3.3", ""),
+ Cmd("git log --format=%H", "hash1\nhash_234"),
Cmd("git diff --name-only hash1 hash1^", ""),
- Cmd("git diff --name-only hash2 hash2^", VERSION_FILE),
- Cmd("git checkout -f hash2 -- %s" % VERSION_FILE, "",
+ Cmd("git diff --name-only hash_234 hash_234^", VERSION_FILE),
+ Cmd("git checkout -f hash_234 -- %s" % VERSION_FILE, "",
cb=ResetVersion(3, 1, 1)),
- Cmd("git log -1 --format=%B hash2",
- "Version 3.3.1.1 (merged 12)\n\nReview URL: fake.com\n"),
- Cmd("git log -1 --format=%s hash2", ""),
- Cmd("git svn find-rev hash2", "234"),
- Cmd("git log -1 --format=%ci hash2", "18:15"),
+ Cmd("git log -1 --format=%B hash_234", c_hash_234_commit_log),
+ Cmd("git log -1 --format=%s hash_234", ""),
+ Cmd("git log -1 --format=%B hash_234", c_hash_234_commit_log),
+ Cmd("git log -1 --format=%ci hash_234", "18:15"),
Cmd("git checkout -f HEAD -- %s" % VERSION_FILE, "",
cb=ResetVersion(22, 5)),
- Cmd("git reset --hard svn/3.21", ""),
- Cmd("git log --format=%H", "hash3\nhash4\nhash5\n"),
- Cmd("git diff --name-only hash3 hash3^", VERSION_FILE),
- Cmd("git checkout -f hash3 -- %s" % VERSION_FILE, "",
+ Cmd("git reset --hard branch-heads/3.21", ""),
+ Cmd("git log --format=%H", "hash_123\nhash4\nhash5\n"),
+ Cmd("git diff --name-only hash_123 hash_123^", VERSION_FILE),
+ Cmd("git checkout -f hash_123 -- %s" % VERSION_FILE, "",
cb=ResetVersion(21, 2)),
- Cmd("git log -1 --format=%B hash3", ""),
- Cmd("git log -1 --format=%s hash3", ""),
- Cmd("git svn find-rev hash3", "123"),
- Cmd("git log -1 --format=%ci hash3", "03:15"),
+ Cmd("git log -1 --format=%B hash_123", c_hash_123_commit_log),
+ Cmd("git log -1 --format=%s hash_123", ""),
+ Cmd("git log -1 --format=%B hash_123", c_hash_123_commit_log),
+ Cmd("git log -1 --format=%ci hash_123", "03:15"),
Cmd("git checkout -f HEAD -- %s" % VERSION_FILE, "",
cb=ResetVersion(22, 5)),
- Cmd("git reset --hard svn/trunk", ""),
- Cmd("git log --format=%H", "hash6\n"),
- Cmd("git diff --name-only hash6 hash6^", VERSION_FILE),
- Cmd("git checkout -f hash6 -- %s" % VERSION_FILE, "",
+ Cmd("git reset --hard origin/candidates", ""),
+ Cmd("git log --format=%H", "hash_345\n"),
+ Cmd("git diff --name-only hash_345 hash_345^", VERSION_FILE),
+ Cmd("git checkout -f hash_345 -- %s" % VERSION_FILE, "",
cb=ResetVersion(22, 3)),
- Cmd("git log -1 --format=%B hash6", ""),
- Cmd("git log -1 --format=%s hash6", ""),
- Cmd("git svn find-rev hash6", "345"),
- Cmd("git log -1 --format=%ci hash6", ""),
+ Cmd("git log -1 --format=%B hash_345", c_hash_345_commit_log),
+ Cmd("git log -1 --format=%s hash_345", ""),
+ Cmd("git log -1 --format=%B hash_345", c_hash_345_commit_log),
+ Cmd("git log -1 --format=%ci hash_345", ""),
Cmd("git checkout -f HEAD -- %s" % VERSION_FILE, "",
cb=ResetVersion(22, 5)),
- Cmd("git reset --hard svn/bleeding_edge", ""),
- Cmd("svn log https://v8.googlecode.com/svn/tags -v --limit 20",
- tag_response_text),
- Cmd("git svn find-rev r22626", "hash_22626"),
- Cmd("git svn find-rev hash_22626", "22626"),
- Cmd("git log -1 --format=%ci hash_22626", "01:23"),
- Cmd("git svn find-rev r22624", "hash_22624"),
- Cmd("git svn find-rev hash_22624", "22624"),
- Cmd("git log -1 --format=%ci hash_22624", "02:34"),
+ Cmd("git reset --hard origin/master", ""),
Cmd("git status -s -uno", "", cwd=chrome_dir),
Cmd("git checkout -f master", "", cwd=chrome_dir),
Cmd("git pull", "", cwd=chrome_dir),
- Cmd("git checkout -b %s" % TEST_CONFIG["BRANCHNAME"], "", cwd=chrome_dir),
+ Cmd("git new-branch %s" % TEST_CONFIG["BRANCHNAME"], "",
+ cwd=chrome_dir),
Cmd("git fetch origin", "", cwd=chrome_v8_dir),
Cmd("git log --format=%H --grep=\"V8\"", "c_hash1\nc_hash2\nc_hash3\n",
cwd=chrome_dir),
@@ -1302,8 +1320,6 @@
cwd=chrome_dir),
Cmd("git log -1 --format=%B c_hash2", c_hash2_commit_log,
cwd=chrome_dir),
- Cmd("git rev-list -n 1 0123456789012345678901234567890123456789",
- "0123456789012345678901234567890123456789", cwd=chrome_v8_dir),
Cmd("git log -1 --format=%B 0123456789012345678901234567890123456789",
self.C_V8_22624_LOG, cwd=chrome_v8_dir),
Cmd("git diff --name-only c_hash3 c_hash3^", "DEPS", cwd=chrome_dir),
@@ -1331,35 +1347,54 @@
Releases(TEST_CONFIG, self).Run(args)
# Check expected output.
- csv = ("3.28.41,bleeding_edge,22626,,\r\n"
- "3.28.40,bleeding_edge,22624,4567,\r\n"
- "3.22.3,trunk,345,3456:4566,\r\n"
+ csv = ("3.22.3,candidates,345,3456:4566,\r\n"
"3.21.2,3.21,123,,\r\n"
- "3.3.1.1,3.3,234,,12\r\n")
+ "3.3.1.1,3.3,234,,abc12\r\n")
self.assertEquals(csv, FileToText(csv_output))
expected_json = [
- {"bleeding_edge": "22626", "patches_merged": "", "version": "3.28.41",
- "chromium_revision": "", "branch": "bleeding_edge", "revision": "22626",
- "review_link": "", "date": "01:23", "chromium_branch": "",
- "revision_link": "https://code.google.com/p/v8/source/detail?r=22626"},
- {"bleeding_edge": "22624", "patches_merged": "", "version": "3.28.40",
- "chromium_revision": "4567", "branch": "bleeding_edge",
- "revision": "22624", "review_link": "", "date": "02:34",
- "chromium_branch": "",
- "revision_link": "https://code.google.com/p/v8/source/detail?r=22624"},
- {"bleeding_edge": "", "patches_merged": "", "version": "3.22.3",
- "chromium_revision": "3456:4566", "branch": "trunk", "revision": "345",
- "review_link": "", "date": "", "chromium_branch": "7",
- "revision_link": "https://code.google.com/p/v8/source/detail?r=345"},
- {"patches_merged": "", "bleeding_edge": "", "version": "3.21.2",
- "chromium_revision": "", "branch": "3.21", "revision": "123",
- "review_link": "", "date": "03:15", "chromium_branch": "",
- "revision_link": "https://code.google.com/p/v8/source/detail?r=123"},
- {"patches_merged": "12", "bleeding_edge": "", "version": "3.3.1.1",
- "chromium_revision": "", "branch": "3.3", "revision": "234",
- "review_link": "fake.com", "date": "18:15", "chromium_branch": "",
- "revision_link": "https://code.google.com/p/v8/source/detail?r=234"},
+ {
+ "revision": "345",
+ "revision_git": "hash_345",
+ "bleeding_edge": "",
+ "bleeding_edge_git": "",
+ "patches_merged": "",
+ "version": "3.22.3",
+ "chromium_revision": "3456:4566",
+ "branch": "candidates",
+ "review_link": "",
+ "date": "",
+ "chromium_branch": "7",
+ "revision_link": "https://code.google.com/p/v8/source/detail?r=345",
+ },
+ {
+ "revision": "123",
+ "revision_git": "hash_123",
+ "patches_merged": "",
+ "bleeding_edge": "",
+ "bleeding_edge_git": "",
+ "version": "3.21.2",
+ "chromium_revision": "",
+ "branch": "3.21",
+ "review_link": "",
+ "date": "03:15",
+ "chromium_branch": "",
+ "revision_link": "https://code.google.com/p/v8/source/detail?r=123",
+ },
+ {
+ "revision": "234",
+ "revision_git": "hash_234",
+ "patches_merged": "abc12",
+ "bleeding_edge": "",
+ "bleeding_edge_git": "",
+ "version": "3.3.1.1",
+ "chromium_revision": "",
+ "branch": "3.3",
+ "review_link": "fake.com",
+ "date": "18:15",
+ "chromium_branch": "",
+ "revision_link": "https://code.google.com/p/v8/source/detail?r=234",
+ },
]
self.assertEquals(expected_json, json.loads(FileToText(json_output)))
@@ -1374,26 +1409,27 @@
return [
Cmd("git status -s -uno", ""),
- Cmd("git checkout -f bleeding_edge", "", cb=ResetVersion(11, 4)),
+ Cmd("git checkout -f master", "", cb=ResetVersion(11, 4)),
Cmd("git pull", ""),
Cmd("git branch", ""),
- Cmd("git checkout -f bleeding_edge", ""),
+ Cmd("git checkout -f master", ""),
Cmd("git log -1 --format=%H", "latest_hash"),
Cmd("git diff --name-only latest_hash latest_hash^", ""),
URL("https://v8-status.appspot.com/lkgr", "12345"),
- Cmd("git checkout -f bleeding_edge", ""),
+ Cmd("git checkout -f master", ""),
Cmd(("git log --format=%H --grep="
"\"^git-svn-id: [^@]*@12345 [A-Za-z0-9-]*$\""),
"lkgr_hash"),
- Cmd("git checkout -b auto-bump-up-version lkgr_hash", ""),
- Cmd("git checkout -f bleeding_edge", ""),
- Cmd("git branch", ""),
+ Cmd("git new-branch auto-bump-up-version --upstream lkgr_hash", ""),
+ Cmd("git checkout -f master", ""),
+ Cmd("git branch", "auto-bump-up-version\n* master"),
+ Cmd("git branch -D auto-bump-up-version", ""),
Cmd("git diff --name-only lkgr_hash lkgr_hash^", ""),
- Cmd("git checkout -f master", "", cb=ResetVersion(11, 5)),
+ Cmd("git checkout -f candidates", "", cb=ResetVersion(11, 5)),
Cmd("git pull", ""),
URL("https://v8-status.appspot.com/current?format=json",
"{\"message\": \"Tree is open\"}"),
- Cmd("git checkout -b auto-bump-up-version bleeding_edge", "",
+ Cmd("git new-branch auto-bump-up-version --upstream master", "",
cb=ResetVersion(11, 4)),
Cmd("git commit -am \"[Auto-roll] Bump up version to 3.11.6.0\n\n"
"TBR=author@chromium.org\" "
@@ -1405,88 +1441,15 @@
expectations += [
Cmd("git cl upload --send-mail --email \"author@chromium.org\" -f "
"--bypass-hooks", ""),
- Cmd("git cl dcommit -f --bypass-hooks", ""),
- Cmd("git checkout -f bleeding_edge", ""),
- Cmd("git branch", "auto-bump-up-version\n* bleeding_edge"),
+ Cmd("git cl land -f --bypass-hooks", ""),
+ Cmd("git checkout -f master", ""),
+ Cmd("git branch", "auto-bump-up-version\n* master"),
Cmd("git branch -D auto-bump-up-version", ""),
]
self.Expect(expectations)
BumpUpVersion(TEST_CONFIG, self).Run(["-a", "author@chromium.org"])
- def testBumpUpVersionSvn(self):
- svn_root = self.MakeEmptyTempDirectory()
- expectations = self._bumpUpVersion()
- expectations += [
- Cmd("git diff HEAD^ HEAD", "patch content"),
- Cmd("svn update", "", cwd=svn_root),
- Cmd("svn status", "", cwd=svn_root),
- Cmd("patch -d branches/bleeding_edge -p1 -i %s" %
- TEST_CONFIG["PATCH_FILE"], "Applied patch...", cwd=svn_root),
- Cmd("svn commit --non-interactive --username=author@chromium.org "
- "--config-dir=[CONFIG_DIR] "
- "-m \"[Auto-roll] Bump up version to 3.11.6.0\"",
- "", cwd=svn_root),
- Cmd("git checkout -f bleeding_edge", ""),
- Cmd("git branch", "auto-bump-up-version\n* bleeding_edge"),
- Cmd("git branch -D auto-bump-up-version", ""),
- ]
- self.Expect(expectations)
-
- BumpUpVersion(TEST_CONFIG, self).Run(
- ["-a", "author@chromium.org",
- "--svn", svn_root,
- "--svn-config", "[CONFIG_DIR]"])
-
- def testAutoTag(self):
- self.WriteFakeVersionFile()
-
- def ResetVersion(minor, build, patch=0):
- return lambda: self.WriteFakeVersionFile(minor=minor,
- build=build,
- patch=patch)
-
- self.Expect([
- Cmd("git status -s -uno", ""),
- Cmd("git status -s -b -uno", "## some_branch\n"),
- Cmd("git svn fetch", ""),
- Cmd("git branch", " branch1\n* branch2\n"),
- Cmd("git checkout -f master", ""),
- Cmd("git svn rebase", ""),
- Cmd("git checkout -b %s" % TEST_CONFIG["BRANCHNAME"], "",
- cb=ResetVersion(4, 5)),
- Cmd("git branch -r",
- "svn/tags/3.4.2\nsvn/tags/3.2.1.0\nsvn/branches/3.4"),
- Cmd(("git log --format=%H --grep="
- "\"\\[Auto\\-roll\\] Bump up version to\""),
- "hash125\nhash118\nhash111\nhash101"),
- Cmd("git checkout -f hash125 -- %s" % VERSION_FILE, "",
- cb=ResetVersion(4, 4)),
- Cmd("git checkout -f HEAD -- %s" % VERSION_FILE, "",
- cb=ResetVersion(4, 5)),
- Cmd("git checkout -f hash118 -- %s" % VERSION_FILE, "",
- cb=ResetVersion(4, 3)),
- Cmd("git checkout -f HEAD -- %s" % VERSION_FILE, "",
- cb=ResetVersion(4, 5)),
- Cmd("git checkout -f hash111 -- %s" % VERSION_FILE, "",
- cb=ResetVersion(4, 2)),
- Cmd("git checkout -f HEAD -- %s" % VERSION_FILE, "",
- cb=ResetVersion(4, 5)),
- URL("https://v8-status.appspot.com/revisions?format=json",
- "[{\"revision\": \"126\", \"status\": true},"
- "{\"revision\": \"123\", \"status\": true},"
- "{\"revision\": \"112\", \"status\": true}]"),
- Cmd("git svn find-rev hash118", "118"),
- Cmd("git svn find-rev hash125", "125"),
- Cmd("git svn find-rev r123", "hash123"),
- Cmd("git log -1 --format=%at hash123", "1"),
- Cmd("git reset --hard hash123", ""),
- Cmd("git svn tag 3.4.3 -m \"Tagging version 3.4.3\"", ""),
- Cmd("git checkout -f some_branch", ""),
- Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
- ])
-
- AutoTag(TEST_CONFIG, self).Run(["-a", "author@chromium.org"])
# Test that we bail out if the last change was a version change.
def testBumpUpVersionBailout1(self):
@@ -1527,7 +1490,10 @@
class SystemTest(unittest.TestCase):
def testReload(self):
+ options = ScriptsBase(
+ TEST_CONFIG, DEFAULT_SIDE_EFFECT_HANDLER, {}).MakeOptions([])
step = MakeStep(step_class=PrepareChangeLog, number=0, state={}, config={},
+ options=options,
side_effect_handler=DEFAULT_SIDE_EFFECT_HANDLER)
body = step.Reload(
"""------------------------------------------------------------------------
diff --git a/tools/run-deopt-fuzzer.py b/tools/run-deopt-fuzzer.py
index 57cb6b2..a6fdf31 100755
--- a/tools/run-deopt-fuzzer.py
+++ b/tools/run-deopt-fuzzer.py
@@ -160,6 +160,9 @@
result.add_option("--buildbot",
help="Adapt to path structure used on buildbots",
default=False, action="store_true")
+ result.add_option("--dcheck-always-on",
+ help="Indicates that V8 was compiled with DCHECKs enabled",
+ default=False, action="store_true")
result.add_option("--command-prefix",
help="Prepended to each shell command used to run a test",
default="")
@@ -389,6 +392,8 @@
"simulator": utils.UseSimulator(arch),
"system": utils.GuessOS(),
"tsan": False,
+ "msan": False,
+ "dcheck_always_on": options.dcheck_always_on,
}
all_tests = []
num_tests = 0
diff --git a/tools/run-llprof.sh b/tools/run-llprof.sh
index d526170..54a3881 100755
--- a/tools/run-llprof.sh
+++ b/tools/run-llprof.sh
@@ -66,4 +66,4 @@
fi
echo "Running benchmark..."
-perf record -R -e cycles -c $SAMPLE_EVERY_N_CYCLES -f -i $@ --ll-prof
+perf record -R -e cycles -c $SAMPLE_EVERY_N_CYCLES -i $@ --ll-prof
diff --git a/tools/run-tests.py b/tools/run-tests.py
index d48b70c..d68d1f8 100755
--- a/tools/run-tests.py
+++ b/tools/run-tests.py
@@ -44,6 +44,7 @@
from testrunner.local import execution
from testrunner.local import progress
from testrunner.local import testsuite
+from testrunner.local.testsuite import VARIANT_FLAGS
from testrunner.local import utils
from testrunner.local import verbose
from testrunner.network import network_execution
@@ -51,9 +52,13 @@
ARCH_GUESS = utils.DefaultArch()
-DEFAULT_TESTS = ["mjsunit", "fuzz-natives", "base-unittests",
- "cctest", "compiler-unittests", "heap-unittests",
- "libplatform-unittests", "message", "preparser"]
+DEFAULT_TESTS = [
+ "mjsunit",
+ "unittests",
+ "cctest",
+ "message",
+ "preparser",
+]
# Map of test name synonyms to lists of test suites. Should be ordered by
# expected runtimes (suites with slow test cases first). These groups are
@@ -61,7 +66,6 @@
TEST_MAP = {
"default": [
"mjsunit",
- "fuzz-natives",
"cctest",
"message",
"preparser",
@@ -72,10 +76,7 @@
"webkit",
],
"unittests": [
- "compiler-unittests",
- "heap-unittests",
- "base-unittests",
- "libplatform-unittests",
+ "unittests",
],
}
@@ -83,13 +84,6 @@
TIMEOUT_SCALEFACTOR = {"debug" : 4,
"release" : 1 }
-# Use this to run several variants of the tests.
-VARIANT_FLAGS = {
- "default": [],
- "stress": ["--stress-opt", "--always-opt"],
- "turbofan": ["--turbo-filter=*", "--always-opt"],
- "nocrankshaft": ["--nocrankshaft"]}
-
VARIANTS = ["default", "stress", "turbofan", "nocrankshaft"]
MODE_FLAGS = {
@@ -147,6 +141,9 @@
result.add_option("--buildbot",
help="Adapt to path structure used on buildbots",
default=False, action="store_true")
+ result.add_option("--dcheck-always-on",
+ help="Indicates that V8 was compiled with DCHECKs enabled",
+ default=False, action="store_true")
result.add_option("--cat", help="Print the source of the tests",
default=False, action="store_true")
result.add_option("--flaky-tests",
@@ -257,6 +254,9 @@
default="v8tests")
result.add_option("--random-seed", default=0, dest="random_seed",
help="Default seed for initializing random generator")
+ result.add_option("--msan",
+ help="Regard test expectations for MSAN",
+ default=False, action="store_true")
return result
@@ -309,6 +309,11 @@
if options.tsan:
VARIANTS = ["default"]
+ suppressions_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'sanitizers', 'tsan_suppressions.txt')
+ tsan_options = '%s suppressions=%s' % (
+ os.environ.get('TSAN_OPTIONS', ''), suppressions_file)
+ os.environ['TSAN_OPTIONS'] = tsan_options
if options.j == 0:
options.j = multiprocessing.cpu_count()
@@ -489,7 +494,8 @@
# TODO(all): Combine "simulator" and "simulator_run".
simulator_run = not options.dont_skip_simulator_slow_tests and \
- arch in ['arm64', 'arm', 'mips'] and ARCH_GUESS and arch != ARCH_GUESS
+ arch in ['arm64', 'arm', 'mipsel', 'mips', 'mips64el'] and \
+ ARCH_GUESS and arch != ARCH_GUESS
# Find available test suites and read test cases from them.
variables = {
"arch": arch,
@@ -504,6 +510,8 @@
"simulator": utils.UseSimulator(arch),
"system": utils.GuessOS(),
"tsan": options.tsan,
+ "msan": options.msan,
+ "dcheck_always_on": options.dcheck_always_on,
}
all_tests = []
num_tests = 0
diff --git a/tools/run_perf.py b/tools/run_perf.py
index 920c18d..63c9148 100755
--- a/tools/run_perf.py
+++ b/tools/run_perf.py
@@ -15,9 +15,10 @@
"archs": [<architecture name for which this suite is run>, ...],
"binary": <name of binary to run, default "d8">,
"flags": [<flag to d8>, ...],
+ "test_flags": [<flag to the test file>, ...],
"run_count": <how often will this suite run (optional)>,
"run_count_XXX": <how often will this suite run for arch XXX (optional)>,
- "resources": [<js file to be loaded before main>, ...]
+ "resources": [<js file to be moved to android device>, ...]
"main": <main js perf runner file>,
"results_regexp": <optional regexp>,
"results_processor": <optional python results processor script>,
@@ -54,6 +55,7 @@
{
"path": ["."],
"flags": ["--expose-gc"],
+ "test_flags": ["5"],
"archs": ["ia32", "x64"],
"run_count": 5,
"run_count_ia32": 3,
@@ -89,9 +91,13 @@
}
Path pieces are concatenated. D8 is always run with the suite's path as cwd.
+
+The test flags are passed to the js test file after '--'.
"""
+from collections import OrderedDict
import json
+import logging
import math
import optparse
import os
@@ -114,8 +120,25 @@
"x64",
"arm64"]
-GENERIC_RESULTS_RE = re.compile(
- r"^Trace\(([^\)]+)\), Result\(([^\)]+)\), StdDev\(([^\)]+)\)$")
+GENERIC_RESULTS_RE = re.compile(r"^RESULT ([^:]+): ([^=]+)= ([^ ]+) ([^ ]*)$")
+RESULT_STDDEV_RE = re.compile(r"^\{([^\}]+)\}$")
+RESULT_LIST_RE = re.compile(r"^\[([^\]]+)\]$")
+
+
+def LoadAndroidBuildTools(path): # pragma: no cover
+ assert os.path.exists(path)
+ sys.path.insert(0, path)
+
+ from pylib.device import device_utils # pylint: disable=F0401
+ from pylib.device import device_errors # pylint: disable=F0401
+ from pylib.perf import cache_control # pylint: disable=F0401
+ from pylib.perf import perf_control # pylint: disable=F0401
+ import pylib.android_commands # pylint: disable=F0401
+ global cache_control
+ global device_errors
+ global device_utils
+ global perf_control
+ global pylib
def GeometricMean(values):
@@ -168,6 +191,7 @@
self.path = []
self.graphs = []
self.flags = []
+ self.test_flags = []
self.resources = []
self.results_regexp = None
self.stddev_regexp = None
@@ -187,19 +211,24 @@
assert isinstance(suite.get("path", []), list)
assert isinstance(suite["name"], basestring)
assert isinstance(suite.get("flags", []), list)
+ assert isinstance(suite.get("test_flags", []), list)
assert isinstance(suite.get("resources", []), list)
# Accumulated values.
self.path = parent.path[:] + suite.get("path", [])
self.graphs = parent.graphs[:] + [suite["name"]]
self.flags = parent.flags[:] + suite.get("flags", [])
- self.resources = parent.resources[:] + suite.get("resources", [])
+ self.test_flags = parent.test_flags[:] + suite.get("test_flags", [])
+
+ # Values independent of parent node.
+ self.resources = suite.get("resources", [])
# Descrete values (with parent defaults).
self.binary = suite.get("binary", parent.binary)
self.run_count = suite.get("run_count", parent.run_count)
self.run_count = suite.get("run_count_%s" % arch, self.run_count)
self.timeout = suite.get("timeout", parent.timeout)
+ self.timeout = suite.get("timeout_%s" % arch, self.timeout)
self.units = suite.get("units", parent.units)
self.total = suite.get("total", parent.total)
@@ -236,8 +265,11 @@
def ConsumeOutput(self, stdout):
try:
- self.results.append(
- re.search(self.results_regexp, stdout, re.M).group(1))
+ result = re.search(self.results_regexp, stdout, re.M).group(1)
+ self.results.append(str(float(result)))
+ except ValueError:
+ self.errors.append("Regexp \"%s\" returned a non-numeric for test %s."
+ % (self.results_regexp, self.graphs[-1]))
except:
self.errors.append("Regexp \"%s\" didn't match for test %s."
% (self.results_regexp, self.graphs[-1]))
@@ -277,14 +309,13 @@
bench_dir = os.path.normpath(os.path.join(*self.path))
os.chdir(os.path.join(suite_dir, bench_dir))
+ def GetCommandFlags(self):
+ suffix = ["--"] + self.test_flags if self.test_flags else []
+ return self.flags + [self.main] + suffix
+
def GetCommand(self, shell_dir):
# TODO(machenbach): This requires +.exe if run on windows.
- return (
- [os.path.join(shell_dir, self.binary)] +
- self.flags +
- self.resources +
- [self.main]
- )
+ return [os.path.join(shell_dir, self.binary)] + self.GetCommandFlags()
def Run(self, runner):
"""Iterates over several runs and handles the output for all traces."""
@@ -334,21 +365,41 @@
def Run(self, runner):
"""Iterates over several runs and handles the output."""
- traces = {}
+ traces = OrderedDict()
for stdout in runner():
for line in stdout.strip().splitlines():
match = GENERIC_RESULTS_RE.match(line)
if match:
- trace = match.group(1)
- result = match.group(2)
- stddev = match.group(3)
+ stddev = ""
+ graph = match.group(1)
+ trace = match.group(2)
+ body = match.group(3)
+ units = match.group(4)
+ match_stddev = RESULT_STDDEV_RE.match(body)
+ match_list = RESULT_LIST_RE.match(body)
+ errors = []
+ if match_stddev:
+ result, stddev = map(str.strip, match_stddev.group(1).split(","))
+ results = [result]
+ elif match_list:
+ results = map(str.strip, match_list.group(1).split(","))
+ else:
+ results = [body.strip()]
+
+ try:
+ results = map(lambda r: str(float(r)), results)
+ except ValueError:
+ results = []
+ errors = ["Found non-numeric in %s" %
+ "/".join(self.graphs + [graph, trace])]
+
trace_result = traces.setdefault(trace, Results([{
- "graphs": self.graphs + [trace],
- "units": self.units,
+ "graphs": self.graphs + [graph, trace],
+ "units": (units or self.units).strip(),
"results": [],
"stddev": "",
- }], []))
- trace_result.traces[0]["results"].append(result)
+ }], errors))
+ trace_result.traces[0]["results"].extend(results)
trace_result.traces[0]["stddev"] = stddev
return reduce(lambda r, t: r + t, traces.itervalues(), Results())
@@ -385,7 +436,7 @@
parent = parent or DefaultSentinel()
# TODO(machenbach): Implement notion of cpu type?
- if arch not in suite.get("archs", ["ia32", "x64"]):
+ if arch not in suite.get("archs", SUPPORTED_ARCHS):
return None
graph = MakeGraph(suite, arch, parent)
@@ -395,23 +446,167 @@
return graph
-def FlattenRunnables(node):
+def FlattenRunnables(node, node_cb):
"""Generator that traverses the tree structure and iterates over all
runnables.
"""
+ node_cb(node)
if isinstance(node, Runnable):
yield node
elif isinstance(node, Node):
for child in node._children:
- for result in FlattenRunnables(child):
+ for result in FlattenRunnables(child, node_cb):
yield result
else: # pragma: no cover
raise Exception("Invalid suite configuration.")
+class Platform(object):
+ @staticmethod
+ def GetPlatform(options):
+ if options.arch.startswith("android"):
+ return AndroidPlatform(options)
+ else:
+ return DesktopPlatform(options)
+
+
+class DesktopPlatform(Platform):
+ def __init__(self, options):
+ self.shell_dir = options.shell_dir
+
+ def PreExecution(self):
+ pass
+
+ def PostExecution(self):
+ pass
+
+ def PreTests(self, node, path):
+ if isinstance(node, Runnable):
+ node.ChangeCWD(path)
+
+ def Run(self, runnable, count):
+ output = commands.Execute(runnable.GetCommand(self.shell_dir),
+ timeout=runnable.timeout)
+ print ">>> Stdout (#%d):" % (count + 1)
+ print output.stdout
+ if output.stderr: # pragma: no cover
+ # Print stderr for debugging.
+ print ">>> Stderr (#%d):" % (count + 1)
+ print output.stderr
+ if output.timed_out:
+ print ">>> Test timed out after %ss." % runnable.timeout
+ return output.stdout
+
+
+class AndroidPlatform(Platform): # pragma: no cover
+ DEVICE_DIR = "/data/local/tmp/v8/"
+
+ def __init__(self, options):
+ self.shell_dir = options.shell_dir
+ LoadAndroidBuildTools(options.android_build_tools)
+
+ if not options.device:
+ # Detect attached device if not specified.
+ devices = pylib.android_commands.GetAttachedDevices(
+ hardware=True, emulator=False, offline=False)
+ assert devices and len(devices) == 1, (
+ "None or multiple devices detected. Please specify the device on "
+ "the command-line with --device")
+ options.device = devices[0]
+ adb_wrapper = pylib.android_commands.AndroidCommands(options.device)
+ self.device = device_utils.DeviceUtils(adb_wrapper)
+ self.adb = adb_wrapper.Adb()
+
+ def PreExecution(self):
+ perf = perf_control.PerfControl(self.device)
+ perf.SetHighPerfMode()
+
+ # Remember what we have already pushed to the device.
+ self.pushed = set()
+
+ def PostExecution(self):
+ perf = perf_control.PerfControl(self.device)
+ perf.SetDefaultPerfMode()
+ self.device.RunShellCommand(["rm", "-rf", AndroidPlatform.DEVICE_DIR])
+
+ def _SendCommand(self, cmd):
+ logging.info("adb -s %s %s" % (str(self.device), cmd))
+ return self.adb.SendCommand(cmd, timeout_time=60)
+
+ def _PushFile(self, host_dir, file_name, target_rel="."):
+ file_on_host = os.path.join(host_dir, file_name)
+ file_on_device_tmp = os.path.join(
+ AndroidPlatform.DEVICE_DIR, "_tmp_", file_name)
+ file_on_device = os.path.join(
+ AndroidPlatform.DEVICE_DIR, target_rel, file_name)
+ folder_on_device = os.path.dirname(file_on_device)
+
+ # Only push files not yet pushed in one execution.
+ if file_on_host in self.pushed:
+ return
+ else:
+ self.pushed.add(file_on_host)
+
+ # Work-around for "text file busy" errors. Push the files to a temporary
+ # location and then copy them with a shell command.
+ output = self._SendCommand(
+ "push %s %s" % (file_on_host, file_on_device_tmp))
+ # Success looks like this: "3035 KB/s (12512056 bytes in 4.025s)".
+ # Errors look like this: "failed to copy ... ".
+ if output and not re.search('^[0-9]', output.splitlines()[-1]):
+ logging.critical('PUSH FAILED: ' + output)
+ self._SendCommand("shell mkdir -p %s" % folder_on_device)
+ self._SendCommand("shell cp %s %s" % (file_on_device_tmp, file_on_device))
+
+ def PreTests(self, node, path):
+ suite_dir = os.path.abspath(os.path.dirname(path))
+ if node.path:
+ bench_rel = os.path.normpath(os.path.join(*node.path))
+ bench_abs = os.path.join(suite_dir, bench_rel)
+ else:
+ bench_rel = "."
+ bench_abs = suite_dir
+
+ self._PushFile(self.shell_dir, node.binary)
+ if isinstance(node, Runnable):
+ self._PushFile(bench_abs, node.main, bench_rel)
+ for resource in node.resources:
+ self._PushFile(bench_abs, resource, bench_rel)
+
+ def Run(self, runnable, count):
+ cache = cache_control.CacheControl(self.device)
+ cache.DropRamCaches()
+ binary_on_device = AndroidPlatform.DEVICE_DIR + runnable.binary
+ cmd = [binary_on_device] + runnable.GetCommandFlags()
+
+ # Relative path to benchmark directory.
+ if runnable.path:
+ bench_rel = os.path.normpath(os.path.join(*runnable.path))
+ else:
+ bench_rel = "."
+
+ try:
+ output = self.device.RunShellCommand(
+ cmd,
+ cwd=os.path.join(AndroidPlatform.DEVICE_DIR, bench_rel),
+ timeout=runnable.timeout,
+ retries=0,
+ )
+ stdout = "\n".join(output)
+ print ">>> Stdout (#%d):" % (count + 1)
+ print stdout
+ except device_errors.CommandTimeoutError:
+ print ">>> Test timed out after %ss." % runnable.timeout
+ stdout = ""
+ return stdout
+
+
# TODO: Implement results_processor.
def Main(args):
+ logging.getLogger().setLevel(logging.INFO)
parser = optparse.OptionParser()
+ parser.add_option("--android-build-tools",
+ help="Path to chromium's build/android.")
parser.add_option("--arch",
help=("The architecture to run tests for, "
"'auto' or 'native' for auto-detect"),
@@ -419,6 +614,9 @@
parser.add_option("--buildbot",
help="Adapt to path structure used on buildbots",
default=False, action="store_true")
+ parser.add_option("--device",
+ help="The device ID to run Android tests on. If not given "
+ "it will be autodetected.")
parser.add_option("--json-test-results",
help="Path to a file for storing json results.")
parser.add_option("--outdir", help="Base directory with compile output",
@@ -436,13 +634,26 @@
print "Unknown architecture %s" % options.arch
return 1
+ if (bool(options.arch.startswith("android")) !=
+ bool(options.android_build_tools)): # pragma: no cover
+ print ("Android architectures imply setting --android-build-tools and the "
+ "other way around.")
+ return 1
+
+ if (options.device and not
+ options.arch.startswith("android")): # pragma: no cover
+ print "Specifying a device requires an Android architecture to be used."
+ return 1
+
workspace = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
if options.buildbot:
- shell_dir = os.path.join(workspace, options.outdir, "Release")
+ options.shell_dir = os.path.join(workspace, options.outdir, "Release")
else:
- shell_dir = os.path.join(workspace, options.outdir,
- "%s.release" % options.arch)
+ options.shell_dir = os.path.join(workspace, options.outdir,
+ "%s.release" % options.arch)
+
+ platform = Platform.GetPlatform(options)
results = Results()
for path in args:
@@ -458,30 +669,32 @@
# If no name is given, default to the file name without .json.
suite.setdefault("name", os.path.splitext(os.path.basename(path))[0])
- for runnable in FlattenRunnables(BuildGraphs(suite, options.arch)):
+ # Setup things common to one test suite.
+ platform.PreExecution()
+
+ # Build the graph/trace tree structure.
+ root = BuildGraphs(suite, options.arch)
+
+ # Callback to be called on each node on traversal.
+ def NodeCB(node):
+ platform.PreTests(node, path)
+
+ # Traverse graph/trace tree and interate over all runnables.
+ for runnable in FlattenRunnables(root, NodeCB):
print ">>> Running suite: %s" % "/".join(runnable.graphs)
- runnable.ChangeCWD(path)
def Runner():
"""Output generator that reruns several times."""
for i in xrange(0, max(1, runnable.run_count)):
# TODO(machenbach): Allow timeout per arch like with run_count per
# arch.
- output = commands.Execute(runnable.GetCommand(shell_dir),
- timeout=runnable.timeout)
- print ">>> Stdout (#%d):" % (i + 1)
- print output.stdout
- if output.stderr: # pragma: no cover
- # Print stderr for debugging.
- print ">>> Stderr (#%d):" % (i + 1)
- print output.stderr
- if output.timed_out:
- print ">>> Test timed out after %ss." % runnable.timeout
- yield output.stdout
+ yield platform.Run(runnable, i)
# Let runnable iterate over all runs and handle output.
results += runnable.Run(Runner)
+ platform.PostExecution()
+
if options.json_test_results:
results.WriteToFile(options.json_test_results)
else: # pragma: no cover
diff --git a/tools/sanitizers/tsan_suppressions.txt b/tools/sanitizers/tsan_suppressions.txt
new file mode 100644
index 0000000..270340e
--- /dev/null
+++ b/tools/sanitizers/tsan_suppressions.txt
@@ -0,0 +1,6 @@
+# Suppressions for TSan v2
+# https://code.google.com/p/thread-sanitizer/wiki/Suppressions
+
+# Incorrectly detected lock cycles in test-lockers
+# https://code.google.com/p/thread-sanitizer/issues/detail?id=81
+deadlock:LockAndUnlockDifferentIsolatesThread::Run
diff --git a/tools/shell-utils.h b/tools/shell-utils.h
index ac61fb6..7b51d2f 100644
--- a/tools/shell-utils.h
+++ b/tools/shell-utils.h
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Utility functions used by parser-shell and lexer-shell.
+// Utility functions used by parser-shell.
#include <stdio.h>
diff --git a/tools/testrunner/local/execution.py b/tools/testrunner/local/execution.py
index 36ce7be..5c5fbac 100644
--- a/tools/testrunner/local/execution.py
+++ b/tools/testrunner/local/execution.py
@@ -33,6 +33,7 @@
from pool import Pool
from . import commands
from . import perfdata
+from . import statusfile
from . import utils
@@ -98,6 +99,10 @@
"--stress-opt" in self.context.mode_flags or
"--stress-opt" in self.context.extra_flags):
timeout *= 4
+ # FIXME(machenbach): Make this more OO. Don't expose default outcomes or
+ # the like.
+ if statusfile.IsSlow(test.outcomes or [statusfile.PASS]):
+ timeout *= 2
if test.dependency is not None:
dep_command = [ c.replace(test.path, test.dependency) for c in command ]
else:
diff --git a/tools/testrunner/local/progress.py b/tools/testrunner/local/progress.py
index 8caa58c..2616958 100644
--- a/tools/testrunner/local/progress.py
+++ b/tools/testrunner/local/progress.py
@@ -333,6 +333,7 @@
"stderr": test.output.stderr,
"exit_code": test.output.exit_code,
"result": test.suite.GetOutcome(test),
+ "expected": list(test.outcomes or ["PASS"]),
})
diff --git a/tools/testrunner/local/statusfile.py b/tools/testrunner/local/statusfile.py
index 7c3ca7f..a313f05 100644
--- a/tools/testrunner/local/statusfile.py
+++ b/tools/testrunner/local/statusfile.py
@@ -35,6 +35,7 @@
CRASH = "CRASH"
SLOW = "SLOW"
FLAKY = "FLAKY"
+FAST_VARIANTS = "FAST_VARIANTS"
NO_VARIANTS = "NO_VARIANTS"
# These are just for the status files and are mapped below in DEFS:
FAIL_OK = "FAIL_OK"
@@ -44,7 +45,7 @@
KEYWORDS = {}
for key in [SKIP, FAIL, PASS, OKAY, TIMEOUT, CRASH, SLOW, FLAKY, FAIL_OK,
- NO_VARIANTS, PASS_OR_FAIL, ALWAYS]:
+ FAST_VARIANTS, NO_VARIANTS, PASS_OR_FAIL, ALWAYS]:
KEYWORDS[key] = key
DEFS = {FAIL_OK: [FAIL, OKAY],
@@ -70,6 +71,10 @@
return NO_VARIANTS in outcomes
+def OnlyFastVariants(outcomes):
+ return FAST_VARIANTS in outcomes
+
+
def IsFlaky(outcomes):
return FLAKY in outcomes
diff --git a/tools/testrunner/local/testsuite.py b/tools/testrunner/local/testsuite.py
index 47bc08f..84f07fe 100644
--- a/tools/testrunner/local/testsuite.py
+++ b/tools/testrunner/local/testsuite.py
@@ -34,6 +34,17 @@
from . import utils
from ..objects import testcase
+# Use this to run several variants of the tests.
+VARIANT_FLAGS = {
+ "default": [],
+ "stress": ["--stress-opt", "--always-opt"],
+ "turbofan": ["--turbo-asm", "--turbo-filter=*", "--always-opt"],
+ "nocrankshaft": ["--nocrankshaft"]}
+
+FAST_VARIANT_FLAGS = [
+ f for v, f in VARIANT_FLAGS.iteritems() if v in ["default", "turbofan"]
+]
+
class TestSuite(object):
@staticmethod
@@ -81,6 +92,8 @@
def VariantFlags(self, testcase, default_flags):
if testcase.outcomes and statusfile.OnlyStandardVariant(testcase.outcomes):
return [[]]
+ if testcase.outcomes and statusfile.OnlyFastVariants(testcase.outcomes):
+ return filter(lambda flags: flags in FAST_VARIANT_FLAGS, default_flags)
return default_flags
def DownloadData(self):
@@ -123,6 +136,9 @@
t.outcomes = self.rules[testname]
if statusfile.DoSkip(t.outcomes):
continue # Don't add skipped tests to |filtered|.
+ for outcome in t.outcomes:
+ if outcome.startswith('Flags: '):
+ t.flags += outcome[7:].split()
flaky = statusfile.IsFlaky(t.outcomes)
slow = statusfile.IsSlow(t.outcomes)
pass_fail = statusfile.IsPassOrFail(t.outcomes)
@@ -234,7 +250,7 @@
if output.exit_code != 0:
print output.stdout
print output.stderr
- return []
+ raise Exception("Test executable failed to list the tests.")
tests = []
test_case = ''
for line in output.stdout.splitlines():
diff --git a/tools/testrunner/objects/testcase.py b/tools/testrunner/objects/testcase.py
index ca82606..6c55082 100644
--- a/tools/testrunner/objects/testcase.py
+++ b/tools/testrunner/objects/testcase.py
@@ -29,10 +29,10 @@
from . import output
class TestCase(object):
- def __init__(self, suite, path, flags=[], dependency=None):
- self.suite = suite # TestSuite object
- self.path = path # string, e.g. 'div-mod', 'test-api/foo'
- self.flags = flags # list of strings, flags specific to this test case
+ def __init__(self, suite, path, flags=None, dependency=None):
+ self.suite = suite # TestSuite object
+ self.path = path # string, e.g. 'div-mod', 'test-api/foo'
+ self.flags = flags or [] # list of strings, flags specific to this test
self.dependency = dependency # |path| for testcase that must be run first
self.outcomes = None
self.output = None
diff --git a/tools/tickprocessor.js b/tools/tickprocessor.js
index acd7a71..d544717 100644
--- a/tools/tickprocessor.js
+++ b/tools/tickprocessor.js
@@ -485,6 +485,15 @@
this.ticks_.total, null);
}
+ print('\n [C++ entry points]:');
+ print(' ticks cpp total name');
+ var c_entry_functions = this.profile_.getCEntryProfile();
+ var total_c_entry = c_entry_functions[0].ticks;
+ for (var i = 1; i < c_entry_functions.length; i++) {
+ c = c_entry_functions[i];
+ this.printLine(c.name, c.ticks, total_c_entry, totalTicks);
+ }
+
this.printHeavyProfHeader();
var heavyProfile = this.profile_.getBottomUpProfile();
var heavyView = this.viewBuilder_.buildView(heavyProfile);
diff --git a/tools/trace-maps-processor.py b/tools/trace-maps-processor.py
new file mode 100755
index 0000000..bf8c8a8
--- /dev/null
+++ b/tools/trace-maps-processor.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python
+# Copyright 2014 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+
+action = sys.argv[1]
+
+if action in ["help", "-h", "--help"] or len(sys.argv) != 3:
+ print("Usage: %s <action> <inputfile>, where action can be: \n"
+ "help Print this message\n"
+ "plain Print ASCII tree to stdout\n"
+ "dot Print dot file to stdout\n"
+ "count Count most frequent transition reasons\n" % sys.argv[0])
+ sys.exit(0)
+
+
+filename = sys.argv[2]
+maps = {}
+root_maps = []
+transitions = {}
+annotations = {}
+
+
+class Map(object):
+
+ def __init__(self, pointer, origin):
+ self.pointer = pointer
+ self.origin = origin
+
+ def __str__(self):
+ return "%s (%s)" % (self.pointer, self.origin)
+
+
+class Transition(object):
+
+ def __init__(self, from_map, to_map, reason):
+ self.from_map = from_map
+ self.to_map = to_map
+ self.reason = reason
+
+
+def RegisterNewMap(raw_map):
+ if raw_map in annotations:
+ annotations[raw_map] += 1
+ else:
+ annotations[raw_map] = 0
+ return AnnotateExistingMap(raw_map)
+
+
+def AnnotateExistingMap(raw_map):
+ return "%s_%d" % (raw_map, annotations[raw_map])
+
+
+def AddMap(pointer, origin):
+ pointer = RegisterNewMap(pointer)
+ maps[pointer] = Map(pointer, origin)
+ return pointer
+
+
+def AddTransition(from_map, to_map, reason):
+ from_map = AnnotateExistingMap(from_map)
+ to_map = AnnotateExistingMap(to_map)
+ if from_map not in transitions:
+ transitions[from_map] = {}
+ targets = transitions[from_map]
+ if to_map in targets:
+ # Some events get printed twice, that's OK. In some cases, ignore the
+ # second output...
+ old_reason = targets[to_map].reason
+ if old_reason.startswith("ReplaceDescriptors"):
+ return
+ # ...and in others use it for additional detail.
+ if reason in []:
+ targets[to_map].reason = reason
+ return
+ # Unexpected duplicate events? Warn.
+ print("// warning: already have a transition from %s to %s, reason: %s" %
+ (from_map, to_map, targets[to_map].reason))
+ return
+ targets[to_map] = Transition(from_map, to_map, reason)
+
+
+with open(filename, "r") as f:
+ last_to_map = ""
+ for line in f:
+ if not line.startswith("[TraceMaps: "): continue
+ words = line.split(" ")
+ event = words[1]
+ if event == "InitialMap":
+ assert words[2] == "map="
+ assert words[4] == "SFI="
+ new_map = AddMap(words[3], "SFI#%s" % words[5])
+ root_maps.append(new_map)
+ continue
+ if words[2] == "from=" and words[4] == "to=":
+ from_map = words[3]
+ to_map = words[5]
+ if from_map not in annotations:
+ print("// warning: unknown from_map %s" % from_map)
+ new_map = AddMap(from_map, "<unknown>")
+ root_maps.append(new_map)
+ if to_map != last_to_map:
+ AddMap(to_map, "<transition> (%s)" % event)
+ last_to_map = to_map
+ if event in ["Transition", "NoTransition"]:
+ assert words[6] == "name=", line
+ reason = "%s: %s" % (event, words[7])
+ elif event in ["Normalize", "ReplaceDescriptors", "SlowToFast"]:
+ assert words[6] == "reason=", line
+ reason = "%s: %s" % (event, words[7])
+ if words[8].strip() != "]":
+ reason = "%s_%s" % (reason, words[8])
+ else:
+ reason = event
+ AddTransition(from_map, to_map, reason)
+ continue
+
+
+def PlainPrint(m, indent, label):
+ print("%s%s (%s)" % (indent, m, label))
+ if m in transitions:
+ for t in transitions[m]:
+ PlainPrint(t, indent + " ", transitions[m][t].reason)
+
+
+def CountTransitions(m):
+ if m not in transitions: return 0
+ return len(transitions[m])
+
+
+def DotPrint(m, label):
+ print("m%s [label=\"%s\"]" % (m[2:], label))
+ if m in transitions:
+ for t in transitions[m]:
+ # GraphViz doesn't like node labels looking like numbers, so use
+ # "m..." instead of "0x...".
+ print("m%s -> m%s" % (m[2:], t[2:]))
+ reason = transitions[m][t].reason
+ reason = reason.replace("\\", "BACKSLASH")
+ reason = reason.replace("\"", "\\\"")
+ DotPrint(t, reason)
+
+
+if action == "plain":
+ root_maps = sorted(root_maps, key=CountTransitions, reverse=True)
+ for m in root_maps:
+ PlainPrint(m, "", maps[m].origin)
+
+elif action == "dot":
+ print("digraph g {")
+ for m in root_maps:
+ DotPrint(m, maps[m].origin)
+ print("}")
+
+elif action == "count":
+ reasons = {}
+ for s in transitions:
+ for t in transitions[s]:
+ reason = transitions[s][t].reason
+ if reason not in reasons:
+ reasons[reason] = 1
+ else:
+ reasons[reason] += 1
+ reasons_list = []
+ for r in reasons:
+ reasons_list.append("%8d %s" % (reasons[r], r))
+ reasons_list.sort(reverse=True)
+ for r in reasons_list[:20]:
+ print r
diff --git a/tools/try_perf.py b/tools/try_perf.py
new file mode 100755
index 0000000..fcd1ddc
--- /dev/null
+++ b/tools/try_perf.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+# Copyright 2014 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import find_depot_tools
+import sys
+
+find_depot_tools.add_depot_tools_to_path()
+
+from git_cl import Changelist
+
+BOTS = [
+ 'v8_linux32_perf_try',
+ 'v8_linux64_perf_try',
+]
+
+def main(tests):
+ cl = Changelist()
+ if not cl.GetIssue():
+ print 'Need to upload first'
+ return 1
+
+ props = cl.GetIssueProperties()
+ if props.get('closed'):
+ print 'Cannot send tryjobs for a closed CL'
+ return 1
+
+ if props.get('private'):
+ print 'Cannot use trybots with private issue'
+ return 1
+
+ if not tests:
+ print 'Please specify the benchmarks to run as arguments.'
+ return 1
+
+ masters = {'internal.client.v8': dict((b, tests) for b in BOTS)}
+ cl.RpcServer().trigger_distributed_try_jobs(
+ cl.GetIssue(), cl.GetMostRecentPatchset(), cl.GetBranch(),
+ False, None, masters)
+ return 0
+
+if __name__ == "__main__": # pragma: no cover
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/unittests/run_perf_test.py b/tools/unittests/run_perf_test.py
index 76e8d23..f9ea0c0 100644
--- a/tools/unittests/run_perf_test.py
+++ b/tools/unittests/run_perf_test.py
@@ -174,11 +174,25 @@
self.assertEquals(0, self._CallMain())
self._VerifyResults("test", "score", [
{"name": "Richards", "results": ["1.234"], "stddev": ""},
- {"name": "DeltaBlue", "results": ["10657567"], "stddev": ""},
+ {"name": "DeltaBlue", "results": ["10657567.0"], "stddev": ""},
])
self._VerifyErrors([])
self._VerifyMock(path.join("out", "x64.release", "d7"), "--flag", "run.js")
+ def testOneRunWithTestFlags(self):
+ test_input = dict(V8_JSON)
+ test_input["test_flags"] = ["2", "test_name"]
+ self._WriteTestInput(test_input)
+ self._MockCommand(["."], ["Richards: 1.234\nDeltaBlue: 10657567"])
+ self.assertEquals(0, self._CallMain())
+ self._VerifyResults("test", "score", [
+ {"name": "Richards", "results": ["1.234"], "stddev": ""},
+ {"name": "DeltaBlue", "results": ["10657567.0"], "stddev": ""},
+ ])
+ self._VerifyErrors([])
+ self._VerifyMock(path.join("out", "x64.release", "d7"), "--flag", "run.js",
+ "--", "2", "test_name")
+
def testTwoRuns_Units_SuiteName(self):
test_input = dict(V8_JSON)
test_input["run_count"] = 2
@@ -190,8 +204,8 @@
"Richards: 50\nDeltaBlue: 300\n"])
self.assertEquals(0, self._CallMain())
self._VerifyResults("v8", "ms", [
- {"name": "Richards", "results": ["50", "100"], "stddev": ""},
- {"name": "DeltaBlue", "results": ["300", "200"], "stddev": ""},
+ {"name": "Richards", "results": ["50.0", "100.0"], "stddev": ""},
+ {"name": "DeltaBlue", "results": ["300.0", "200.0"], "stddev": ""},
])
self._VerifyErrors([])
self._VerifyMock(path.join("out", "x64.release", "d7"), "--flag", "run.js")
@@ -208,8 +222,8 @@
"Richards: 50\nDeltaBlue: 300\n"])
self.assertEquals(0, self._CallMain())
self._VerifyResults("test", "score", [
- {"name": "Richards", "results": ["50", "100"], "stddev": ""},
- {"name": "DeltaBlue", "results": ["300", "200"], "stddev": ""},
+ {"name": "Richards", "results": ["50.0", "100.0"], "stddev": ""},
+ {"name": "DeltaBlue", "results": ["300.0", "200.0"], "stddev": ""},
])
self._VerifyErrors([])
self._VerifyMock(path.join("out", "x64.release", "d7"), "--flag", "run.js")
@@ -227,23 +241,21 @@
self.assertEquals([
{"units": "score",
"graphs": ["test", "Richards"],
- "results": ["50", "100"],
+ "results": ["50.0", "100.0"],
"stddev": ""},
{"units": "ms",
"graphs": ["test", "Sub", "Leaf"],
- "results": ["3", "2", "1"],
+ "results": ["3.0", "2.0", "1.0"],
"stddev": ""},
{"units": "score",
"graphs": ["test", "DeltaBlue"],
- "results": ["200"],
+ "results": ["200.0"],
"stddev": ""},
], self._LoadResults()["traces"])
self._VerifyErrors([])
self._VerifyMockMultiple(
- (path.join("out", "x64.release", "d7"), "--flag", "file1.js",
- "file2.js", "run.js"),
- (path.join("out", "x64.release", "d7"), "--flag", "file1.js",
- "file2.js", "run.js"),
+ (path.join("out", "x64.release", "d7"), "--flag", "run.js"),
+ (path.join("out", "x64.release", "d7"), "--flag", "run.js"),
(path.join("out", "x64.release", "d8"), "--flag", "run.js"),
(path.join("out", "x64.release", "d8"), "--flag", "run.js"),
(path.join("out", "x64.release", "d8"), "--flag", "run.js"),
@@ -258,7 +270,7 @@
self.assertEquals(0, self._CallMain())
self._VerifyResults("test", "score", [
{"name": "Richards", "results": ["1.234"], "stddev": "0.23"},
- {"name": "DeltaBlue", "results": ["10657567"], "stddev": "106"},
+ {"name": "DeltaBlue", "results": ["10657567.0"], "stddev": "106"},
])
self._VerifyErrors([])
self._VerifyMock(path.join("out", "x64.release", "d7"), "--flag", "run.js")
@@ -274,8 +286,8 @@
"DeltaBlue: 5\nDeltaBlue-stddev: 0.8\n"])
self.assertEquals(1, self._CallMain())
self._VerifyResults("test", "score", [
- {"name": "Richards", "results": ["2", "3"], "stddev": "0.7"},
- {"name": "DeltaBlue", "results": ["5", "6"], "stddev": "0.8"},
+ {"name": "Richards", "results": ["2.0", "3.0"], "stddev": "0.7"},
+ {"name": "DeltaBlue", "results": ["5.0", "6.0"], "stddev": "0.8"},
])
self._VerifyErrors(
["Test Richards should only run once since a stddev is provided "
@@ -292,7 +304,7 @@
self.assertEquals(0, self._CallMain("--buildbot"))
self._VerifyResults("test", "score", [
{"name": "Richards", "results": ["1.234"], "stddev": ""},
- {"name": "DeltaBlue", "results": ["10657567"], "stddev": ""},
+ {"name": "DeltaBlue", "results": ["10657567.0"], "stddev": ""},
])
self._VerifyErrors([])
self._VerifyMock(path.join("out", "Release", "d7"), "--flag", "run.js")
@@ -305,7 +317,7 @@
self.assertEquals(0, self._CallMain("--buildbot"))
self._VerifyResults("test", "score", [
{"name": "Richards", "results": ["1.234"], "stddev": ""},
- {"name": "DeltaBlue", "results": ["10657567"], "stddev": ""},
+ {"name": "DeltaBlue", "results": ["10657567.0"], "stddev": ""},
{"name": "Total", "results": ["3626.49109719"], "stddev": ""},
])
self._VerifyErrors([])
@@ -315,14 +327,15 @@
test_input = dict(V8_JSON)
test_input["total"] = True
self._WriteTestInput(test_input)
- self._MockCommand(["."], ["x\nRichaards: 1.234\nDeltaBlue: 10657567\ny\n"])
+ self._MockCommand(["."], ["x\nRichards: bla\nDeltaBlue: 10657567\ny\n"])
self.assertEquals(1, self._CallMain("--buildbot"))
self._VerifyResults("test", "score", [
{"name": "Richards", "results": [], "stddev": ""},
- {"name": "DeltaBlue", "results": ["10657567"], "stddev": ""},
+ {"name": "DeltaBlue", "results": ["10657567.0"], "stddev": ""},
])
self._VerifyErrors(
- ["Regexp \"^Richards: (.+)$\" didn't match for test Richards.",
+ ["Regexp \"^Richards: (.+)$\" "
+ "returned a non-numeric for test Richards.",
"Not all traces have the same number of results."])
self._VerifyMock(path.join("out", "Release", "d7"), "--flag", "run.js")
@@ -332,7 +345,7 @@
self.assertEquals(1, self._CallMain())
self._VerifyResults("test", "score", [
{"name": "Richards", "results": [], "stddev": ""},
- {"name": "DeltaBlue", "results": ["10657567"], "stddev": ""},
+ {"name": "DeltaBlue", "results": ["10657567.0"], "stddev": ""},
])
self._VerifyErrors(
["Regexp \"^Richards: (.+)$\" didn't match for test Richards."])
@@ -342,14 +355,30 @@
test_input = dict(V8_GENERIC_JSON)
self._WriteTestInput(test_input)
self._MockCommand(["."], [
- "Trace(Test1), Result(1.234), StdDev(0.23)\n"
- "Trace(Test2), Result(10657567), StdDev(106)\n"])
- self.assertEquals(0, self._CallMain())
- self._VerifyResults("test", "ms", [
- {"name": "Test1", "results": ["1.234"], "stddev": "0.23"},
- {"name": "Test2", "results": ["10657567"], "stddev": "106"},
- ])
- self._VerifyErrors([])
+ "RESULT Infra: Constant1= 11 count\n"
+ "RESULT Infra: Constant2= [10,5,10,15] count\n"
+ "RESULT Infra: Constant3= {12,1.2} count\n"
+ "RESULT Infra: Constant4= [10,5,error,15] count\n"])
+ self.assertEquals(1, self._CallMain())
+ self.assertEquals([
+ {"units": "count",
+ "graphs": ["test", "Infra", "Constant1"],
+ "results": ["11.0"],
+ "stddev": ""},
+ {"units": "count",
+ "graphs": ["test", "Infra", "Constant2"],
+ "results": ["10.0", "5.0", "10.0", "15.0"],
+ "stddev": ""},
+ {"units": "count",
+ "graphs": ["test", "Infra", "Constant3"],
+ "results": ["12.0"],
+ "stddev": "1.2"},
+ {"units": "count",
+ "graphs": ["test", "Infra", "Constant4"],
+ "results": [],
+ "stddev": ""},
+ ], self._LoadResults()["traces"])
+ self._VerifyErrors(["Found non-numeric in test/Infra/Constant4"])
self._VerifyMock(path.join("out", "x64.release", "cc"), "--flag", "")
def testOneRunTimingOut(self):
@@ -368,3 +397,22 @@
])
self._VerifyMock(
path.join("out", "x64.release", "d7"), "--flag", "run.js", timeout=70)
+
+ # Simple test that mocks out the android platform. Testing the platform would
+ # require lots of complicated mocks for the android tools.
+ def testAndroid(self):
+ self._WriteTestInput(V8_JSON)
+ platform = run_perf.Platform
+ platform.PreExecution = MagicMock(return_value=None)
+ platform.PostExecution = MagicMock(return_value=None)
+ platform.PreTests = MagicMock(return_value=None)
+ platform.Run = MagicMock(
+ return_value="Richards: 1.234\nDeltaBlue: 10657567\n")
+ run_perf.AndroidPlatform = MagicMock(return_value=platform)
+ self.assertEquals(
+ 0, self._CallMain("--android-build-tools", "/some/dir",
+ "--arch", "android_arm"))
+ self._VerifyResults("test", "score", [
+ {"name": "Richards", "results": ["1.234"], "stddev": ""},
+ {"name": "DeltaBlue", "results": ["10657567.0"], "stddev": ""},
+ ])
diff --git a/tools/whitespace.txt b/tools/whitespace.txt
index 305e8ed..657e68f 100644
--- a/tools/whitespace.txt
+++ b/tools/whitespace.txt
@@ -1,8 +1,8 @@
-You can modify this file to create no-op changelists..
+You can modify this file to create no-op changelists.
Try to write something funny. And please don't add trailing whitespace.
A Smi walks into a bar and says:
"I'm so deoptimized today!"
The doubles heard this and started to unbox.
-The Smi looked at them when a crazy v8-autoroll account showed up..............
+The Smi looked at them when a crazy v8-autoroll account showed up...........