Upgrade V8 to 7.0.276.40

Bug: 117554758
Bug: 117555811
Bug: 117556606
Bug: 117556220
Bug: 117607414
Bug: 117606285
Test: atest com.google.android.gts.devicepolicy.DeviceOwnerTest#testProxyPacProxyTest
Test: atest proxy_resolver_v8_unittest

Change-Id: I2e02d994f107e64e4f465b4d8a02d4159a95240e
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
index 4cacf81..ce456dd 100644
--- a/PRESUBMIT.py
+++ b/PRESUBMIT.py
@@ -31,6 +31,8 @@
 for more details about the presubmit API built into gcl.
 """
 
+import json
+import re
 import sys
 
 
@@ -42,6 +44,12 @@
 )
 
 
+# Regular expression that matches code which should not be run through cpplint.
+_NO_LINT_PATHS = (
+    r'src[\\\/]base[\\\/]export-template\.h',
+)
+
+
 # Regular expression that matches code only used for test binaries
 # (best effort).
 _TEST_CODE_EXCLUDED_PATHS = (
@@ -65,13 +73,19 @@
   import sys
   sys.path.append(input_api.os_path.join(
         input_api.PresubmitLocalPath(), 'tools'))
-  from presubmit import CppLintProcessor
-  from presubmit import SourceProcessor
-  from presubmit import StatusFilesProcessor
+  from v8_presubmit import CppLintProcessor
+  from v8_presubmit import SourceProcessor
+  from v8_presubmit import StatusFilesProcessor
+
+  def FilterFile(affected_file):
+    return input_api.FilterSourceFile(
+      affected_file,
+      white_list=None,
+      black_list=_NO_LINT_PATHS)
 
   results = []
   if not CppLintProcessor().RunOnFiles(
-      input_api.AffectedFiles(include_deletes=False)):
+      input_api.AffectedFiles(file_filter=FilterFile, include_deletes=False)):
     results.append(output_api.PresubmitError("C++ lint check failed"))
   if not SourceProcessor().RunOnFiles(
       input_api.AffectedFiles(include_deletes=False)):
@@ -82,7 +96,9 @@
       input_api.AffectedFiles(include_deletes=True)):
     results.append(output_api.PresubmitError("Status file check failed"))
   results.extend(input_api.canned_checks.CheckAuthorizedAuthor(
-      input_api, output_api))
+      input_api, output_api, bot_whitelist=[
+        'v8-ci-autoroll-builder@chops-service-accounts.iam.gserviceaccount.com'
+      ]))
   return results
 
 
@@ -139,16 +155,73 @@
   return results
 
 
+def _CheckHeadersHaveIncludeGuards(input_api, output_api):
+  """Ensures that all header files have include guards."""
+  file_inclusion_pattern = r'src/.+\.h'
+
+  def FilterFile(affected_file):
+    black_list = (_EXCLUDED_PATHS +
+                  input_api.DEFAULT_BLACK_LIST)
+    return input_api.FilterSourceFile(
+      affected_file,
+      white_list=(file_inclusion_pattern, ),
+      black_list=black_list)
+
+  leading_src_pattern = input_api.re.compile(r'^src/')
+  dash_dot_slash_pattern = input_api.re.compile(r'[-./]')
+  def PathToGuardMacro(path):
+    """Guards should be of the form V8_PATH_TO_FILE_WITHOUT_SRC_H_."""
+    x = input_api.re.sub(leading_src_pattern, 'v8_', path)
+    x = input_api.re.sub(dash_dot_slash_pattern, '_', x)
+    x = x.upper() + "_"
+    return x
+
+  problems = []
+  for f in input_api.AffectedSourceFiles(FilterFile):
+    local_path = f.LocalPath()
+    guard_macro = PathToGuardMacro(local_path)
+    guard_patterns = [
+            input_api.re.compile(r'^#ifndef ' + guard_macro + '$'),
+            input_api.re.compile(r'^#define ' + guard_macro + '$'),
+            input_api.re.compile(r'^#endif  // ' + guard_macro + '$')]
+    skip_check_pattern = input_api.re.compile(
+            r'^// PRESUBMIT_INTENTIONALLY_MISSING_INCLUDE_GUARD')
+    found_patterns = [ False, False, False ]
+    file_omitted = False
+
+    for line in f.NewContents():
+      for i in range(len(guard_patterns)):
+        if guard_patterns[i].match(line):
+            found_patterns[i] = True
+      if skip_check_pattern.match(line):
+        file_omitted = True
+        break
+
+    if not file_omitted and not all(found_patterns):
+      problems.append(
+        '%s: Missing include guard \'%s\'' % (local_path, guard_macro))
+
+  if problems:
+    return [output_api.PresubmitError(
+        'You added one or more header files without an appropriate\n'
+        'include guard. Add the include guard {#ifndef,#define,#endif}\n'
+        'triplet or omit the check entirely through the magic comment:\n'
+        '"// PRESUBMIT_INTENTIONALLY_MISSING_INCLUDE_GUARD".', problems)]
+  else:
+    return []
+
+
+# TODO(mstarzinger): Similar checking should be made available as part of
+# tools/presubmit.py (note that tools/check-inline-includes.sh exists).
 def _CheckNoInlineHeaderIncludesInNormalHeaders(input_api, output_api):
   """Attempts to prevent inclusion of inline headers into normal header
   files. This tries to establish a layering where inline headers can be
   included by other inline headers or compilation units only."""
   file_inclusion_pattern = r'(?!.+-inl\.h).+\.h'
   include_directive_pattern = input_api.re.compile(r'#include ".+-inl.h"')
-  include_warning = (
-    'You might be including an inline header (e.g. foo-inl.h) within a\n'
-    'normal header (e.g. bar.h) file.  Can you avoid introducing the\n'
-    '#include?  The commit queue will not block on this warning.')
+  include_error = (
+    'You are including an inline header (e.g. foo-inl.h) within a normal\n'
+    'header (e.g. bar.h) file.  This violates layering of dependencies.')
 
   def FilterFile(affected_file):
     black_list = (_EXCLUDED_PATHS +
@@ -167,7 +240,7 @@
           '%s:%d\n    %s' % (local_path, line_number, line.strip()))
 
   if problems:
-    return [output_api.PresubmitPromptOrNotify(include_warning, problems)]
+    return [output_api.PresubmitError(include_error, problems)]
   else:
     return []
 
@@ -215,43 +288,17 @@
     return []
 
 
-def _CheckMissingFiles(input_api, output_api):
-  """Runs verify_source_deps.py to ensure no files were added that are not in
-  GN.
-  """
-  # We need to wait until we have an input_api object and use this
-  # roundabout construct to import checkdeps because this file is
-  # eval-ed and thus doesn't have __file__.
-  original_sys_path = sys.path
-  try:
-    sys.path = sys.path + [input_api.os_path.join(
-        input_api.PresubmitLocalPath(), 'tools')]
-    from verify_source_deps import missing_gn_files, missing_gyp_files
-  finally:
-    # Restore sys.path to what it was before.
-    sys.path = original_sys_path
-
-  gn_files = missing_gn_files()
-  gyp_files = missing_gyp_files()
-  results = []
-  if gn_files:
-    results.append(output_api.PresubmitError(
-        "You added one or more source files but didn't update the\n"
-        "corresponding BUILD.gn files:\n",
-        gn_files))
-  if gyp_files:
-    results.append(output_api.PresubmitError(
-        "You added one or more source files but didn't update the\n"
-        "corresponding gyp files:\n",
-        gyp_files))
-  return results
-
-
 def _CommonChecks(input_api, output_api):
   """Checks common to both upload and commit."""
   results = []
+  # TODO(machenbach): Replace some of those checks, e.g. owners and copyright,
+  # with the canned PanProjectChecks. Need to make sure that the checks all
+  # pass on all existing files.
+  results.extend(input_api.canned_checks.CheckOwnersFormat(
+      input_api, output_api))
   results.extend(input_api.canned_checks.CheckOwners(
-      input_api, output_api, source_file_filter=None))
+      input_api, output_api))
+  results.extend(_CheckCommitMessageBugEntry(input_api, output_api))
   results.extend(input_api.canned_checks.CheckPatchFormatted(
       input_api, output_api))
   results.extend(input_api.canned_checks.CheckGenderNeutral(
@@ -260,9 +307,13 @@
   results.extend(_CheckUnwantedDependencies(input_api, output_api))
   results.extend(
       _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
+  results.extend(_CheckHeadersHaveIncludeGuards(input_api, output_api))
   results.extend(
       _CheckNoInlineHeaderIncludesInNormalHeaders(input_api, output_api))
-  results.extend(_CheckMissingFiles(input_api, output_api))
+  results.extend(_CheckJSONFiles(input_api, output_api))
+  results.extend(_CheckMacroUndefs(input_api, output_api))
+  results.extend(input_api.RunTests(
+    input_api.canned_checks.CheckVPythonSpec(input_api, output_api)))
   return results
 
 
@@ -276,6 +327,111 @@
   return input_api.environ.get('PRESUBMIT_TREE_CHECK') == 'skip'
 
 
+def _CheckCommitMessageBugEntry(input_api, output_api):
+  """Check that bug entries are well-formed in commit message."""
+  bogus_bug_msg = (
+      'Bogus BUG entry: %s. Please specify the issue tracker prefix and the '
+      'issue number, separated by a colon, e.g. v8:123 or chromium:12345.')
+  results = []
+  for bug in (input_api.change.BUG or '').split(','):
+    bug = bug.strip()
+    if 'none'.startswith(bug.lower()):
+      continue
+    if ':' not in bug:
+      try:
+        if int(bug) > 100000:
+          # Rough indicator for current chromium bugs.
+          prefix_guess = 'chromium'
+        else:
+          prefix_guess = 'v8'
+        results.append('BUG entry requires issue tracker prefix, e.g. %s:%s' %
+                       (prefix_guess, bug))
+      except ValueError:
+        results.append(bogus_bug_msg % bug)
+    elif not re.match(r'\w+:\d+', bug):
+      results.append(bogus_bug_msg % bug)
+  return [output_api.PresubmitError(r) for r in results]
+
+
+def _CheckJSONFiles(input_api, output_api):
+  def FilterFile(affected_file):
+    return input_api.FilterSourceFile(
+        affected_file,
+        white_list=(r'.+\.json',))
+
+  results = []
+  for f in input_api.AffectedFiles(
+      file_filter=FilterFile, include_deletes=False):
+    with open(f.LocalPath()) as j:
+      try:
+        json.load(j)
+      except Exception as e:
+        results.append(
+            'JSON validation failed for %s. Error:\n%s' % (f.LocalPath(), e))
+
+  return [output_api.PresubmitError(r) for r in results]
+
+
+def _CheckMacroUndefs(input_api, output_api):
+  """
+  Checks that each #define in a .cc file is eventually followed by an #undef.
+
+  TODO(clemensh): This check should eventually be enabled for all cc files via
+  tools/presubmit.py (https://crbug.com/v8/6811).
+  """
+  def FilterFile(affected_file):
+    # Skip header files, as they often define type lists which are used in
+    # other files.
+    white_list = (r'.+\.cc',r'.+\.cpp',r'.+\.c')
+    return input_api.FilterSourceFile(affected_file, white_list=white_list)
+
+  def TouchesMacros(f):
+    for line in f.GenerateScmDiff().splitlines():
+      if not line.startswith('+') and not line.startswith('-'):
+        continue
+      if define_pattern.match(line[1:]) or undef_pattern.match(line[1:]):
+        return True
+    return False
+
+  define_pattern = input_api.re.compile(r'#define (\w+)')
+  undef_pattern = input_api.re.compile(r'#undef (\w+)')
+  errors = []
+  for f in input_api.AffectedFiles(
+      file_filter=FilterFile, include_deletes=False):
+    if not TouchesMacros(f):
+      continue
+
+    defined_macros = dict()
+    with open(f.LocalPath()) as fh:
+      line_nr = 0
+      for line in fh:
+        line_nr += 1
+
+        define_match = define_pattern.match(line)
+        if define_match:
+          name = define_match.group(1)
+          defined_macros[name] = line_nr
+
+        undef_match = undef_pattern.match(line)
+        if undef_match:
+          name = undef_match.group(1)
+          if not name in defined_macros:
+            errors.append('{}:{}: Macro named \'{}\' was not defined before.'
+                          .format(f.LocalPath(), line_nr, name))
+          else:
+            del defined_macros[name]
+    for name, line_nr in sorted(defined_macros.items(), key=lambda e: e[1]):
+      errors.append('{}:{}: Macro missing #undef: {}'
+                    .format(f.LocalPath(), line_nr, name))
+
+  if errors:
+    return [output_api.PresubmitPromptOrNotify(
+        'Detected mismatches in #define / #undef in the file(s) where you '
+        'modified preprocessor macros.',
+        errors)]
+  return []
+
+
 def CheckChangeOnUpload(input_api, output_api):
   results = []
   results.extend(_CommonChecks(input_api, output_api))
@@ -292,3 +448,19 @@
         input_api, output_api,
         json_url='http://v8-status.appspot.com/current?format=json'))
   return results
+
+def PostUploadHook(cl, change, output_api):
+  """git cl upload will call this hook after the issue is created/modified.
+
+  This hook adds a noi18n bot if the patch affects Intl.
+  """
+  def affects_intl(f):
+    return 'intl' in f.LocalPath() or 'test262' in f.LocalPath()
+  if not change.AffectedFiles(file_filter=affects_intl):
+    return []
+  return output_api.EnsureCQIncludeTrybotsAreAdded(
+      cl,
+      [
+        'luci.v8.try:v8_linux_noi18n_rel_ng'
+      ],
+      'Automatically added noi18n trybots to run tests on CQ.')