You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
depot_tools/presubmit_support.py

1630 lines
58 KiB
Python

#!/usr/bin/env python3
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Enables directory-specific presubmit checks to run at upload and/or commit.
"""
from __future__ import print_function
__version__ = '2.0.0'
# TODO(joi) Add caching where appropriate/needed. The API is designed to allow
# caching (between all different invocations of presubmit scripts for a given
# change). We should add it as our presubmit scripts start feeling slow.
import argparse
import ast # Exposed through the API.
import contextlib
import cpplint
import fnmatch # Exposed through the API.
import glob
import inspect
import itertools
import json # Exposed through the API.
import logging
import multiprocessing
import os # Somewhat exposed through the API.
import random
import re # Exposed through the API.
import signal
import six
import sys # Parts exposed through API.
import tempfile # Exposed through the API.
import threading
import time
presubmit: Make ThreadPool surface exceptions on CallCommand. Exceptions other than OSError are not surfaced. This caused errors like this to be printed, but not block presubmit, allowing bugs to sneak in. Exception in thread Thread-8: Traceback (most recent call last): File "C:\b\s\w\ir\cipd_bin_packages\cpython\bin\Lib\threading.py", line 801, in __bootstrap_inner self.run() File "C:\b\s\w\ir\cipd_bin_packages\cpython\bin\Lib\threading.py", line 754, in run self.__target(*self.__args, **self.__kwargs) File "C:\b\s\w\ir\kitchen-checkout\depot_tools\presubmit_support.py", line 199, in _WorkerFn result = self.CallCommand(test) File "C:\b\s\w\ir\kitchen-checkout\depot_tools\presubmit_support.py", line 170, in CallCommand p = subprocess.Popen(cmd, **test.kwargs) File "C:\b\s\w\ir\kitchen-checkout\depot_tools\subprocess2.py", line 143, in __init__ super(Popen, self).__init__(args, **kwargs) File "C:\b\s\w\ir\cipd_bin_packages\cpython\bin\Lib\subprocess.py", line 390, in __init__ errread, errwrite) File "C:\b\s\w\ir\cipd_bin_packages\cpython\bin\Lib\subprocess.py", line 640, in _execute_child startupinfo) TypeError: environment can only contain strings https://logs.chromium.org/logs/infra/buildbucket/cr-buildbucket.appspot.com/8898840708364523888/+/steps/presubmit/0/stdout Change-Id: I34e65d8c0050eed7ed26fd782e0a5dc8616f30f7 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/1877051 Commit-Queue: Edward Lesmes <ehmaldonado@chromium.org> Reviewed-by: Anthony Polito <apolito@google.com> Reviewed-by: Dirk Pranke <dpranke@chromium.org>
6 years ago
import traceback
import unittest # Exposed through the API.
from warnings import warn
# Local imports.
import fix_encoding
import gclient_paths # Exposed through the API
import gclient_utils
import gerrit_util
import owners_client
import owners_finder
import presubmit_canned_checks
import rdb_wrapper
from lib import scm
import subprocess2 as subprocess # Exposed through the API.
from lib import utils
from lib import change as libchange
if sys.version_info.major == 2:
# TODO(1009814): Expose urllib2 only through urllib_request and urllib_error
import urllib2 # Exposed through the API.
import urlparse
import urllib2 as urllib_request
import urllib2 as urllib_error
else:
import urllib.parse as urlparse
import urllib.request as urllib_request
import urllib.error as urllib_error
# Ask for feedback only once in program lifetime.
_ASKED_FOR_FEEDBACK = False
Show PresubmitResult call stacks in verbose mode When a presubmit message, warning, or error strikes it is sometimes due to a bug or weakness in the presubmit. Examining the presubmit or fixing it can be important. However it can be hard to find the relevant code (hint: many presubmits are in depot_tools/presubmit_canned_checks.py). With this change you can just run the presubmits with -v -v (double verbose) and a call stack will be recorded when each presubmit result object is created. For instance: >git cl presubmit --force --files ash/public/cpp/app_list/vector_icons/google_black.icon -v -v ** Presubmit Messages: 1 ** Trademarked images should not be added to the public repo. See crbug.com/944754 ash/public/cpp/app_list/vector_icons/google_black.icon *************** Presubmit result call stack is: File "depot_tools/presubmit_support.py", line 2098, in <module> sys.exit(main()) File "depot_tools/presubmit_support.py", line 2074, in main return DoPresubmitChecks( File "depot_tools/presubmit_support.py", line 1771, in DoPresubmitChecks results += executer.ExecPresubmitScript(presubmit_script, filename) File "depot_tools/presubmit_support.py", line 1612, in ExecPresubmitScript self._run_check_function(function_name, context, sink, File "depot_tools/presubmit_support.py", line 1653, in _run_check_function result = eval(function_name + '(*__args)', context) File "<string>", line 1, in <module> File "chromium/src/PRESUBMIT.py", line 2225, in CheckNoProductIconsAddedToPublicRepo message_type( File "depot_tools/presubmit_support.py", line 352, in __init__ self._long_text += ' '.join(traceback.format_stack(None, 8)) This changes tracking down presubmits from a dark art to a trivial operation. Bug: 1309977 Change-Id: Ia0a6adfbbab04041f97c56cd2064a1627e252561 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3896076 Reviewed-by: Dmitrii Kuragin <kuragin@chromium.org> Commit-Queue: Dmitrii Kuragin <kuragin@chromium.org>
3 years ago
# Set if super-verbose mode is requested, for tracking where presubmit messages
# are coming from.
_SHOW_CALLSTACKS = False
_PRESUBMIT_FILE_REGEX = r'PRESUBMIT.*\.py$'
_PRESUBMIT_FILE_EXCLUDE = r'PRESUBMIT_test'
Show PresubmitResult call stacks in verbose mode When a presubmit message, warning, or error strikes it is sometimes due to a bug or weakness in the presubmit. Examining the presubmit or fixing it can be important. However it can be hard to find the relevant code (hint: many presubmits are in depot_tools/presubmit_canned_checks.py). With this change you can just run the presubmits with -v -v (double verbose) and a call stack will be recorded when each presubmit result object is created. For instance: >git cl presubmit --force --files ash/public/cpp/app_list/vector_icons/google_black.icon -v -v ** Presubmit Messages: 1 ** Trademarked images should not be added to the public repo. See crbug.com/944754 ash/public/cpp/app_list/vector_icons/google_black.icon *************** Presubmit result call stack is: File "depot_tools/presubmit_support.py", line 2098, in <module> sys.exit(main()) File "depot_tools/presubmit_support.py", line 2074, in main return DoPresubmitChecks( File "depot_tools/presubmit_support.py", line 1771, in DoPresubmitChecks results += executer.ExecPresubmitScript(presubmit_script, filename) File "depot_tools/presubmit_support.py", line 1612, in ExecPresubmitScript self._run_check_function(function_name, context, sink, File "depot_tools/presubmit_support.py", line 1653, in _run_check_function result = eval(function_name + '(*__args)', context) File "<string>", line 1, in <module> File "chromium/src/PRESUBMIT.py", line 2225, in CheckNoProductIconsAddedToPublicRepo message_type( File "depot_tools/presubmit_support.py", line 352, in __init__ self._long_text += ' '.join(traceback.format_stack(None, 8)) This changes tracking down presubmits from a dark art to a trivial operation. Bug: 1309977 Change-Id: Ia0a6adfbbab04041f97c56cd2064a1627e252561 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3896076 Reviewed-by: Dmitrii Kuragin <kuragin@chromium.org> Commit-Queue: Dmitrii Kuragin <kuragin@chromium.org>
3 years ago
def time_time():
# Use this so that it can be mocked in tests without interfering with python
# system machinery.
return time.time()
class PresubmitFailure(Exception):
pass
class CommandData(object):
def __init__(self, name, cmd, kwargs, message, python3=False):
self.name = name
self.cmd = cmd
self.stdin = kwargs.get('stdin', None)
self.kwargs = kwargs.copy()
self.kwargs['stdout'] = subprocess.PIPE
self.kwargs['stderr'] = subprocess.STDOUT
self.kwargs['stdin'] = subprocess.PIPE
self.message = message
self.info = None
self.python3 = python3
# Adapted from
# https://github.com/google/gtest-parallel/blob/master/gtest_parallel.py#L37
#
# An object that catches SIGINT sent to the Python process and notices
# if processes passed to wait() die by SIGINT (we need to look for
# both of those cases, because pressing Ctrl+C can result in either
# the main process or one of the subprocesses getting the signal).
#
# Before a SIGINT is seen, wait(p) will simply call p.wait() and
# return the result. Once a SIGINT has been seen (in the main process
# or a subprocess, including the one the current call is waiting for),
# wait(p) will call p.terminate().
class SigintHandler(object):
sigint_returncodes = {-signal.SIGINT, # Unix
-1073741510, # Windows
}
def __init__(self):
self.__lock = threading.Lock()
self.__processes = set()
self.__got_sigint = False
self.__previous_signal = signal.signal(signal.SIGINT, self.interrupt)
def __on_sigint(self):
self.__got_sigint = True
while self.__processes:
try:
self.__processes.pop().terminate()
except OSError:
pass
def interrupt(self, signal_num, frame):
with self.__lock:
self.__on_sigint()
self.__previous_signal(signal_num, frame)
def got_sigint(self):
with self.__lock:
return self.__got_sigint
def wait(self, p, stdin):
with self.__lock:
if self.__got_sigint:
p.terminate()
self.__processes.add(p)
stdout, stderr = p.communicate(stdin)
code = p.returncode
with self.__lock:
self.__processes.discard(p)
if code in self.sigint_returncodes:
self.__on_sigint()
return stdout, stderr
sigint_handler = SigintHandler()
class Timer(object):
def __init__(self, timeout, fn):
self.completed = False
self._fn = fn
self._timer = threading.Timer(timeout, self._onTimer) if timeout else None
def __enter__(self):
if self._timer:
self._timer.start()
return self
def __exit__(self, _type, _value, _traceback):
if self._timer:
self._timer.cancel()
def _onTimer(self):
self._fn()
self.completed = True
class ThreadPool(object):
def __init__(self, pool_size=None, timeout=None):
self.timeout = timeout
self._pool_size = pool_size or multiprocessing.cpu_count()
Mitigate Python 3 multiprocessing bug on Windows The multiprocessing module on Windows has a bug where if you ask for more than 60 child processes then it will hang. This is related to the MAXIMUM_WAIT_OBJECTS (64) limit of WaitForMultipleObjects. Other sources have listed the multiprocessing limit as being 61, or have said that the the maximum number of objects that can be waited on is actually 63, but those details don't really matter. The original fix for this class of issues was crrev.com/c/2785844. This change extends those fixes to depot_tools, which was missed last year. This change also updates how PyLint is called by further limiting the number of jobs to the number of files being processed divided by 10. This is because there is a significant cost to creating PyLint subprocesses - each takes about 0.5 s on my test machine. So there needs to be enough parallelism to justify this. Patches for PyLint and a bug for cpython are planned. This will stop PyLint from hanging during presubmits on many-core machines. The command used to reproduce the hangs and validate the fix was: git cl presubmit -v --force --files "chrome/test/mini_installer/*.py" Prior to this change this command would use (on my many-core test machine) 96 processes and would hang. How it uses just two processes because there are only 16 files to analyze. Output before: Pylint (16 files using ['--disable=cyclic-import'] on 96 cores) Output after: Pylint (16 files using ['--disable=cyclic-import'] on 2 processes) This is actually not quite true because the hang would prevent the old message from being displayed. Bug: 1190269, 1336854 Change-Id: Ie82baf91df4364a92eb664a00cf9daf167e0a548 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3711282 Reviewed-by: Gavin Mak <gavinmak@google.com> Commit-Queue: Bruce Dawson <brucedawson@chromium.org>
3 years ago
if sys.platform == 'win32':
# TODO(crbug.com/1190269) - we can't use more than 56 child processes on
# Windows or Python3 may hang.
self._pool_size = min(self._pool_size, 56)
self._messages = []
self._messages_lock = threading.Lock()
self._tests = []
self._tests_lock = threading.Lock()
self._nonparallel_tests = []
def _GetCommand(self, test):
vpython = 'vpython'
if test.python3:
vpython += '3'
if sys.platform == 'win32':
vpython += '.bat'
cmd = test.cmd
if cmd[0] == 'python':
cmd = list(cmd)
cmd[0] = vpython
elif cmd[0].endswith('.py'):
cmd = [vpython] + cmd
# On Windows, scripts on the current directory take precedence over PATH, so
# that when testing depot_tools on Windows, calling `vpython.bat` will
# execute the copy of vpython of the depot_tools under test instead of the
# one in the bot.
# As a workaround, we run the tests from the parent directory instead.
if (cmd[0] == vpython and
'cwd' in test.kwargs and
os.path.basename(test.kwargs['cwd']) == 'depot_tools'):
test.kwargs['cwd'] = os.path.dirname(test.kwargs['cwd'])
cmd[1] = os.path.join('depot_tools', cmd[1])
return cmd
def _RunWithTimeout(self, cmd, stdin, kwargs):
p = subprocess.Popen(cmd, **kwargs)
with Timer(self.timeout, p.terminate) as timer:
stdout, _ = sigint_handler.wait(p, stdin)
stdout = stdout.decode('utf-8', 'ignore')
if timer.completed:
stdout = 'Process timed out after %ss\n%s' % (self.timeout, stdout)
return p.returncode, stdout
def CallCommand(self, test):
"""Runs an external program.
This function converts invocation of .py files and invocations of 'python'
to vpython invocations.
"""
cmd = self._GetCommand(test)
try:
start = time_time()
returncode, stdout = self._RunWithTimeout(cmd, test.stdin, test.kwargs)
duration = time_time() - start
except Exception:
duration = time_time() - start
return test.message(
'%s\n%s exec failure (%4.2fs)\n%s' % (
test.name, ' '.join(cmd), duration, traceback.format_exc()))
presubmit: Make ThreadPool surface exceptions on CallCommand. Exceptions other than OSError are not surfaced. This caused errors like this to be printed, but not block presubmit, allowing bugs to sneak in. Exception in thread Thread-8: Traceback (most recent call last): File "C:\b\s\w\ir\cipd_bin_packages\cpython\bin\Lib\threading.py", line 801, in __bootstrap_inner self.run() File "C:\b\s\w\ir\cipd_bin_packages\cpython\bin\Lib\threading.py", line 754, in run self.__target(*self.__args, **self.__kwargs) File "C:\b\s\w\ir\kitchen-checkout\depot_tools\presubmit_support.py", line 199, in _WorkerFn result = self.CallCommand(test) File "C:\b\s\w\ir\kitchen-checkout\depot_tools\presubmit_support.py", line 170, in CallCommand p = subprocess.Popen(cmd, **test.kwargs) File "C:\b\s\w\ir\kitchen-checkout\depot_tools\subprocess2.py", line 143, in __init__ super(Popen, self).__init__(args, **kwargs) File "C:\b\s\w\ir\cipd_bin_packages\cpython\bin\Lib\subprocess.py", line 390, in __init__ errread, errwrite) File "C:\b\s\w\ir\cipd_bin_packages\cpython\bin\Lib\subprocess.py", line 640, in _execute_child startupinfo) TypeError: environment can only contain strings https://logs.chromium.org/logs/infra/buildbucket/cr-buildbucket.appspot.com/8898840708364523888/+/steps/presubmit/0/stdout Change-Id: I34e65d8c0050eed7ed26fd782e0a5dc8616f30f7 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/1877051 Commit-Queue: Edward Lesmes <ehmaldonado@chromium.org> Reviewed-by: Anthony Polito <apolito@google.com> Reviewed-by: Dirk Pranke <dpranke@chromium.org>
6 years ago
if returncode != 0:
return test.message(
'%s\n%s (%4.2fs) failed\n%s' % (
test.name, ' '.join(cmd), duration, stdout))
if test.info:
return test.info('%s\n%s (%4.2fs)' % (test.name, ' '.join(cmd), duration))
def AddTests(self, tests, parallel=True):
if parallel:
self._tests.extend(tests)
else:
self._nonparallel_tests.extend(tests)
def RunAsync(self):
self._messages = []
def _WorkerFn():
while True:
test = None
with self._tests_lock:
if not self._tests:
break
test = self._tests.pop()
result = self.CallCommand(test)
if result:
with self._messages_lock:
self._messages.append(result)
def _StartDaemon():
t = threading.Thread(target=_WorkerFn)
t.daemon = True
t.start()
return t
while self._nonparallel_tests:
test = self._nonparallel_tests.pop()
result = self.CallCommand(test)
if result:
self._messages.append(result)
if self._tests:
threads = [_StartDaemon() for _ in range(self._pool_size)]
for worker in threads:
worker.join()
return self._messages
def prompt_should_continue(prompt_string):
sys.stdout.write(prompt_string)
sys.stdout.flush()
response = sys.stdin.readline().strip().lower()
return response in ('y', 'yes')
def _ShouldRunPresubmit(script_text, use_python3):
"""Try to figure out whether these presubmit checks should be run under
python2 or python3. We need to do this without actually trying to
compile the text, since the text might compile in one but not the
other.
Args:
script_text: The text of the presubmit script.
use_python3: if true, will use python3 instead of python2 by default
if USE_PYTHON3 is not specified.
Return:
A boolean if presubmit should be executed
"""
if os.getenv('LUCI_OMIT_PYTHON2') == 'true':
# If LUCI omits python2, run all presubmits with python3, regardless of
# USE_PYTHON3 variable.
return True
m = re.search('^USE_PYTHON3 = (True|False)$', script_text, flags=re.MULTILINE)
if m:
use_python3 = m.group(1) == 'True'
return ((sys.version_info.major == 2) and not use_python3) or \
((sys.version_info.major == 3) and use_python3)
# Top level object so multiprocessing can pickle
# Public access through OutputApi object.
class _PresubmitResult(object):
"""Base class for result objects."""
fatal = False
should_prompt = False
def __init__(self, message, items=None, long_text=''):
"""
message: A short one-line message to indicate errors.
items: A list of short strings to indicate where errors occurred.
long_text: multi-line text output, e.g. from another tool
"""
self._message = _PresubmitResult._ensure_str(message)
self._items = items or []
self._long_text = _PresubmitResult._ensure_str(long_text.rstrip())
Show PresubmitResult call stacks in verbose mode When a presubmit message, warning, or error strikes it is sometimes due to a bug or weakness in the presubmit. Examining the presubmit or fixing it can be important. However it can be hard to find the relevant code (hint: many presubmits are in depot_tools/presubmit_canned_checks.py). With this change you can just run the presubmits with -v -v (double verbose) and a call stack will be recorded when each presubmit result object is created. For instance: >git cl presubmit --force --files ash/public/cpp/app_list/vector_icons/google_black.icon -v -v ** Presubmit Messages: 1 ** Trademarked images should not be added to the public repo. See crbug.com/944754 ash/public/cpp/app_list/vector_icons/google_black.icon *************** Presubmit result call stack is: File "depot_tools/presubmit_support.py", line 2098, in <module> sys.exit(main()) File "depot_tools/presubmit_support.py", line 2074, in main return DoPresubmitChecks( File "depot_tools/presubmit_support.py", line 1771, in DoPresubmitChecks results += executer.ExecPresubmitScript(presubmit_script, filename) File "depot_tools/presubmit_support.py", line 1612, in ExecPresubmitScript self._run_check_function(function_name, context, sink, File "depot_tools/presubmit_support.py", line 1653, in _run_check_function result = eval(function_name + '(*__args)', context) File "<string>", line 1, in <module> File "chromium/src/PRESUBMIT.py", line 2225, in CheckNoProductIconsAddedToPublicRepo message_type( File "depot_tools/presubmit_support.py", line 352, in __init__ self._long_text += ' '.join(traceback.format_stack(None, 8)) This changes tracking down presubmits from a dark art to a trivial operation. Bug: 1309977 Change-Id: Ia0a6adfbbab04041f97c56cd2064a1627e252561 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3896076 Reviewed-by: Dmitrii Kuragin <kuragin@chromium.org> Commit-Queue: Dmitrii Kuragin <kuragin@chromium.org>
3 years ago
if _SHOW_CALLSTACKS:
self._long_text += 'Presubmit result call stack is:\n'
self._long_text += ''.join(traceback.format_stack(None, 8))
@staticmethod
def _ensure_str(val):
"""
val: A "stringish" value. Can be any of str, unicode or bytes.
returns: A str after applying encoding/decoding as needed.
Assumes/uses UTF-8 for relevant inputs/outputs.
We'd prefer to use six.ensure_str but our copy of six is old :(
"""
if isinstance(val, str):
return val
if six.PY2 and isinstance(val, unicode):
return val.encode()
if six.PY3 and isinstance(val, bytes):
return val.decode()
raise ValueError("Unknown string type %s" % type(val))
def handle(self):
sys.stdout.write(self._message)
sys.stdout.write('\n')
presubmit_support: remove backslash from log This removes backslash from log like ``` Python 2 scripts were run during Python 2 presubmits. Please see https://bugs.chromium.org/p/chromium/issues/detail?id=1313804#c61 for tips on resolving this. scripts/common/env.py print from /usr/local/google/home/tikuta/chrome_infra/build \ /usr/local/google/home/tikuta/chrome_infra/build/recipes/unittests/recipe_test.py from /usr/local/google/home/tikuta/chrome_infra/build \ -u /usr/local/google/home/tikuta/chrome_infra/build/recipes/.recipe_deps/recipe_engine/recipe_engine/main.py --package /usr/local/google/home/tikuta/chrome_infra/build/infra/config/recipes.cfg test run from /usr/local/google/home/tikuta/chrome_infra/build \ -u /usr/local/google/home/tikuta/chrome_infra/build/recipes/.recipe_deps/recipe_engine/recipe_engine/main.py --package /usr/local/google/home/tikuta/chrome_infra/build/infra/config/recipes.cfg --proto-override /usr/local/google/home/tikuta/chrome_infra/build/recipes/.recipe_deps/_pb --log-level ERROR -O depot_too ls=/usr/local/google/home/tikuta/chrome_infra/build/recipes/.recipe_deps/depot_tools -O chromiumos_proto=/usr/local/google/home/tikuta/chrome_infra/build/recipes/.recipe_deps/chromiumos_proto -O infra=/usr/local/google/home/tikuta/chrome_infra/build/recipes/.recipe_deps/infra -O recipe_engine=/usr/local/google/home /tikuta/chrome_infra/build/recipes/.recipe_deps/recipe_engine -O chromeos=/usr/local/google/home/tikuta/chrome_infra/build/recipes/.recipe_deps/chromeos -O chromiumos_config=/usr/local/google/home/tikuta/chrome_infra/build/recipes/.recipe_deps/chromiumos_config test _runner --cov-file /tmp/tmpqNhYvj.recipe_test_cov erage/thread-0.coverage --cover-module-imports from /usr/local/google/home/tikuta/chrome_infra/build \ ``` I think this backslash is bit confusing there. Change-Id: If5911e27808cfcde029d5ea2b34dda06847085ff Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3827922 Commit-Queue: Takuto Ikuta <tikuta@chromium.org> Auto-Submit: Takuto Ikuta <tikuta@chromium.org> Reviewed-by: Bruce Dawson <brucedawson@chromium.org>
3 years ago
for item in self._items:
sys.stdout.write(' ')
# Write separately in case it's unicode.
sys.stdout.write(str(item))
sys.stdout.write('\n')
if self._long_text:
sys.stdout.write('\n***************\n')
# Write separately in case it's unicode.
sys.stdout.write(self._long_text)
sys.stdout.write('\n***************\n')
def json_format(self):
return {
'message': self._message,
'items': [str(item) for item in self._items],
'long_text': self._long_text,
'fatal': self.fatal
}
# Top level object so multiprocessing can pickle
# Public access through OutputApi object.
class _PresubmitError(_PresubmitResult):
"""A hard presubmit error."""
fatal = True
# Top level object so multiprocessing can pickle
# Public access through OutputApi object.
class _PresubmitPromptWarning(_PresubmitResult):
"""An warning that prompts the user if they want to continue."""
should_prompt = True
# Top level object so multiprocessing can pickle
# Public access through OutputApi object.
class _PresubmitNotifyResult(_PresubmitResult):
"""Just print something to the screen -- but it's not even a warning."""
# Top level object so multiprocessing can pickle
# Public access through OutputApi object.
class _MailTextResult(_PresubmitResult):
"""A warning that should be included in the review request email."""
def __init__(self, *args, **kwargs):
super(_MailTextResult, self).__init__()
raise NotImplementedError()
class GerritAccessor(object):
"""Limited Gerrit functionality for canned presubmit checks to work.
To avoid excessive Gerrit calls, caches the results.
"""
def __init__(self, url=None, project=None, branch=None):
self.host = urlparse.urlparse(url).netloc if url else None
self.project = project
self.branch = branch
self.cache = {}
self.code_owners_enabled = None
def _FetchChangeDetail(self, issue):
# Separate function to be easily mocked in tests.
try:
return gerrit_util.GetChangeDetail(
self.host, str(issue),
['ALL_REVISIONS', 'DETAILED_LABELS', 'ALL_COMMITS'])
except gerrit_util.GerritError as e:
if e.http_status == 404:
raise Exception('Either Gerrit issue %s doesn\'t exist, or '
'no credentials to fetch issue details' % issue)
raise
def GetChangeInfo(self, issue):
"""Returns labels and all revisions (patchsets) for this issue.
The result is a dictionary according to Gerrit REST Api.
https://gerrit-review.googlesource.com/Documentation/rest-api.html
However, API isn't very clear what's inside, so see tests for example.
"""
assert issue
cache_key = int(issue)
if cache_key not in self.cache:
self.cache[cache_key] = self._FetchChangeDetail(issue)
return self.cache[cache_key]
def GetChangeDescription(self, issue, patchset=None):
"""If patchset is none, fetches current patchset."""
info = self.GetChangeInfo(issue)
# info is a reference to cache. We'll modify it here adding description to
# it to the right patchset, if it is not yet there.
# Find revision info for the patchset we want.
if patchset is not None:
for rev, rev_info in info['revisions'].items():
if str(rev_info['_number']) == str(patchset):
break
else:
raise Exception('patchset %s doesn\'t exist in issue %s' % (
patchset, issue))
else:
rev = info['current_revision']
rev_info = info['revisions'][rev]
return rev_info['commit']['message']
def GetDestRef(self, issue):
ref = self.GetChangeInfo(issue)['branch']
if not ref.startswith('refs/'):
# NOTE: it is possible to create 'refs/x' branch,
# aka 'refs/heads/refs/x'. However, this is ill-advised.
ref = 'refs/heads/%s' % ref
return ref
def _GetApproversForLabel(self, issue, label):
change_info = self.GetChangeInfo(issue)
label_info = change_info.get('labels', {}).get(label, {})
values = label_info.get('values', {}).keys()
if not values:
return []
max_value = max(int(v) for v in values)
return [v for v in label_info.get('all', [])
if v.get('value', 0) == max_value]
def IsBotCommitApproved(self, issue):
return bool(self._GetApproversForLabel(issue, 'Bot-Commit'))
def IsOwnersOverrideApproved(self, issue):
return bool(self._GetApproversForLabel(issue, 'Owners-Override'))
def GetChangeOwner(self, issue):
return self.GetChangeInfo(issue)['owner']['email']
def GetChangeReviewers(self, issue, approving_only=True):
changeinfo = self.GetChangeInfo(issue)
if approving_only:
reviewers = self._GetApproversForLabel(issue, 'Code-Review')
else:
reviewers = changeinfo.get('reviewers', {}).get('REVIEWER', [])
return [r.get('email') for r in reviewers]
def UpdateDescription(self, description, issue):
gerrit_util.SetCommitMessage(self.host, issue, description, notify='NONE')
def IsCodeOwnersEnabledOnRepo(self):
if self.code_owners_enabled is None:
self.code_owners_enabled = gerrit_util.IsCodeOwnersEnabledOnRepo(
self.host, self.project)
return self.code_owners_enabled
class OutputApi(object):
"""An instance of OutputApi gets passed to presubmit scripts so that they
can output various types of results.
"""
PresubmitResult = _PresubmitResult
PresubmitError = _PresubmitError
PresubmitPromptWarning = _PresubmitPromptWarning
PresubmitNotifyResult = _PresubmitNotifyResult
MailTextResult = _MailTextResult
def __init__(self, is_committing):
self.is_committing = is_committing
self.more_cc = []
def AppendCC(self, cc):
"""Appends a user to cc for this change."""
if cc not in self.more_cc:
self.more_cc.append(cc)
def PresubmitPromptOrNotify(self, *args, **kwargs):
"""Warn the user when uploading, but only notify if committing."""
if self.is_committing:
return self.PresubmitNotifyResult(*args, **kwargs)
return self.PresubmitPromptWarning(*args, **kwargs)
class InputApi(object):
"""An instance of this object is passed to presubmit scripts so they can
know stuff about the change they're looking at.
"""
# Method could be a function
# pylint: disable=no-self-use
# File extensions that are considered source files from a style guide
# perspective. Don't modify this list from a presubmit script!
#
# Files without an extension aren't included in the list. If you want to
# filter them as source files, add r'(^|.*?[\\\/])[^.]+$' to the allow list.
# Note that ALL CAPS files are skipped in DEFAULT_FILES_TO_SKIP below.
DEFAULT_FILES_TO_CHECK = (
# C++ and friends
r'.+\.c$', r'.+\.cc$', r'.+\.cpp$', r'.+\.h$', r'.+\.m$', r'.+\.mm$',
r'.+\.inl$', r'.+\.asm$', r'.+\.hxx$', r'.+\.hpp$', r'.+\.s$', r'.+\.S$',
# Scripts
r'.+\.js$', r'.+\.ts$', r'.+\.py$', r'.+\.sh$', r'.+\.rb$', r'.+\.pl$',
r'.+\.pm$',
# Other
r'.+\.java$', r'.+\.mk$', r'.+\.am$', r'.+\.css$', r'.+\.mojom$',
r'.+\.fidl$', r'.+\.rs$',
)
# Path regexp that should be excluded from being considered containing source
# files. Don't modify this list from a presubmit script!
DEFAULT_FILES_TO_SKIP = (
r'testing_support[\\\/]google_appengine[\\\/].*',
r'.*\bexperimental[\\\/].*',
# Exclude third_party/.* but NOT third_party/{WebKit,blink}
# (crbug.com/539768 and crbug.com/836555).
r'.*\bthird_party[\\\/](?!(WebKit|blink)[\\\/]).*',
# Output directories (just in case)
r'.*\bDebug[\\\/].*',
r'.*\bRelease[\\\/].*',
r'.*\bxcodebuild[\\\/].*',
r'.*\bout[\\\/].*',
# All caps files like README and LICENCE.
r'.*\b[A-Z0-9_]{2,}$',
# SCM (can happen in dual SCM configuration). (Slightly over aggressive)
r'(|.*[\\\/])\.git[\\\/].*',
r'(|.*[\\\/])\.svn[\\\/].*',
# There is no point in processing a patch file.
r'.+\.diff$',
r'.+\.patch$',
)
def __init__(self, change, presubmit_path, is_committing,
verbose, gerrit_obj, dry_run=None, thread_pool=None, parallel=False,
no_diffs=False):
"""Builds an InputApi object.
Args:
change: A presubmit.Change object.
presubmit_path: The path to the presubmit script being processed.
is_committing: True if the change is about to be committed.
gerrit_obj: provides basic Gerrit codereview functionality.
dry_run: if true, some Checks will be skipped.
parallel: if true, all tests reported via input_api.RunTests for all
PRESUBMIT files will be run in parallel.
no_diffs: if true, implies that --files or --all was specified so some
checks can be skipped, and some errors will be messages.
"""
# Version number of the presubmit_support script.
self.version = [int(x) for x in __version__.split('.')]
self.change = change
self.is_committing = is_committing
self.gerrit = gerrit_obj
self.dry_run = dry_run
self.no_diffs = no_diffs
self.parallel = parallel
self.thread_pool = thread_pool or ThreadPool()
# We expose various modules and functions as attributes of the input_api
# so that presubmit scripts don't have to import them.
self.ast = ast
self.basename = os.path.basename
self.cpplint = cpplint
self.fnmatch = fnmatch
self.gclient_paths = gclient_paths
# TODO(yyanagisawa): stop exposing this when python3 become default.
# Since python3's tempfile has TemporaryDirectory, we do not need this.
self.temporary_directory = gclient_utils.temporary_directory
self.glob = glob.glob
self.json = json
self.logging = logging.getLogger('PRESUBMIT')
self.os_listdir = os.listdir
self.os_path = os.path
self.os_stat = os.stat
self.os_walk = os.walk
self.re = re
self.subprocess = subprocess
self.sys = sys
self.tempfile = tempfile
self.time = time
self.unittest = unittest
if sys.version_info.major == 2:
self.urllib2 = urllib2
self.urllib_request = urllib_request
self.urllib_error = urllib_error
self.is_windows = sys.platform == 'win32'
# Set python_executable to 'vpython' in order to allow scripts in other
# repos (e.g. src.git) to automatically pick up that repo's .vpython file,
# instead of inheriting the one in depot_tools.
self.python_executable = 'vpython'
# Offer a python 3 executable for use during the migration off of python 2.
self.python3_executable = 'vpython3'
self.environ = os.environ
# InputApi.platform is the platform you're currently running on.
self.platform = sys.platform
self.cpu_count = multiprocessing.cpu_count()
Mitigate Python 3 multiprocessing bug on Windows The multiprocessing module on Windows has a bug where if you ask for more than 60 child processes then it will hang. This is related to the MAXIMUM_WAIT_OBJECTS (64) limit of WaitForMultipleObjects. Other sources have listed the multiprocessing limit as being 61, or have said that the the maximum number of objects that can be waited on is actually 63, but those details don't really matter. The original fix for this class of issues was crrev.com/c/2785844. This change extends those fixes to depot_tools, which was missed last year. This change also updates how PyLint is called by further limiting the number of jobs to the number of files being processed divided by 10. This is because there is a significant cost to creating PyLint subprocesses - each takes about 0.5 s on my test machine. So there needs to be enough parallelism to justify this. Patches for PyLint and a bug for cpython are planned. This will stop PyLint from hanging during presubmits on many-core machines. The command used to reproduce the hangs and validate the fix was: git cl presubmit -v --force --files "chrome/test/mini_installer/*.py" Prior to this change this command would use (on my many-core test machine) 96 processes and would hang. How it uses just two processes because there are only 16 files to analyze. Output before: Pylint (16 files using ['--disable=cyclic-import'] on 96 cores) Output after: Pylint (16 files using ['--disable=cyclic-import'] on 2 processes) This is actually not quite true because the hang would prevent the old message from being displayed. Bug: 1190269, 1336854 Change-Id: Ie82baf91df4364a92eb664a00cf9daf167e0a548 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3711282 Reviewed-by: Gavin Mak <gavinmak@google.com> Commit-Queue: Bruce Dawson <brucedawson@chromium.org>
3 years ago
if self.is_windows:
# TODO(crbug.com/1190269) - we can't use more than 56 child processes on
# Windows or Python3 may hang.
self.cpu_count = min(self.cpu_count, 56)
# The local path of the currently-being-processed presubmit script.
self._current_presubmit_path = os.path.dirname(presubmit_path)
# We carry the canned checks so presubmit scripts can easily use them.
self.canned_checks = presubmit_canned_checks
# Temporary files we must manually remove at the end of a run.
self._named_temporary_files = []
self.owners_client = None
if self.gerrit and not 'PRESUBMIT_SKIP_NETWORK' in self.environ:
try:
self.owners_client = owners_client.GetCodeOwnersClient(
host=self.gerrit.host,
project=self.gerrit.project,
branch=self.gerrit.branch)
except Exception as e:
print('Failed to set owners_client - %s' % str(e))
self.owners_finder = owners_finder.OwnersFinder
self.verbose = verbose
self.Command = CommandData
# Replace <hash_map> and <hash_set> as headers that need to be included
# with 'base/containers/hash_tables.h' instead.
# Access to a protected member _XX of a client class
# pylint: disable=protected-access
self.cpplint._re_pattern_templates = [
(a, b, 'base/containers/hash_tables.h')
if header in ('<hash_map>', '<hash_set>') else (a, b, header)
for (a, b, header) in cpplint._re_pattern_templates
]
def SetTimeout(self, timeout):
self.thread_pool.timeout = timeout
def PresubmitLocalPath(self):
"""Returns the local path of the presubmit script currently being run.
This is useful if you don't want to hard-code absolute paths in the
presubmit script. For example, It can be used to find another file
relative to the PRESUBMIT.py script, so the whole tree can be branched and
the presubmit script still works, without editing its content.
"""
return self._current_presubmit_path
def AffectedFiles(self, include_deletes=True, file_filter=None):
"""Same as input_api.change.AffectedFiles() except only lists files
(and optionally directories) in the same directory as the current presubmit
script, or subdirectories thereof. Note that files are listed using the OS
path separator, so backslashes are used as separators on Windows.
"""
dir_with_slash = utils.normpath(self.PresubmitLocalPath())
# normpath strips trailing path separators, so the trailing separator has to
# be added after the normpath call.
if len(dir_with_slash) > 0:
dir_with_slash += os.path.sep
return list(
filter(
lambda x: utils.normpath(x.AbsoluteLocalPath()).startswith(
dir_with_slash),
self.change.AffectedFiles(include_deletes, file_filter)))
def LocalPaths(self):
"""Returns local paths of input_api.AffectedFiles()."""
paths = [af.LocalPath() for af in self.AffectedFiles()]
logging.debug('LocalPaths: %s', paths)
return paths
def AbsoluteLocalPaths(self):
"""Returns absolute local paths of input_api.AffectedFiles()."""
return [af.AbsoluteLocalPath() for af in self.AffectedFiles()]
def AffectedTestableFiles(self, include_deletes=None, **kwargs):
"""Same as input_api.change.AffectedTestableFiles() except only lists files
in the same directory as the current presubmit script, or subdirectories
thereof.
"""
if include_deletes is not None:
warn('AffectedTestableFiles(include_deletes=%s)'
' is deprecated and ignored' % str(include_deletes),
category=DeprecationWarning,
stacklevel=2)
# pylint: disable=consider-using-generator
return [
x for x in self.AffectedFiles(include_deletes=False, **kwargs)
if x.IsTestableFile()
]
def AffectedTextFiles(self, include_deletes=None):
"""An alias to AffectedTestableFiles for backwards compatibility."""
return self.AffectedTestableFiles(include_deletes=include_deletes)
def FilterSourceFile(self,
affected_file,
files_to_check=None,
files_to_skip=None,
allow_list=None,
block_list=None):
"""Filters out files that aren't considered 'source file'.
If files_to_check or files_to_skip is None, InputApi.DEFAULT_FILES_TO_CHECK
and InputApi.DEFAULT_FILES_TO_SKIP is used respectively.
affected_file.LocalPath() needs to re.match an entry in the files_to_check
list and not re.match any entries in the files_to_skip list.
'/' path separators should be used in the regular expressions and will work
on Windows as well as other platforms.
Note: Copy-paste this function to suit your needs or use a lambda function.
"""
if files_to_check is None:
files_to_check = self.DEFAULT_FILES_TO_CHECK
if files_to_skip is None:
files_to_skip = self.DEFAULT_FILES_TO_SKIP
def Find(affected_file, items):
local_path = affected_file.LocalPath()
for item in items:
if self.re.match(item, local_path):
return True
# Handle the cases where the files regex only handles /, but the local
# path uses \.
if self.is_windows and self.re.match(item, local_path.replace(
'\\', '/')):
return True
return False
return (Find(affected_file, files_to_check) and
not Find(affected_file, files_to_skip))
def AffectedSourceFiles(self, source_file):
"""Filter the list of AffectedTestableFiles by the function source_file.
If source_file is None, InputApi.FilterSourceFile() is used.
"""
if not source_file:
source_file = self.FilterSourceFile
return list(filter(source_file, self.AffectedTestableFiles()))
def RightHandSideLines(self, source_file_filter=None):
"""An iterator over all text lines in 'new' version of changed files.
Only lists lines from new or modified text files in the change that are
contained by the directory of the currently executing presubmit script.
This is useful for doing line-by-line regex checks, like checking for
trailing whitespace.
Yields:
a 3 tuple:
the change.AffectedFile instance of the current file;
integer line number (1-based); and
the contents of the line as a string.
Note: The carriage return (LF or CR) is stripped off.
"""
files = self.AffectedSourceFiles(source_file_filter)
return libchange.RightHandSideLinesImpl(files)
def ReadFile(self, file_item, mode='r'):
"""Reads an arbitrary file.
Deny reading anything outside the repository.
"""
if isinstance(file_item, libchange.AffectedFile):
file_item = file_item.AbsoluteLocalPath()
if not file_item.startswith(self.change.RepositoryRoot()):
raise IOError('Access outside the repository root is denied.')
return gclient_utils.FileRead(file_item, mode)
def CreateTemporaryFile(self, **kwargs):
"""Returns a named temporary file that must be removed with a call to
RemoveTemporaryFiles().
All keyword arguments are forwarded to tempfile.NamedTemporaryFile(),
except for |delete|, which is always set to False.
Presubmit checks that need to create a temporary file and pass it for
reading should use this function instead of NamedTemporaryFile(), as
Windows fails to open a file that is already open for writing.
with input_api.CreateTemporaryFile() as f:
f.write('xyz')
f.close()
input_api.subprocess.check_output(['script-that', '--reads-from',
f.name])
Note that callers of CreateTemporaryFile() should not worry about removing
any temporary file; this is done transparently by the presubmit handling
code.
"""
if 'delete' in kwargs:
# Prevent users from passing |delete|; we take care of file deletion
# ourselves and this prevents unintuitive error messages when we pass
# delete=False and 'delete' is also in kwargs.
raise TypeError('CreateTemporaryFile() does not take a "delete" '
'argument, file deletion is handled automatically by '
'the same presubmit_support code that creates InputApi '
'objects.')
temp_file = self.tempfile.NamedTemporaryFile(delete=False, **kwargs)
self._named_temporary_files.append(temp_file.name)
return temp_file
@property
def tbr(self):
"""Returns if a change is TBR'ed."""
return 'TBR' in self.change.tags or self.change.TBRsFromDescription()
def RunTests(self, tests_mix, parallel=True):
tests = []
msgs = []
for t in tests_mix:
if isinstance(t, OutputApi.PresubmitResult) and t:
msgs.append(t)
else:
assert issubclass(t.message, _PresubmitResult)
tests.append(t)
if self.verbose:
t.info = _PresubmitNotifyResult
if not t.kwargs.get('cwd'):
t.kwargs['cwd'] = self.PresubmitLocalPath()
self.thread_pool.AddTests(tests, parallel)
# When self.parallel is True (i.e. --parallel is passed as an option)
# RunTests doesn't actually run tests. It adds them to a ThreadPool that
# will run all tests once all PRESUBMIT files are processed.
# Otherwise, it will run them and return the results.
if not self.parallel:
msgs.extend(self.thread_pool.RunAsync())
return msgs
class GetPostUploadExecuter(object):
def __init__(self, change, gerrit_obj, use_python3):
"""
Args:
change: The Change object.
gerrit_obj: provides basic Gerrit codereview functionality.
use_python3: if true, will use python3 instead of python2 by default
if USE_PYTHON3 is not specified.
"""
self.change = change
self.gerrit = gerrit_obj
self.use_python3 = use_python3
def ExecPresubmitScript(self, script_text, presubmit_path):
"""Executes PostUploadHook() from a single presubmit script.
Caller is responsible for validating whether the hook should be executed
and should only call this function if it should be.
Args:
script_text: The text of the presubmit script.
presubmit_path: Project script to run.
Return:
A list of results objects.
"""
# Change to the presubmit file's directory to support local imports.
presubmit_dir = os.path.dirname(presubmit_path)
main_path = os.getcwd()
try:
os.chdir(presubmit_dir)
return self._execute_with_local_working_directory(script_text,
presubmit_dir,
presubmit_path)
finally:
# Return the process to the original working directory.
os.chdir(main_path)
def _execute_with_local_working_directory(self, script_text, presubmit_dir,
presubmit_path):
context = {}
try:
exec(compile(script_text, presubmit_path, 'exec', dont_inherit=True),
context)
except Exception as e:
raise PresubmitFailure('"%s" had an exception.\n%s'
% (presubmit_path, e))
function_name = 'PostUploadHook'
if function_name not in context:
return {}
post_upload_hook = context[function_name]
if not len(inspect.getargspec(post_upload_hook)[0]) == 3:
raise PresubmitFailure(
'Expected function "PostUploadHook" to take three arguments.')
return post_upload_hook(self.gerrit, self.change, OutputApi(False))
def _MergeMasters(masters1, masters2):
"""Merges two master maps. Merges also the tests of each builder."""
result = {}
for (master, builders) in itertools.chain(masters1.items(),
masters2.items()):
new_builders = result.setdefault(master, {})
for (builder, tests) in builders.items():
new_builders.setdefault(builder, set([])).update(tests)
return result
def DoPostUploadExecuter(change, gerrit_obj, verbose, use_python3=False):
"""Execute the post upload hook.
Args:
change: The Change object.
gerrit_obj: The GerritAccessor object.
verbose: Prints debug info.
use_python3: if true, default to using Python3 for presubmit checks
rather than Python2.
"""
python_version = 'Python %s' % sys.version_info.major
sys.stdout.write('Running %s post upload checks ...\n' % python_version)
presubmit_files = utils.ListRelevantFilesInSourceCheckout(
change.LocalPaths(), change.RepositoryRoot(), _PRESUBMIT_FILE_REGEX,
_PRESUBMIT_FILE_EXCLUDE)
if not presubmit_files and verbose:
sys.stdout.write('Warning, no PRESUBMIT.py found.\n')
results = []
executer = GetPostUploadExecuter(change, gerrit_obj, use_python3)
# The root presubmit file should be executed after the ones in subdirectories.
# i.e. the specific post upload hooks should run before the general ones.
# Thus, reverse the order.
presubmit_files.reverse()
for filename in presubmit_files:
filename = os.path.abspath(filename)
# Accept CRLF presubmit script.
presubmit_script = gclient_utils.FileRead(filename).replace('\r\n', '\n')
if _ShouldRunPresubmit(presubmit_script, use_python3):
if sys.version_info[0] == 2:
sys.stdout.write(
'Running %s under Python 2. Add USE_PYTHON3 = True to prevent '
'this.\n' % filename)
elif verbose:
sys.stdout.write('Running %s\n' % filename)
results.extend(executer.ExecPresubmitScript(presubmit_script, filename))
if not results:
return 0
sys.stdout.write('\n')
sys.stdout.write('** Post Upload Hook Messages **\n')
exit_code = 0
for result in results:
if result.fatal:
exit_code = 1
result.handle()
sys.stdout.write('\n')
return exit_code
class PresubmitExecuter(object):
def __init__(self, change, committing, verbose, gerrit_obj, dry_run=None,
thread_pool=None, parallel=False, use_python3=False,
no_diffs=False):
"""
Args:
change: The Change object.
committing: True if 'git cl land' is running, False if 'git cl upload' is.
gerrit_obj: provides basic Gerrit codereview functionality.
dry_run: if true, some Checks will be skipped.
parallel: if true, all tests reported via input_api.RunTests for all
PRESUBMIT files will be run in parallel.
use_python3: if true, will use python3 instead of python2 by default
if USE_PYTHON3 is not specified.
no_diffs: if true, implies that --files or --all was specified so some
checks can be skipped, and some errors will be messages.
"""
self.change = change
self.committing = committing
self.gerrit = gerrit_obj
self.verbose = verbose
self.dry_run = dry_run
self.more_cc = []
self.thread_pool = thread_pool
self.parallel = parallel
self.use_python3 = use_python3
self.no_diffs = no_diffs
def ExecPresubmitScript(self, script_text, presubmit_path):
"""Executes a single presubmit script.
Caller is responsible for validating whether the hook should be executed
and should only call this function if it should be.
Args:
script_text: The text of the presubmit script.
presubmit_path: The path to the presubmit file (this will be reported via
input_api.PresubmitLocalPath()).
Return:
A list of result objects, empty if no problems.
"""
# Change to the presubmit file's directory to support local imports.
presubmit_dir = os.path.dirname(presubmit_path)
main_path = os.getcwd()
try:
os.chdir(presubmit_dir)
return self._execute_with_local_working_directory(script_text,
presubmit_dir,
presubmit_path)
finally:
# Return the process to the original working directory.
os.chdir(main_path)
def _execute_with_local_working_directory(self, script_text, presubmit_dir,
presubmit_path):
# Load the presubmit script into context.
input_api = InputApi(self.change, presubmit_path, self.committing,
self.verbose, gerrit_obj=self.gerrit,
dry_run=self.dry_run, thread_pool=self.thread_pool,
parallel=self.parallel, no_diffs=self.no_diffs)
output_api = OutputApi(self.committing)
context = {}
try:
exec(compile(script_text, presubmit_path, 'exec', dont_inherit=True),
context)
except Exception as e:
raise PresubmitFailure('"%s" had an exception.\n%s' % (presubmit_path, e))
context['__args'] = (input_api, output_api)
# Get path of presubmit directory relative to repository root.
# Always use forward slashes, so that path is same in *nix and Windows
root = input_api.change.RepositoryRoot()
rel_path = os.path.relpath(presubmit_dir, root)
rel_path = rel_path.replace(os.path.sep, '/')
# Get the URL of git remote origin and use it to identify host and project
host = project = ''
if self.gerrit:
host = self.gerrit.host or ''
project = self.gerrit.project or ''
# Prefix for test names
prefix = 'presubmit:%s/%s:%s/' % (host, project, rel_path)
# Perform all the desired presubmit checks.
results = []
try:
version = [
int(x) for x in context.get('PRESUBMIT_VERSION', '0.0.0').split('.')
]
with rdb_wrapper.client(prefix) as sink:
if version >= [2, 0, 0]:
# Copy the keys to prevent "dictionary changed size during iteration"
# exception if checks add globals to context. E.g. sometimes the
# Python runtime will add __warningregistry__.
for function_name in list(context.keys()):
if not function_name.startswith('Check'):
continue
if function_name.endswith('Commit') and not self.committing:
continue
if function_name.endswith('Upload') and self.committing:
continue
logging.debug('Running %s in %s', function_name, presubmit_path)
results.extend(
self._run_check_function(function_name, context, sink,
presubmit_path))
logging.debug('Running %s done.', function_name)
self.more_cc.extend(output_api.more_cc)
else: # Old format
if self.committing:
function_name = 'CheckChangeOnCommit'
else:
function_name = 'CheckChangeOnUpload'
if function_name in list(context.keys()):
logging.debug('Running %s in %s', function_name, presubmit_path)
results.extend(
self._run_check_function(function_name, context, sink,
presubmit_path))
logging.debug('Running %s done.', function_name)
self.more_cc.extend(output_api.more_cc)
finally:
for f in input_api._named_temporary_files:
os.remove(f)
return results
def _run_check_function(self, function_name, context, sink, presubmit_path):
"""Evaluates and returns the result of a given presubmit function.
If sink is given, the result of the presubmit function will be reported
to the ResultSink.
Args:
function_name: the name of the presubmit function to evaluate
context: a context dictionary in which the function will be evaluated
sink: an instance of ResultSink. None, by default.
Returns:
the result of the presubmit function call.
"""
start_time = time_time()
try:
result = eval(function_name + '(*__args)', context)
self._check_result_type(result)
except Exception:
Don't halt presubmits on exceptions Some presubmit failures result in uncaught exceptions which historically have halted presubmit runs. This behavior makes it more difficult to find all failures, measure presubmit times, or detect other behavior over time. This is particularly problematic when running "git cl presubmit --all". As an example, presubmit --all was broken and would not complete from when crrev.com/3642633 landed until crrev.com/c/3657600. The minimal repro was this presubmit command: git cl presubmit --files ui\accessibility\ax_mode.h This command, or presubmit --all, triggered this error, which halts the presubmits: Evaluation of CheckChangeOnCommit failed: [Errno 2] No such file or directory: 'chrome/browser/resources/accessibility/accessibility.js' Traceback (most recent call last): File "presubmit_support.py", line 2038, in <module> sys.exit(main()) File "presubmit_support.py", line 2015, in main return DoPresubmitChecks( File "presubmit_support.py", line 1738, in DoPresubmitChecks results += executer.ExecPresubmitScript(presubmit_script, filename) File "presubmit_support.py", line 1602, in ExecPresubmitScript self._run_check_function(function_name, context, sink, File "presubmit_support.py", line 1640, in _run_check_function six.reraise(e_type, e_value, e_tb) File "C:\Users\brucedawson\.vpython-root\e726d2\lib\site-packages\six.py", line 686, in reraise raise value File "presubmit_support.py", line 1630, in _run_check_function result = eval(function_name + '(*__args)', context) File "<string>", line 1, in <module> File "PRESUBMIT.py", line 314, in CheckChangeOnCommit errs.extend(CheckModesMatch(input_api, output_api)) File "PRESUBMIT.py", line 283, in CheckModesMatch ax_modes_in_js = GetAccessibilityModesFromFile( File "PRESUBMIT.py", line 245, in GetAccessibilityModesFromFile for line in open(fullpath).readlines(): FileNotFoundError: [Errno 2] No such file or directory: 'chrome/browser/resources/accessibility/accessibility.js' With this change the exception is handled and presubmits continue after printing this message: Evaluation of CheckChangeOnCommit failed: [Errno 2] No such file or directory: 'chrome/browser/resources/accessibility/accessibility.js', Traceback (most recent call last): File "presubmit_support.py", line 1630, in _run_check_function result = eval(function_name + '(*__args)', context) File "<string>", line 1, in <module> File "PRESUBMIT.py", line 314, in CheckChangeOnCommit errs.extend(CheckModesMatch(input_api, output_api)) File "PRESUBMIT.py", line 283, in CheckModesMatch ax_modes_in_js = GetAccessibilityModesFromFile( File "PRESUBMIT.py", line 245, in GetAccessibilityModesFromFile for line in open(fullpath).readlines(): FileNotFoundError: [Errno 2] No such file or directory: 'chrome/browser/resources/accessibility/accessibility.js' That is, the crucial information regarding the failure is printed, but subsequent presubmits are not skipped. Bug: 1309977 Change-Id: I7cfeda85c830e6c8e567c0df3c50f27af1dbe835 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3653978 Reviewed-by: Josip Sokcevic <sokcevic@google.com> Commit-Queue: Bruce Dawson <brucedawson@chromium.org>
3 years ago
_, e_value, _ = sys.exc_info()
result = [
OutputApi.PresubmitError(
'Evaluation of %s failed: %s, %s' %
(function_name, e_value, traceback.format_exc()))
]
elapsed_time = time_time() - start_time
if elapsed_time > 10.0:
sys.stdout.write('%6.1fs to run %s from %s.\n' %
(elapsed_time, function_name, presubmit_path))
if sink:
failure_reason = None
status = rdb_wrapper.STATUS_PASS
if any(r.fatal for r in result):
status = rdb_wrapper.STATUS_FAIL
failure_reasons = []
for r in result:
fields = r.json_format()
message = fields['message']
items = '\n'.join(' %s' % item for item in fields['items'])
failure_reasons.append('\n'.join([message, items]))
if failure_reasons:
failure_reason = '\n'.join(failure_reasons)
sink.report(function_name, status, elapsed_time, failure_reason)
return result
def _check_result_type(self, result):
"""Helper function which ensures result is a list, and all elements are
instances of OutputApi.PresubmitResult"""
if not isinstance(result, (tuple, list)):
raise PresubmitFailure('Presubmit functions must return a tuple or list')
if not all(isinstance(res, OutputApi.PresubmitResult) for res in result):
raise PresubmitFailure(
'All presubmit results must be of types derived from '
'output_api.PresubmitResult')
def DoPresubmitChecks(change,
committing,
verbose,
default_presubmit,
may_prompt,
gerrit_obj,
dry_run=None,
parallel=False,
json_output=None,
use_python3=False,
no_diffs=False):
"""Runs all presubmit checks that apply to the files in the change.
This finds all PRESUBMIT.py files in directories enclosing the files in the
change (up to the repository root) and calls the relevant entrypoint function
depending on whether the change is being committed or uploaded.
Prints errors, warnings and notifications. Prompts the user for warnings
when needed.
Args:
change: The Change object.
committing: True if 'git cl land' is running, False if 'git cl upload' is.
verbose: Prints debug info.
default_presubmit: A default presubmit script to execute in any case.
may_prompt: Enable (y/n) questions on warning or error. If False,
any questions are answered with yes by default.
gerrit_obj: provides basic Gerrit codereview functionality.
dry_run: if true, some Checks will be skipped.
parallel: if true, all tests specified by input_api.RunTests in all
PRESUBMIT files will be run in parallel.
use_python3: if true, default to using Python3 for presubmit checks
rather than Python2.
no_diffs: if true, implies that --files or --all was specified so some
checks can be skipped, and some errors will be messages.
Return:
1 if presubmit checks failed or 0 otherwise.
"""
old_environ = os.environ
try:
# Make sure python subprocesses won't generate .pyc files.
os.environ = os.environ.copy()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
python_version = 'Python %s' % sys.version_info.major
if committing:
sys.stdout.write('Running %s presubmit commit checks ...\n' %
python_version)
else:
sys.stdout.write('Running %s presubmit upload checks ...\n' %
python_version)
start_time = time_time()
presubmit_files = utils.ListRelevantFilesInSourceCheckout(
change.AbsoluteLocalPaths(), change.RepositoryRoot(),
_PRESUBMIT_FILE_REGEX, _PRESUBMIT_FILE_EXCLUDE)
if not presubmit_files and verbose:
sys.stdout.write('Warning, no PRESUBMIT.py found.\n')
results = []
if sys.platform == 'win32':
temp = os.environ['TEMP']
else:
temp = '/tmp'
python2_usage_log_file = os.path.join(temp, 'python2_usage.txt')
Report when vpython (Python 2) is run during presubmits While all Chromium PRESUBMIT.py scripts have been running on Python 2 for a long time they continue to invoke child scripts under Python 2. Part of the reason for slow progress on this transition is that it is not easy to tell that this is happening, and most developers probably assume that Python 3 presubmits implies a lack of Python 2. This change adds a warning when it detects Python 2 scripts being run. Typical output (edited for clarity) looks like this: git cl presubmit --files "chrome/updater/tools/*;ppapi/generators/*" ... Python 2 scripts were run during Python 3 presubmits. Please ask ??? if help is needed in preventing this. "depot_tools\pylint-1.5" --args-on-stdin from chrome\updater\tools \ "depot_tools\pylint-1.5" --args-on-stdin from chrome\updater\tools \ idl_tests.py from ppapi\generators If Python 2 scripts launch child scripts, especially if they use sys.executable, then they will not be reported. However this is a good thing because it means that the report focuses on the top-level scripts that drive Python 2 usage. This change works by modifying vpython.bat to write invocation information to a text file. The data in this text file is picked up by presubmit_support.py when it finishes running a set of presubmits. Bug: 1313804 Change-Id: Ic632b38eae07eca2e02e94358305cc9c998818e9 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3699002 Reviewed-by: Gavin Mak <gavinmak@google.com> Commit-Queue: Bruce Dawson <brucedawson@chromium.org>
3 years ago
if os.path.exists(python2_usage_log_file):
os.remove(python2_usage_log_file)
thread_pool = ThreadPool()
executer = PresubmitExecuter(change, committing, verbose, gerrit_obj,
dry_run, thread_pool, parallel, use_python3,
no_diffs)
skipped_count = 0;
if default_presubmit:
if verbose:
sys.stdout.write('Running default presubmit script.\n')
fake_path = os.path.join(change.RepositoryRoot(), 'PRESUBMIT.py')
if _ShouldRunPresubmit(default_presubmit, use_python3):
results += executer.ExecPresubmitScript(default_presubmit, fake_path)
else:
skipped_count += 1
for filename in presubmit_files:
filename = os.path.abspath(filename)
# Accept CRLF presubmit script.
presubmit_script = gclient_utils.FileRead(filename).replace('\r\n', '\n')
if _ShouldRunPresubmit(presubmit_script, use_python3):
if sys.version_info[0] == 2:
sys.stdout.write(
'Running %s under Python 2. Add USE_PYTHON3 = True to prevent '
'this.\n' % filename)
elif verbose:
sys.stdout.write('Running %s\n' % filename)
results += executer.ExecPresubmitScript(presubmit_script, filename)
else:
skipped_count += 1
results += thread_pool.RunAsync()
Report when vpython (Python 2) is run during presubmits While all Chromium PRESUBMIT.py scripts have been running on Python 2 for a long time they continue to invoke child scripts under Python 2. Part of the reason for slow progress on this transition is that it is not easy to tell that this is happening, and most developers probably assume that Python 3 presubmits implies a lack of Python 2. This change adds a warning when it detects Python 2 scripts being run. Typical output (edited for clarity) looks like this: git cl presubmit --files "chrome/updater/tools/*;ppapi/generators/*" ... Python 2 scripts were run during Python 3 presubmits. Please ask ??? if help is needed in preventing this. "depot_tools\pylint-1.5" --args-on-stdin from chrome\updater\tools \ "depot_tools\pylint-1.5" --args-on-stdin from chrome\updater\tools \ idl_tests.py from ppapi\generators If Python 2 scripts launch child scripts, especially if they use sys.executable, then they will not be reported. However this is a good thing because it means that the report focuses on the top-level scripts that drive Python 2 usage. This change works by modifying vpython.bat to write invocation information to a text file. The data in this text file is picked up by presubmit_support.py when it finishes running a set of presubmits. Bug: 1313804 Change-Id: Ic632b38eae07eca2e02e94358305cc9c998818e9 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3699002 Reviewed-by: Gavin Mak <gavinmak@google.com> Commit-Queue: Bruce Dawson <brucedawson@chromium.org>
3 years ago
if os.path.exists(python2_usage_log_file):
with open(python2_usage_log_file) as f:
python2_usage = [x.strip() for x in f.readlines()]
results.append(
OutputApi(committing).PresubmitPromptWarning(
'Python 2 scripts were run during %s presubmits. Please see '
'https://bugs.chromium.org/p/chromium/issues/detail?id=1313804'
'#c61 for tips on resolving this.'
% python_version,
items=python2_usage))
messages = {}
should_prompt = False
presubmits_failed = False
for result in results:
if result.fatal:
presubmits_failed = True
messages.setdefault('ERRORS', []).append(result)
elif result.should_prompt:
should_prompt = True
messages.setdefault('Warnings', []).append(result)
else:
messages.setdefault('Messages', []).append(result)
# Print the different message types in a consistent order. ERRORS go last
# so that they will be most visible in the local-presubmit output.
for name in ['Messages', 'Warnings', 'ERRORS']:
if name in messages:
items = messages[name]
sys.stdout.write('** Presubmit %s: %d **\n' % (name, len(items)))
for item in items:
item.handle()
sys.stdout.write('\n')
total_time = time_time() - start_time
if total_time > 1.0:
sys.stdout.write(
'Presubmit checks took %.1fs to calculate.\n' % total_time)
if not should_prompt and not presubmits_failed:
sys.stdout.write('%s presubmit checks passed.\n\n' % python_version)
elif should_prompt and not presubmits_failed:
sys.stdout.write('There were %s presubmit warnings. ' % python_version)
if may_prompt:
presubmits_failed = not prompt_should_continue(
'Are you sure you wish to continue? (y/N): ')
else:
sys.stdout.write('\n')
else:
sys.stdout.write('There were %s presubmit errors.\n' % python_version)
if json_output:
# Write the presubmit results to json output
presubmit_results = {
'errors': [
error.json_format()
for error in messages.get('ERRORS', [])
],
'notifications': [
notification.json_format()
for notification in messages.get('Messages', [])
],
'warnings': [
warning.json_format()
for warning in messages.get('Warnings', [])
],
'more_cc': executer.more_cc,
'skipped_presubmits': skipped_count,
}
gclient_utils.FileWrite(
json_output, json.dumps(presubmit_results, sort_keys=True))
global _ASKED_FOR_FEEDBACK
# Ask for feedback one time out of 5.
if (results and random.randint(0, 4) == 0 and not _ASKED_FOR_FEEDBACK):
sys.stdout.write(
'Was the presubmit check useful? If not, run "git cl presubmit -v"\n'
'to figure out which PRESUBMIT.py was run, then run git blame\n'
'on the file to figure out who to ask for help.\n')
_ASKED_FOR_FEEDBACK = True
return 1 if presubmits_failed else 0
finally:
os.environ = old_environ
def _scan_sub_dirs(mask, recursive):
if not recursive:
return [x for x in glob.glob(mask) if x not in ('.svn', '.git')]
results = []
for root, dirs, files in os.walk('.'):
if '.svn' in dirs:
dirs.remove('.svn')
if '.git' in dirs:
dirs.remove('.git')
for name in files:
if fnmatch.fnmatch(name, mask):
results.append(os.path.join(root, name))
return results
def _parse_files(args, recursive):
logging.debug('Searching for %s', args)
files = []
for arg in args:
files.extend([('M', f) for f in _scan_sub_dirs(arg, recursive)])
return files
def _parse_change(parser, options):
"""Process change options.
Args:
parser: The parser used to parse the arguments from command line.
options: The arguments parsed from command line.
Returns:
A change.GitChange if the change root is a git repository, or a Change
otherwise.
"""
if options.files and options.all_files:
parser.error('<files> cannot be specified when --all-files is set.')
change_scm = scm.determine_scm(options.root)
if change_scm != 'git' and not options.files:
parser.error('<files> is not optional for unversioned directories.')
if options.files:
if options.source_controlled_only:
# Get the filtered set of files from SCM.
change_files = []
for name in scm.GIT.GetAllFiles(options.root):
for mask in options.files:
if fnmatch.fnmatch(name, mask):
change_files.append(('M', name))
break
else:
# Get the filtered set of files from a directory scan.
change_files = _parse_files(options.files, options.recursive)
elif options.all_files:
change_files = [('M', f) for f in scm.GIT.GetAllFiles(options.root)]
else:
change_files = scm.GIT.CaptureStatus(
options.root, options.upstream or None)
logging.info('Found %d file(s).', len(change_files))
change_class = libchange.GitChange if change_scm == 'git' \
else libchange.Change
return change_class(
options.name,
options.description,
options.root,
change_files,
options.issue,
options.patchset,
options.author,
upstream=options.upstream)
def _parse_gerrit_options(parser, options):
"""Process gerrit options.
SIDE EFFECTS: Modifies options.author and options.description from Gerrit if
options.gerrit_fetch is set.
Args:
parser: The parser used to parse the arguments from command line.
options: The arguments parsed from command line.
Returns:
A GerritAccessor object if options.gerrit_url is set, or None otherwise.
"""
gerrit_obj = None
if options.gerrit_url:
gerrit_obj = GerritAccessor(
url=options.gerrit_url,
project=options.gerrit_project,
branch=options.gerrit_branch)
if not options.gerrit_fetch:
return gerrit_obj
if not options.gerrit_url or not options.issue or not options.patchset:
parser.error(
'--gerrit_fetch requires --gerrit_url, --issue and --patchset.')
options.author = gerrit_obj.GetChangeOwner(options.issue)
options.description = gerrit_obj.GetChangeDescription(
options.issue, options.patchset)
logging.info('Got author: "%s"', options.author)
logging.info('Got description: """\n%s\n"""', options.description)
return gerrit_obj
@contextlib.contextmanager
def canned_check_filter(method_names):
filtered = {}
try:
for method_name in method_names:
if not hasattr(presubmit_canned_checks, method_name):
logging.warning('Skipping unknown "canned" check %s' % method_name)
continue
filtered[method_name] = getattr(presubmit_canned_checks, method_name)
setattr(presubmit_canned_checks, method_name, lambda *_a, **_kw: [])
yield
finally:
for name, method in filtered.items():
setattr(presubmit_canned_checks, name, method)
def main(argv=None):
parser = argparse.ArgumentParser(usage='%(prog)s [options] <files...>')
hooks = parser.add_mutually_exclusive_group()
hooks.add_argument('-c', '--commit', action='store_true',
help='Use commit instead of upload checks.')
hooks.add_argument('-u', '--upload', action='store_false', dest='commit',
help='Use upload instead of commit checks.')
hooks.add_argument('--post_upload', action='store_true',
help='Run post-upload commit hooks.')
parser.add_argument('-r', '--recursive', action='store_true',
help='Act recursively.')
parser.add_argument('-v', '--verbose', action='count', default=0,
help='Use 2 times for more debug info.')
parser.add_argument('--name', default='no name')
parser.add_argument('--author')
desc = parser.add_mutually_exclusive_group()
desc.add_argument('--description', default='', help='The change description.')
desc.add_argument('--description_file',
help='File to read change description from.')
parser.add_argument('--issue', type=int, default=0)
parser.add_argument('--patchset', type=int, default=0)
parser.add_argument('--root', default=os.getcwd(),
help='Search for PRESUBMIT.py up to this directory. '
'If inherit-review-settings-ok is present in this '
'directory, parent directories up to the root file '
'system directories will also be searched.')
parser.add_argument('--upstream',
help='Git only: the base ref or upstream branch against '
'which the diff should be computed.')
parser.add_argument('--default_presubmit')
parser.add_argument('--may_prompt', action='store_true', default=False)
parser.add_argument('--skip_canned', action='append', default=[],
help='A list of checks to skip which appear in '
'presubmit_canned_checks. Can be provided multiple times '
'to skip multiple canned checks.')
parser.add_argument('--dry_run', action='store_true', help=argparse.SUPPRESS)
parser.add_argument('--gerrit_url', help=argparse.SUPPRESS)
parser.add_argument('--gerrit_project', help=argparse.SUPPRESS)
parser.add_argument('--gerrit_branch', help=argparse.SUPPRESS)
parser.add_argument('--gerrit_fetch', action='store_true',
help=argparse.SUPPRESS)
parser.add_argument('--parallel', action='store_true',
help='Run all tests specified by input_api.RunTests in '
'all PRESUBMIT files in parallel.')
parser.add_argument('--json_output',
help='Write presubmit errors to json output.')
parser.add_argument('--all_files', action='store_true',
help='Mark all files under source control as modified.')
parser.add_argument('files', nargs='*',
help='List of files to be marked as modified when '
'executing presubmit or post-upload hooks. fnmatch '
'wildcards can also be used.')
parser.add_argument('--source_controlled_only', action='store_true',
help='Constrain \'files\' to those in source control.')
parser.add_argument('--use-python3', action='store_true',
help='Use python3 for presubmit checks by default')
parser.add_argument('--no_diffs', action='store_true',
help='Assume that all "modified" files have no diffs.')
options = parser.parse_args(argv)
log_level = logging.ERROR
if options.verbose >= 2:
log_level = logging.DEBUG
elif options.verbose:
log_level = logging.INFO
log_format = ('[%(levelname).1s%(asctime)s %(process)d %(thread)d '
'%(filename)s] %(message)s')
logging.basicConfig(format=log_format, level=log_level)
Show PresubmitResult call stacks in verbose mode When a presubmit message, warning, or error strikes it is sometimes due to a bug or weakness in the presubmit. Examining the presubmit or fixing it can be important. However it can be hard to find the relevant code (hint: many presubmits are in depot_tools/presubmit_canned_checks.py). With this change you can just run the presubmits with -v -v (double verbose) and a call stack will be recorded when each presubmit result object is created. For instance: >git cl presubmit --force --files ash/public/cpp/app_list/vector_icons/google_black.icon -v -v ** Presubmit Messages: 1 ** Trademarked images should not be added to the public repo. See crbug.com/944754 ash/public/cpp/app_list/vector_icons/google_black.icon *************** Presubmit result call stack is: File "depot_tools/presubmit_support.py", line 2098, in <module> sys.exit(main()) File "depot_tools/presubmit_support.py", line 2074, in main return DoPresubmitChecks( File "depot_tools/presubmit_support.py", line 1771, in DoPresubmitChecks results += executer.ExecPresubmitScript(presubmit_script, filename) File "depot_tools/presubmit_support.py", line 1612, in ExecPresubmitScript self._run_check_function(function_name, context, sink, File "depot_tools/presubmit_support.py", line 1653, in _run_check_function result = eval(function_name + '(*__args)', context) File "<string>", line 1, in <module> File "chromium/src/PRESUBMIT.py", line 2225, in CheckNoProductIconsAddedToPublicRepo message_type( File "depot_tools/presubmit_support.py", line 352, in __init__ self._long_text += ' '.join(traceback.format_stack(None, 8)) This changes tracking down presubmits from a dark art to a trivial operation. Bug: 1309977 Change-Id: Ia0a6adfbbab04041f97c56cd2064a1627e252561 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3896076 Reviewed-by: Dmitrii Kuragin <kuragin@chromium.org> Commit-Queue: Dmitrii Kuragin <kuragin@chromium.org>
3 years ago
# Print call stacks when _PresubmitResult objects are created with -v -v is
# specified. This helps track down where presubmit messages are coming from.
if options.verbose >= 2:
global _SHOW_CALLSTACKS
_SHOW_CALLSTACKS = True
if options.description_file:
options.description = gclient_utils.FileRead(options.description_file)
gerrit_obj = _parse_gerrit_options(parser, options)
change = _parse_change(parser, options)
try:
if options.post_upload:
return DoPostUploadExecuter(change, gerrit_obj, options.verbose,
options.use_python3)
with canned_check_filter(options.skip_canned):
return DoPresubmitChecks(
change,
options.commit,
options.verbose,
options.default_presubmit,
options.may_prompt,
gerrit_obj,
options.dry_run,
options.parallel,
options.json_output,
options.use_python3,
options.no_diffs)
except PresubmitFailure as e:
print(e, file=sys.stderr)
print('Maybe your depot_tools is out of date?', file=sys.stderr)
print('depot_tools version: %s' % utils.depot_tools_version(),
file=sys.stderr)
return 2
if __name__ == '__main__':
fix_encoding.fix_encoding()
try:
sys.exit(main())
except KeyboardInterrupt:
sys.stderr.write('interrupted\n')
sys.exit(2)