From edba22d4eb12b5db5bdee7dc921da7857be1b687 Mon Sep 17 00:00:00 2001 From: Gavin Mak Date: Tue, 2 Jan 2024 20:19:55 +0000 Subject: [PATCH] Fix multiline comment formatting Many incorrectly formatted comments exist from the switch to 4 space indent: https://crrev.com/c/4836379 Bug: 1514505 Change-Id: I6366f9da812919bd35b999f18fa8a49b7a66c09b Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/5153633 Commit-Queue: Gavin Mak Reviewed-by: Josip Sokcevic --- PRESUBMIT.py | 2 +- auth.py | 54 +- bootstrap/bootstrap.py | 92 +- cpplint.py | 1663 ++++++++--------- detect_host_arch.py | 2 +- download_from_google_storage.py | 32 +- fetch.py | 32 +- fix_encoding.py | 58 +- gclient.py | 148 +- gclient_eval.py | 52 +- gclient_scm.py | 2 +- gclient_utils.py | 249 +-- gerrit_client.py | 21 +- gerrit_util.py | 201 +- git_cache.py | 47 +- git_cl.py | 423 +++-- git_common.py | 267 +-- git_dates.py | 16 +- git_footers.py | 60 +- git_hyper_blame.py | 33 +- git_map_branches.py | 15 +- git_number.py | 56 +- git_rebase_update.py | 8 +- git_retry.py | 10 +- lockfile.py | 14 +- metrics.py | 17 +- metrics_utils.py | 28 +- my_activity.py | 24 +- owners_client.py | 42 +- presubmit_canned_checks.py | 369 ++-- presubmit_support.py | 456 ++--- rdb_wrapper.py | 37 +- reclient_helper.py | 72 +- reclient_metrics.py | 2 +- roll_dep.py | 6 +- scm.py | 30 +- setup_color.py | 4 +- split_cl.py | 126 +- subcommand.py | 49 +- subprocess2.py | 68 +- testing_support/coverage_utils.py | 20 +- testing_support/fake_repos.py | 12 +- testing_support/filesystem_mock.py | 8 +- testing_support/git_test_utils.py | 190 +- .../presubmit_canned_checks_test_mocks.py | 46 +- testing_support/test_case_utils.py | 2 +- testing_support/trial_dir.py | 8 +- tests/bot_update_coverage_test.py | 30 +- tests/cipd_bootstrap_test.py | 18 +- tests/gclient_scm_test.py | 15 +- tests/gclient_smoketest_base.py | 12 +- tests/gclient_test.py | 86 +- tests/gclient_transitions_smoketest.py | 10 +- tests/git_cl_test.py | 16 +- tests/git_common_test.py | 4 +- tests/git_hyper_blame_test.py | 26 +- tests/presubmit_canned_checks_test.py | 4 +- tests/presubmit_unittest.py | 40 +- tests/subprocess2_test.py | 8 +- watchlists.py | 24 +- win_toolchain/get_toolchain_if_necessary.py | 30 +- win_toolchain/package_from_installed.py | 10 +- 62 files changed, 2762 insertions(+), 2744 deletions(-) diff --git a/PRESUBMIT.py b/PRESUBMIT.py index 63d3cdaf5a..4b3e0d1c3e 100644 --- a/PRESUBMIT.py +++ b/PRESUBMIT.py @@ -100,7 +100,7 @@ def CheckJsonFiles(input_api, output_api): def CheckUnitTestsOnCommit(input_api, output_api): - """ Do not run integration tests on upload since they are way too slow.""" + """Do not run integration tests on upload since they are way too slow.""" input_api.SetTimeout(TEST_TIMEOUT_S) diff --git a/auth.py b/auth.py index 285fd6e969..de9ecfd415 100644 --- a/auth.py +++ b/auth.py @@ -69,11 +69,11 @@ def has_luci_context_local_auth(): class Authenticator(object): """Object that knows how to refresh access tokens or id tokens when needed. - Args: - scopes: space separated oauth scopes. It's used to generate access tokens. + Args: + scopes: space separated oauth scopes. It's used to generate access tokens. Defaults to OAUTH_SCOPE_EMAIL. - audience: An audience in ID tokens to claim which clients should accept it. - """ + audience: An audience in ID tokens to claim which clients should accept it. + """ def __init__(self, scopes=OAUTH_SCOPE_EMAIL, audience=None): self._access_token = None self._scopes = scopes @@ -83,20 +83,20 @@ class Authenticator(object): def has_cached_credentials(self): """Returns True if credentials can be obtained. - If returns False, get_access_token() or get_id_token() later will probably - ask for interactive login by raising LoginRequiredError. + If returns False, get_access_token() or get_id_token() later will probably + ask for interactive login by raising LoginRequiredError. - If returns True, get_access_token() or get_id_token() won't ask for - interactive login. - """ + If returns True, get_access_token() or get_id_token() won't ask for + interactive login. + """ return bool(self._get_luci_auth_token()) def get_access_token(self): """Returns AccessToken, refreshing it if necessary. - Raises: - LoginRequiredError if user interaction is required. - """ + Raises: + LoginRequiredError if user interaction is required. + """ if self._access_token and not self._access_token.needs_refresh(): return self._access_token @@ -113,12 +113,12 @@ class Authenticator(object): def get_id_token(self): """Returns id token, refreshing it if necessary. - Returns: - A Token object. + Returns: + A Token object. - Raises: - LoginRequiredError if user interaction is required. - """ + Raises: + LoginRequiredError if user interaction is required. + """ if self._id_token and not self._id_token.needs_refresh(): return self._id_token @@ -133,15 +133,15 @@ class Authenticator(object): def authorize(self, http, use_id_token=False): """Monkey patches authentication logic of httplib2.Http instance. - The modified http.request method will add authentication headers to each - request. + The modified http.request method will add authentication headers to each + request. - Args: - http: An instance of httplib2.Http. + Args: + http: An instance of httplib2.Http. - Returns: - A modified instance of http that was passed in. - """ + Returns: + A modified instance of http that was passed in. + """ # Adapted from oauth2client.OAuth2Credentials.authorize. request_orig = http.request @@ -167,9 +167,9 @@ class Authenticator(object): def _run_luci_auth_login(self): """Run luci-auth login. - Returns: - AccessToken with credentials. - """ + Returns: + AccessToken with credentials. + """ logging.debug('Running luci-auth login') subprocess2.check_call(['luci-auth', 'login', '-scopes', self._scopes]) return self._get_luci_auth_token() diff --git a/bootstrap/bootstrap.py b/bootstrap/bootstrap.py index 7afa2b237c..a15656826d 100644 --- a/bootstrap/bootstrap.py +++ b/bootstrap/bootstrap.py @@ -54,15 +54,15 @@ class Template( def maybe_install(self, name, dst_path): """Installs template |name| to |dst_path| if it has changed. - This loads the template |name| from THIS_DIR, resolves template parameters, - and installs it to |dst_path|. See `maybe_update` for more information. + This loads the template |name| from THIS_DIR, resolves template parameters, + and installs it to |dst_path|. See `maybe_update` for more information. - Args: - name (str): The name of the template to install. - dst_path (str): The destination filesystem path. + Args: + name (str): The name of the template to install. + dst_path (str): The destination filesystem path. - Returns (bool): True if |dst_path| was updated, False otherwise. - """ + Returns (bool): True if |dst_path| was updated, False otherwise. + """ template_path = os.path.join(THIS_DIR, name) with open(template_path, 'r', encoding='utf8') as fd: t = string.Template(fd.read()) @@ -72,17 +72,17 @@ class Template( def maybe_update(content, dst_path): """Writes |content| to |dst_path| if |dst_path| does not already match. - This function will ensure that there is a file at |dst_path| containing - |content|. If |dst_path| already exists and contains |content|, no operation - will be performed, preserving filesystem modification times and avoiding - potential write contention. + This function will ensure that there is a file at |dst_path| containing + |content|. If |dst_path| already exists and contains |content|, no operation + will be performed, preserving filesystem modification times and avoiding + potential write contention. - Args: - content (str): The file content. - dst_path (str): The destination filesystem path. + Args: + content (str): The file content. + dst_path (str): The destination filesystem path. - Returns (bool): True if |dst_path| was updated, False otherwise. - """ + Returns (bool): True if |dst_path| was updated, False otherwise. + """ # If the path already exists and matches the new content, refrain from # writing a new one. if os.path.exists(dst_path): @@ -100,14 +100,14 @@ def maybe_update(content, dst_path): def maybe_copy(src_path, dst_path): """Writes the content of |src_path| to |dst_path| if needed. - See `maybe_update` for more information. + See `maybe_update` for more information. - Args: - src_path (str): The content source filesystem path. - dst_path (str): The destination filesystem path. + Args: + src_path (str): The content source filesystem path. + dst_path (str): The destination filesystem path. - Returns (bool): True if |dst_path| was updated, False otherwise. - """ + Returns (bool): True if |dst_path| was updated, False otherwise. + """ with open(src_path, 'r', encoding='utf-8') as fd: content = fd.read() return maybe_update(content, dst_path) @@ -116,21 +116,21 @@ def maybe_copy(src_path, dst_path): def call_if_outdated(stamp_path, stamp_version, fn): """Invokes |fn| if the stamp at |stamp_path| doesn't match |stamp_version|. - This can be used to keep a filesystem record of whether an operation has been - performed. The record is stored at |stamp_path|. To invalidate a record, - change the value of |stamp_version|. + This can be used to keep a filesystem record of whether an operation has been + performed. The record is stored at |stamp_path|. To invalidate a record, + change the value of |stamp_version|. - After |fn| completes successfully, |stamp_path| will be updated to match - |stamp_version|, preventing the same update from happening in the future. + After |fn| completes successfully, |stamp_path| will be updated to match + |stamp_version|, preventing the same update from happening in the future. - Args: - stamp_path (str): The filesystem path of the stamp file. - stamp_version (str): The desired stamp version. - fn (callable): A callable to invoke if the current stamp version doesn't - match |stamp_version|. + Args: + stamp_path (str): The filesystem path of the stamp file. + stamp_version (str): The desired stamp version. + fn (callable): A callable to invoke if the current stamp version doesn't + match |stamp_version|. - Returns (bool): True if an update occurred. - """ + Returns (bool): True if an update occurred. + """ stamp_version = stamp_version.strip() if os.path.isfile(stamp_path): @@ -149,13 +149,13 @@ def call_if_outdated(stamp_path, stamp_version, fn): def _in_use(path): """Checks if a Windows file is in use. - When Windows is using an executable, it prevents other writers from - modifying or deleting that executable. We can safely test for an in-use - file by opening it in write mode and checking whether or not there was - an error. + When Windows is using an executable, it prevents other writers from + modifying or deleting that executable. We can safely test for an in-use + file by opening it in write mode and checking whether or not there was + an error. - Returns (bool): True if the file was in use, False if not. - """ + Returns (bool): True if the file was in use, False if not. + """ try: with open(path, 'r+'): return False @@ -165,7 +165,7 @@ def _in_use(path): def _toolchain_in_use(toolchain_path): """Returns (bool): True if a toolchain rooted at |path| is in use. - """ + """ # Look for Python files that may be in use. for python_dir in ( os.path.join(toolchain_path, 'python', 'bin'), # CIPD @@ -225,11 +225,11 @@ def _safe_rmtree(path): def clean_up_old_installations(skip_dir): """Removes Python installations other than |skip_dir|. - This includes an "in-use" check against the "python.exe" in a given directory - to avoid removing Python executables that are currently ruinning. We need - this because our Python bootstrap may be run after (and by) other software - that is using the bootstrapped Python! - """ + This includes an "in-use" check against the "python.exe" in a given directory + to avoid removing Python executables that are currently ruinning. We need + this because our Python bootstrap may be run after (and by) other software + that is using the bootstrapped Python! + """ root_contents = os.listdir(ROOT_DIR) for f in ('win_tools-*_bin', 'python27*_bin', 'git-*_bin', 'bootstrap-*_bin'): diff --git a/cpplint.py b/cpplint.py index a8a8761243..b1677886fe 100755 --- a/cpplint.py +++ b/cpplint.py @@ -777,16 +777,16 @@ _global_error_suppressions = {} def ParseNolintSuppressions(filename, raw_line, linenum, error): """Updates the global list of line error-suppressions. - Parses any NOLINT comments on the current line, updating the global - error_suppressions store. Reports an error if the NOLINT comment - was malformed. + Parses any NOLINT comments on the current line, updating the global + error_suppressions store. Reports an error if the NOLINT comment + was malformed. - Args: - filename: str, the name of the input file. - raw_line: str, the line of input text, with comments. - linenum: int, the number of the current line. - error: function, an error handler. - """ + Args: + filename: str, the name of the input file. + raw_line: str, the line of input text, with comments. + linenum: int, the number of the current line. + error: function, an error handler. + """ matched = Search(r'\bNOLINT(NEXTLINE)?\b(\([^)]+\))?', raw_line) if matched: if matched.group(1): @@ -810,12 +810,12 @@ def ParseNolintSuppressions(filename, raw_line, linenum, error): def ProcessGlobalSuppresions(lines): """Updates the list of global error suppressions. - Parses any lint directives in the file that have global effect. + Parses any lint directives in the file that have global effect. - Args: - lines: An array of strings, each representing a line of the file, with the - last element being empty if the file is terminated with a newline. - """ + Args: + lines: An array of strings, each representing a line of the file, with the + last element being empty if the file is terminated with a newline. + """ for line in lines: if _SEARCH_C_FILE.search(line): for category in _DEFAULT_C_SUPPRESSED_CATEGORIES: @@ -834,16 +834,16 @@ def ResetNolintSuppressions(): def IsErrorSuppressedByNolint(category, linenum): """Returns true if the specified error category is suppressed on this line. - Consults the global error_suppressions map populated by - ParseNolintSuppressions/ProcessGlobalSuppresions/ResetNolintSuppressions. + Consults the global error_suppressions map populated by + ParseNolintSuppressions/ProcessGlobalSuppresions/ResetNolintSuppressions. - Args: - category: str, the category of the error. - linenum: int, the current line number. - Returns: - bool, True iff the error should be suppressed due to a NOLINT comment or - global suppression. - """ + Args: + category: str, the category of the error. + linenum: int, the current line number. + Returns: + bool, True iff the error should be suppressed due to a NOLINT comment or + global suppression. + """ return (_global_error_suppressions.get(category, False) or linenum in _error_suppressions.get(category, set()) or linenum in _error_suppressions.get(None, set())) @@ -862,16 +862,16 @@ def Match(pattern, s): def ReplaceAll(pattern, rep, s): """Replaces instances of pattern in a string with a replacement. - The compiled regex is kept in a cache shared by Match and Search. + The compiled regex is kept in a cache shared by Match and Search. - Args: - pattern: regex pattern - rep: replacement text - s: search string + Args: + pattern: regex pattern + rep: replacement text + s: search string - Returns: - string with replacements made (or original string if no replacements) - """ + Returns: + string with replacements made (or original string if no replacements) + """ if pattern not in _regexp_compile_cache: _regexp_compile_cache[pattern] = sre_compile.compile(pattern) return _regexp_compile_cache[pattern].sub(rep, s) @@ -892,15 +892,15 @@ def _IsSourceExtension(s): class _IncludeState(object): """Tracks line numbers for includes, and the order in which includes appear. - include_list contains list of lists of (header, line number) pairs. - It's a lists of lists rather than just one flat list to make it - easier to update across preprocessor boundaries. + include_list contains list of lists of (header, line number) pairs. + It's a lists of lists rather than just one flat list to make it + easier to update across preprocessor boundaries. - Call CheckNextIncludeOrder() once for each header in the file, passing - in the type constants defined above. Calls in an illegal order will - raise an _IncludeError with an appropriate error message. + Call CheckNextIncludeOrder() once for each header in the file, passing + in the type constants defined above. Calls in an illegal order will + raise an _IncludeError with an appropriate error message. - """ + """ # self._section will move monotonically through this set. If it ever # needs to move backwards, CheckNextIncludeOrder will raise an error. _INITIAL_SECTION = 0 @@ -931,12 +931,12 @@ class _IncludeState(object): def FindHeader(self, header): """Check if a header has already been included. - Args: - header: header to check. - Returns: - Line number of previous occurrence, or -1 if the header has not - been seen before. - """ + Args: + header: header to check. + Returns: + Line number of previous occurrence, or -1 if the header has not + been seen before. + """ for section_list in self.include_list: for f in section_list: if f[0] == header: @@ -946,9 +946,9 @@ class _IncludeState(object): def ResetSection(self, directive): """Reset section checking for preprocessor directive. - Args: - directive: preprocessor directive (e.g. "if", "else"). - """ + Args: + directive: preprocessor directive (e.g. "if", "else"). + """ # The name of the current section. self._section = self._INITIAL_SECTION # The path of last found header. @@ -967,29 +967,29 @@ class _IncludeState(object): def CanonicalizeAlphabeticalOrder(self, header_path): """Returns a path canonicalized for alphabetical comparison. - - replaces "-" with "_" so they both cmp the same. - - removes '-inl' since we don't require them to be after the main header. - - lowercase everything, just in case. + - replaces "-" with "_" so they both cmp the same. + - removes '-inl' since we don't require them to be after the main header. + - lowercase everything, just in case. - Args: - header_path: Path to be canonicalized. + Args: + header_path: Path to be canonicalized. - Returns: - Canonicalized path. - """ + Returns: + Canonicalized path. + """ return header_path.replace('-inl.h', '.h').replace('-', '_').lower() def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path): """Check if a header is in alphabetical order with the previous header. - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - header_path: Canonicalized header to be checked. + Args: + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + header_path: Canonicalized header to be checked. - Returns: - Returns true if the header is in alphabetical order. - """ + Returns: + Returns true if the header is in alphabetical order. + """ # If previous section is different from current section, _last_header # will be reset to empty string, so it's always less than current # header. @@ -1004,17 +1004,17 @@ class _IncludeState(object): def CheckNextIncludeOrder(self, header_type): """Returns a non-empty error message if the next header is out of order. - This function also updates the internal state to be ready to check - the next include. + This function also updates the internal state to be ready to check + the next include. - Args: - header_type: One of the _XXX_HEADER constants defined above. + Args: + header_type: One of the _XXX_HEADER constants defined above. - Returns: - The empty string if the header is in the right order, or an - error message describing what's wrong. + Returns: + The empty string if the header is in the right order, or an + error message describing what's wrong. - """ + """ error_message = ( 'Found %s after %s' % (self._TYPE_NAMES[header_type], self._SECTION_NAMES[self._section])) @@ -1089,23 +1089,23 @@ class _CppLintState(object): def SetFilters(self, filters): """Sets the error-message filters. - These filters are applied when deciding whether to emit a given - error message. + These filters are applied when deciding whether to emit a given + error message. - Args: - filters: A string of comma-separated filters (eg "+whitespace/indent"). - Each filter should start with + or -; else we die. + Args: + filters: A string of comma-separated filters (eg "+whitespace/indent"). + Each filter should start with + or -; else we die. - Raises: - ValueError: The comma-separated filters did not all start with '+' or '-'. - E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter" - """ + Raises: + ValueError: The comma-separated filters did not all start with '+' or '-'. + E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter" + """ # Default filters always have less priority than the flag ones. self.filters = _DEFAULT_FILTERS[:] self.AddFilters(filters) def AddFilters(self, filters): - """ Adds more filters to the existing list of error-message filters. """ + """Adds more filters to the existing list of error-message filters.""" for filt in filters.split(','): clean_filt = filt.strip() if clean_filt: @@ -1117,11 +1117,11 @@ class _CppLintState(object): ' (%s does not)' % filt) def BackupFilters(self): - """ Saves the current filter list to backup storage.""" + """Saves the current filter list to backup storage.""" self._filters_backup = self.filters[:] def RestoreFilters(self): - """ Restores filters previously backed up.""" + """Restores filters previously backed up.""" self.filters = self._filters_backup[:] def ResetErrorCounts(self): @@ -1183,36 +1183,36 @@ def _Filters(): def _SetFilters(filters): """Sets the module's error-message filters. - These filters are applied when deciding whether to emit a given - error message. + These filters are applied when deciding whether to emit a given + error message. - Args: - filters: A string of comma-separated filters (eg "whitespace/indent"). - Each filter should start with + or -; else we die. - """ + Args: + filters: A string of comma-separated filters (eg "whitespace/indent"). + Each filter should start with + or -; else we die. + """ _cpplint_state.SetFilters(filters) def _AddFilters(filters): """Adds more filter overrides. - Unlike _SetFilters, this function does not reset the current list of filters - available. + Unlike _SetFilters, this function does not reset the current list of filters + available. - Args: - filters: A string of comma-separated filters (eg "whitespace/indent"). - Each filter should start with + or -; else we die. - """ + Args: + filters: A string of comma-separated filters (eg "whitespace/indent"). + Each filter should start with + or -; else we die. + """ _cpplint_state.AddFilters(filters) def _BackupFilters(): - """ Saves the current filter list to backup storage.""" + """Saves the current filter list to backup storage.""" _cpplint_state.BackupFilters() def _RestoreFilters(): - """ Restores filters previously backed up.""" + """Restores filters previously backed up.""" _cpplint_state.RestoreFilters() @@ -1230,9 +1230,9 @@ class _FunctionState(object): def Begin(self, function_name): """Start analyzing function body. - Args: - function_name: The name of the function being tracked. - """ + Args: + function_name: The name of the function being tracked. + """ self.in_a_function = True self.lines_in_function = 0 self.current_function = function_name @@ -1245,11 +1245,11 @@ class _FunctionState(object): def Check(self, error, filename, linenum): """Report if too many lines in function body. - Args: - error: The function to call with any errors found. - filename: The name of the current file. - linenum: The number of the line to check. - """ + Args: + error: The function to call with any errors found. + filename: The name of the current file. + linenum: The number of the line to check. + """ if not self.in_a_function: return @@ -1285,9 +1285,9 @@ class _IncludeError(Exception): class FileInfo(object): """Provides utility functions for filenames. - FileInfo provides easy access to the components of a file's path - relative to the project root. - """ + FileInfo provides easy access to the components of a file's path + relative to the project root. + """ def __init__(self, filename): self._filename = filename @@ -1298,13 +1298,13 @@ class FileInfo(object): def RepositoryName(self): r"""FullName after removing the local path to the repository. - If we have a real absolute path name here we can try to do something smart: - detecting the root of the checkout and truncating /path/to/checkout from - the name so that we get header guards that don't include things like - "C:\Documents and Settings\..." or "/home/username/..." in them and thus - people on different computers who have checked the source out to different - locations won't see bogus errors. - """ + If we have a real absolute path name here we can try to do something smart: + detecting the root of the checkout and truncating /path/to/checkout from + the name so that we get header guards that don't include things like + "C:\Documents and Settings\..." or "/home/username/..." in them and thus + people on different computers who have checked the source out to different + locations won't see bogus errors. + """ fullname = self.FullName() if os.path.exists(fullname): @@ -1348,12 +1348,12 @@ class FileInfo(object): def Split(self): """Splits the file into the directory, basename, and extension. - For 'chrome/browser/browser.cc', Split() would - return ('chrome/browser', 'browser', '.cc') + For 'chrome/browser/browser.cc', Split() would + return ('chrome/browser', 'browser', '.cc') - Returns: - A tuple of (directory, basename, extension). - """ + Returns: + A tuple of (directory, basename, extension). + """ googlename = self.RepositoryName() project, rest = os.path.split(googlename) @@ -1407,25 +1407,25 @@ def _ShouldPrintError(category, confidence, linenum): def Error(filename, linenum, category, confidence, message): """Logs the fact we've found a lint error. - We log where the error was found, and also our confidence in the error, - that is, how certain we are this is a legitimate style regression, and - not a misidentification or a use that's sometimes justified. + We log where the error was found, and also our confidence in the error, + that is, how certain we are this is a legitimate style regression, and + not a misidentification or a use that's sometimes justified. - False positives can be suppressed by the use of - "cpplint(category)" comments on the offending line. These are - parsed into _error_suppressions. + False positives can be suppressed by the use of + "cpplint(category)" comments on the offending line. These are + parsed into _error_suppressions. - Args: - filename: The name of the file containing the error. - linenum: The number of the line containing the error. - category: A string used to describe the "category" this bug - falls under: "whitespace", say, or "runtime". Categories - may have a hierarchy separated by slashes: "whitespace/indent". - confidence: A number from 1-5 representing a confidence score for - the error, with 5 meaning that we are certain of the problem, - and 1 meaning that it could be a legitimate construct. - message: The error message. - """ + Args: + filename: The name of the file containing the error. + linenum: The number of the line containing the error. + category: A string used to describe the "category" this bug + falls under: "whitespace", say, or "runtime". Categories + may have a hierarchy separated by slashes: "whitespace/indent". + confidence: A number from 1-5 representing a confidence score for + the error, with 5 meaning that we are certain of the problem, + and 1 meaning that it could be a legitimate construct. + message: The error message. + """ if _ShouldPrintError(category, confidence, linenum): _cpplint_state.IncrementErrorCount(category) if _cpplint_state.output_format == 'vs7': @@ -1465,15 +1465,15 @@ _RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile(r'(\s*' + def IsCppString(line): """Does line terminate so, that the next symbol is in string constant. - This function does not consider single-line nor multi-line comments. + This function does not consider single-line nor multi-line comments. - Args: - line: is a partial line of code starting from the 0..n. + Args: + line: is a partial line of code starting from the 0..n. - Returns: - True, if next character appended to 'line' is inside a - string constant. - """ + Returns: + True, if next character appended to 'line' is inside a + string constant. + """ line = line.replace(r'\\', 'XX') # after this, \\" does not match to \" return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1 @@ -1492,12 +1492,12 @@ def CleanseRawStrings(raw_lines): (replaced by blank line) ""; - Args: - raw_lines: list of raw lines. + Args: + raw_lines: list of raw lines. - Returns: - list of lines with C++11 raw strings replaced by empty strings. - """ + Returns: + list of lines with C++11 raw strings replaced by empty strings. + """ delimiter = None lines_without_raw_strings = [] @@ -1604,12 +1604,12 @@ def RemoveMultiLineComments(filename, lines, error): def CleanseComments(line): """Removes //-comments and single-line C-style /* */ comments. - Args: - line: A line of C++ source. + Args: + line: A line of C++ source. - Returns: - The line with single-line comments removed. - """ + Returns: + The line with single-line comments removed. + """ commentpos = line.find('//') if commentpos != -1 and not IsCppString(line[:commentpos]): line = line[:commentpos].rstrip() @@ -1620,13 +1620,13 @@ def CleanseComments(line): class CleansedLines(object): """Holds 4 copies of all lines with different preprocessing applied to them. - 1) elided member contains lines without strings and comments. - 2) lines member contains lines without comments. - 3) raw_lines member contains all the lines without processing. - 4) lines_without_raw_strings member is same as raw_lines, but with C++11 raw - strings removed. - All these members are of , and of the same length. - """ + 1) elided member contains lines without strings and comments. + 2) lines member contains lines without comments. + 3) raw_lines member contains all the lines without processing. + 4) lines_without_raw_strings member is same as raw_lines, but with C++11 raw + strings removed. + All these members are of , and of the same length. + """ def __init__(self, lines): self.elided = [] self.lines = [] @@ -1648,14 +1648,14 @@ class CleansedLines(object): def _CollapseStrings(elided): """Collapses strings and chars on a line to simple "" or '' blocks. - We nix strings first so we're not fooled by text like '"http://"' + We nix strings first so we're not fooled by text like '"http://"' - Args: - elided: The line being processed. + Args: + elided: The line being processed. - Returns: - The line with collapsed strings. - """ + Returns: + The line with collapsed strings. + """ if _RE_PATTERN_INCLUDE.match(elided): return elided @@ -1717,16 +1717,16 @@ class CleansedLines(object): def FindEndOfExpressionInLine(line, startpos, stack): """Find the position just after the end of current parenthesized expression. - Args: - line: a CleansedLines line. - startpos: start searching at this position. - stack: nesting stack at startpos. + Args: + line: a CleansedLines line. + startpos: start searching at this position. + stack: nesting stack at startpos. - Returns: - On finding matching end: (index just after matching end, None) - On finding an unclosed expression: (-1, None) - Otherwise: (-1, new stack at end of this line) - """ + Returns: + On finding matching end: (index just after matching end, None) + On finding an unclosed expression: (-1, None) + Otherwise: (-1, new stack at end of this line) + """ for i in range(startpos, len(line)): char = line[i] if char in '([{': @@ -1796,25 +1796,25 @@ def FindEndOfExpressionInLine(line, startpos, stack): def CloseExpression(clean_lines, linenum, pos): """If input points to ( or { or [ or <, finds the position that closes it. - If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the - linenum/pos that correspond to the closing of the expression. + If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the + linenum/pos that correspond to the closing of the expression. - TODO(unknown): cpplint spends a fair bit of time matching parentheses. - Ideally we would want to index all opening and closing parentheses once - and have CloseExpression be just a simple lookup, but due to preprocessor - tricks, this is not so easy. + TODO(unknown): cpplint spends a fair bit of time matching parentheses. + Ideally we would want to index all opening and closing parentheses once + and have CloseExpression be just a simple lookup, but due to preprocessor + tricks, this is not so easy. - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - pos: A position on the line. + Args: + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + pos: A position on the line. - Returns: - A tuple (line, linenum, pos) pointer *past* the closing brace, or - (line, len(lines), -1) if we never find a close. Note we ignore - strings and comments when matching; and the line we return is the - 'cleansed' line at linenum. - """ + Returns: + A tuple (line, linenum, pos) pointer *past* the closing brace, or + (line, len(lines), -1) if we never find a close. Note we ignore + strings and comments when matching; and the line we return is the + 'cleansed' line at linenum. + """ line = clean_lines.elided[linenum] if (line[pos] not in '({[<') or Match(r'<[<=]', line[pos:]): @@ -1840,19 +1840,19 @@ def CloseExpression(clean_lines, linenum, pos): def FindStartOfExpressionInLine(line, endpos, stack): """Find position at the matching start of current expression. - This is almost the reverse of FindEndOfExpressionInLine, but note - that the input position and returned position differs by 1. + This is almost the reverse of FindEndOfExpressionInLine, but note + that the input position and returned position differs by 1. - Args: - line: a CleansedLines line. - endpos: start searching at this position. - stack: nesting stack at endpos. + Args: + line: a CleansedLines line. + endpos: start searching at this position. + stack: nesting stack at endpos. - Returns: - On finding matching start: (index at matching start, None) - On finding an unclosed expression: (-1, None) - Otherwise: (-1, new stack at beginning of this line) - """ + Returns: + On finding matching start: (index at matching start, None) + On finding an unclosed expression: (-1, None) + Otherwise: (-1, new stack at beginning of this line) + """ i = endpos while i >= 0: char = line[i] @@ -1916,20 +1916,20 @@ def FindStartOfExpressionInLine(line, endpos, stack): def ReverseCloseExpression(clean_lines, linenum, pos): """If input points to ) or } or ] or >, finds the position that opens it. - If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the - linenum/pos that correspond to the opening of the expression. + If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the + linenum/pos that correspond to the opening of the expression. - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - pos: A position on the line. + Args: + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + pos: A position on the line. - Returns: - A tuple (line, linenum, pos) pointer *at* the opening brace, or - (line, 0, -1) if we never find the matching opening brace. Note - we ignore strings and comments when matching; and the line we - return is the 'cleansed' line at linenum. - """ + Returns: + A tuple (line, linenum, pos) pointer *at* the opening brace, or + (line, 0, -1) if we never find the matching opening brace. Note + we ignore strings and comments when matching; and the line we + return is the 'cleansed' line at linenum. + """ line = clean_lines.elided[linenum] if line[pos] not in ')}]>': return (line, 0, -1) @@ -1969,12 +1969,12 @@ def CheckForCopyright(filename, lines, error): def GetIndentLevel(line): """Return the number of leading spaces in line. - Args: - line: A string to check. + Args: + line: A string to check. - Returns: - An integer count of leading spaces, possibly zero. - """ + Returns: + An integer count of leading spaces, possibly zero. + """ indent = Match(r'^( *)\S', line) if indent: return len(indent.group(1)) @@ -1985,12 +1985,12 @@ def GetIndentLevel(line): def PathSplitToList(path): """Returns the path split into a list by the separator. - Args: - path: An absolute or relative path (e.g. '/a/b/c/' or '../a') + Args: + path: An absolute or relative path (e.g. '/a/b/c/' or '../a') - Returns: - A list of path components (e.g. ['a', 'b', 'c]). - """ + Returns: + A list of path components (e.g. ['a', 'b', 'c]). + """ lst = [] while True: (head, tail) = os.path.split(path) @@ -2011,14 +2011,13 @@ def PathSplitToList(path): def GetHeaderGuardCPPVariable(filename): """Returns the CPP variable that should be used as a header guard. - Args: - filename: The name of a C++ header file. + Args: + filename: The name of a C++ header file. - Returns: - The CPP variable that should be used as a header guard in the - named file. - - """ + Returns: + The CPP variable that should be used as a header guard in the + named file. + """ # Restores original filename in case that cpplint is invoked from Emacs's # flymake. @@ -2092,14 +2091,14 @@ def GetHeaderGuardCPPVariable(filename): def CheckForHeaderGuard(filename, clean_lines, error): """Checks that the file contains a header guard. - Logs an error if no #ifndef header guard is present. For other - headers, checks that the full pathname is used. + Logs an error if no #ifndef header guard is present. For other + headers, checks that the full pathname is used. - Args: - filename: The name of the C++ header file. - clean_lines: A CleansedLines instance containing the file. - error: The function to call with any errors found. - """ + Args: + filename: The name of the C++ header file. + clean_lines: A CleansedLines instance containing the file. + error: The function to call with any errors found. + """ # Don't check for header guards if there are error suppression # comments somewhere in this file. @@ -2219,20 +2218,20 @@ def CheckHeaderFileIncluded(filename, include_state, error): def CheckForBadCharacters(filename, lines, error): """Logs an error for each line containing bad characters. - Two kinds of bad characters: + Two kinds of bad characters: - 1. Unicode replacement characters: These indicate that either the file - contained invalid UTF-8 (likely) or Unicode replacement characters (which - it shouldn't). Note that it's possible for this to throw off line - numbering if the invalid UTF-8 occurred adjacent to a newline. + 1. Unicode replacement characters: These indicate that either the file + contained invalid UTF-8 (likely) or Unicode replacement characters (which + it shouldn't). Note that it's possible for this to throw off line + numbering if the invalid UTF-8 occurred adjacent to a newline. - 2. NUL bytes. These are problematic for some tools. + 2. NUL bytes. These are problematic for some tools. - Args: - filename: The name of the current file. - lines: An array of strings, each representing a line of the file. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + lines: An array of strings, each representing a line of the file. + error: The function to call with any errors found. + """ for linenum, line in enumerate(lines): if u'\ufffd' in line: error( @@ -2247,11 +2246,11 @@ def CheckForBadCharacters(filename, lines, error): def CheckForNewlineAtEOF(filename, lines, error): """Logs an error if there is no newline char at the end of the file. - Args: - filename: The name of the current file. - lines: An array of strings, each representing a line of the file. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + lines: An array of strings, each representing a line of the file. + error: The function to call with any errors found. + """ # The array lines() was created by adding two newlines to the # original file (go figure), then splitting on \n. @@ -2266,20 +2265,20 @@ def CheckForNewlineAtEOF(filename, lines, error): def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error): """Logs an error if we see /* ... */ or "..." that extend past one line. - /* ... */ comments are legit inside macros, for one line. - Otherwise, we prefer // comments, so it's ok to warn about the - other. Likewise, it's ok for strings to extend across multiple - lines, as long as a line continuation character (backslash) - terminates each line. Although not currently prohibited by the C++ - style guide, it's ugly and unnecessary. We don't do well with either - in this lint program, so we warn about both. + /* ... */ comments are legit inside macros, for one line. + Otherwise, we prefer // comments, so it's ok to warn about the + other. Likewise, it's ok for strings to extend across multiple + lines, as long as a line continuation character (backslash) + terminates each line. Although not currently prohibited by the C++ + style guide, it's ugly and unnecessary. We don't do well with either + in this lint program, so we warn about both. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] # Remove all \\ (escaped backslashes) from the line. They are OK, and the @@ -2335,18 +2334,18 @@ _THREADING_LIST = ( def CheckPosixThreading(filename, clean_lines, linenum, error): """Checks for calls to thread-unsafe functions. - Much code has been originally written without consideration of - multi-threading. Also, engineers are relying on their old experience; - they have learned posix before threading extensions were added. These - tests guide the engineers to use thread-safe functions (when using - posix directly). + Much code has been originally written without consideration of + multi-threading. Also, engineers are relying on their old experience; + they have learned posix before threading extensions were added. These + tests guide the engineers to use thread-safe functions (when using + posix directly). - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] for single_thread_func, multithread_safe_func, pattern in _THREADING_LIST: # Additional pattern matching check to confirm that this is the @@ -2361,15 +2360,15 @@ def CheckPosixThreading(filename, clean_lines, linenum, error): def CheckVlogArguments(filename, clean_lines, linenum, error): """Checks that VLOG() is only used for defining a logging level. - For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and - VLOG(FATAL) are not. + For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and + VLOG(FATAL) are not. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] if Search(r'\bVLOG\((INFO|ERROR|WARNING|DFATAL|FATAL)\)', line): error( @@ -2386,19 +2385,19 @@ _RE_PATTERN_INVALID_INCREMENT = re.compile(r'^\s*\*\w+(\+\+|--);') def CheckInvalidIncrement(filename, clean_lines, linenum, error): """Checks for invalid increment *count++. - For example following function: - void increment_counter(int* count) { - *count++; - } - is invalid, because it effectively does count++, moving pointer, and should - be replaced with ++*count, (*count)++ or *count += 1. + For example following function: + void increment_counter(int* count) { + *count++; + } + is invalid, because it effectively does count++, moving pointer, and should + be replaced with ++*count, (*count)++ or *count += 1. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] if _RE_PATTERN_INVALID_INCREMENT.match(line): error( @@ -2432,40 +2431,40 @@ class _BlockInfo(object): def CheckBegin(self, filename, clean_lines, linenum, error): """Run checks that applies to text up to the opening brace. - This is mostly for checking the text after the class identifier - and the "{", usually where the base class is specified. For other - blocks, there isn't much to check, so we always pass. + This is mostly for checking the text after the class identifier + and the "{", usually where the base class is specified. For other + blocks, there isn't much to check, so we always pass. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ pass def CheckEnd(self, filename, clean_lines, linenum, error): """Run checks that applies to text after the closing brace. - This is mostly used for checking end of namespace comments. + This is mostly used for checking end of namespace comments. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ pass def IsBlockInfo(self): """Returns true if this block is a _BlockInfo. - This is convenient for verifying that an object is an instance of - a _BlockInfo, but not an instance of any of the derived classes. + This is convenient for verifying that an object is an instance of + a _BlockInfo, but not an instance of any of the derived classes. - Returns: - True for this class, False for derived classes. - """ + Returns: + True for this class, False for derived classes. + """ return self.__class__ == _BlockInfo @@ -2650,54 +2649,54 @@ class NestingState(object): def SeenOpenBrace(self): """Check if we have seen the opening brace for the innermost block. - Returns: - True if we have seen the opening brace, False if the innermost - block is still expecting an opening brace. - """ + Returns: + True if we have seen the opening brace, False if the innermost + block is still expecting an opening brace. + """ return (not self.stack) or self.stack[-1].seen_open_brace def InNamespaceBody(self): """Check if we are currently one level inside a namespace body. - Returns: - True if top of the stack is a namespace block, False otherwise. - """ + Returns: + True if top of the stack is a namespace block, False otherwise. + """ return self.stack and isinstance(self.stack[-1], _NamespaceInfo) def InExternC(self): """Check if we are currently one level inside an 'extern "C"' block. - Returns: - True if top of the stack is an extern block, False otherwise. - """ + Returns: + True if top of the stack is an extern block, False otherwise. + """ return self.stack and isinstance(self.stack[-1], _ExternCInfo) def InClassDeclaration(self): """Check if we are currently one level inside a class or struct declaration. - Returns: - True if top of the stack is a class/struct, False otherwise. - """ + Returns: + True if top of the stack is a class/struct, False otherwise. + """ return self.stack and isinstance(self.stack[-1], _ClassInfo) def InAsmBlock(self): """Check if we are currently one level inside an inline ASM block. - Returns: - True if the top of the stack is a block containing inline ASM. - """ + Returns: + True if the top of the stack is a block containing inline ASM. + """ return self.stack and self.stack[-1].inline_asm != _NO_ASM def InTemplateArgumentList(self, clean_lines, linenum, pos): """Check if current position is inside template argument list. - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - pos: position just after the suspected template argument. - Returns: - True if (linenum, pos) is inside template arguments. - """ + Args: + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + pos: position just after the suspected template argument. + Returns: + True if (linenum, pos) is inside template arguments. + """ while linenum < clean_lines.NumLines(): # Find the earliest character that might indicate a template # argument @@ -2745,24 +2744,24 @@ class NestingState(object): def UpdatePreprocessor(self, line): """Update preprocessor stack. - We need to handle preprocessors due to classes like this: - #ifdef SWIG - struct ResultDetailsPageElementExtensionPoint { - #else - struct ResultDetailsPageElementExtensionPoint : public Extension { - #endif + We need to handle preprocessors due to classes like this: + #ifdef SWIG + struct ResultDetailsPageElementExtensionPoint { + #else + struct ResultDetailsPageElementExtensionPoint : public Extension { + #endif - We make the following assumptions (good enough for most files): - - Preprocessor condition evaluates to true from #if up to first - #else/#elif/#endif. + We make the following assumptions (good enough for most files): + - Preprocessor condition evaluates to true from #if up to first + #else/#elif/#endif. - - Preprocessor condition evaluates to false from #else/#elif up - to #endif. We still perform lint checks on these lines, but - these do not affect nesting stack. + - Preprocessor condition evaluates to false from #else/#elif up + to #endif. We still perform lint checks on these lines, but + these do not affect nesting stack. - Args: - line: current line to check. - """ + Args: + line: current line to check. + """ if Match(r'^\s*#\s*(if|ifdef|ifndef)\b', line): # Beginning of #if block, save the nesting stack here. The saved # stack will allow us to restore the parsing state in the #else @@ -2804,12 +2803,12 @@ class NestingState(object): def Update(self, filename, clean_lines, linenum, error): """Update nesting state with current line. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] # Remember top of the previous nesting stack. @@ -2969,9 +2968,9 @@ class NestingState(object): def InnermostClass(self): """Get class info on the top of the stack. - Returns: - A _ClassInfo object if we are inside a class, or None otherwise. - """ + Returns: + A _ClassInfo object if we are inside a class, or None otherwise. + """ for i in range(len(self.stack), 0, -1): classinfo = self.stack[i - 1] if isinstance(classinfo, _ClassInfo): @@ -2981,11 +2980,11 @@ class NestingState(object): def CheckCompletedBlocks(self, filename, error): """Checks that all classes and namespaces have been completely parsed. - Call this when all lines in a file have been processed. - Args: - filename: The name of the current file. - error: The function to call with any errors found. - """ + Call this when all lines in a file have been processed. + Args: + filename: The name of the current file. + error: The function to call with any errors found. + """ # Note: This test can result in false positives if #ifdef constructs # get in the way of brace matching. See the testBuildClass test in # cpplint_unittest.py for an example of this. @@ -3004,32 +3003,32 @@ class NestingState(object): def CheckForNonStandardConstructs(filename, clean_lines, linenum, nesting_state, error): - r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2. + """Logs an error if we see certain non-ANSI constructs ignored by gcc-2. - Complain about several constructs which gcc-2 accepts, but which are - not standard C++. Warning about these in lint is one way to ease the - transition to new compilers. - - put storage class first (e.g. "static const" instead of "const static"). - - "%lld" instead of %qd" in printf-type functions. - - "%1$d" is non-standard in printf-type functions. - - "\%" is an undefined character escape sequence. - - text after #endif is not allowed. - - invalid inner-style forward declaration. - - >? and ?= and ? and ?= and = 0: @@ -3985,12 +3984,12 @@ def GetPreviousNonBlankLine(clean_lines, linenum): def CheckBraces(filename, clean_lines, linenum, error): """Looks for misplaced braces (e.g. at the end of line). - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] # get rid of comments and strings @@ -4115,12 +4114,12 @@ def CheckBraces(filename, clean_lines, linenum, error): def CheckTrailingSemicolon(filename, clean_lines, linenum, error): """Looks for redundant trailing semicolon. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] @@ -4260,12 +4259,12 @@ def CheckTrailingSemicolon(filename, clean_lines, linenum, error): def CheckEmptyBlockBody(filename, clean_lines, linenum, error): """Look for empty loop/conditional body with only a single semicolon. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ # Search for loop keywords at the beginning of the line. Because only # whitespaces are allowed before the keywords, this will also ignore most @@ -4377,12 +4376,12 @@ def CheckEmptyBlockBody(filename, clean_lines, linenum, error): def FindCheckMacro(line): """Find a replaceable CHECK-like macro. - Args: - line: line to search on. - Returns: - (macro name, start position), or (None, -1) if no replaceable - macro is found. - """ + Args: + line: line to search on. + Returns: + (macro name, start position), or (None, -1) if no replaceable + macro is found. + """ for macro in _CHECK_MACROS: i = line.find(macro) if i >= 0: @@ -4400,12 +4399,12 @@ def FindCheckMacro(line): def CheckCheck(filename, clean_lines, linenum, error): """Checks the use of CHECK and EXPECT macros. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ # Decide the set of replacement macros that should be suggested lines = clean_lines.elided @@ -4519,12 +4518,12 @@ def CheckCheck(filename, clean_lines, linenum, error): def CheckAltTokens(filename, clean_lines, linenum, error): """Check alternative keywords being used in boolean expressions. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] # Avoid preprocessor lines @@ -4553,19 +4552,19 @@ def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state, error): """Checks rules from the 'C++ style rules' section of cppguide.html. - Most of these rules are hard to test (naming, comment style), but we - do what we can. In particular we check for 2-space indents, line lengths, - tab usage, spaces inside code, etc. + Most of these rules are hard to test (naming, comment style), but we + do what we can. In particular we check for 2-space indents, line lengths, + tab usage, spaces inside code, etc. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - file_extension: The extension (without the dot) of the filename. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + file_extension: The extension (without the dot) of the filename. + nesting_state: A NestingState instance which maintains information about + the current stack of nested blocks being parsed. + error: The function to call with any errors found. + """ # Don't use "elided" lines here, otherwise we can't check commented lines. # Don't want to use "raw" either, because we don't want to check inside @@ -4680,22 +4679,22 @@ _RE_FIRST_COMPONENT = re.compile(r'^[^-_.]+') def _DropCommonSuffixes(filename): """Drops common suffixes like _test.cc or -inl.h from filename. - For example: - >>> _DropCommonSuffixes('foo/foo-inl.h') - 'foo/foo' - >>> _DropCommonSuffixes('foo/bar/foo.cc') - 'foo/bar/foo' - >>> _DropCommonSuffixes('foo/foo_internal.h') - 'foo/foo' - >>> _DropCommonSuffixes('foo/foo_unusualinternal.h') - 'foo/foo_unusualinternal' + For example: + >>> _DropCommonSuffixes('foo/foo-inl.h') + 'foo/foo' + >>> _DropCommonSuffixes('foo/bar/foo.cc') + 'foo/bar/foo' + >>> _DropCommonSuffixes('foo/foo_internal.h') + 'foo/foo' + >>> _DropCommonSuffixes('foo/foo_unusualinternal.h') + 'foo/foo_unusualinternal' - Args: - filename: The input filename. + Args: + filename: The input filename. - Returns: - The filename with the common suffix removed. - """ + Returns: + The filename with the common suffix removed. + """ for suffix in ('test.cc', 'regtest.cc', 'unittest.cc', 'inl.h', 'impl.h', 'internal.h'): if (filename.endswith(suffix) and len(filename) > len(suffix) @@ -4707,27 +4706,27 @@ def _DropCommonSuffixes(filename): def _ClassifyInclude(fileinfo, include, is_system): """Figures out what kind of header 'include' is. - Args: - fileinfo: The current file cpplint is running over. A FileInfo instance. - include: The path to a #included file. - is_system: True if the #include used <> rather than "". + Args: + fileinfo: The current file cpplint is running over. A FileInfo instance. + include: The path to a #included file. + is_system: True if the #include used <> rather than "". - Returns: - One of the _XXX_HEADER constants. + Returns: + One of the _XXX_HEADER constants. - For example: - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True) - _C_SYS_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True) - _CPP_SYS_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False) - _LIKELY_MY_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'), - ... 'bar/foo_other_ext.h', False) - _POSSIBLE_MY_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False) - _OTHER_HEADER - """ + For example: + >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True) + _C_SYS_HEADER + >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True) + _CPP_SYS_HEADER + >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False) + _LIKELY_MY_HEADER + >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'), + ... 'bar/foo_other_ext.h', False) + _POSSIBLE_MY_HEADER + >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False) + _OTHER_HEADER + """ # This is a list of all standard c++ header files, except # those already checked for above. is_cpp_h = include in _CPP_HEADERS @@ -4766,17 +4765,17 @@ def _ClassifyInclude(fileinfo, include, is_system): def CheckIncludeLine(filename, clean_lines, linenum, include_state, error): """Check rules that are applicable to #include lines. - Strings on #include lines are NOT removed from elided line, to make - certain tasks easier. However, to prevent false positives, checks - applicable to #include lines in CheckLanguage must be put here. + Strings on #include lines are NOT removed from elided line, to make + certain tasks easier. However, to prevent false positives, checks + applicable to #include lines in CheckLanguage must be put here. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - include_state: An _IncludeState instance in which the headers are inserted. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + include_state: An _IncludeState instance in which the headers are inserted. + error: The function to call with any errors found. + """ fileinfo = FileInfo(filename) line = clean_lines.lines[linenum] @@ -4840,25 +4839,25 @@ def CheckIncludeLine(filename, clean_lines, linenum, include_state, error): def _GetTextInside(text, start_pattern): - r"""Retrieves all the text between matching open and close parentheses. + """Retrieves all the text between matching open and close parentheses. - Given a string of lines and a regular expression string, retrieve all the text - following the expression and between opening punctuation symbols like - (, [, or {, and the matching close-punctuation symbol. This properly nested - occurrences of the punctuations, so for the text like - printf(a(), b(c())); - a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'. - start_pattern must match string having an open punctuation symbol at the end. + Given a string of lines and a regular expression string, retrieve all the text + following the expression and between opening punctuation symbols like + (, [, or {, and the matching close-punctuation symbol. This properly nested + occurrences of the punctuations, so for the text like + printf(a(), b(c())); + a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'. + start_pattern must match string having an open punctuation symbol at the end. - Args: - text: The lines to extract text. Its comments and strings must be elided. - It can be single line and can span multiple lines. - start_pattern: The regexp string indicating where to start extracting - the text. - Returns: - The extracted text. - None if either the opening string or ending punctuation could not be found. - """ + Args: + text: The lines to extract text. Its comments and strings must be elided. + It can be single line and can span multiple lines. + start_pattern: The regexp string indicating where to start extracting + the text. + Returns: + The extracted text. + None if either the opening string or ending punctuation could not be found. + """ # TODO(unknown): Audit cpplint.py to see what places could be profitably # rewritten to use _GetTextInside (and use inferior regexp matching today). @@ -4929,19 +4928,19 @@ def CheckLanguage(filename, clean_lines, linenum, file_extension, include_state, nesting_state, error): """Checks rules from the 'C++ language rules' section of cppguide.html. - Some of these rules are hard to test (function overloading, using - uint32 inappropriately), but we do the best we can. + Some of these rules are hard to test (function overloading, using + uint32 inappropriately), but we do the best we can. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - file_extension: The extension (without the dot) of the filename. - include_state: An _IncludeState instance in which the headers are inserted. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + file_extension: The extension (without the dot) of the filename. + include_state: An _IncludeState instance in which the headers are inserted. + nesting_state: A NestingState instance which maintains information about + the current stack of nested blocks being parsed. + error: The function to call with any errors found. + """ # If the line is empty or consists of entirely a comment, no need to # check it. line = clean_lines.elided[linenum] @@ -5097,12 +5096,12 @@ def CheckLanguage(filename, clean_lines, linenum, file_extension, include_state, def CheckGlobalStatic(filename, clean_lines, linenum, error): """Check for unsafe global or static objects. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] # Match two lines at a time to support multiline declarations @@ -5158,12 +5157,12 @@ def CheckGlobalStatic(filename, clean_lines, linenum, error): def CheckPrintf(filename, clean_lines, linenum, error): """Check for printf related issues. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] # When snprintf is used, the second argument shouldn't be a literal. @@ -5188,13 +5187,13 @@ def CheckPrintf(filename, clean_lines, linenum, error): def IsDerivedFunction(clean_lines, linenum): """Check if current line contains an inherited function. - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - Returns: - True if current line contains a function with "override" - virt-specifier. - """ + Args: + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + Returns: + True if current line contains a function with "override" + virt-specifier. + """ # Scan back a few lines for start of current function for i in range(linenum, max(-1, linenum - 10), -1): match = Match(r'^([^()]*\w+)\(', clean_lines.elided[i]) @@ -5210,12 +5209,12 @@ def IsDerivedFunction(clean_lines, linenum): def IsOutOfLineMethodDefinition(clean_lines, linenum): """Check if current line contains an out-of-line method definition. - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - Returns: - True if current line contains an out-of-line method definition. - """ + Args: + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + Returns: + True if current line contains an out-of-line method definition. + """ # Scan back a few lines for start of current function for i in range(linenum, max(-1, linenum - 10), -1): if Match(r'^([^()]*\w+)\(', clean_lines.elided[i]): @@ -5227,13 +5226,13 @@ def IsOutOfLineMethodDefinition(clean_lines, linenum): def IsInitializerList(clean_lines, linenum): """Check if current line is inside constructor initializer list. - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - Returns: - True if current line appears to be inside constructor initializer - list, False otherwise. - """ + Args: + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + Returns: + True if current line appears to be inside constructor initializer + list, False otherwise. + """ for i in range(linenum, 1, -1): line = clean_lines.elided[i] if i == linenum: @@ -5271,17 +5270,17 @@ def CheckForNonConstReference(filename, clean_lines, linenum, nesting_state, error): """Check for non-const references. - Separate from CheckLanguage since it scans backwards from current - line, instead of scanning forward. + Separate from CheckLanguage since it scans backwards from current + line, instead of scanning forward. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + nesting_state: A NestingState instance which maintains information about + the current stack of nested blocks being parsed. + error: The function to call with any errors found. + """ # Do nothing if there is no '&' on current line. line = clean_lines.elided[linenum] if '&' not in line: @@ -5411,12 +5410,12 @@ def CheckForNonConstReference(filename, clean_lines, linenum, nesting_state, def CheckCasts(filename, clean_lines, linenum, error): """Various cast related checks. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] # Check to see if they're using an conversion function cast. @@ -5532,19 +5531,19 @@ def CheckCasts(filename, clean_lines, linenum, error): def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error): """Checks for a C-style cast by looking for the pattern. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - cast_type: The string for the C++ cast to recommend. This is either - reinterpret_cast, static_cast, or const_cast, depending. - pattern: The regular expression used to find C-style casts. - error: The function to call with any errors found. + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + cast_type: The string for the C++ cast to recommend. This is either + reinterpret_cast, static_cast, or const_cast, depending. + pattern: The regular expression used to find C-style casts. + error: The function to call with any errors found. - Returns: - True if an error was emitted. - False otherwise. - """ + Returns: + True if an error was emitted. + False otherwise. + """ line = clean_lines.elided[linenum] match = Search(pattern, line) if not match: @@ -5586,14 +5585,14 @@ def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error): def ExpectingFunctionArgs(clean_lines, linenum): """Checks whether where function type arguments are expected. - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. + Args: + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. - Returns: - True if the line at 'linenum' is inside something that expects arguments - of function types. - """ + Returns: + True if the line at 'linenum' is inside something that expects arguments + of function types. + """ line = clean_lines.elided[linenum] return (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or _TYPE_TRAITS_RE.search(line) @@ -5720,32 +5719,32 @@ for _header, _templates in _HEADERS_CONTAINING_TEMPLATES: def FilesBelongToSameModule(filename_cc, filename_h): """Check if these two filenames belong to the same module. - The concept of a 'module' here is a as follows: - foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the - same 'module' if they are in the same directory. - some/path/public/xyzzy and some/path/internal/xyzzy are also considered - to belong to the same module here. + The concept of a 'module' here is a as follows: + foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the + same 'module' if they are in the same directory. + some/path/public/xyzzy and some/path/internal/xyzzy are also considered + to belong to the same module here. - If the filename_cc contains a longer path than the filename_h, for example, - '/absolute/path/to/base/sysinfo.cc', and this file would include - 'base/sysinfo.h', this function also produces the prefix needed to open the - header. This is used by the caller of this function to more robustly open the - header file. We don't have access to the real include paths in this context, - so we need this guesswork here. + If the filename_cc contains a longer path than the filename_h, for example, + '/absolute/path/to/base/sysinfo.cc', and this file would include + 'base/sysinfo.h', this function also produces the prefix needed to open the + header. This is used by the caller of this function to more robustly open the + header file. We don't have access to the real include paths in this context, + so we need this guesswork here. - Known bugs: tools/base/bar.cc and base/bar.h belong to the same module - according to this implementation. Because of this, this function gives - some false positives. This should be sufficiently rare in practice. + Known bugs: tools/base/bar.cc and base/bar.h belong to the same module + according to this implementation. Because of this, this function gives + some false positives. This should be sufficiently rare in practice. - Args: - filename_cc: is the path for the .cc file - filename_h: is the path for the header path + Args: + filename_cc: is the path for the .cc file + filename_h: is the path for the header path - Returns: - Tuple with a bool and a string: - bool: True if filename_cc and filename_h belong to the same module. - string: the additional prefix needed to open the header file. - """ + Returns: + Tuple with a bool and a string: + bool: True if filename_cc and filename_h belong to the same module. + string: the additional prefix needed to open the header file. + """ fileinfo = FileInfo(filename_cc) if not fileinfo.IsSource(): @@ -5775,14 +5774,14 @@ def FilesBelongToSameModule(filename_cc, filename_h): def UpdateIncludeState(filename, include_dict, io=codecs): """Fill up the include_dict with new includes found from the file. - Args: - filename: the name of the header to read. - include_dict: a dictionary in which the headers are inserted. - io: The io factory to use to read the file. Provided for testability. + Args: + filename: the name of the header to read. + include_dict: a dictionary in which the headers are inserted. + io: The io factory to use to read the file. Provided for testability. - Returns: - True if a header was successfully added. False otherwise. - """ + Returns: + True if a header was successfully added. False otherwise. + """ headerfile = None try: headerfile = io.open(filename, 'r', 'utf8', 'replace') @@ -5818,20 +5817,20 @@ def CheckForIncludeWhatYouUse(filename, io=codecs): """Reports for missing stl includes. - This function will output warnings to make sure you are including the headers - necessary for the stl containers and functions that you use. We only give one - reason to include a header. For example, if you use both equal_to<> and - less<> in a .h file, only one (the latter in the file) of these will be - reported as a reason to include the . + This function will output warnings to make sure you are including the headers + necessary for the stl containers and functions that you use. We only give one + reason to include a header. For example, if you use both equal_to<> and + less<> in a .h file, only one (the latter in the file) of these will be + reported as a reason to include the . - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - include_state: An _IncludeState instance. - error: The function to call with any errors found. - io: The IO factory to use to read the header file. Provided for unittest - injection. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + include_state: An _IncludeState instance. + error: The function to call with any errors found. + io: The IO factory to use to read the header file. Provided for unittest + injection. + """ # A map of header name to linenumber and the template entity. # Example of required: { '': (1219, 'less<>') } required = {} @@ -5914,15 +5913,15 @@ _RE_PATTERN_EXPLICIT_MAKEPAIR = re.compile(r'\bmake_pair\s*<') def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error): """Check that make_pair's template arguments are deduced. - G++ 4.6 in C++11 mode fails badly if make_pair's template arguments are - specified explicitly, and such use isn't intended in any case. + G++ 4.6 in C++11 mode fails badly if make_pair's template arguments are + specified explicitly, and such use isn't intended in any case. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line) if match: @@ -5939,12 +5938,12 @@ def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error): def CheckRedundantVirtual(filename, clean_lines, linenum, error): """Check if line contains a redundant "virtual" function-specifier. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ # Look for "virtual" on current line. line = clean_lines.elided[linenum] virtual = Match(r'^(.*)(\bvirtual\b)(.*)$', line) @@ -6004,12 +6003,12 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error): def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error): """Check if line contains a redundant "override" or "final" virt-specifier. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ # Look for closing parenthesis nearby. We need one to confirm where # the declarator ends and where the virt-specifier starts to avoid # false positives. @@ -6035,12 +6034,12 @@ def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error): def IsBlockInNameSpace(nesting_state, is_forward_declaration): """Checks that the new block is directly in a namespace. - Args: - nesting_state: The _NestingState object that contains info about our state. - is_forward_declaration: If the class is a forward declared class. - Returns: - Whether or not the new block is directly in a namespace. - """ + Args: + nesting_state: The _NestingState object that contains info about our state. + is_forward_declaration: If the class is a forward declared class. + Returns: + Whether or not the new block is directly in a namespace. + """ if is_forward_declaration: if len(nesting_state.stack) >= 1 and (isinstance( nesting_state.stack[-1], _NamespaceInfo)): @@ -6057,18 +6056,18 @@ def ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item, raw_lines_no_comments, linenum): """This method determines if we should apply our namespace indentation check. - Args: - nesting_state: The current nesting state. - is_namespace_indent_item: If we just put a new class on the stack, True. - If the top of the stack is not a class, or we did not recently - add the class, False. - raw_lines_no_comments: The lines without the comments. - linenum: The current line number we are processing. + Args: + nesting_state: The current nesting state. + is_namespace_indent_item: If we just put a new class on the stack, True. + If the top of the stack is not a class, or we did not recently + add the class, False. + raw_lines_no_comments: The lines without the comments. + linenum: The current line number we are processing. - Returns: - True if we should apply our namespace indentation check. Currently, it - only works for classes and namespaces inside of a namespace. - """ + Returns: + True if we should apply our namespace indentation check. Currently, it + only works for classes and namespaces inside of a namespace. + """ is_forward_declaration = IsForwardClassDeclaration(raw_lines_no_comments, linenum) @@ -6105,22 +6104,22 @@ def ProcessLine(filename, extra_check_functions=[]): """Processes a single line in the file. - Args: - filename: Filename of the file that is being processed. - file_extension: The extension (dot not included) of the file. - clean_lines: An array of strings, each representing a line of the file, - with comments stripped. - line: Number of line being processed. - include_state: An _IncludeState instance in which the headers are inserted. - function_state: A _FunctionState instance which counts function lines, etc. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: A callable to which errors are reported, which takes 4 arguments: - filename, line number, error level, and message - extra_check_functions: An array of additional check functions that will be - run on each source line. Each function takes 4 - arguments: filename, clean_lines, line, error - """ + Args: + filename: Filename of the file that is being processed. + file_extension: The extension (dot not included) of the file. + clean_lines: An array of strings, each representing a line of the file, + with comments stripped. + line: Number of line being processed. + include_state: An _IncludeState instance in which the headers are inserted. + function_state: A _FunctionState instance which counts function lines, etc. + nesting_state: A NestingState instance which maintains information about + the current stack of nested blocks being parsed. + error: A callable to which errors are reported, which takes 4 arguments: + filename, line number, error level, and message + extra_check_functions: An array of additional check functions that will be + run on each source line. Each function takes 4 arguments: filename, + clean_lines, line, error + """ raw_lines = clean_lines.raw_lines ParseNolintSuppressions(filename, raw_lines[line], line, error) nesting_state.Update(filename, clean_lines, line, error) @@ -6149,12 +6148,12 @@ def ProcessLine(filename, def FlagCxx11Features(filename, clean_lines, linenum, error): """Flag those c++11 features that we only allow in certain places. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line) @@ -6203,12 +6202,12 @@ def FlagCxx11Features(filename, clean_lines, linenum, error): def FlagCxx14Features(filename, clean_lines, linenum, error): """Flag those C++14 features that we restrict. - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ + Args: + filename: The name of the current file. + clean_lines: A CleansedLines instance containing the file. + linenum: The number of the line to check. + error: The function to call with any errors found. + """ line = clean_lines.elided[linenum] include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line) @@ -6226,17 +6225,17 @@ def ProcessFileData(filename, extra_check_functions=[]): """Performs lint checks and reports any errors to the given error function. - Args: - filename: Filename of the file that is being processed. - file_extension: The extension (dot not included) of the file. - lines: An array of strings, each representing a line of the file, with the - last element being empty if the file is terminated with a newline. - error: A callable to which errors are reported, which takes 4 arguments: - filename, line number, error level, and message - extra_check_functions: An array of additional check functions that will be - run on each source line. Each function takes 4 - arguments: filename, clean_lines, line, error - """ + Args: + filename: Filename of the file that is being processed. + file_extension: The extension (dot not included) of the file. + lines: An array of strings, each representing a line of the file, with the + last element being empty if the file is terminated with a newline. + error: A callable to which errors are reported, which takes 4 arguments: + filename, line number, error level, and message + extra_check_functions: An array of additional check functions that will be + run on each source line. Each function takes 4 arguments: filename, + clean_lines, line, error + """ lines = (['// marker so line numbers and indices both start at 1'] + lines + ['// marker so line numbers end in a known way']) @@ -6274,14 +6273,14 @@ def ProcessFileData(filename, def ProcessConfigOverrides(filename): - """ Loads the configuration files and processes the config overrides. + """Loads the configuration files and processes the config overrides. - Args: - filename: The name of the file being processed by the linter. + Args: + filename: The name of the file being processed by the linter. - Returns: - False if the current |filename| should not be processed further. - """ + Returns: + False if the current |filename| should not be processed further. + """ abs_filename = os.path.abspath(filename) cfg_filters = [] @@ -6355,16 +6354,16 @@ def ProcessConfigOverrides(filename): def ProcessFile(filename, vlevel, extra_check_functions=[]): """Does google-lint on a single file. - Args: - filename: The name of the file to parse. + Args: + filename: The name of the file to parse. - vlevel: The level of errors to report. Every error of confidence - >= verbose_level will be reported. 0 is a good default. + vlevel: The level of errors to report. Every error of confidence + >= verbose_level will be reported. 0 is a good default. - extra_check_functions: An array of additional check functions that will be - run on each source line. Each function takes 4 - arguments: filename, clean_lines, line, error - """ + extra_check_functions: An array of additional check functions that will be + run on each source line. Each function takes 4 arguments: filename, + clean_lines, line, error + """ _SetVerboseLevel(vlevel) _BackupFilters() @@ -6443,9 +6442,9 @@ def ProcessFile(filename, vlevel, extra_check_functions=[]): def PrintUsage(message): """Prints a brief usage string and exits, optionally with an error message. - Args: - message: The optional error message. - """ + Args: + message: The optional error message. + """ sys.stderr.write(_USAGE) if message: sys.exit('\nFATAL ERROR: ' + message) @@ -6456,8 +6455,8 @@ def PrintUsage(message): def PrintCategories(): """Prints a list of all the error-categories used by error messages. - These are the categories used to filter messages via --filter. - """ + These are the categories used to filter messages via --filter. + """ sys.stderr.write(''.join(' %s\n' % cat for cat in _ERROR_CATEGORIES)) sys.exit(0) @@ -6465,14 +6464,14 @@ def PrintCategories(): def ParseArguments(args): """Parses the command line arguments. - This may set the output format and verbosity level as side-effects. + This may set the output format and verbosity level as side-effects. - Args: - args: The command line arguments: + Args: + args: The command line arguments: - Returns: - The list of filenames to lint. - """ + Returns: + The list of filenames to lint. + """ try: (opts, filenames) = getopt.getopt( args, diff --git a/detect_host_arch.py b/detect_host_arch.py index 266b457bf7..317b12a792 100755 --- a/detect_host_arch.py +++ b/detect_host_arch.py @@ -59,7 +59,7 @@ def HostArch(): def DoMain(_): """Hook to be called from gyp without starting a separate python - interpreter.""" + interpreter.""" return HostArch() diff --git a/download_from_google_storage.py b/download_from_google_storage.py index 1952815bda..f7e74e2bf1 100755 --- a/download_from_google_storage.py +++ b/download_from_google_storage.py @@ -48,7 +48,7 @@ class InvalidPlatformError(Exception): def GetNormalizedPlatform(): """Returns the result of sys.platform accounting for cygwin. - Under cygwin, this will always return "win32" like the native Python.""" + Under cygwin, this will always return "win32" like the native Python.""" if sys.platform == 'cygwin': return 'win32' return sys.platform @@ -57,11 +57,11 @@ def GetNormalizedPlatform(): # Common utilities class Gsutil(object): """Call gsutil with some predefined settings. This is a convenience object, - and is also immutable. + and is also immutable. - HACK: This object is used directly by the external script - `/win_toolchain/get_toolchain_if_necessary.py` - """ + HACK: This object is used directly by the external script + `/win_toolchain/get_toolchain_if_necessary.py` + """ MAX_TRIES = 5 RETRY_BASE_DELAY = 5.0 @@ -402,18 +402,18 @@ class PrinterThread(threading.Thread): def _data_exists(input_sha1_sum, output_filename, extract): """Returns True if the data exists locally and matches the sha1. - This conservatively returns False for error cases. + This conservatively returns False for error cases. - Args: - input_sha1_sum: Expected sha1 stored on disk. - output_filename: The file to potentially download later. Its sha1 will be - compared to input_sha1_sum. - extract: Whether or not a downloaded file should be extracted. If the file - is not extracted, this just compares the sha1 of the file. If the file - is to be extracted, this only compares the sha1 of the target archive if - the target directory already exists. The content of the target directory - is not checked. - """ + Args: + input_sha1_sum: Expected sha1 stored on disk. + output_filename: The file to potentially download later. Its sha1 will + be compared to input_sha1_sum. + extract: Whether or not a downloaded file should be extracted. If the + file is not extracted, this just compares the sha1 of the file. If + the file is to be extracted, this only compares the sha1 of the + target archive if the target directory already exists. The content + of the target directory is not checked. + """ extract_dir = None if extract: if not output_filename.endswith('.tar.gz'): diff --git a/fetch.py b/fetch.py index f20b1e8f80..8ae37e3145 100755 --- a/fetch.py +++ b/fetch.py @@ -6,7 +6,7 @@ Tool to perform checkouts in one easy command line! Usage: - fetch [--property=value [--property2=value2 ...]] + fetch [--property=value [--property2=value2 ...]] This script is a wrapper around various version control and repository checkout commands. It requires a |config| name, fetches data from that @@ -37,13 +37,13 @@ SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__)) class Checkout(object): """Base class for implementing different types of checkouts. - Attributes: - |base|: the absolute path of the directory in which this script is run. - |spec|: the spec for this checkout as returned by the config. Different - subclasses will expect different keys in this dictionary. - |root|: the directory into which the checkout will be performed, as returned - by the config. This is a relative path from |base|. - """ + Attributes: + |base|: the absolute path of the directory in which this script is run. + |spec|: the spec for this checkout as returned by the config. Different + subclasses will expect different keys in this dictionary. + |root|: the directory into which the checkout will be performed, as + returnedby the config. This is a relative path from |base|. + """ def __init__(self, options, spec, root): self.base = os.getcwd() self.options = options @@ -51,7 +51,7 @@ class Checkout(object): self.root = root def exists(self): - """Check does this checkout already exist on desired location""" + """Check does this checkout already exist on desired location.""" def init(self): pass @@ -239,7 +239,7 @@ def handle_args(argv): def run_config_fetch(config, props, aliased=False): """Invoke a config's fetch method with the passed-through args - and return its json output as a python object.""" + and return its json output as a python object.""" config_path = os.path.abspath( os.path.join(SCRIPT_PATH, 'fetch_configs', config)) if not os.path.exists(config_path + '.py'): @@ -264,12 +264,12 @@ def run_config_fetch(config, props, aliased=False): def run(options, spec, root): """Perform a checkout with the given type and configuration. - Args: - options: Options instance. - spec: Checkout configuration returned by the the config's fetch_spec - method (checkout type, repository url, etc.). - root: The directory into which the repo expects to be checkout out. - """ + Args: + options: Options instance. + spec: Checkout configuration returned by the the config's fetch_spec + method (checkout type, repository url, etc.). + root: The directory into which the repo expects to be checkout out. + """ assert 'type' in spec checkout_type = spec['type'] checkout_spec = spec['%s_spec' % checkout_type] diff --git a/fix_encoding.py b/fix_encoding.py index f23bda7bf4..3218373660 100644 --- a/fix_encoding.py +++ b/fix_encoding.py @@ -13,10 +13,10 @@ import sys def complain(message): """If any exception occurs in this file, we'll probably try to print it - on stderr, which makes for frustrating debugging if stderr is directed - to our wrapper. So be paranoid about catching errors and reporting them - to sys.__stderr__, so that the user has a higher chance to see them. - """ + on stderr, which makes for frustrating debugging if stderr is directed + to our wrapper. So be paranoid about catching errors and reporting them + to sys.__stderr__, so that the user has a higher chance to see them. + """ print(isinstance(message, str) and message or repr(message), file=sys.__stderr__) @@ -24,11 +24,11 @@ def complain(message): def fix_default_encoding(): """Forces utf8 solidly on all platforms. - By default python execution environment is lazy and defaults to ascii - encoding. + By default python execution environment is lazy and defaults to ascii + encoding. - http://uucode.com/blog/2007/03/23/shut-up-you-dummy-7-bit-python/ - """ + http://uucode.com/blog/2007/03/23/shut-up-you-dummy-7-bit-python/ + """ if sys.getdefaultencoding() == 'utf-8': return False @@ -79,10 +79,10 @@ def fix_win_codec(): class WinUnicodeOutputBase(object): """Base class to adapt sys.stdout or sys.stderr to behave correctly on - Windows. + Windows. - Setting encoding to utf-8 is recommended. - """ + Setting encoding to utf-8 is recommended. + """ def __init__(self, fileno, name, encoding): # Corresponding file handle. self._fileno = fileno @@ -122,8 +122,8 @@ class WinUnicodeOutputBase(object): class WinUnicodeConsoleOutput(WinUnicodeOutputBase): """Output adapter to a Windows Console. - Understands how to use the win32 console API. - """ + Understands how to use the win32 console API. + """ def __init__(self, console_handle, fileno, stream_name, encoding): super(WinUnicodeConsoleOutput, self).__init__(fileno, '' % stream_name, @@ -183,9 +183,9 @@ class WinUnicodeConsoleOutput(WinUnicodeOutputBase): class WinUnicodeOutput(WinUnicodeOutputBase): """Output adaptor to a file output on Windows. - If the standard FileWrite function is used, it will be encoded in the current - code page. WriteConsoleW() permits writing any character. - """ + If the standard FileWrite function is used, it will be encoded in the + current code page. WriteConsoleW() permits writing any character. + """ def __init__(self, stream, fileno, encoding): super(WinUnicodeOutput, self).__init__(fileno, '' % stream.name, @@ -247,11 +247,11 @@ def win_handle_is_a_console(handle): def win_get_unicode_stream(stream, excepted_fileno, output_handle, encoding): """Returns a unicode-compatible stream. - This function will return a direct-Console writing object only if: - - the file number is the expected console file number - - the handle the expected file handle - - the 'real' handle is in fact a handle to a console. - """ + This function will return a direct-Console writing object only if: + - the file number is the expected console file number + - the handle the expected file handle + - the 'real' handle is in fact a handle to a console. + """ old_fileno = getattr(stream, 'fileno', lambda: None)() if old_fileno == excepted_fileno: # These types are available on linux but not Mac. @@ -276,12 +276,12 @@ def win_get_unicode_stream(stream, excepted_fileno, output_handle, encoding): def fix_win_console(encoding): """Makes Unicode console output work independently of the current code page. - This also fixes . - Credit to Michael Kaplan - and - TZOmegaTZIOY - . - """ + This also fixes . + Credit to Michael Kaplan + and + TZOmegaTZIOY + . + """ if (isinstance(sys.stdout, WinUnicodeOutputBase) or isinstance(sys.stderr, WinUnicodeOutputBase)): return False @@ -308,8 +308,8 @@ def fix_win_console(encoding): def fix_encoding(): """Fixes various encoding problems on all platforms. - Should be called at the very beginning of the process. - """ + Should be called at the very beginning of the process. + """ ret = True if sys.platform == 'win32': ret &= fix_win_codec() diff --git a/gclient.py b/gclient.py index ead059627d..59e2b4c519 100755 --- a/gclient.py +++ b/gclient.py @@ -1682,7 +1682,7 @@ solutions = %(solution_list)s def _CheckConfig(self): """Verify that the config matches the state of the existing checked-out - solutions.""" + solutions.""" for dep in self.dependencies: if dep.managed and dep.url: scm = dep.CreateSCM() @@ -1806,7 +1806,7 @@ it or fix the checkout. def LoadCurrentConfig(options): # type: (optparse.Values) -> GClient """Searches for and loads a .gclient file relative to the current working - dir.""" + dir.""" if options.spec: client = GClient('.', options) client.SetConfig(options.spec) @@ -1862,8 +1862,8 @@ it or fix the checkout. def _SaveEntries(self): """Creates a .gclient_entries file to record the list of unique checkouts. - The .gclient_entries file lives in the same directory as .gclient. - """ + The .gclient_entries file lives in the same directory as .gclient. + """ # Sometimes pprint.pformat will use {', sometimes it'll use { ' ... It # makes testing a bit too fun. result = 'entries = {\n' @@ -1878,10 +1878,10 @@ it or fix the checkout. def _ReadEntries(self): """Read the .gclient_entries file for the given client. - Returns: - A sequence of solution names, which will be empty if there is the - entries file hasn't been created yet. - """ + Returns: + A sequence of solution names, which will be empty if there is the + entries file hasn't been created yet. + """ scope = {} filename = os.path.join(self.root_dir, self._options.entries_filename) if not os.path.exists(filename): @@ -2047,12 +2047,12 @@ it or fix the checkout. def _RemoveUnversionedGitDirs(self): """Remove directories that are no longer part of the checkout. - Notify the user if there is an orphaned entry in their working copy. - Only delete the directory if there are no changes in it, and - delete_unversioned_trees is set to true. + Notify the user if there is an orphaned entry in their working copy. + Only delete the directory if there are no changes in it, and + delete_unversioned_trees is set to true. - Returns CIPD packages that are no longer versioned. - """ + Returns CIPD packages that are no longer versioned. + """ entry_names_and_sync = [(i.name, i._should_sync) for i in self.root.subtree(False) if i.url] @@ -2227,10 +2227,10 @@ it or fix the checkout. progress=True): """Runs a command on each dependency in a client and its dependencies. - Args: - command: The command to use (e.g., 'status' or 'diff') - args: list of str - extra arguments to add to the command line. - """ + Args: + command: The command to use (e.g., 'status' or 'diff') + args: list of str - extra arguments to add to the command line. + """ if not self.dependencies: raise gclient_utils.Error('No solution specified') @@ -2613,16 +2613,16 @@ class CipdDependency(Dependency): def CMDrecurse(parser, args): """Operates [command args ...] on all the dependencies. - Change directory to each dependency's directory, and call [command - args ...] there. Sets GCLIENT_DEP_PATH environment variable as the - dep's relative location to root directory of the checkout. + Change directory to each dependency's directory, and call [command + args ...] there. Sets GCLIENT_DEP_PATH environment variable as the + dep's relative location to root directory of the checkout. - Examples: - * `gclient recurse --no-progress -j1 sh -c 'echo "$GCLIENT_DEP_PATH"'` - print the relative path of each dependency. - * `gclient recurse --no-progress -j1 sh -c "pwd"` - print the absolute path of each dependency. - """ + Examples: + * `gclient recurse --no-progress -j1 sh -c 'echo "$GCLIENT_DEP_PATH"'` + print the relative path of each dependency. + * `gclient recurse --no-progress -j1 sh -c "pwd"` + print the absolute path of each dependency. + """ # Stop parsing at the first non-arg so that these go through to the command parser.disable_interspersed_args() parser.add_option('-s', @@ -2676,8 +2676,8 @@ def CMDrecurse(parser, args): def CMDfetch(parser, args): """Fetches upstream commits for all modules. - Completely git-specific. Simply runs 'git fetch [args ...]' for each module. - """ + Completely git-specific. Simply runs 'git fetch [args ...]' for each module. + """ (options, args) = parser.parse_args(args) return CMDrecurse( OptionParser(), @@ -2689,11 +2689,11 @@ class Flattener(object): def __init__(self, client, pin_all_deps=False): """Constructor. - Arguments: - client (GClient): client to flatten - pin_all_deps (bool): whether to pin all deps, even if they're not pinned - in DEPS - """ + Arguments: + client (GClient): client to flatten + pin_all_deps (bool): whether to pin all deps, even if they're not pinned + in DEPS + """ self._client = client self._deps_string = None @@ -2725,9 +2725,9 @@ class Flattener(object): def _pin_dep(self, dep): """Pins a dependency to specific full revision sha. - Arguments: - dep (Dependency): dependency to process - """ + Arguments: + dep (Dependency): dependency to process + """ if dep.url is None: return @@ -2742,10 +2742,10 @@ class Flattener(object): def _flatten(self, pin_all_deps=False): """Runs the flattener. Saves resulting DEPS string. - Arguments: - pin_all_deps (bool): whether to pin all deps, even if they're not pinned - in DEPS - """ + Arguments: + pin_all_deps (bool): whether to pin all deps, even if they're not pinned + in DEPS + """ for solution in self._client.dependencies: self._add_dep(solution) self._flatten_dep(solution) @@ -2791,9 +2791,9 @@ class Flattener(object): def _add_dep(self, dep): """Helper to add a dependency to flattened DEPS. - Arguments: - dep (Dependency): dependency to add - """ + Arguments: + dep (Dependency): dependency to add + """ assert dep.name not in self._deps or self._deps.get( dep.name) == dep, (dep.name, self._deps.get(dep.name)) if dep.url: @@ -2802,9 +2802,9 @@ class Flattener(object): def _flatten_dep(self, dep): """Visits a dependency in order to flatten it (see CMDflatten). - Arguments: - dep (Dependency): dependency to process - """ + Arguments: + dep (Dependency): dependency to process + """ logging.debug('_flatten_dep(%s)', dep.name) assert dep.deps_parsed, ( @@ -2854,10 +2854,10 @@ class Flattener(object): def CMDgitmodules(parser, args): """Adds or updates Git Submodules based on the contents of the DEPS file. - This command should be run in the root directory of the repo. - It will create or update the .gitmodules file and include - `gclient-condition` values. Commits in gitlinks will also be updated. - """ + This command should be run in the root directory of the repo. + It will create or update the .gitmodules file and include + `gclient-condition` values. Commits in gitlinks will also be updated. + """ parser.add_option('--output-gitmodules', help='name of the .gitmodules file to write to', default='.gitmodules') @@ -3028,7 +3028,7 @@ def _DepsToLines(deps): def _DepsToDotGraphLines(deps): # type: (Mapping[str, Dependency]) -> Sequence[str] - """Converts |deps| dict to list of lines for dot graphs""" + """Converts |deps| dict to list of lines for dot graphs.""" if not deps: return [] graph_lines = ["digraph {\n\trankdir=\"LR\";"] @@ -3137,8 +3137,8 @@ def _VarsToLines(variables): def CMDgrep(parser, args): """Greps through git repos managed by gclient. - Runs 'git grep [args...]' for each module. - """ + Runs 'git grep [args...]' for each module. + """ # We can't use optparse because it will try to parse arguments sent # to git grep and throw an error. :-( if not args or re.match('(-h|--help)$', args[0]): @@ -3180,12 +3180,12 @@ def CMDroot(parser, args): def CMDconfig(parser, args): """Creates a .gclient file in the current directory. - This specifies the configuration for further commands. After update/sync, - top-level DEPS files in each module are read to determine dependent - modules to operate on as well. If optional [url] parameter is - provided, then configuration is read from a specified Subversion server - URL. - """ + This specifies the configuration for further commands. After update/sync, + top-level DEPS files in each module are read to determine dependent + modules to operate on as well. If optional [url] parameter is + provided, then configuration is read from a specified Subversion server + URL. + """ # We do a little dance with the --gclientfile option. 'gclient config' is # the only command where it's acceptable to have both '--gclientfile' and # '--spec' arguments. So, we temporarily stash any --gclientfile parameter @@ -3279,11 +3279,11 @@ def CMDconfig(parser, args): def CMDpack(parser, args): """Generates a patch which can be applied at the root of the tree. - Internally, runs 'git diff' on each checked out module and - dependencies, and performs minimal postprocessing of the output. The - resulting patch is printed to stdout and can be applied to a freshly - checked out tree via 'patch -p0 < patchfile'. - """ + Internally, runs 'git diff' on each checked out module and + dependencies, and performs minimal postprocessing of the output. The + resulting patch is printed to stdout and can be applied to a freshly + checked out tree via 'patch -p0 < patchfile'. + """ parser.add_option('--deps', dest='deps_os', metavar='OS_LIST', @@ -3582,8 +3582,8 @@ def CMDdiff(parser, args): def CMDrevert(parser, args): """Reverts all modifications in every dependencies. - That's the nuclear option to get back to a 'clean' state. It removes anything - that shows up in git status.""" + That's the nuclear option to get back to a 'clean' state. It removes anything + that shows up in git status.""" parser.add_option('--deps', dest='deps_os', metavar='OS_LIST', @@ -3670,11 +3670,11 @@ def CMDinstallhooks(parser, args): def CMDrevinfo(parser, args): """Outputs revision info mapping for the client and its dependencies. - This allows the capture of an overall 'revision' for the source tree that - can be used to reproduce the same tree in the future. It is only useful for - 'unpinned dependencies', i.e. DEPS/deps references without a git hash. - A git branch name isn't 'pinned' since the actual commit can change. - """ + This allows the capture of an overall 'revision' for the source tree that + can be used to reproduce the same tree in the future. It is only useful for + 'unpinned dependencies', i.e. DEPS/deps references without a git hash. + A git branch name isn't 'pinned' since the actual commit can change. + """ parser.add_option('--deps', dest='deps_os', metavar='OS_LIST', @@ -3719,8 +3719,8 @@ def CMDrevinfo(parser, args): def CMDgetdep(parser, args): """Gets revision information and variable values from a DEPS file. - If key doesn't exist or is incorrectly declared, this script exits with exit - code 2.""" + If key doesn't exist or is incorrectly declared, this script exits with exit + code 2.""" parser.add_option('--var', action='append', dest='vars', @@ -4124,7 +4124,7 @@ def can_run_gclient_and_helpers(): def main(argv): """Doesn't parse the arguments here, just find the right subcommand to - execute.""" + execute.""" if not can_run_gclient_and_helpers(): return 2 fix_encoding.fix_encoding() diff --git a/gclient_eval.py b/gclient_eval.py index d01b52aa1a..c12d074535 100644 --- a/gclient_eval.py +++ b/gclient_eval.py @@ -444,11 +444,11 @@ def Exec(content, filename='', vars_override=None, builtin_vars=None): def _StandardizeDeps(deps_dict, vars_dict): """"Standardizes the deps_dict. - For each dependency: - - Expands the variable in the dependency name. - - Ensures the dependency is a dictionary. - - Set's the 'dep_type' to be 'git' by default. - """ + For each dependency: + - Expands the variable in the dependency name. + - Ensures the dependency is a dictionary. + - Set's the 'dep_type' to be 'git' by default. + """ new_deps_dict = {} for dep_name, dep_info in deps_dict.items(): dep_name = dep_name.format(**vars_dict) @@ -462,10 +462,10 @@ def _StandardizeDeps(deps_dict, vars_dict): def _MergeDepsOs(deps_dict, os_deps_dict, os_name): """Merges the deps in os_deps_dict into conditional dependencies in deps_dict. - The dependencies in os_deps_dict are transformed into conditional dependencies - using |'checkout_' + os_name|. - If the dependency is already present, the URL and revision must coincide. - """ + The dependencies in os_deps_dict are transformed into conditional dependencies + using |'checkout_' + os_name|. + If the dependency is already present, the URL and revision must coincide. + """ for dep_name, dep_info in os_deps_dict.items(): # Make this condition very visible, so it's not a silent failure. # It's unclear how to support None override in deps_os. @@ -493,8 +493,8 @@ def _MergeDepsOs(deps_dict, os_deps_dict, os_name): def UpdateCondition(info_dict, op, new_condition): """Updates info_dict's condition with |new_condition|. - An absent value is treated as implicitly True. - """ + An absent value is treated as implicitly True. + """ curr_condition = info_dict.get('condition') # Easy case: Both are present. if curr_condition and new_condition: @@ -511,23 +511,23 @@ def UpdateCondition(info_dict, op, new_condition): def Parse(content, filename, vars_override=None, builtin_vars=None): """Parses DEPS strings. - Executes the Python-like string stored in content, resulting in a Python - dictionary specified by the schema above. Supports syntax validation and - variable expansion. + Executes the Python-like string stored in content, resulting in a Python + dictionary specified by the schema above. Supports syntax validation and + variable expansion. - Args: - content: str. DEPS file stored as a string. - filename: str. The name of the DEPS file, or a string describing the source - of the content, e.g. '', ''. - vars_override: dict, optional. A dictionary with overrides for the variables - defined by the DEPS file. - builtin_vars: dict, optional. A dictionary with variables that are provided - by default. + Args: + content: str. DEPS file stored as a string. + filename: str. The name of the DEPS file, or a string describing the source + of the content, e.g. '', ''. + vars_override: dict, optional. A dictionary with overrides for the variables + defined by the DEPS file. + builtin_vars: dict, optional. A dictionary with variables that are provided + by default. - Returns: - A Python dict with the parsed contents of the DEPS file, as specified by the - schema above. - """ + Returns: + A Python dict with the parsed contents of the DEPS file, as specified by the + schema above. + """ result = Exec(content, filename, vars_override, builtin_vars) vars_dict = result.get('vars', {}) diff --git a/gclient_scm.py b/gclient_scm.py index 574da8c3de..70d59d9c36 100644 --- a/gclient_scm.py +++ b/gclient_scm.py @@ -1586,7 +1586,7 @@ class GitWrapper(SCMWrapper): def _AutoFetchRef(self, options, revision, depth=None): """Attempts to fetch |revision| if not available in local repo. - Returns possibly updated revision.""" + Returns possibly updated revision.""" if not scm.GIT.IsValidRevision(self.checkout_path, revision): self._Fetch(options, refspec=revision, depth=depth) revision = self._Capture(['rev-parse', 'FETCH_HEAD']) diff --git a/gclient_utils.py b/gclient_utils.py index e017f99691..95006cf0be 100644 --- a/gclient_utils.py +++ b/gclient_utils.py @@ -85,14 +85,15 @@ def FuzzyMatchRepo(repo, candidates): # type: (str, Union[Collection[str], Mapping[str, Any]]) -> Optional[str] """Attempts to find a representation of repo in the candidates. - Args: - repo: a string representation of a repo in the form of a url or the - name and path of the solution it represents. - candidates: The candidates to look through which may contain `repo` in - in any of the forms mentioned above. - Returns: - The matching string, if any, which may be in a different form from `repo`. - """ + Args: + repo: a string representation of a repo in the form of a url or the + name and path of the solution it represents. + candidates: The candidates to look through which may contain `repo` in + in any of the forms mentioned above. + Returns: + The matching string, if any, which may be in a different form from + `repo`. + """ if repo in candidates: return repo if repo.endswith('.git') and repo[:-len('.git')] in candidates: @@ -103,7 +104,7 @@ def FuzzyMatchRepo(repo, candidates): def SplitUrlRevision(url): - """Splits url and returns a two-tuple: url, rev""" + """Splits url and returns a two-tuple: url, rev.""" if url.startswith('ssh:'): # Make sure ssh://user-name@example.com/~/test.git@stable works regex = r'(ssh://(?:[-.\w]+@)?[-\w:\.]+/[-~\w\./]+)(?:@(.+))?' @@ -129,12 +130,12 @@ def ExtractRefName(remote, full_refs_str): def IsGitSha(revision): - """Returns true if the given string is a valid hex-encoded sha""" + """Returns true if the given string is a valid hex-encoded sha.""" return re.match('^[a-fA-F0-9]{6,40}$', revision) is not None def IsFullGitSha(revision): - """Returns true if the given string is a valid hex-encoded full sha""" + """Returns true if the given string is a valid hex-encoded full sha.""" return re.match('^[a-fA-F0-9]{40}$', revision) is not None @@ -145,12 +146,12 @@ def IsDateRevision(revision): def MakeDateRevision(date): """Returns a revision representing the latest revision before the given - date.""" + date.""" return "{" + date + "}" def SyntaxErrorToError(filename, e): - """Raises a gclient_utils.Error exception with the human readable message""" + """Raises a gclient_utils.Error exception with a human readable message.""" try: # Try to construct a human readable error message if filename: @@ -223,23 +224,23 @@ def temporary_directory(**kwargs): def temporary_file(): """Creates a temporary file. - On Windows, a file must be closed before it can be opened again. This function - allows to write something like: + On Windows, a file must be closed before it can be opened again. This + function allows to write something like: - with gclient_utils.temporary_file() as tmp: - gclient_utils.FileWrite(tmp, foo) - useful_stuff(tmp) + with gclient_utils.temporary_file() as tmp: + gclient_utils.FileWrite(tmp, foo) + useful_stuff(tmp) - Instead of something like: + Instead of something like: - with tempfile.NamedTemporaryFile(delete=False) as tmp: - tmp.write(foo) - tmp.close() - try: - useful_stuff(tmp) - finally: - os.remove(tmp.name) - """ + with tempfile.NamedTemporaryFile(delete=False) as tmp: + tmp.write(foo) + tmp.close() + try: + useful_stuff(tmp) + finally: + os.remove(tmp.name) + """ handle, name = tempfile.mkstemp() os.close(handle) try: @@ -251,11 +252,11 @@ def temporary_file(): def safe_rename(old, new): """Renames a file reliably. - Sometimes os.rename does not work because a dying git process keeps a handle - on it for a few seconds. An exception is then thrown, which make the program - give up what it was doing and remove what was deleted. - The only solution is to catch the exception and try again until it works. - """ + Sometimes os.rename does not work because a dying git process keeps a handle + on it for a few seconds. An exception is then thrown, which make the program + give up what it was doing and remove what was deleted. + The only solution is to catch the exception and try again until it works. + """ # roughly 10s retries = 100 for i in range(retries): @@ -282,28 +283,28 @@ def rm_file_or_tree(path): def rmtree(path): """shutil.rmtree() on steroids. - Recursively removes a directory, even if it's marked read-only. + Recursively removes a directory, even if it's marked read-only. - shutil.rmtree() doesn't work on Windows if any of the files or directories - are read-only. We need to be able to force the files to be writable (i.e., - deletable) as we traverse the tree. + shutil.rmtree() doesn't work on Windows if any of the files or directories + are read-only. We need to be able to force the files to be writable (i.e., + deletable) as we traverse the tree. - Even with all this, Windows still sometimes fails to delete a file, citing - a permission error (maybe something to do with antivirus scans or disk - indexing). The best suggestion any of the user forums had was to wait a - bit and try again, so we do that too. It's hand-waving, but sometimes it - works. :/ + Even with all this, Windows still sometimes fails to delete a file, citing + a permission error (maybe something to do with antivirus scans or disk + indexing). The best suggestion any of the user forums had was to wait a + bit and try again, so we do that too. It's hand-waving, but sometimes it + works. :/ - On POSIX systems, things are a little bit simpler. The modes of the files - to be deleted doesn't matter, only the modes of the directories containing - them are significant. As the directory tree is traversed, each directory - has its mode set appropriately before descending into it. This should - result in the entire tree being removed, with the possible exception of - *path itself, because nothing attempts to change the mode of its parent. - Doing so would be hazardous, as it's not a directory slated for removal. - In the ordinary case, this is not a problem: for our purposes, the user - will never lack write permission on *path's parent. - """ + On POSIX systems, things are a little bit simpler. The modes of the files + to be deleted doesn't matter, only the modes of the directories containing + them are significant. As the directory tree is traversed, each directory + has its mode set appropriately before descending into it. This should + result in the entire tree being removed, with the possible exception of + *path itself, because nothing attempts to change the mode of its parent. + Doing so would be hazardous, as it's not a directory slated for removal. + In the ordinary case, this is not a problem: for our purposes, the user + will never lack write permission on *path's parent. + """ if not os.path.exists(path): return @@ -349,9 +350,9 @@ def rmtree(path): def safe_makedirs(tree): """Creates the directory in a safe manner. - Because multiple threads can create these directories concurrently, trap the - exception and pass on. - """ + Because multiple threads can create these directories concurrently, trap the + exception and pass on. + """ count = 0 while not os.path.exists(tree): count += 1 @@ -373,8 +374,8 @@ def CommandToStr(args): class Wrapper(object): """Wraps an object, acting as a transparent proxy for all properties by - default. - """ + default. + """ def __init__(self, wrapped): self._wrapped = wrapped @@ -567,20 +568,21 @@ def CheckCallAndFilter(args, **kwargs): """Runs a command and calls back a filter function if needed. - Accepts all subprocess2.Popen() parameters plus: - print_stdout: If True, the command's stdout is forwarded to stdout. - filter_fn: A function taking a single string argument called with each line - of the subprocess2's output. Each line has the trailing newline - character trimmed. - show_header: Whether to display a header before the command output. - always_show_header: Show header even when the command produced no output. - retry: If the process exits non-zero, sleep for a brief interval and try - again, up to RETRY_MAX times. + Accepts all subprocess2.Popen() parameters plus: + print_stdout: If True, the command's stdout is forwarded to stdout. + filter_fn: A function taking a single string argument called with each + line of the subprocess2's output. Each line has the trailing + newline character trimmed. + show_header: Whether to display a header before the command output. + always_show_header: Show header even when the command produced no + output. + retry: If the process exits non-zero, sleep for a brief interval and + try again, up to RETRY_MAX times. - stderr is always redirected to stdout. + stderr is always redirected to stdout. - Returns the output of the command as a binary string. - """ + Returns the output of the command as a binary string. + """ def show_header_if_necessary(needs_header, attempt): """Show the header at most once.""" if not needs_header[0]: @@ -716,21 +718,22 @@ def CheckCallAndFilter(args, class GitFilter(object): """A filter_fn implementation for quieting down git output messages. - Allows a custom function to skip certain lines (predicate), and will throttle - the output of percentage completed lines to only output every X seconds. - """ + Allows a custom function to skip certain lines (predicate), and will + throttle the output of percentage completed lines to only output every X + seconds. + """ PERCENT_RE = re.compile('(.*) ([0-9]{1,3})% .*') def __init__(self, time_throttle=0, predicate=None, out_fh=None): """ - Args: - time_throttle (int): GitFilter will throttle 'noisy' output (such as the - XX% complete messages) to only be printed at least |time_throttle| - seconds apart. - predicate (f(line)): An optional function which is invoked for every line. - The line will be skipped if predicate(line) returns False. - out_fh: File handle to write output to. - """ + Args: + time_throttle (int): GitFilter will throttle 'noisy' output (such as the + XX% complete messages) to only be printed at least |time_throttle| + seconds apart. + predicate (f(line)): An optional function which is invoked for every + line. The line will be skipped if predicate(line) returns False. + out_fh: File handle to write output to. + """ self.first_line = True self.last_time = 0 self.time_throttle = time_throttle @@ -762,8 +765,8 @@ class GitFilter(object): def FindFileUpwards(filename, path=None): """Search upwards from the a directory (default: current) to find a file. - Returns nearest upper-level directory with the passed in file. - """ + Returns nearest upper-level directory with the passed in file. + """ if not path: path = os.getcwd() path = os.path.realpath(path) @@ -844,7 +847,7 @@ class WorkItem(object): def run(self, work_queue): """work_queue is passed as keyword argument so it should be - the last parameters of the function when you override it.""" + the last parameters of the function when you override it.""" @property def name(self): @@ -853,16 +856,16 @@ class WorkItem(object): class ExecutionQueue(object): """Runs a set of WorkItem that have interdependencies and were WorkItem are - added as they are processed. + added as they are processed. - This class manages that all the required dependencies are run - before running each one. + This class manages that all the required dependencies are run + before running each one. - Methods of this class are thread safe. - """ + Methods of this class are thread safe. + """ def __init__(self, jobs, progress, ignore_requirements, verbose=False): """jobs specifies the number of concurrent tasks to allow. progress is a - Progress instance.""" + Progress instance.""" # Set when a thread is done or a new item is enqueued. self.ready_cond = threading.Condition() # Maximum number of concurrent tasks. @@ -887,8 +890,8 @@ class ExecutionQueue(object): def enqueue(self, d): """Enqueue one Dependency to be executed later once its requirements are - satisfied. - """ + satisfied. + """ assert isinstance(d, WorkItem) self.ready_cond.acquire() try: @@ -1126,16 +1129,16 @@ class ExecutionQueue(object): def GetEditor(git_editor=None): """Returns the most plausible editor to use. - In order of preference: - - GIT_EDITOR environment variable - - core.editor git configuration variable (if supplied by git-cl) - - VISUAL environment variable - - EDITOR environment variable - - vi (non-Windows) or notepad (Windows) + In order of preference: + - GIT_EDITOR environment variable + - core.editor git configuration variable (if supplied by git-cl) + - VISUAL environment variable + - EDITOR environment variable + - vi (non-Windows) or notepad (Windows) - In the case of git-cl, this matches git's behaviour, except that it does not - include dumb terminal detection. - """ + In the case of git-cl, this matches git's behaviour, except that it does not + include dumb terminal detection. + """ editor = os.environ.get('GIT_EDITOR') or git_editor if not editor: editor = os.environ.get('VISUAL') @@ -1201,10 +1204,10 @@ def RunEditor(content, git, git_editor=None): def UpgradeToHttps(url): """Upgrades random urls to https://. - Do not touch unknown urls like ssh:// or git://. - Do not touch http:// urls with a port number, - Fixes invalid GAE url. - """ + Do not touch unknown urls like ssh:// or git://. + Do not touch http:// urls with a port number, + Fixes invalid GAE url. + """ if not url: return url if not re.match(r'[a-z\-]+\://.*', url): @@ -1240,10 +1243,10 @@ def ParseCodereviewSettingsContent(content): def NumLocalCpus(): """Returns the number of processors. - multiprocessing.cpu_count() is permitted to raise NotImplementedError, and - is known to do this on some Windows systems and OSX 10.6. If we can't get the - CPU count, we will fall back to '1'. - """ + multiprocessing.cpu_count() is permitted to raise NotImplementedError, and + is known to do this on some Windows systems and OSX 10.6. If we can't get + the CPU count, we will fall back to '1'. + """ # Surround the entire thing in try/except; no failure here should stop # gclient from working. try: @@ -1272,10 +1275,10 @@ def NumLocalCpus(): def DefaultDeltaBaseCacheLimit(): """Return a reasonable default for the git config core.deltaBaseCacheLimit. - The primary constraint is the address space of virtual memory. The cache - size limit is per-thread, and 32-bit systems can hit OOM errors if this - parameter is set too high. - """ + The primary constraint is the address space of virtual memory. The cache + size limit is per-thread, and 32-bit systems can hit OOM errors if this + parameter is set too high. + """ if platform.architecture()[0].startswith('64'): return '2g' @@ -1285,8 +1288,8 @@ def DefaultDeltaBaseCacheLimit(): def DefaultIndexPackConfig(url=''): """Return reasonable default values for configuring git-index-pack. - Experiments suggest that higher values for pack.threads don't improve - performance.""" + Experiments suggest that higher values for pack.threads don't improve + performance.""" cache_limit = DefaultDeltaBaseCacheLimit() result = ['-c', 'core.deltaBaseCacheLimit=%s' % cache_limit] if url in THREADED_INDEX_PACK_BLOCKLIST: @@ -1316,15 +1319,15 @@ def FindExecutable(executable): def freeze(obj): """Takes a generic object ``obj``, and returns an immutable version of it. - Supported types: - * dict / OrderedDict -> FrozenDict - * list -> tuple - * set -> frozenset - * any object with a working __hash__ implementation (assumes that hashable - means immutable) + Supported types: + * dict / OrderedDict -> FrozenDict + * list -> tuple + * set -> frozenset + * any object with a working __hash__ implementation (assumes that + hashable means immutable) - Will raise TypeError if you pass an object which is not hashable. - """ + Will raise TypeError if you pass an object which is not hashable. + """ if isinstance(obj, collections.abc.Mapping): return FrozenDict((freeze(k), freeze(v)) for k, v in obj.items()) @@ -1341,8 +1344,8 @@ def freeze(obj): class FrozenDict(collections.abc.Mapping): """An immutable OrderedDict. - Modified From: http://stackoverflow.com/a/2704866 - """ + Modified From: http://stackoverflow.com/a/2704866 + """ def __init__(self, *args, **kwargs): self._d = collections.OrderedDict(*args, **kwargs) diff --git a/gerrit_client.py b/gerrit_client.py index c33f7fab8b..d669118bd3 100755 --- a/gerrit_client.py +++ b/gerrit_client.py @@ -5,7 +5,7 @@ """Simple client for the Gerrit REST API. Example usage: - ./gerrit_client.py [command] [args] + ./gerrit_client.py [command] [args] """ import json @@ -415,17 +415,18 @@ def CMDabandon(parser, args): def CMDmass_abandon(parser, args): """Mass abandon changes - Abandons CLs that match search criteria provided by user. Before any change is - actually abandoned, user is presented with a list of CLs that will be affected - if user confirms. User can skip confirmation by passing --force parameter. + Abandons CLs that match search criteria provided by user. Before any change + is actually abandoned, user is presented with a list of CLs that will be + affected if user confirms. User can skip confirmation by passing --force + parameter. - The script can abandon up to 100 CLs per invocation. + The script can abandon up to 100 CLs per invocation. - Examples: - gerrit_client.py mass-abandon --host https://HOST -p 'project=repo2' - gerrit_client.py mass-abandon --host https://HOST -p 'message=testing' - gerrit_client.py mass-abandon --host https://HOST -p 'is=wip' -p 'age=1y' - """ + Examples: + gerrit_client.py mass-abandon --host https://HOST -p 'project=repo2' + gerrit_client.py mass-abandon --host https://HOST -p 'message=testing' + gerrit_client.py mass-abandon --host https://HOST -p 'is=wip' -p 'age=1y' + """ parser.add_option('-p', '--param', dest='params', diff --git a/gerrit_util.py b/gerrit_util.py index cd5f282054..7fe1bf0303 100644 --- a/gerrit_util.py +++ b/gerrit_util.py @@ -83,8 +83,8 @@ class GerritError(Exception): def _QueryString(params, first_param=None): """Encodes query parameters in the key:val[+key:val...] format specified here: - https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes - """ + https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes + """ q = [urllib.parse.quote(first_param)] if first_param else [] q.extend(['%s:%s' % (key, val.replace(" ", "+")) for key, val in params]) return '+'.join(q) @@ -99,9 +99,9 @@ class Authenticator(object): def get(): """Returns: (Authenticator) The identified Authenticator to use. - Probes the local system and its environment and identifies the - Authenticator instance to use. - """ + Probes the local system and its environment and identifies the + Authenticator instance to use. + """ # LUCI Context takes priority since it's normally present only on bots, # which then must use it. if LuciContextAuthenticator.is_luci(): @@ -116,8 +116,8 @@ class Authenticator(object): class CookiesAuthenticator(Authenticator): """Authenticator implementation that uses ".netrc" or ".gitcookies" for token. - Expected case for developer workstations. - """ + Expected case for developer workstations. + """ _EMPTY = object() @@ -286,7 +286,7 @@ NetrcAuthenticator = CookiesAuthenticator class GceAuthenticator(Authenticator): """Authenticator implementation that uses GCE metadata service for token. - """ + """ _INFO_URL = 'http://metadata.google.internal' _ACQUIRE_URL = ('%s/computeMetadata/v1/instance/' @@ -361,7 +361,7 @@ class GceAuthenticator(Authenticator): class LuciContextAuthenticator(Authenticator): """Authenticator implementation that uses LUCI_CONTEXT ambient local auth. - """ + """ @staticmethod def is_luci(): return auth.has_luci_context_local_auth() @@ -429,12 +429,13 @@ def CreateHttpConn(host, def ReadHttpResponse(conn, accept_statuses=frozenset([200])): """Reads an HTTP response from a connection into a string buffer. - Args: - conn: An Http object created by CreateHttpConn above. - accept_statuses: Treat any of these statuses as success. Default: [200] - Common additions include 204, 400, and 404. - Returns: A string buffer containing the connection's reply. - """ + Args: + conn: An Http object created by CreateHttpConn above. + accept_statuses: Treat any of these statuses as success. Default: [200] + Common additions include 204, 400, and 404. + Returns: + A string buffer containing the connection's reply. + """ sleep_time = SLEEP_TIME for idx in range(TRY_LIMIT): before_response = time.time() @@ -534,21 +535,21 @@ def QueryChanges(host, o_params=None, start=None): """ - Queries a gerrit-on-borg server for changes matching query terms. + Queries a gerrit-on-borg server for changes matching query terms. - Args: - params: A list of key:value pairs for search parameters, as documented - here (e.g. ('is', 'owner') for a parameter 'is:owner'): - https://gerrit-review.googlesource.com/Documentation/user-search.html#search-operators - first_param: A change identifier - limit: Maximum number of results to return. - start: how many changes to skip (starting with the most recent) - o_params: A list of additional output specifiers, as documented here: - https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes + Args: + params: A list of key:value pairs for search parameters, as documented + here (e.g. ('is', 'owner') for a parameter 'is:owner'): + https://gerrit-review.googlesource.com/Documentation/user-search.html#search-operators + first_param: A change identifier + limit: Maximum number of results to return. + start: how many changes to skip (starting with the most recent) + o_params: A list of additional output specifiers, as documented here: + https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes - Returns: - A list of json-decoded query results. - """ + Returns: + A list of json-decoded query results. + """ # Note that no attempt is made to escape special characters; YMMV. if not params and not first_param: raise RuntimeError('QueryChanges requires search parameters') @@ -569,24 +570,24 @@ def GenerateAllChanges(host, o_params=None, start=None): """Queries a gerrit-on-borg server for all the changes matching the query - terms. + terms. - WARNING: this is unreliable if a change matching the query is modified while - this function is being called. + WARNING: this is unreliable if a change matching the query is modified while + this function is being called. - A single query to gerrit-on-borg is limited on the number of results by the - limit parameter on the request (see QueryChanges) and the server maximum - limit. + A single query to gerrit-on-borg is limited on the number of results by the + limit parameter on the request (see QueryChanges) and the server maximum + limit. - Args: - params, first_param: Refer to QueryChanges(). - limit: Maximum number of requested changes per query. - o_params: Refer to QueryChanges(). - start: Refer to QueryChanges(). + Args: + params, first_param: Refer to QueryChanges(). + limit: Maximum number of requested changes per query. + o_params: Refer to QueryChanges(). + start: Refer to QueryChanges(). - Returns: - A generator object to the list of returned changes. - """ + Returns: + A generator object to the list of returned changes. + """ already_returned = set() def at_most_once(cls): @@ -666,7 +667,7 @@ def GetGerritFetchUrl(host): def GetCodeReviewTbrScore(host, project): """Given a Gerrit host name and project, return the Code-Review score for TBR. - """ + """ conn = CreateHttpConn(host, '/projects/%s' % urllib.parse.quote(project, '')) project = ReadHttpJsonResponse(conn) @@ -836,8 +837,8 @@ def DeletePendingChangeEdit(host, change): def CherryPick(host, change, destination, revision='current'): """Create a cherry-pick commit from the given change, onto the given - destination. - """ + destination. + """ path = 'changes/%s/revisions/%s/cherrypick' % (change, revision) body = {'destination': destination} conn = CreateHttpConn(host, path, reqtype='POST', body=body) @@ -847,9 +848,9 @@ def CherryPick(host, change, destination, revision='current'): def GetFileContents(host, change, path): """Get the contents of a file with the given path in the given revision. - Returns: - A bytes object with the file's contents. - """ + Returns: + A bytes object with the file's contents. + """ path = 'changes/%s/revisions/current/files/%s/content' % ( change, urllib.parse.quote(path, '')) conn = CreateHttpConn(host, path, reqtype='GET') @@ -874,11 +875,11 @@ def SetCommitMessage(host, change, description, notify='ALL'): def GetCommitIncludedIn(host, project, commit): """Retrieves the branches and tags for a given commit. - https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-included-in + https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-included-in - Returns: - A JSON object with keys of 'branches' and 'tags'. - """ + Returns: + A JSON object with keys of 'branches' and 'tags'. + """ path = 'projects/%s/commits/%s/in' % (urllib.parse.quote(project, ''), commit) conn = CreateHttpConn(host, path, reqtype='GET') @@ -1073,16 +1074,16 @@ def ResetReviewLabels(host, def CreateChange(host, project, branch='main', subject='', params=()): """ - Creates a new change. + Creates a new change. - Args: - params: A list of additional ChangeInput specifiers, as documented here: - (e.g. ('is_private', 'true') to mark the change private. - https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#change-input + Args: + params: A list of additional ChangeInput specifiers, as documented here: + (e.g. ('is_private', 'true') to mark the change private. + https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#change-input - Returns: - ChangeInfo for the new change. - """ + Returns: + ChangeInfo for the new change. + """ path = 'changes/' body = {'project': project, 'branch': branch, 'subject': subject} body.update(dict(params)) @@ -1097,11 +1098,11 @@ def CreateChange(host, project, branch='main', subject='', params=()): def CreateGerritBranch(host, project, branch, commit): """Creates a new branch from given project and commit - https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#create-branch + https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#create-branch - Returns: - A JSON object with 'ref' key. - """ + Returns: + A JSON object with 'ref' key. + """ path = 'projects/%s/branches/%s' % (project, branch) body = {'revision': commit} conn = CreateHttpConn(host, path, reqtype='PUT', body=body) @@ -1114,11 +1115,11 @@ def CreateGerritBranch(host, project, branch, commit): def CreateGerritTag(host, project, tag, commit): """Creates a new tag at the given commit. - https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#create-tag + https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#create-tag - Returns: - A JSON object with 'ref' key. - """ + Returns: + A JSON object with 'ref' key. + """ path = 'projects/%s/tags/%s' % (project, tag) body = {'revision': commit} conn = CreateHttpConn(host, path, reqtype='PUT', body=body) @@ -1131,11 +1132,11 @@ def CreateGerritTag(host, project, tag, commit): def GetHead(host, project): """Retrieves current HEAD of Gerrit project - https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-head + https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-head - Returns: - A JSON object with 'ref' key. - """ + Returns: + A JSON object with 'ref' key. + """ path = 'projects/%s/HEAD' % (project) conn = CreateHttpConn(host, path, reqtype='GET') response = ReadHttpJsonResponse(conn, accept_statuses=[200]) @@ -1147,11 +1148,11 @@ def GetHead(host, project): def UpdateHead(host, project, branch): """Updates Gerrit HEAD to point to branch - https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#set-head + https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#set-head - Returns: - A JSON object with 'ref' key. - """ + Returns: + A JSON object with 'ref' key. + """ path = 'projects/%s/HEAD' % (project) body = {'ref': branch} conn = CreateHttpConn(host, path, reqtype='PUT', body=body) @@ -1164,12 +1165,12 @@ def UpdateHead(host, project, branch): def GetGerritBranch(host, project, branch): """Gets a branch info from given project and branch name. - See: - https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-branch + See: + https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-branch - Returns: - A JSON object with 'revision' key if the branch exists, otherwise None. - """ + Returns: + A JSON object with 'revision' key if the branch exists, otherwise None. + """ path = 'projects/%s/branches/%s' % (project, branch) conn = CreateHttpConn(host, path, reqtype='GET') return ReadHttpJsonResponse(conn, accept_statuses=[200, 404]) @@ -1184,14 +1185,14 @@ def GetProjectHead(host, project): def GetAccountDetails(host, account_id='self'): """Returns details of the account. - If account_id is not given, uses magic value 'self' which corresponds to - whichever account user is authenticating as. + If account_id is not given, uses magic value 'self' which corresponds to + whichever account user is authenticating as. - Documentation: - https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#get-account + Documentation: + https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#get-account - Returns None if account is not found (i.e., Gerrit returned 404). - """ + Returns None if account is not found (i.e., Gerrit returned 404). + """ conn = CreateHttpConn(host, '/accounts/%s' % account_id) return ReadHttpJsonResponse(conn, accept_statuses=[200, 404]) @@ -1199,9 +1200,9 @@ def GetAccountDetails(host, account_id='self'): def ValidAccounts(host, accounts, max_threads=10): """Returns a mapping from valid account to its details. - Invalid accounts, either not existing or without unique match, - are not present as returned dictionary keys. - """ + Invalid accounts, either not existing or without unique match, + are not present as returned dictionary keys. + """ assert not isinstance(accounts, str), type(accounts) accounts = list(set(accounts)) if not accounts: @@ -1225,14 +1226,14 @@ def ValidAccounts(host, accounts, max_threads=10): def PercentEncodeForGitRef(original): """Applies percent-encoding for strings sent to Gerrit via git ref metadata. - The encoding used is based on but stricter than URL encoding (Section 2.1 of - RFC 3986). The only non-escaped characters are alphanumerics, and 'SPACE' - (U+0020) can be represented as 'LOW LINE' (U+005F) or 'PLUS SIGN' (U+002B). + The encoding used is based on but stricter than URL encoding (Section 2.1 of + RFC 3986). The only non-escaped characters are alphanumerics, and 'SPACE' + (U+0020) can be represented as 'LOW LINE' (U+005F) or 'PLUS SIGN' (U+002B). - For more information, see the Gerrit docs here: + For more information, see the Gerrit docs here: - https://gerrit-review.googlesource.com/Documentation/user-upload.html#message - """ + https://gerrit-review.googlesource.com/Documentation/user-upload.html#message + """ safe = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 ' encoded = ''.join(c if c in safe else '%%%02X' % ord(c) for c in original) @@ -1255,10 +1256,10 @@ def tempdir(): def ChangeIdentifier(project, change_number): """Returns change identifier "project~number" suitable for |change| arg of - this module API. + this module API. - Such format is allows for more efficient Gerrit routing of HTTP requests, - comparing to specifying just change_number. - """ + Such format is allows for more efficient Gerrit routing of HTTP requests, + comparing to specifying just change_number. + """ assert int(change_number) return '%s~%s' % (urllib.parse.quote(project, ''), change_number) diff --git a/git_cache.py b/git_cache.py index a15b3f939d..45324d7ea4 100755 --- a/git_cache.py +++ b/git_cache.py @@ -54,23 +54,23 @@ def exponential_backoff_retry(fn, printerr=None): """Executes |fn| up to |count| times, backing off exponentially. - Args: - fn (callable): The function to execute. If this raises a handled - exception, the function will retry with exponential backoff. - excs (tuple): A tuple of Exception types to handle. If one of these is - raised by |fn|, a retry will be attempted. If |fn| raises an Exception - that is not in this list, it will immediately pass through. If |excs| - is empty, the Exception base class will be used. - name (str): Optional operation name to print in the retry string. - count (int): The number of times to try before allowing the exception to - pass through. - sleep_time (float): The initial number of seconds to sleep in between - retries. This will be doubled each retry. - printerr (callable): Function that will be called with the error string upon - failures. If None, |logging.warning| will be used. + Args: + fn (callable): The function to execute. If this raises a handled + exception, the function will retry with exponential backoff. + excs (tuple): A tuple of Exception types to handle. If one of these is + raised by |fn|, a retry will be attempted. If |fn| raises an + Exception that is not in this list, it will immediately pass + through. If |excs| is empty, the Exception base class will be used. + name (str): Optional operation name to print in the retry string. + count (int): The number of times to try before allowing the exception + to pass through. + sleep_time (float): The initial number of seconds to sleep in between + retries. This will be doubled each retry. + printerr (callable): Function that will be called with the error string + upon failures. If None, |logging.warning| will be used. - Returns: The return value of the successful fn. - """ + Returns: The return value of the successful fn. + """ printerr = printerr or logging.warning for i in range(count): try: @@ -101,9 +101,9 @@ class Mirror(object): def parse_fetch_spec(spec): """Parses and canonicalizes a fetch spec. - Returns (fetchspec, value_regex), where value_regex can be used - with 'git config --replace-all'. - """ + Returns (fetchspec, value_regex), where value_regex can be used + with 'git config --replace-all'. + """ parts = spec.split(':', 1) src = parts[0].lstrip('+').rstrip('/') if not src.startswith('refs/'): @@ -290,8 +290,9 @@ class Mirror(object): def bootstrap_repo(self, directory): """Bootstrap the repo from Google Storage if possible. - More apt-ly named bootstrap_repo_from_cloud_if_possible_else_do_nothing(). - """ + More apt-ly named + bootstrap_repo_from_cloud_if_possible_else_do_nothing(). + """ if not self.bootstrap_bucket: return False @@ -369,8 +370,8 @@ class Mirror(object): def _preserve_fetchspec(self): """Read and preserve remote.origin.fetch from an existing mirror. - This modifies self.fetch_specs. - """ + This modifies self.fetch_specs. + """ if not self.exists(): return try: diff --git a/git_cl.py b/git_cl.py index 5155703590..6201fd73cd 100755 --- a/git_cl.py +++ b/git_cl.py @@ -372,11 +372,11 @@ def _get_counterpart_host(host): def _trigger_tryjobs(changelist, jobs, options, patchset): """Sends a request to Buildbucket to trigger tryjobs for a changelist. - Args: - changelist: Changelist that the tryjobs are associated with. - jobs: A list of (project, bucket, builder). - options: Command-line options. - """ + Args: + changelist: Changelist that the tryjobs are associated with. + jobs: A list of (project, bucket, builder). + options: Command-line options. + """ print('Scheduling jobs on:') for project, bucket, builder in jobs: print(' %s/%s: %s' % (project, bucket, builder)) @@ -460,8 +460,8 @@ def _make_tryjob_schedule_requests(changelist, jobs, options, patchset): def _fetch_tryjobs(changelist, buildbucket_host, patchset=None): """Fetches tryjobs from buildbucket. - Returns list of buildbucket.v2.Build with the try jobs for the changelist. - """ + Returns list of buildbucket.v2.Build with the try jobs for the changelist. + """ fields = ['id', 'builder', 'status', 'createTime', 'tags'] request = { 'predicate': { @@ -488,17 +488,17 @@ def _fetch_tryjobs(changelist, buildbucket_host, patchset=None): def _fetch_latest_builds(changelist, buildbucket_host, latest_patchset=None): """Fetches builds from the latest patchset that has builds (within - the last few patchsets). + the last few patchsets). - Args: - changelist (Changelist): The CL to fetch builds for - buildbucket_host (str): Buildbucket host, e.g. "cr-buildbucket.appspot.com" - lastest_patchset(int|NoneType): the patchset to start fetching builds from. - If None (default), starts with the latest available patchset. - Returns: - A tuple (builds, patchset) where builds is a list of buildbucket.v2.Build, - and patchset is the patchset number where those builds came from. - """ + Args: + changelist (Changelist): The CL to fetch builds for + buildbucket_host (str): Buildbucket host, e.g. "cr-buildbucket.appspot.com" + lastest_patchset(int|NoneType): the patchset to start fetching builds from. + If None (default), starts with the latest available patchset. + Returns: + A tuple (builds, patchset) where builds is a list of buildbucket.v2.Build, + and patchset is the patchset number where those builds came from. + """ assert buildbucket_host assert changelist.GetIssue(), 'CL must be uploaded first' assert changelist.GetCodereviewServer(), 'CL must be uploaded first' @@ -521,15 +521,15 @@ def _fetch_latest_builds(changelist, buildbucket_host, latest_patchset=None): def _filter_failed_for_retry(all_builds): """Returns a list of buckets/builders that are worth retrying. - Args: - all_builds (list): Builds, in the format returned by _fetch_tryjobs, - i.e. a list of buildbucket.v2.Builds which includes status and builder - info. + Args: + all_builds (list): Builds, in the format returned by _fetch_tryjobs, + i.e. a list of buildbucket.v2.Builds which includes status and builder + info. - Returns: - A dict {(proj, bucket): [builders]}. This is the same format accepted by - _trigger_tryjobs. - """ + Returns: + A dict {(proj, bucket): [builders]}. This is the same format accepted by + _trigger_tryjobs. + """ grouped = {} for build in all_builds: builder = build['builder'] @@ -634,10 +634,10 @@ def _print_tryjobs(options, builds): def _ComputeFormatDiffLineRanges(files, upstream_commit): """Gets the changed line ranges for each file since upstream_commit. - Parses a git diff on provided files and returns a dict that maps a file name - to an ordered list of range tuples in the form (start_line, count). - Ranges are in the same format as a git diff. - """ + Parses a git diff on provided files and returns a dict that maps a file name + to an ordered list of range tuples in the form (start_line, count). + Ranges are in the same format as a git diff. + """ # If files is empty then diff_output will be a full diff. if len(files) == 0: return {} @@ -690,10 +690,10 @@ def _ComputeFormatDiffLineRanges(files, upstream_commit): def _FindYapfConfigFile(fpath, yapf_config_cache, top_dir=None): """Checks if a yapf file is in any parent directory of fpath until top_dir. - Recursively checks parent directories to find yapf file and if no yapf file - is found returns None. Uses yapf_config_cache as a cache for previously found - configs. - """ + Recursively checks parent directories to find yapf file and if no yapf file + is found returns None. Uses yapf_config_cache as a cache for previously found + configs. + """ fpath = os.path.abspath(fpath) # Return result if we've already computed it. if fpath in yapf_config_cache: @@ -721,19 +721,19 @@ def _FindYapfConfigFile(fpath, yapf_config_cache, top_dir=None): def _GetYapfIgnorePatterns(top_dir): """Returns all patterns in the .yapfignore file. - yapf is supposed to handle the ignoring of files listed in .yapfignore itself, - but this functionality appears to break when explicitly passing files to - yapf for formatting. According to - https://github.com/google/yapf/blob/HEAD/README.rst#excluding-files-from-formatting-yapfignore, - the .yapfignore file should be in the directory that yapf is invoked from, - which we assume to be the top level directory in this case. + yapf is supposed to handle the ignoring of files listed in .yapfignore itself, + but this functionality appears to break when explicitly passing files to + yapf for formatting. According to + https://github.com/google/yapf/blob/HEAD/README.rst#excluding-files-from-formatting-yapfignore, + the .yapfignore file should be in the directory that yapf is invoked from, + which we assume to be the top level directory in this case. - Args: - top_dir: The top level directory for the repository being formatted. + Args: + top_dir: The top level directory for the repository being formatted. - Returns: - A set of all fnmatch patterns to be ignored. - """ + Returns: + A set of all fnmatch patterns to be ignored. + """ yapfignore_file = os.path.join(top_dir, '.yapfignore') ignore_patterns = set() if not os.path.exists(yapfignore_file): @@ -751,14 +751,14 @@ def _GetYapfIgnorePatterns(top_dir): def _FilterYapfIgnoredFiles(filepaths, patterns): """Filters out any filepaths that match any of the given patterns. - Args: - filepaths: An iterable of strings containing filepaths to filter. - patterns: An iterable of strings containing fnmatch patterns to filter on. + Args: + filepaths: An iterable of strings containing filepaths to filter. + patterns: An iterable of strings containing fnmatch patterns to filter on. - Returns: - A list of strings containing all the elements of |filepaths| that did not - match any of the patterns in |patterns|. - """ + Returns: + A list of strings containing all the elements of |filepaths| that did not + match any of the patterns in |patterns|. + """ # Not inlined so that tests can use the same implementation. return [ f for f in filepaths @@ -770,8 +770,8 @@ def _GetCommitCountSummary(begin_commit: str, end_commit: str = "HEAD") -> Optional[str]: """Generate a summary of the number of commits in (begin_commit, end_commit). - Returns a string containing the summary, or None if the range is empty. - """ + Returns a string containing the summary, or None if the range is empty. + """ count = int( RunGitSilent(['rev-list', '--count', f'{begin_commit}..{end_commit}'])) @@ -876,8 +876,8 @@ class Settings(object): def GetSquashGerritUploadsOverride(self): """Return True or False if codereview.settings should be overridden. - Returns None if no override has been defined. - """ + Returns None if no override has been defined. + """ # See also http://crbug.com/611892#c23 result = self._GetConfig('gerrit.override-squash-uploads').lower() if result == 'true': @@ -894,7 +894,7 @@ class Settings(object): def GetGerritSkipEnsureAuthenticated(self): """Return True if EnsureAuthenticated should not be done for Gerrit - uploads.""" + uploads.""" if self.gerrit_skip_ensure_authenticated is None: self.gerrit_skip_ensure_authenticated = self._GetConfig( 'gerrit.skip-ensure-authenticated').lower() == 'true' @@ -1121,9 +1121,9 @@ class ChangeDescription(object): def update_reviewers(self, reviewers): """Rewrites the R= line(s) as a single line each. - Args: - reviewers (list(str)) - list of additional emails to use for reviewers. - """ + Args: + reviewers (list(str)) - list of additional emails to use for reviewers. + """ if not reviewers: return @@ -1200,10 +1200,10 @@ class ChangeDescription(object): def append_footer(self, line): """Adds a footer line to the description. - Differentiates legacy "KEY=xxx" footers (used to be called tags) and - Gerrit's footers in the form of "Footer-Key: footer any value" and ensures - that Gerrit footers are always at the end. - """ + Differentiates legacy "KEY=xxx" footers (used to be called tags) and + Gerrit's footers in the form of "Footer-Key: footer any value" and ensures + that Gerrit footers are always at the end. + """ parsed_footer_line = git_footers.parse_footer(line) if parsed_footer_line: # Line is a gerrit footer in the form: Footer-Key: any value. @@ -1285,19 +1285,19 @@ class ChangeDescription(object): def sanitize_hash_tag(cls, tag): """Returns a sanitized Gerrit hash tag. - A sanitized hashtag can be used as a git push refspec parameter value. - """ + A sanitized hashtag can be used as a git push refspec parameter value. + """ return re.sub(cls.BAD_HASH_TAG_CHUNK, '-', tag).strip('-').lower() class Changelist(object): """Changelist works with one changelist in local branch. - Notes: - * Not safe for concurrent multi-{thread,process} use. - * Caches values from current branch. Therefore, re-use after branch change - with great care. - """ + Notes: + * Not safe for concurrent multi-{thread,process} use. + * Caches values from current branch. Therefore, re-use after branch change + with great care. + """ def __init__(self, branchref=None, issue=None, @@ -1305,8 +1305,8 @@ class Changelist(object): commit_date=None): """Create a new ChangeList instance. - **kwargs will be passed directly to Gerrit implementation. - """ + **kwargs will be passed directly to Gerrit implementation. + """ # Poke settings so we get the "configure your server" message if # necessary. global settings @@ -1358,9 +1358,9 @@ class Changelist(object): def GetCCList(self): """Returns the users cc'd on this CL. - The return value is a string suitable for passing to git cl with the --cc - flag. - """ + The return value is a string suitable for passing to git cl with the --cc + flag. + """ if self.cc is None: base_cc = settings.GetDefaultCCList() more_cc = ','.join(self.more_cc) @@ -1405,8 +1405,8 @@ class Changelist(object): @staticmethod def FetchUpstreamTuple(branch): """Returns a tuple containing remote and remote ref, - e.g. 'origin', 'refs/heads/main' - """ + e.g. 'origin', 'refs/heads/main' + """ remote, upstream_branch = scm.GIT.FetchUpstreamTuple( settings.GetRoot(), branch) if not remote or not upstream_branch: @@ -1477,8 +1477,8 @@ class Changelist(object): def GetRemoteUrl(self) -> Optional[str]: """Return the configured remote URL, e.g. 'git://example.org/foo.git/'. - Returns None if there is no remote. - """ + Returns None if there is no remote. + """ is_cached, value = self._cached_remote_url if is_cached: return value @@ -1939,16 +1939,15 @@ class Changelist(object): end_commit: Optional[str] = None) -> _NewUpload: """Create a squashed commit to upload. - - Args: - parent: The commit to use as the parent for the new squashed. - orig_parent: The commit that is an actual ancestor of `end_commit`. It - is part of the same original tree as end_commit, which does not - contain squashed commits. This is used to create the change - description for the new squashed commit with: - `git log orig_parent..end_commit`. - end_commit: The commit to use as the end of the new squashed commit. - """ + Args: + parent: The commit to use as the parent for the new squashed. + orig_parent: The commit that is an actual ancestor of `end_commit`. It + is part of the same original tree as end_commit, which does not + contain squashed commits. This is used to create the change + description for the new squashed commit with: + `git log orig_parent..end_commit`. + end_commit: The commit to use as the end of the new squashed commit. + """ if end_commit is None: end_commit = RunGit(['rev-parse', self.branchref]).strip() @@ -2173,8 +2172,8 @@ class Changelist(object): def SetCQState(self, new_state): """Updates the CQ state for the latest patchset. - Issue must have been already uploaded and known. - """ + Issue must have been already uploaded and known. + """ assert new_state in _CQState.ALL_STATES assert self.GetIssue() try: @@ -2276,9 +2275,9 @@ class Changelist(object): def _GerritChangeIdentifier(self): """Handy method for gerrit_util.ChangeIdentifier for a given CL. - Not to be confused by value of "Change-Id:" footer. - If Gerrit project can be determined, this will speed up Gerrit HTTP API RPC. - """ + Not to be confused by value of "Change-Id:" footer. + If Gerrit project can be determined, this will speed up Gerrit HTTP API RPC. + """ project = self.GetGerritProject() if project: return gerrit_util.ChangeIdentifier(project, self.GetIssue()) @@ -2395,18 +2394,18 @@ class Changelist(object): def GetStatus(self): """Applies a rough heuristic to give a simple summary of an issue's review - or CQ status, assuming adherence to a common workflow. + or CQ status, assuming adherence to a common workflow. - Returns None if no issue for this branch, or one of the following keywords: - * 'error' - error from review tool (including deleted issues) - * 'unsent' - no reviewers added - * 'waiting' - waiting for review - * 'reply' - waiting for uploader to reply to review - * 'lgtm' - Code-Review label has been set - * 'dry-run' - dry-running in the CQ - * 'commit' - in the CQ - * 'closed' - successfully submitted or abandoned - """ + Returns None if no issue for this branch, or one of the following keywords: + * 'error' - error from review tool (including deleted issues) + * 'unsent' - no reviewers added + * 'waiting' - waiting for review + * 'reply' - waiting for uploader to reply to review + * 'lgtm' - Code-Review label has been set + * 'dry-run' - dry-running in the CQ + * 'commit' - in the CQ + * 'closed' - successfully submitted or abandoned + """ if not self.GetIssue(): return None @@ -2464,7 +2463,7 @@ class Changelist(object): def _IsPatchsetRangeSignificant(self, lower, upper): """Returns True if the inclusive range of patchsets contains any reworks or - rebases.""" + rebases.""" if not self.GetIssue(): return False @@ -2481,8 +2480,8 @@ class Changelist(object): def GetMostRecentDryRunPatchset(self): """Get patchsets equivalent to the most recent patchset and return - the patchset with the latest dry run. If none have been dry run, return - the latest patchset.""" + the patchset with the latest dry run. If none have been dry run, return + the latest patchset.""" if not self.GetIssue(): return None @@ -3010,7 +3009,7 @@ class Changelist(object): def CMDUploadChange(self, options, git_diff_args, custom_cl_base, change_desc): """Upload the current branch to Gerrit, retry if new remote HEAD is - found. options and change_desc may be mutated.""" + found. options and change_desc may be mutated.""" remote, remote_branch = self.GetRemoteBranch() branch = GetTargetRef(remote, remote_branch, options.target_branch) @@ -3226,8 +3225,8 @@ class Changelist(object): change_desc): """Computes parent of the generated commit to be uploaded to Gerrit. - Returns revision or a ref name. - """ + Returns revision or a ref name. + """ if custom_cl_base: # Try to avoid creating additional unintended CLs when uploading, # unless user wants to take this risk. @@ -3291,8 +3290,8 @@ class Changelist(object): def _UpdateWithExternalChanges(self): """Updates workspace with external changes. - Returns the commit hash that should be used as the merge base on upload. - """ + Returns the commit hash that should be used as the merge base on upload. + """ local_ps = self.GetPatchset() if local_ps is None: return @@ -3427,8 +3426,8 @@ class Changelist(object): def _AddChangeIdToCommitMessage(self, log_desc, args): """Re-commits using the current message, assumes the commit hook is in - place. - """ + place. + """ RunGit(['commit', '--amend', '-m', log_desc]) new_log_desc = _create_description_from_log(args) if git_footers.get_footer_change_id(new_log_desc): @@ -3479,18 +3478,18 @@ class Changelist(object): def _get_bug_line_values(default_project_prefix, bugs): """Given default_project_prefix and comma separated list of bugs, yields bug - line values. + line values. - Each bug can be either: - * a number, which is combined with default_project_prefix - * string, which is left as is. + Each bug can be either: + * a number, which is combined with default_project_prefix + * string, which is left as is. - This function may produce more than one line, because bugdroid expects one - project per line. + This function may produce more than one line, because bugdroid expects one + project per line. - >>> list(_get_bug_line_values('v8:', '123,chromium:789')) - ['v8:123', 'chromium:789'] - """ + >>> list(_get_bug_line_values('v8:', '123,chromium:789')) + ['v8:123', 'chromium:789'] + """ default_bugs = [] others = [] for bug in bugs.split(','): @@ -3518,9 +3517,9 @@ def _get_bug_line_values(default_project_prefix, bugs): def FindCodereviewSettingsFile(filename='codereview.settings'): """Finds the given file starting in the cwd and going up. - Only looks up to the top of the repository unless an - 'inherit-review-settings-ok' file exists in the root of the repository. - """ + Only looks up to the top of the repository unless an + 'inherit-review-settings-ok' file exists in the root of the repository. + """ inherit_ok_file = 'inherit-review-settings-ok' cwd = os.getcwd() root = settings.GetRoot() @@ -3595,8 +3594,8 @@ def LoadCodereviewSettingsFromFile(fileobj): def urlretrieve(source, destination): """Downloads a network object to a local file, like urllib.urlretrieve. - This is necessary because urllib is broken for SSL connections via a proxy. - """ + This is necessary because urllib is broken for SSL connections via a proxy. + """ with open(destination, 'wb') as f: f.write(urllib.request.urlopen(source).read()) @@ -3610,9 +3609,9 @@ def hasSheBang(fname): def DownloadGerritHook(force): """Downloads and installs a Gerrit commit-msg hook. - Args: - force: True to update hooks. False to install hooks if not present. - """ + Args: + force: True to update hooks. False to install hooks if not present. + """ src = 'https://gerrit-review.googlesource.com/tools/hooks/commit-msg' dst = os.path.join(settings.GetRoot(), '.git', 'hooks', 'commit-msg') if not os.access(dst, os.X_OK): @@ -3645,7 +3644,7 @@ class _GitCookiesChecker(object): def ensure_configured_gitcookies(self): """Runs checks and suggests fixes to make git use .gitcookies from default - path.""" + path.""" default = gerrit_util.CookiesAuthenticator.get_gitcookies_path() configured_path = RunGitSilent( ['config', '--global', 'http.cookiefile']).strip() @@ -3746,8 +3745,8 @@ class _GitCookiesChecker(object): def has_generic_host(self): """Returns whether generic .googlesource.com has been configured. - Chrome Infra recommends to use explicit ${host}.googlesource.com instead. - """ + Chrome Infra recommends to use explicit ${host}.googlesource.com instead. + """ for host, _, _ in self.get_hosts_with_creds(include_netrc=False): if host == '.' + _GOOGLESOURCE: return True @@ -3756,8 +3755,8 @@ class _GitCookiesChecker(object): def _get_git_gerrit_identity_pairs(self): """Returns map from canonic host to pair of identities (Git, Gerrit). - One of identities might be None, meaning not configured. - """ + One of identities might be None, meaning not configured. + """ host_to_identity_pairs = {} for host, identity, _ in self.get_hosts_with_creds(): canonical = _canonical_git_googlesource_host(host) @@ -3924,15 +3923,15 @@ def color_for_status(status): def get_cl_statuses(changes, fine_grained, max_processes=None): """Returns a blocking iterable of (cl, status) for given branches. - If fine_grained is true, this will fetch CL statuses from the server. - Otherwise, simply indicate if there's a matching url for the given branches. + If fine_grained is true, this will fetch CL statuses from the server. + Otherwise, simply indicate if there's a matching url for the given branches. - If max_processes is specified, it is used as the maximum number of processes - to spawn to fetch CL status from the server. Otherwise 1 process per branch is - spawned. + If max_processes is specified, it is used as the maximum number of processes + to spawn to fetch CL status from the server. Otherwise 1 process per branch is + spawned. - See GetStatus() for a list of possible statuses. - """ + See GetStatus() for a list of possible statuses. + """ if not changes: return @@ -3986,23 +3985,23 @@ def get_cl_statuses(changes, fine_grained, max_processes=None): def upload_branch_deps(cl, args, force=False): """Uploads CLs of local branches that are dependents of the current branch. - If the local branch dependency tree looks like: + If the local branch dependency tree looks like: - test1 -> test2.1 -> test3.1 - -> test3.2 - -> test2.2 -> test3.3 + test1 -> test2.1 -> test3.1 + -> test3.2 + -> test2.2 -> test3.3 - and you run "git cl upload --dependencies" from test1 then "git cl upload" is - run on the dependent branches in this order: - test2.1, test3.1, test3.2, test2.2, test3.3 + and you run "git cl upload --dependencies" from test1 then "git cl upload" is + run on the dependent branches in this order: + test2.1, test3.1, test3.2, test2.2, test3.3 - Note: This function does not rebase your local dependent branches. Use it + Note: This function does not rebase your local dependent branches. Use it when you make a change to the parent branch that will not conflict with its dependent branches, and you would like their dependencies updated in Gerrit. If the new stacked change flow is used, and ancestor diverged, upload will fail. To recover, `git rebase-update [-n]` must be executed. - """ + """ if git_common.is_dirty_git_tree('upload-branch-deps'): return 1 @@ -4086,8 +4085,8 @@ def upload_branch_deps(cl, args, force=False): def GetArchiveTagForBranch(issue_num, branch_name, existing_tags, pattern): """Given a proposed tag name, returns a tag name that is guaranteed to be - unique. If 'foo' is proposed but already exists, then 'foo-2' is used, - or 'foo-3', and so on.""" + unique. If 'foo' is proposed but already exists, then 'foo-2' is used, + or 'foo-3', and so on.""" proposed_tag = pattern.format(**{'issue': issue_num, 'branch': branch_name}) for suffix_num in itertools.count(1): @@ -4205,17 +4204,17 @@ def CMDarchive(parser, args): def CMDstatus(parser, args): """Show status of changelists. - Colors are used to tell the state of the CL unless --fast is used: - - Blue waiting for review - - Yellow waiting for you to reply to review, or not yet sent - - Green LGTM'ed - - Red 'not LGTM'ed - - Magenta in the CQ - - Cyan was committed, branch can be deleted - - White error, or unknown status + Colors are used to tell the state of the CL unless --fast is used: + - Blue waiting for review + - Yellow waiting for you to reply to review, or not yet sent + - Green LGTM'ed + - Red 'not LGTM'ed + - Magenta in the CQ + - Cyan was committed, branch can be deleted + - White error, or unknown status - Also see 'git cl comments'. - """ + Also see 'git cl comments'. + """ parser.add_option('--no-branch-color', action='store_true', help='Disable colorized branch names') @@ -4290,7 +4289,7 @@ def CMDstatus(parser, args): def FormatBranchName(branch, colorize=False): """Simulates 'git branch' behavior. Colorizes and prefixes branch name with - an asterisk when it is the current branch.""" + an asterisk when it is the current branch.""" asterisk = "" color = Fore.RESET @@ -4387,8 +4386,8 @@ def write_json(path, contents): def CMDissue(parser, args): """Sets or displays the current code review issue number. - Pass issue number 0 to clear the current issue. - """ + Pass issue number 0 to clear the current issue. + """ parser.add_option('-r', '--reverse', action='store_true', @@ -4730,13 +4729,13 @@ def CMDpresubmit(parser, args): def GenerateGerritChangeId(message): """Returns the Change ID footer value (Ixxxxxx...xxx). - Works the same way as - https://gerrit-review.googlesource.com/tools/hooks/commit-msg - but can be called on demand on all platforms. + Works the same way as + https://gerrit-review.googlesource.com/tools/hooks/commit-msg + but can be called on demand on all platforms. - The basic idea is to generate git hash of a state of the tree, original - commit message, author/committer info and timestamps. - """ + The basic idea is to generate git hash of a state of the tree, original + commit message, author/committer info and timestamps. + """ lines = [] tree_hash = RunGitSilent(['write-tree']) lines.append('tree %s' % tree_hash.strip()) @@ -4761,11 +4760,11 @@ def GenerateGerritChangeId(message): def GetTargetRef(remote, remote_branch, target_branch): """Computes the remote branch ref to use for the CL. - Args: - remote (str): The git remote for the CL. - remote_branch (str): The git remote branch for the CL. - target_branch (str): The target branch specified by the user. - """ + Args: + remote (str): The git remote for the CL. + remote_branch (str): The git remote branch for the CL. + target_branch (str): The target branch specified by the user. + """ if not (remote and remote_branch): return None @@ -4818,9 +4817,9 @@ def GetTargetRef(remote, remote_branch, target_branch): def cleanup_list(l): """Fixes a list so that comma separated items are put as individual items. - So that "--reviewers joe@c,john@c --reviewers joa@c" results in - options.reviewers == sorted(['joe@c', 'john@c', 'joa@c']). - """ + So that "--reviewers joe@c,john@c --reviewers joa@c" results in + options.reviewers == sorted(['joe@c', 'john@c', 'joa@c']). + """ items = sum((i.split(',') for i in l), []) stripped_items = (i.strip() for i in items) return sorted(filter(None, stripped_items)) @@ -4831,22 +4830,22 @@ def cleanup_list(l): def CMDupload(parser, args): """Uploads the current changelist to codereview. - Can skip dependency patchset uploads for a branch by running: - git config branch.branch_name.skip-deps-uploads True - To unset, run: - git config --unset branch.branch_name.skip-deps-uploads - Can also set the above globally by using the --global flag. + Can skip dependency patchset uploads for a branch by running: + git config branch.branch_name.skip-deps-uploads True + To unset, run: + git config --unset branch.branch_name.skip-deps-uploads + Can also set the above globally by using the --global flag. - If the name of the checked out branch starts with "bug-" or "fix-" followed - by a bug number, this bug number is automatically populated in the CL - description. + If the name of the checked out branch starts with "bug-" or "fix-" followed + by a bug number, this bug number is automatically populated in the CL + description. - If subject contains text in square brackets or has ": " prefix, such - text(s) is treated as Gerrit hashtags. For example, CLs with subjects: - [git-cl] add support for hashtags - Foo bar: implement foo - will be hashtagged with "git-cl" and "foo-bar" respectively. - """ + If subject contains text in square brackets or has ": " prefix, such + text(s) is treated as Gerrit hashtags. For example, CLs with subjects: + [git-cl] add support for hashtags + Foo bar: implement foo + will be hashtagged with "git-cl" and "foo-bar" respectively. + """ parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks', @@ -5249,10 +5248,10 @@ def _UploadAllPrecheck(options, orig_args): # bool] """Checks the state of the tree and gives the user uploading options - Returns: A tuple of the ordered list of changes that have new commits - since their last upload and a boolean of whether the user wants to - cherry-pick and upload the current branch instead of uploading all cls. - """ + Returns: A tuple of the ordered list of changes that have new commits + since their last upload and a boolean of whether the user wants to + cherry-pick and upload the current branch instead of uploading all cls. + """ cl = Changelist() if cl.GetBranch() is None: DieWithError('Can\'t upload from detached HEAD state. Get on a branch!') @@ -5378,11 +5377,11 @@ def _UploadAllPrecheck(options, orig_args): def CMDsplit(parser, args): """Splits a branch into smaller branches and uploads CLs. - Creates a branch and uploads a CL for each group of files modified in the - current branch that share a common OWNERS file. In the CL description and - comment, the string '$directory', is replaced with the directory containing - the shared OWNERS file. - """ + Creates a branch and uploads a CL for each group of files modified in the + current branch that share a common OWNERS file. In the CL description and + comment, the string '$directory', is replaced with the directory containing + the shared OWNERS file. + """ parser.add_option('-d', '--description', dest='description_file', @@ -5466,9 +5465,9 @@ def CMDdcommit(parser, args): def CMDland(parser, args): """Commits the current changelist via git. - In case of Gerrit, uses Gerrit REST api to "submit" the issue, which pushes - upstream and closes the issue automatically and atomically. - """ + In case of Gerrit, uses Gerrit REST api to "submit" the issue, which pushes + upstream and closes the issue automatically and atomically. + """ parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks', @@ -5591,7 +5590,7 @@ def CMDpatch(parser, args): def GetTreeStatus(url=None): """Fetches the tree status and returns either 'open', 'closed', - 'unknown' or 'unset'.""" + 'unknown' or 'unset'.""" url = url or settings.GetTreeStatusUrl(error_ok=True) if url: status = str(urllib.request.urlopen(url).read().lower()) @@ -5607,7 +5606,7 @@ def GetTreeStatus(url=None): def GetTreeStatusReason(): """Fetches the tree status from a json url and returns the message - with the reason for the tree to be opened or closed.""" + with the reason for the tree to be opened or closed.""" url = settings.GetTreeStatusUrl() json_url = urllib.parse.urljoin(url, '/current?format=json') connection = urllib.request.urlopen(json_url) @@ -6198,7 +6197,7 @@ def _RunGoogleJavaFormat(opts, paths, top_dir, upstream_commit): def _RunRustFmt(opts, rust_diff_files, top_dir, upstream_commit): """Runs rustfmt. Just like _RunClangFormatDiff returns 2 to indicate that - presubmit checks have failed (and returns 0 otherwise).""" + presubmit checks have failed (and returns 0 otherwise).""" # Locate the rustfmt binary. try: rustfmt_tool = rustfmt.FindRustfmtToolInChromiumTree() @@ -6221,7 +6220,7 @@ def _RunRustFmt(opts, rust_diff_files, top_dir, upstream_commit): def _RunSwiftFormat(opts, swift_diff_files, top_dir, upstream_commit): """Runs swift-format. Just like _RunClangFormatDiff returns 2 to indicate - that presubmit checks have failed (and returns 0 otherwise).""" + that presubmit checks have failed (and returns 0 otherwise).""" if sys.platform != 'darwin': DieWithError('swift-format is only supported on macOS.') # Locate the swift-format binary. diff --git a/git_common.py b/git_common.py index fe80c5f938..1c025d111d 100644 --- a/git_common.py +++ b/git_common.py @@ -137,19 +137,21 @@ class BadCommitRefException(Exception): def memoize_one(**kwargs): """Memoizes a single-argument pure function. - Values of None are not cached. + Values of None are not cached. - Kwargs: - threadsafe (bool) - REQUIRED. Specifies whether to use locking around - cache manipulation functions. This is a kwarg so that users of memoize_one - are forced to explicitly and verbosely pick True or False. + Kwargs: + threadsafe (bool) - REQUIRED. Specifies whether to use locking around + cache manipulation functions. This is a kwarg so that users of + memoize_one are forced to explicitly and verbosely pick True or + False. - Adds three methods to the decorated function: - * get(key, default=None) - Gets the value for this key from the cache. - * set(key, value) - Sets the value for this key from the cache. - * clear() - Drops the entire contents of the cache. Useful for unittests. - * update(other) - Updates the contents of the cache from another dict. - """ + Adds three methods to the decorated function: + * get(key, default=None) - Gets the value for this key from the cache. + * set(key, value) - Sets the value for this key from the cache. + * clear() - Drops the entire contents of the cache. Useful for + unittests. + * update(other) - Updates the contents of the cache from another dict. + """ assert 'threadsafe' in kwargs, 'Must specify threadsafe={True,False}' threadsafe = kwargs['threadsafe'] @@ -203,8 +205,8 @@ def memoize_one(**kwargs): def _ScopedPool_initer(orig, orig_args): # pragma: no cover """Initializer method for ScopedPool's subprocesses. - This helps ScopedPool handle Ctrl-C's correctly. - """ + This helps ScopedPool handle Ctrl-C's correctly. + """ signal.signal(signal.SIGINT, signal.SIG_IGN) if orig: orig(*orig_args) @@ -213,14 +215,14 @@ def _ScopedPool_initer(orig, orig_args): # pragma: no cover @contextlib.contextmanager def ScopedPool(*args, **kwargs): """Context Manager which returns a multiprocessing.pool instance which - correctly deals with thrown exceptions. + correctly deals with thrown exceptions. - *args - Arguments to multiprocessing.pool + *args - Arguments to multiprocessing.pool - Kwargs: - kind ('threads', 'procs') - The type of underlying coprocess to use. - **etc - Arguments to multiprocessing.pool - """ + Kwargs: + kind ('threads', 'procs') - The type of underlying coprocess to use. + **etc - Arguments to multiprocessing.pool + """ if kwargs.pop('kind', None) == 'threads': pool = multiprocessing.pool.ThreadPool(*args, **kwargs) else: @@ -244,23 +246,23 @@ class ProgressPrinter(object): def __init__(self, fmt, enabled=None, fout=sys.stderr, period=0.5): """Create a ProgressPrinter. - Use it as a context manager which produces a simple 'increment' method: + Use it as a context manager which produces a simple 'increment' method: - with ProgressPrinter('(%%(count)d/%d)' % 1000) as inc: - for i in xrange(1000): - # do stuff - if i % 10 == 0: - inc(10) + with ProgressPrinter('(%%(count)d/%d)' % 1000) as inc: + for i in xrange(1000): + # do stuff + if i % 10 == 0: + inc(10) - Args: - fmt - String format with a single '%(count)d' where the counter value - should go. - enabled (bool) - If this is None, will default to True if - logging.getLogger() is set to INFO or more verbose. - fout (file-like) - The stream to print status messages to. - period (float) - The time in seconds for the printer thread to wait - between printing. - """ + Args: + fmt - String format with a single '%(count)d' where the counter value + should go. + enabled (bool) - If this is None, will default to True if + logging.getLogger() is set to INFO or more verbose. + fout (file-like) - The stream to print status messages to. + period (float) - The time in seconds for the printer thread to wait + between printing. + """ self.fmt = fmt if enabled is None: # pragma: no cover self.enabled = logging.getLogger().isEnabledFor(logging.INFO) @@ -303,7 +305,7 @@ class ProgressPrinter(object): def once(function): """@Decorates |function| so that it only performs its action once, no matter - how many times the decorated |function| is called.""" + how many times the decorated |function| is called.""" has_run = [False] def _wrapper(*args, **kwargs): @@ -556,9 +558,9 @@ def freeze(): def get_branch_tree(use_limit=False): """Get the dictionary of {branch: parent}, compatible with topo_iter. - Returns a tuple of (skipped, ) where skipped is a set of - branches without upstream branches defined. - """ + Returns a tuple of (skipped, ) where skipped is a set of + branches without upstream branches defined. + """ skipped = set() branch_tree = {} @@ -575,8 +577,8 @@ def get_branch_tree(use_limit=False): def get_or_create_merge_base(branch, parent=None): """Finds the configured merge base for branch. - If parent is supplied, it's used instead of calling upstream(branch). - """ + If parent is supplied, it's used instead of calling upstream(branch). + """ base = branch_config(branch, 'base') base_upstream = branch_config(branch, 'base-upstream') parent = parent or upstream(branch) @@ -630,12 +632,12 @@ def in_rebase(): def intern_f(f, kind='blob'): """Interns a file object into the git object store. - Args: - f (file-like object) - The file-like object to intern - kind (git object type) - One of 'blob', 'commit', 'tree', 'tag'. + Args: + f (file-like object) - The file-like object to intern + kind (git object type) - One of 'blob', 'commit', 'tree', 'tag'. - Returns the git hash of the interned object (hex encoded). - """ + Returns the git hash of the interned object (hex encoded). + """ ret = run('hash-object', '-t', kind, '-w', '--stdin', stdin=f) f.close() return ret @@ -660,11 +662,11 @@ def manual_merge_base(branch, base, parent): def mktree(treedict): """Makes a git tree object and returns its hash. - See |tree()| for the values of mode, type, and ref. + See |tree()| for the values of mode, type, and ref. - Args: - treedict - { name: (mode, type, ref) } - """ + Args: + treedict - { name: (mode, type, ref) } + """ with tempfile.TemporaryFile() as f: for name, (mode, typ, ref) in treedict.items(): f.write(('%s %s %s\t%s\0' % (mode, typ, ref, name)).encode('utf-8')) @@ -675,11 +677,11 @@ def mktree(treedict): def parse_commitrefs(*commitrefs): """Returns binary encoded commit hashes for one or more commitrefs. - A commitref is anything which can resolve to a commit. Popular examples: - * 'HEAD' - * 'origin/main' - * 'cool_branch~2' - """ + A commitref is anything which can resolve to a commit. Popular examples: + * 'HEAD' + * 'origin/main' + * 'cool_branch~2' + """ try: return [binascii.unhexlify(h) for h in hash_multi(*commitrefs)] except subprocess2.CalledProcessError: @@ -692,26 +694,26 @@ RebaseRet = collections.namedtuple('RebaseRet', 'success stdout stderr') def rebase(parent, start, branch, abort=False, allow_gc=False): """Rebases |start|..|branch| onto the branch |parent|. - Sets 'gc.auto=0' for the duration of this call to prevent the rebase from - running a potentially slow garbage collection cycle. + Sets 'gc.auto=0' for the duration of this call to prevent the rebase from + running a potentially slow garbage collection cycle. - Args: - parent - The new parent ref for the rebased commits. - start - The commit to start from - branch - The branch to rebase - abort - If True, will call git-rebase --abort in the event that the rebase - doesn't complete successfully. - allow_gc - If True, sets "-c gc.auto=1" on the rebase call, rather than - "-c gc.auto=0". Usually if you're doing a series of rebases, - you'll only want to run a single gc pass at the end of all the - rebase activity. + Args: + parent - The new parent ref for the rebased commits. + start - The commit to start from + branch - The branch to rebase + abort - If True, will call git-rebase --abort in the event that the + rebase doesn't complete successfully. + allow_gc - If True, sets "-c gc.auto=1" on the rebase call, rather than + "-c gc.auto=0". Usually if you're doing a series of rebases, + you'll only want to run a single gc pass at the end of all the + rebase activity. - Returns a namedtuple with fields: - success - a boolean indicating that the rebase command completed - successfully. - message - if the rebase failed, this contains the stdout of the failed - rebase. - """ + Returns a namedtuple with fields: + success - a boolean indicating that the rebase command completed + successfully. + message - if the rebase failed, this contains the stdout of the failed + rebase. + """ try: args = [ '-c', @@ -770,11 +772,11 @@ def root(): def less(): # pragma: no cover """Runs 'less' as context manager yielding its stdin as a PIPE. - Automatically checks if sys.stdout is a non-TTY stream. If so, it avoids - running less and just yields sys.stdout. + Automatically checks if sys.stdout is a non-TTY stream. If so, it avoids + running less and just yields sys.stdout. - The returned PIPE is opened on binary mode. - """ + The returned PIPE is opened on binary mode. + """ if not setup_color.IS_TTY: # On Python 3, sys.stdout doesn't accept bytes, and sys.stdout.buffer # must be used. @@ -815,9 +817,9 @@ def run_with_retcode(*cmd, **kwargs): def run_stream(*cmd, **kwargs): """Runs a git command. Returns stdout as a PIPE (file-like object). - stderr is dropped to avoid races if the process outputs to both stdout and - stderr. - """ + stderr is dropped to avoid races if the process outputs to both stdout and + stderr. + """ kwargs.setdefault('stderr', subprocess2.DEVNULL) kwargs.setdefault('stdout', subprocess2.PIPE) kwargs.setdefault('shell', False) @@ -830,11 +832,11 @@ def run_stream(*cmd, **kwargs): def run_stream_with_retcode(*cmd, **kwargs): """Runs a git command as context manager yielding stdout as a PIPE. - stderr is dropped to avoid races if the process outputs to both stdout and - stderr. + stderr is dropped to avoid races if the process outputs to both stdout and + stderr. - Raises subprocess2.CalledProcessError on nonzero return code. - """ + Raises subprocess2.CalledProcessError on nonzero return code. + """ kwargs.setdefault('stderr', subprocess2.DEVNULL) kwargs.setdefault('stdout', subprocess2.PIPE) kwargs.setdefault('shell', False) @@ -852,12 +854,12 @@ def run_stream_with_retcode(*cmd, **kwargs): def run_with_stderr(*cmd, **kwargs): """Runs a git command. - Returns (stdout, stderr) as a pair of strings. + Returns (stdout, stderr) as a pair of strings. - kwargs - autostrip (bool) - Strip the output. Defaults to True. - indata (str) - Specifies stdin data for the process. - """ + kwargs + autostrip (bool) - Strip the output. Defaults to True. + indata (str) - Specifies stdin data for the process. + """ kwargs.setdefault('stdin', subprocess2.PIPE) kwargs.setdefault('stdout', subprocess2.PIPE) kwargs.setdefault('stderr', subprocess2.PIPE) @@ -920,17 +922,17 @@ def is_dirty_git_tree(cmd): def status(ignore_submodules=None): """Returns a parsed version of git-status. - Args: - ignore_submodules (str|None): "all", "none", or None. - None is equivalent to "none". + Args: + ignore_submodules (str|None): "all", "none", or None. + None is equivalent to "none". - Returns a generator of (current_name, (lstat, rstat, src)) pairs where: - * current_name is the name of the file - * lstat is the left status code letter from git-status - * rstat is the right status code letter from git-status - * src is the current name of the file, or the original name of the file - if lstat == 'R' - """ + Returns a generator of (current_name, (lstat, rstat, src)) pairs where: + * current_name is the name of the file + * lstat is the left status code letter from git-status + * rstat is the right status code letter from git-status + * src is the current name of the file, or the original name of the file + if lstat == 'R' + """ ignore_submodules = ignore_submodules or 'none' assert ignore_submodules in ( @@ -1024,27 +1026,27 @@ def thaw(): def topo_iter(branch_tree, top_down=True): """Generates (branch, parent) in topographical order for a branch tree. - Given a tree: + Given a tree: - A1 - B1 B2 - C1 C2 C3 - D1 + A1 + B1 B2 + C1 C2 C3 + D1 - branch_tree would look like: { - 'D1': 'C3', - 'C3': 'B2', - 'B2': 'A1', - 'C1': 'B1', - 'C2': 'B1', - 'B1': 'A1', - } + branch_tree would look like: { + 'D1': 'C3', + 'C3': 'B2', + 'B2': 'A1', + 'C1': 'B1', + 'C2': 'B1', + 'B1': 'A1', + } - It is OK to have multiple 'root' nodes in your graph. + It is OK to have multiple 'root' nodes in your graph. - if top_down is True, items are yielded from A->D. Otherwise they're yielded - from D->A. Within a layer the branches will be yielded in sorted order. - """ + if top_down is True, items are yielded from A->D. Otherwise they're yielded + from D->A. Within a layer the branches will be yielded in sorted order. + """ branch_tree = branch_tree.copy() # TODO(iannucci): There is probably a more efficient way to do these. @@ -1074,26 +1076,27 @@ def topo_iter(branch_tree, top_down=True): def tree(treeref, recurse=False): """Returns a dict representation of a git tree object. - Args: - treeref (str) - a git ref which resolves to a tree (commits count as trees). - recurse (bool) - include all of the tree's descendants too. File names will - take the form of 'some/path/to/file'. + Args: + treeref (str) - a git ref which resolves to a tree (commits count as + trees). + recurse (bool) - include all of the tree's descendants too. File names + will take the form of 'some/path/to/file'. - Return format: - { 'file_name': (mode, type, ref) } + Return format: + { 'file_name': (mode, type, ref) } - mode is an integer where: - * 0040000 - Directory - * 0100644 - Regular non-executable file - * 0100664 - Regular non-executable group-writeable file - * 0100755 - Regular executable file - * 0120000 - Symbolic link - * 0160000 - Gitlink + mode is an integer where: + * 0040000 - Directory + * 0100644 - Regular non-executable file + * 0100664 - Regular non-executable group-writeable file + * 0100755 - Regular executable file + * 0120000 - Symbolic link + * 0160000 - Gitlink - type is a string where it's one of 'blob', 'commit', 'tree', 'tag'. + type is a string where it's one of 'blob', 'commit', 'tree', 'tag'. - ref is the hex encoded hash of the entry. - """ + ref is the hex encoded hash of the entry. + """ ret = {} opts = ['ls-tree', '--full-tree'] if recurse: @@ -1125,7 +1128,7 @@ def upstream(branch): def get_git_version(): """Returns a tuple that contains the numeric components of the current git - version.""" + version.""" version_string = run('--version') return _extract_git_tuple(version_string) diff --git a/git_dates.py b/git_dates.py index 140e267f5f..a4dcb77ad8 100644 --- a/git_dates.py +++ b/git_dates.py @@ -9,16 +9,16 @@ import datetime def timestamp_offset_to_datetime(timestamp, offset): """Converts a timestamp + offset into a datetime.datetime. - Useful for dealing with the output of porcelain commands, which provide times - as timestamp and offset strings. + Useful for dealing with the output of porcelain commands, which provide + times as timestamp and offset strings. - Args: - timestamp: An int UTC timestamp, or a string containing decimal digits. - offset: A str timezone offset. e.g., '-0800'. + Args: + timestamp: An int UTC timestamp, or a string containing decimal digits. + offset: A str timezone offset. e.g., '-0800'. - Returns: - A tz-aware datetime.datetime for this timestamp. - """ + Returns: + A tz-aware datetime.datetime for this timestamp. + """ timestamp = int(timestamp) tz = FixedOffsetTZ.from_offset_string(offset) return datetime.datetime.fromtimestamp(timestamp, tz) diff --git a/git_footers.py b/git_footers.py index b70b248f0d..7bfe0ac29e 100755 --- a/git_footers.py +++ b/git_footers.py @@ -44,8 +44,8 @@ def parse_footers(message): def matches_footer_key(line, key): """Returns whether line is a valid footer whose key matches a given one. - Keys are compared in normalized form. - """ + Keys are compared in normalized form. + """ r = parse_footer(line) if r is None: return False @@ -55,13 +55,14 @@ def matches_footer_key(line, key): def split_footers(message): """Returns (non_footer_lines, footer_lines, parsed footers). - Guarantees that: - (non_footer_lines + footer_lines) ~= message.splitlines(), with at - most one new newline, if the last paragraph is text followed by footers. - parsed_footers is parse_footer applied on each line of footer_lines. - There could be fewer parsed_footers than footer lines if some lines in - last paragraph are malformed. - """ + Guarantees that: + (non_footer_lines + footer_lines) ~= message.splitlines(), with at + most one new newline, if the last paragraph is text followed by + footers. + parsed_footers is parse_footer applied on each line of footer_lines. + There could be fewer parsed_footers than footer lines if some lines + in last paragraph are malformed. + """ message_lines = list(message.rstrip().splitlines()) footer_lines = [] maybe_footer_lines = [] @@ -102,10 +103,10 @@ def get_footer_change_id(message): def add_footer_change_id(message, change_id): """Returns message with Change-ID footer in it. - Assumes that Change-Id is not yet in footers, which is then inserted at - earliest footer line which is after all of these footers: - Bug|Issue|Test|Feature. - """ + Assumes that Change-Id is not yet in footers, which is then inserted at + earliest footer line which is after all of these footers: + Bug|Issue|Test|Feature. + """ assert 'Change-Id' not in parse_footers(message) return add_footer(message, 'Change-Id', @@ -116,18 +117,19 @@ def add_footer_change_id(message, change_id): def add_footer(message, key, value, after_keys=None, before_keys=None): """Returns a message with given footer appended. - If after_keys and before_keys are both None (default), appends footer last. - If after_keys is provided and matches footers already present, inserts footer - as *early* as possible while still appearing after all provided keys, even - if doing so conflicts with before_keys. - If before_keys is provided, inserts footer as late as possible while still - appearing before all provided keys. + If after_keys and before_keys are both None (default), appends footer last. + If after_keys is provided and matches footers already present, inserts + footer as *early* as possible while still appearing after all provided + keys, even if doing so conflicts with before_keys. + If before_keys is provided, inserts footer as late as possible while still + appearing before all provided keys. - For example, given - message='Header.\n\nAdded: 2016\nBug: 123\nVerified-By: CQ' - after_keys=['Bug', 'Issue'] - the new footer will be inserted between Bug and Verified-By existing footers. - """ + For example, given + message='Header.\n\nAdded: 2016\nBug: 123\nVerified-By: CQ' + after_keys=['Bug', 'Issue'] + the new footer will be inserted between Bug and Verified-By existing + footers. + """ assert key == normalize_name(key), 'Use normalized key' new_footer = '%s: %s' % (key, value) if not FOOTER_PATTERN.match(new_footer): @@ -192,13 +194,13 @@ def get_unique(footers, key): def get_position(footers): """Get the commit position from the footers multimap using a heuristic. - Returns: - A tuple of the branch and the position on that branch. For example, + Returns: + A tuple of the branch and the position on that branch. For example, - Cr-Commit-Position: refs/heads/main@{#292272} + Cr-Commit-Position: refs/heads/main@{#292272} - would give the return value ('refs/heads/main', 292272). - """ + would give the return value ('refs/heads/main', 292272). + """ position = get_unique(footers, 'Cr-Commit-Position') if position: diff --git a/git_hyper_blame.py b/git_hyper_blame.py index e335c788ca..773edc3dfc 100755 --- a/git_hyper_blame.py +++ b/git_hyper_blame.py @@ -91,10 +91,10 @@ def parse_blame(blameoutput): def print_table(outbuf, table, align): """Print a 2D rectangular array, aligning columns with spaces. - Args: - align: string of 'l' and 'r', designating whether each column is left- or - right-aligned. - """ + Args: + align: string of 'l' and 'r', designating whether each column is + left- or right-aligned. + """ if len(table) == 0: return @@ -189,20 +189,21 @@ def approx_lineno_across_revs(filename, newfilename, revision, newrevision, lineno): """Computes the approximate movement of a line number between two revisions. - Consider line |lineno| in |filename| at |revision|. This function computes the - line number of that line in |newfilename| at |newrevision|. This is - necessarily approximate. + Consider line |lineno| in |filename| at |revision|. This function computes + the line number of that line in |newfilename| at |newrevision|. This is + necessarily approximate. - Args: - filename: The file (within the repo) at |revision|. - newfilename: The name of the same file at |newrevision|. - revision: A git revision. - newrevision: Another git revision. Note: Can be ahead or behind |revision|. - lineno: Line number within |filename| at |revision|. + Args: + filename: The file (within the repo) at |revision|. + newfilename: The name of the same file at |newrevision|. + revision: A git revision. + newrevision: Another git revision. Note: Can be ahead or behind + |revision|. + lineno: Line number within |filename| at |revision|. - Returns: - Line number within |newfilename| at |newrevision|. - """ + Returns: + Line number within |newfilename| at |newrevision|. + """ # This doesn't work that well if there are a lot of line changes within the # hunk (demonstrated by # GitHyperBlameLineMotionTest.testIntraHunkLineMotion). A fuzzy heuristic diff --git a/git_map_branches.py b/git_map_branches.py index eb68074195..ec9c86baf7 100755 --- a/git_map_branches.py +++ b/git_map_branches.py @@ -77,7 +77,7 @@ class OutputManager(object): class OutputLine(object): """A single line of data. - This consists of an equal number of columns, colors and separators.""" + This consists of an equal number of columns, colors and separators.""" def __init__(self): self.columns = [] self.separators = [] @@ -90,7 +90,7 @@ class OutputLine(object): def as_padded_string(self, max_column_lengths): """"Returns the data as a string with each column padded to - |max_column_lengths|.""" + |max_column_lengths|.""" output_string = '' for i, (color, data, separator) in enumerate( zip(self.colors, self.columns, self.separators)): @@ -106,10 +106,11 @@ class OutputLine(object): class BranchMapper(object): """A class which constructs output representing the tree's branch structure. - Attributes: - __branches_info: a map of branches to their BranchesInfo objects which - consist of the branch hash, upstream and ahead/behind status. - __gone_branches: a set of upstreams which are not fetchable by git""" + Attributes: + __branches_info: a map of branches to their BranchesInfo objects which + consist of the branch hash, upstream and ahead/behind status. + __gone_branches: a set of upstreams which are not fetchable by git + """ def __init__(self): self.verbosity = 0 self.maxjobs = 0 @@ -228,7 +229,7 @@ class BranchMapper(object): def __append_branch(self, branch, output, depth=0): """Recurses through the tree structure and appends an OutputLine to the - OutputManager for each branch.""" + OutputManager for each branch.""" child_output = OutputManager() for child in sorted(self.__parent_map.pop(branch, ())): self.__append_branch(child, child_output, depth=depth + 1) diff --git a/git_number.py b/git_number.py index beae4ac7a6..541a15841a 100755 --- a/git_number.py +++ b/git_number.py @@ -51,24 +51,24 @@ POOL_KIND = 'procs' def pathlify(hash_prefix): """Converts a binary object hash prefix into a posix path, one folder per - byte. + byte. - >>> pathlify('\xDE\xAD') - 'de/ad' - """ + >>> pathlify('\xDE\xAD') + 'de/ad' + """ return '/'.join('%02x' % b for b in hash_prefix) @git.memoize_one(threadsafe=False) def get_number_tree(prefix_bytes): """Returns a dictionary of the git-number registry specified by - |prefix_bytes|. + |prefix_bytes|. - This is in the form of {: ...} + This is in the form of {: ...} - >>> get_number_tree('\x83\xb4') - {'\x83\xb4\xe3\xe4W\xf9J*\x8f/c\x16\xecD\xd1\x04\x8b\xa9qz': 169, ...} - """ + >>> get_number_tree('\x83\xb4') + {'\x83\xb4\xe3\xe4W\xf9J*\x8f/c\x16\xecD\xd1\x04\x8b\xa9qz': 169, ...} + """ ref = '%s:%s' % (REF, pathlify(prefix_bytes)) try: @@ -84,9 +84,9 @@ def get_number_tree(prefix_bytes): def get_num(commit_hash): """Returns the generation number for a commit. - Returns None if the generation number for this commit hasn't been calculated - yet (see load_generation_numbers()). - """ + Returns None if the generation number for this commit hasn't been calculated + yet (see load_generation_numbers()). + """ return get_number_tree(commit_hash[:PREFIX_LEN]).get(commit_hash) @@ -100,14 +100,14 @@ def clear_caches(on_disk=False): def intern_number_tree(tree): """Transforms a number tree (in the form returned by |get_number_tree|) into - a git blob. + a git blob. - Returns the git blob id as hex-encoded string. + Returns the git blob id as hex-encoded string. - >>> d = {'\x83\xb4\xe3\xe4W\xf9J*\x8f/c\x16\xecD\xd1\x04\x8b\xa9qz': 169} - >>> intern_number_tree(d) - 'c552317aa95ca8c3f6aae3357a4be299fbcb25ce' - """ + >>> d = {'\x83\xb4\xe3\xe4W\xf9J*\x8f/c\x16\xecD\xd1\x04\x8b\xa9qz': 169} + >>> intern_number_tree(d) + 'c552317aa95ca8c3f6aae3357a4be299fbcb25ce' + """ with tempfile.TemporaryFile() as f: for k, v in sorted(tree.items()): f.write(struct.pack(CHUNK_FMT, k, v)) @@ -124,11 +124,11 @@ def leaf_map_fn(pre_tree): def finalize(targets): """Saves all cache data to the git repository. - After calculating the generation number for |targets|, call finalize() to - save all the work to the git repository. + After calculating the generation number for |targets|, call finalize() to + save all the work to the git repository. - This in particular saves the trees referred to by DIRTY_TREES. - """ + This in particular saves the trees referred to by DIRTY_TREES. + """ if not DIRTY_TREES: return @@ -197,14 +197,14 @@ def all_prefixes(depth=PREFIX_LEN): def load_generation_numbers(targets): """Populates the caches of get_num and get_number_tree so they contain - the results for |targets|. + the results for |targets|. - Loads cached numbers from disk, and calculates missing numbers if one or - more of |targets| is newer than the cached calculations. + Loads cached numbers from disk, and calculates missing numbers if one or + more of |targets| is newer than the cached calculations. - Args: - targets - An iterable of binary-encoded full git commit hashes. - """ + Args: + targets - An iterable of binary-encoded full git commit hashes. + """ # In case they pass us a generator, listify targets. targets = list(targets) diff --git a/git_rebase_update.py b/git_rebase_update.py index f587dd02a1..8b1fb8d441 100755 --- a/git_rebase_update.py +++ b/git_rebase_update.py @@ -24,11 +24,11 @@ STARTING_WORKDIR_KEY = 'depot-tools.rebase-update.starting-workdir' def find_return_branch_workdir(): """Finds the branch and working directory which we should return to after - rebase-update completes. + rebase-update completes. - These values may persist across multiple invocations of rebase-update, if - rebase-update runs into a conflict mid-way. - """ + These values may persist across multiple invocations of rebase-update, if + rebase-update runs into a conflict mid-way. + """ return_branch = git.get_config(STARTING_BRANCH_KEY) workdir = git.get_config(STARTING_WORKDIR_KEY) if not return_branch: diff --git a/git_retry.py b/git_retry.py index f8db8b518a..3648aadd80 100755 --- a/git_retry.py +++ b/git_retry.py @@ -77,12 +77,12 @@ class GitRetry(object): def computeDelay(self, iteration): """Returns: the delay (in seconds) for a given iteration - The first iteration has a delay of '0'. + The first iteration has a delay of '0'. - Args: - iteration: (int) The iteration index (starting with zero as the first - iteration) - """ + Args: + iteration: (int) The iteration index (starting with zero as the + first iteration) + """ if (not self.delay) or (iteration == 0): return 0 if self.delay_factor == 0: diff --git a/lockfile.py b/lockfile.py index 3eedae172a..4ea9e6f31e 100644 --- a/lockfile.py +++ b/lockfile.py @@ -80,7 +80,7 @@ def _try_lock(lockfile): def _lock(path, timeout=0): """_lock returns function to release the lock if locking was successful. - _lock also implements simple retry logic.""" + _lock also implements simple retry logic.""" elapsed = 0 while True: try: @@ -101,13 +101,13 @@ def _lock(path, timeout=0): def lock(path, timeout=0): """Get exclusive lock to path. - Usage: - import lockfile - with lockfile.lock(path, timeout): - # Do something - pass + Usage: + import lockfile + with lockfile.lock(path, timeout): + # Do something + pass - """ + """ release_fn = _lock(path, timeout) try: yield diff --git a/metrics.py b/metrics.py index 5645f28587..9ba1083d61 100644 --- a/metrics.py +++ b/metrics.py @@ -267,9 +267,9 @@ class MetricsCollector(object): def collect_metrics(self, command_name): """A decorator used to collect metrics over the life of a function. - This decorator executes the function and collects metrics about the system - environment and the function performance. - """ + This decorator executes the function and collects metrics about the + system environment and the function performance. + """ def _decorator(func): if not self.config.should_collect_metrics: return func @@ -287,11 +287,12 @@ class MetricsCollector(object): def print_notice_and_exit(self): """A context manager used to print the notice and terminate execution. - This decorator executes the function and prints the monitoring notice if - necessary. If an exception is raised, we will catch it, and print it before - printing the metrics collection notice. - This will call sys.exit() with an appropriate exit code to ensure the notice - is the last thing printed.""" + This decorator executes the function and prints the monitoring notice if + necessary. If an exception is raised, we will catch it, and print it + before printing the metrics collection notice. + This will call sys.exit() with an appropriate exit code to ensure the + notice is the last thing printed. + """ # Needed to preserve the __name__ and __doc__ attributes of func. try: yield diff --git a/metrics_utils.py b/metrics_utils.py index ee7bb3212a..11ba04f754 100644 --- a/metrics_utils.py +++ b/metrics_utils.py @@ -211,20 +211,20 @@ def extract_known_subcommand_args(args): def extract_http_metrics(request_uri, method, status, response_time): """Extract metrics from the request URI. - Extracts the host, path, and arguments from the request URI, and returns them - along with the method, status and response time. + Extracts the host, path, and arguments from the request URI, and returns + them along with the method, status and response time. - The host, method, path and arguments must be in the KNOWN_HTTP_* constants - defined above. + The host, method, path and arguments must be in the KNOWN_HTTP_* constants + defined above. - Arguments are the values of the o= url parameter. In Gerrit, additional fields - can be obtained by adding o parameters, each option requires more database - lookups and slows down the query response time to the client, so we make an - effort to collect them. + Arguments are the values of the o= url parameter. In Gerrit, additional + fields can be obtained by adding o parameters, each option requires more + database lookups and slows down the query response time to the client, so + we make an effort to collect them. - The regex defined in KNOWN_HTTP_PATH_RES are checked against the path, and - those that match will be returned. - """ + The regex defined in KNOWN_HTTP_PATH_RES are checked against the path, and + those that match will be returned. + """ http_metrics = { 'status': status, 'response_time': response_time, @@ -256,9 +256,9 @@ def extract_http_metrics(request_uri, method, status, response_time): def get_repo_timestamp(path_to_repo): """Get an approximate timestamp for the upstream of |path_to_repo|. - Returns the top two bits of the timestamp of the HEAD for the upstream of the - branch path_to_repo is checked out at. - """ + Returns the top two bits of the timestamp of the HEAD for the upstream of + the branch path_to_repo is checked out at. + """ # Get the upstream for the current branch. If we're not in a branch, # fallback to HEAD. try: diff --git a/my_activity.py b/my_activity.py index e30849170b..59d378e3aa 100755 --- a/my_activity.py +++ b/my_activity.py @@ -320,19 +320,21 @@ class MyActivity(object): def filter_modified_monorail_issue(self, issue): """Precisely checks if an issue has been modified in the time range. - This fetches all issue comments to check if the issue has been modified in - the time range specified by user. This is needed because monorail only - allows filtering by last updated and published dates, which is not - sufficient to tell whether a given issue has been modified at some specific - time range. Any update to the issue is a reported as comment on Monorail. + This fetches all issue comments to check if the issue has been modified + in the time range specified by user. This is needed because monorail + only allows filtering by last updated and published dates, which is not + sufficient to tell whether a given issue has been modified at some + specific time range. Any update to the issue is a reported as comment + on Monorail. - Args: - issue: Issue dict as returned by monorail_query_issues method. In - particular, must have a key 'uid' formatted as 'project:issue_id'. + Args: + issue: Issue dict as returned by monorail_query_issues method. In + particular, must have a key 'uid' formatted as + 'project:issue_id'. - Returns: - Passed issue if modified, None otherwise. - """ + Returns: + Passed issue if modified, None otherwise. + """ http = self.monorail_get_auth_http() project, issue_id = issue['uid'].split(':') url = ('https://monorail-prod.appspot.com/_ah/api/monorail/v1/projects' diff --git a/owners_client.py b/owners_client.py index 5f948e87eb..7a523db308 100644 --- a/owners_client.py +++ b/owners_client.py @@ -12,17 +12,17 @@ import git_common class OwnersClient(object): """Interact with OWNERS files in a repository. - This class allows you to interact with OWNERS files in a repository both the - Gerrit Code-Owners plugin REST API, and the owners database implemented by - Depot Tools in owners.py: + This class allows you to interact with OWNERS files in a repository both the + Gerrit Code-Owners plugin REST API, and the owners database implemented by + Depot Tools in owners.py: - - List all the owners for a group of files. - - Check if files have been approved. - - Suggest owners for a group of files. + - List all the owners for a group of files. + - Check if files have been approved. + - Suggest owners for a group of files. - All code should use this class to interact with OWNERS files instead of the - owners database in owners.py - """ + All code should use this class to interact with OWNERS files instead of the + owners database in owners.py + """ # '*' means that everyone can approve. EVERYONE = '*' @@ -39,15 +39,15 @@ class OwnersClient(object): def ListOwners(self, path): """List all owners for a file. - The returned list is sorted so that better owners appear first. - """ + The returned list is sorted so that better owners appear first. + """ raise Exception('Not implemented') def BatchListOwners(self, paths): """List all owners for a group of files. - Returns a dictionary {path: [owners]}. - """ + Returns a dictionary {path: [owners]}. + """ with git_common.ScopedPool(kind='threads') as pool: return dict( pool.imap_unordered(lambda p: (p, self.ListOwners(p)), paths)) @@ -55,11 +55,11 @@ class OwnersClient(object): def GetFilesApprovalStatus(self, paths, approvers, reviewers): """Check the approval status for the given paths. - Utility method to check for approval status when a change has not yet been - created, given reviewers and approvers. + Utility method to check for approval status when a change has not yet + been created, given reviewers and approvers. - See GetChangeApprovalStatus for description of the returned value. - """ + See GetChangeApprovalStatus for description of the returned value. + """ approvers = set(approvers) if approvers: approvers.add(self.EVERYONE) @@ -167,8 +167,8 @@ class GerritClient(OwnersClient): def BatchListBestOwners(self, paths): """List only the higest-scoring owners for a group of files. - Returns a dictionary {path: [owners]}. - """ + Returns a dictionary {path: [owners]}. + """ with git_common.ScopedPool(kind='threads') as pool: return dict( pool.imap_unordered(lambda p: (p, self.ListBestOwners(p)), @@ -178,8 +178,8 @@ class GerritClient(OwnersClient): def GetCodeOwnersClient(host, project, branch): """Get a new OwnersClient. - Uses GerritClient and raises an exception if code-owners plugin is not - available.""" + Uses GerritClient and raises an exception if code-owners plugin is not + available.""" if gerrit_util.IsCodeOwnersEnabledOnHost(host): return GerritClient(host, project, branch) raise Exception( diff --git a/presubmit_canned_checks.py b/presubmit_canned_checks.py index f3fb999c81..ce685fdc0a 100644 --- a/presubmit_canned_checks.py +++ b/presubmit_canned_checks.py @@ -73,7 +73,7 @@ _CORP_LINK_KEYWORD = '.corp.google' def CheckChangeHasBugFieldFromChange(change, output_api, show_suggestions=True): """Requires that the changelist have a Bug: field. If show_suggestions is - False then only report on incorrect tags, not missing tags.""" + False then only report on incorrect tags, not missing tags.""" bugs = change.BugsFromDescription() results = [] if bugs: @@ -122,7 +122,7 @@ def CheckChangeHasNoUnwantedTags(input_api, output_api): def CheckDoNotSubmitInDescription(input_api, output_api): """Checks that the user didn't add 'DO NOT ''SUBMIT' to the CL description. - """ + """ # Keyword is concatenated to avoid presubmit check rejecting the CL. keyword = 'DO NOT ' + 'SUBMIT' if keyword in input_api.change.DescriptionText(): @@ -171,11 +171,11 @@ def CheckChangeWasUploaded(input_api, output_api): def CheckDescriptionUsesColonInsteadOfEquals(input_api, output_api): """Checks that the CL description uses a colon after 'Bug' and 'Fixed' tags - instead of equals. + instead of equals. - crbug.com only interprets the lines "Bug: xyz" and "Fixed: xyz" but not - "Bug=xyz" or "Fixed=xyz". - """ + crbug.com only interprets the lines "Bug: xyz" and "Fixed: xyz" but not + "Bug=xyz" or "Fixed=xyz". + """ text = input_api.change.DescriptionText() if input_api.re.search(r'^(Bug|Fixed)=', text, @@ -192,8 +192,8 @@ def CheckDescriptionUsesColonInsteadOfEquals(input_api, output_api): def CheckAuthorizedAuthor(input_api, output_api, bot_allowlist=None): """For non-googler/chromites committers, verify the author's email address is - in AUTHORS. - """ + in AUTHORS. + """ if input_api.is_committing or input_api.no_diffs: error_type = output_api.PresubmitError else: @@ -376,8 +376,8 @@ def CheckChangeHasNoCrAndHasOnlyOneEol(input_api, source_file_filter=None): """Runs both CheckChangeHasNoCR and CheckChangeHasOnlyOneEOL in one pass. - It is faster because it is reading the file only once. - """ + It is faster because it is reading the file only once. + """ cr_files = [] eof_files = [] for f in input_api.AffectedSourceFiles(source_file_filter): @@ -403,8 +403,8 @@ def CheckChangeHasNoCrAndHasOnlyOneEol(input_api, def CheckGenderNeutral(input_api, output_api, source_file_filter=None): """Checks that there are no gendered pronouns in any of the text files to be - submitted. - """ + submitted. + """ if input_api.no_diffs: return [] @@ -434,16 +434,16 @@ def _ReportErrorFileAndLine(filename, line_num, dummy_line): def _GenerateAffectedFileExtList(input_api, source_file_filter): """Generate a list of (file, extension) tuples from affected files. - The result can be fed to _FindNewViolationsOfRule() directly, or - could be filtered before doing that. + The result can be fed to _FindNewViolationsOfRule() directly, or + could be filtered before doing that. - Args: - input_api: object to enumerate the affected files. - source_file_filter: a filter to be passed to the input api. - Yields: - A list of (file, extension) tuples, where |file| is an affected - file, and |extension| its file path extension. - """ + Args: + input_api: object to enumerate the affected files. + source_file_filter: a filter to be passed to the input api. + Yields: + A list of (file, extension) tuples, where |file| is an affected + file, and |extension| its file path extension. + """ for f in input_api.AffectedFiles(include_deletes=False, file_filter=source_file_filter): extension = str(f.LocalPath()).rsplit('.', 1)[-1] @@ -456,19 +456,19 @@ def _FindNewViolationsOfRuleForList(callable_rule, """Find all newly introduced violations of a per-line rule (a callable). Prefer calling _FindNewViolationsOfRule() instead of this function, unless - the list of affected files need to be filtered in a special way. + the list of affected files need to be filtered in a special way. - Arguments: - callable_rule: a callable taking a file extension and line of input and - returning True if the rule is satisfied and False if there was a problem. - file_ext_list: a list of input (file, extension) tuples, as returned by - _GenerateAffectedFileExtList(). - error_formatter: a callable taking (filename, line_number, line) and - returning a formatted error string. + Arguments: + callable_rule: a callable taking a file extension and line of input and + returning True if the rule is satisfied and False if there was a problem. + file_ext_list: a list of input (file, extension) tuples, as returned by + _GenerateAffectedFileExtList(). + error_formatter: a callable taking (filename, line_number, line) and + returning a formatted error string. - Returns: - A list of the newly-introduced violations reported by the rule. - """ + Returns: + A list of the newly-introduced violations reported by the rule. + """ errors = [] for f, extension in file_ext_list: # For speed, we do two passes, checking first the full file. Shelling @@ -491,17 +491,17 @@ def _FindNewViolationsOfRule(callable_rule, error_formatter=_ReportErrorFileAndLine): """Find all newly introduced violations of a per-line rule (a callable). - Arguments: - callable_rule: a callable taking a file extension and line of input and - returning True if the rule is satisfied and False if there was a problem. - input_api: object to enumerate the affected files. - source_file_filter: a filter to be passed to the input api. - error_formatter: a callable taking (filename, line_number, line) and - returning a formatted error string. + Arguments: + callable_rule: a callable taking a file extension and line of input and + returning True if the rule is satisfied and False if there was a problem. + input_api: object to enumerate the affected files. + source_file_filter: a filter to be passed to the input api. + error_formatter: a callable taking (filename, line_number, line) and + returning a formatted error string. - Returns: - A list of the newly-introduced violations reported by the rule. - """ + Returns: + A list of the newly-introduced violations reported by the rule. + """ if input_api.no_diffs: return [] return _FindNewViolationsOfRuleForList( @@ -512,8 +512,8 @@ def _FindNewViolationsOfRule(callable_rule, def CheckChangeHasNoTabs(input_api, output_api, source_file_filter=None): """Checks that there are no tab characters in any of the text files to be - submitted. - """ + submitted. + """ # In addition to the filter, make sure that makefiles are skipped. if not source_file_filter: # It's the default filter. @@ -569,8 +569,8 @@ def CheckChangeHasNoStrayWhitespace(input_api, def CheckLongLines(input_api, output_api, maxlen, source_file_filter=None): """Checks that there aren't any lines longer than maxlen characters in any of - the text files to be submitted. - """ + the text files to be submitted. + """ if input_api.no_diffs: return [] maxlens = { @@ -726,8 +726,7 @@ def CheckLicense(input_api, project_name=None, source_file_filter=None, accept_empty_files=True): - """Verifies the license header. - """ + """Verifies the license header.""" # Early-out if the license_re is guaranteed to match everything. if license_re_param and license_re_param == '.*': @@ -851,11 +850,11 @@ def CheckLicense(input_api, def CheckChromiumDependencyMetadata(input_api, output_api, file_filter=None): """Check files for Chromium third party dependency metadata have sufficient - information, and are correctly formatted. + information, and are correctly formatted. - See the README.chromium.template at - https://chromium.googlesource.com/chromium/src/+/main/third_party/README.chromium.template - """ + See the README.chromium.template at + https://chromium.googlesource.com/chromium/src/+/main/third_party/README.chromium.template + """ # If the file filter is unspecified, filter to known Chromium metadata # files. if file_filter is None: @@ -901,17 +900,17 @@ def CheckTreeIsOpen(input_api, json_url=None): """Check whether to allow commit without prompt. - Supports two styles: - 1. Checks that an url's content doesn't match a regexp that would mean that - the tree is closed. (old) - 2. Check the json_url to decide whether to allow commit without prompt. - Args: - input_api: input related apis. - output_api: output related apis. - url: url to use for regex based tree status. - closed: regex to match for closed status. - json_url: url to download json style status. - """ + Supports two styles: + 1. Checks that an url's content doesn't match a regexp that would mean that + the tree is closed. (old) + 2. Check the json_url to decide whether to allow commit without prompt. + Args: + input_api: input related apis. + output_api: output related apis. + url: url to use for regex based tree status. + closed: regex to match for closed status. + json_url: url to download json style status. + """ if not input_api.is_committing or \ 'PRESUBMIT_SKIP_NETWORK' in _os.environ: return [] @@ -963,11 +962,11 @@ def GetUnitTestsInDirectory(input_api, blocklist=None): """Lists all files in a directory and runs them. Doesn't recurse. - It's mainly a wrapper for RunUnitTests. Use allowlist and blocklist to filter - tests accordingly. run_on_python2, run_on_python3, and skip_shebang_check are - no longer used but have to be retained because of the many callers in other - repos that pass them in. - """ + It's mainly a wrapper for RunUnitTests. Use allowlist and blocklist to filter + tests accordingly. run_on_python2, run_on_python3, and skip_shebang_check are + no longer used but have to be retained because of the many callers in other + repos that pass them in. + """ del run_on_python2 del run_on_python3 del skip_shebang_check @@ -1011,11 +1010,11 @@ def GetUnitTests(input_api, skip_shebang_check=True): """Runs all unit tests in a directory. - On Windows, sys.executable is used for unit tests ending with ".py". - run_on_python2, run_on_python3, and skip_shebang_check are no longer used but - have to be retained because of the many callers in other repos that pass them - in. - """ + On Windows, sys.executable is used for unit tests ending with ".py". + run_on_python2, run_on_python3, and skip_shebang_check are no longer used but + have to be retained because of the many callers in other repos that pass them + in. + """ del run_on_python2 del run_on_python3 del skip_shebang_check @@ -1060,11 +1059,11 @@ def GetUnitTestsRecursively(input_api, skip_shebang_check=True): """Gets all files in the directory tree (git repo) that match files_to_check. - Restricts itself to only find files within the Change's source repo, not - dependencies. run_on_python2, run_on_python3, and skip_shebang_check are no - longer used but have to be retained because of the many callers in other repos - that pass them in. - """ + Restricts itself to only find files within the Change's source repo, not + dependencies. run_on_python2, run_on_python3, and skip_shebang_check are no + longer used but have to be retained because of the many callers in other repos + that pass them in. + """ del run_on_python2 del run_on_python3 del skip_shebang_check @@ -1094,10 +1093,10 @@ def GetUnitTestsRecursively(input_api, def GetPythonUnitTests(input_api, output_api, unit_tests, python3=False): """Run the unit tests out of process, capture the output and use the result - code to determine success. + code to determine success. - DEPRECATED. - """ + DEPRECATED. + """ # We don't want to hinder users from uploading incomplete patches. if input_api.is_committing or input_api.no_diffs: message_type = output_api.PresubmitError @@ -1144,9 +1143,9 @@ def GetPythonUnitTests(input_api, output_api, unit_tests, python3=False): def RunUnitTestsInDirectory(input_api, *args, **kwargs): """Run tests in a directory serially. - For better performance, use GetUnitTestsInDirectory and then - pass to input_api.RunTests. - """ + For better performance, use GetUnitTestsInDirectory and then + pass to input_api.RunTests. + """ return input_api.RunTests( GetUnitTestsInDirectory(input_api, *args, **kwargs), False) @@ -1154,17 +1153,17 @@ def RunUnitTestsInDirectory(input_api, *args, **kwargs): def RunUnitTests(input_api, *args, **kwargs): """Run tests serially. - For better performance, use GetUnitTests and then pass to - input_api.RunTests. - """ + For better performance, use GetUnitTests and then pass to + input_api.RunTests. + """ return input_api.RunTests(GetUnitTests(input_api, *args, **kwargs), False) def RunPythonUnitTests(input_api, *args, **kwargs): """Run python tests in a directory serially. - DEPRECATED - """ + DEPRECATED + """ return input_api.RunTests(GetPythonUnitTests(input_api, *args, **kwargs), False) @@ -1212,10 +1211,10 @@ def GetPylint(input_api, version='2.7'): """Run pylint on python files. - The default files_to_check enforces looking only at *.py files. + The default files_to_check enforces looking only at *.py files. - Currently only pylint version '2.6' and '2.7' are supported. - """ + Currently only pylint version '2.6' and '2.7' are supported. + """ files_to_check = tuple(files_to_check or (r'.*\.py$', )) files_to_skip = tuple(files_to_skip or input_api.DEFAULT_FILES_TO_SKIP) @@ -1344,9 +1343,9 @@ def GetPylint(input_api, def RunPylint(input_api, *args, **kwargs): """Legacy presubmit function. - For better performance, get all tests and then pass to - input_api.RunTests. - """ + For better performance, get all tests and then pass to + input_api.RunTests. + """ return input_api.RunTests(GetPylint(input_api, *args, **kwargs), False) @@ -1426,8 +1425,8 @@ def CheckNoNewMetadataInOwners(input_api, output_api): def CheckOwnersDirMetadataExclusive(input_api, output_api): """Check that metadata in OWNERS files and DIR_METADATA files are mutually - exclusive. - """ + exclusive. + """ _METADATA_LINE_RE = input_api.re.compile( r'^#\s*(TEAM|COMPONENT|OS|WPT-NOTIFY)+\s*:\s*\S+$', input_api.re.MULTILINE) @@ -1498,9 +1497,9 @@ def GetCodereviewOwnerAndReviewers(input_api, approval_needed=True): """Return the owner and reviewers of a change, if any. - If approval_needed is True, only reviewers who have approved the change - will be returned. - """ + If approval_needed is True, only reviewers who have approved the change + will be returned. + """ # Recognizes 'X@Y' email addresses. Very simplistic. EMAIL_REGEXP = input_api.re.compile(r'^[\w\-\+\%\.]+\@[\w\-\+\%\.]+$') issue = input_api.change.issue @@ -1550,26 +1549,26 @@ def PanProjectChecks(input_api, global_checks=True): """Checks that ALL chromium orbit projects should use. - These are checks to be run on all Chromium orbit project, including: - Chromium - Native Client - V8 - When you update this function, please take this broad scope into account. - Args: - input_api: Bag of input related interfaces. - output_api: Bag of output related interfaces. - excluded_paths: Don't include these paths in common checks. - text_files: Which file are to be treated as documentation text files. - license_header: What license header should be on files. - project_name: What is the name of the project as it appears in the license. - global_checks: If True run checks that are unaffected by other options or by - the PRESUBMIT script's location, such as CheckChangeHasDescription. - global_checks should be passed as False when this function is called from - locations other than the project's root PRESUBMIT.py, to avoid redundant - checking. - Returns: - A list of warning or error objects. - """ + These are checks to be run on all Chromium orbit project, including: + Chromium + Native Client + V8 + When you update this function, please take this broad scope into account. + Args: + input_api: Bag of input related interfaces. + output_api: Bag of output related interfaces. + excluded_paths: Don't include these paths in common checks. + text_files: Which file are to be treated as documentation text files. + license_header: What license header should be on files. + project_name: What is the name of the project as it appears in the license. + global_checks: If True run checks that are unaffected by other options or by + the PRESUBMIT script's location, such as CheckChangeHasDescription. + global_checks should be passed as False when this function is called from + locations other than the project's root PRESUBMIT.py, to avoid redundant + checking. + Returns: + A list of warning or error objects. + """ excluded_paths = tuple(excluded_paths or []) text_files = tuple(text_files or ( r'.+\.txt$', @@ -1764,13 +1763,13 @@ def CheckGNFormatted(input_api, output_api): def CheckCIPDManifest(input_api, output_api, path=None, content=None): """Verifies that a CIPD ensure file manifest is valid against all platforms. - Exactly one of "path" or "content" must be provided. An assertion will occur - if neither or both are provided. + Exactly one of "path" or "content" must be provided. An assertion will occur + if neither or both are provided. - Args: - path (str): If provided, the filesystem path to the manifest to verify. - content (str): If provided, the raw content of the manifest to veirfy. - """ + Args: + path (str): If provided, the filesystem path to the manifest to verify. + content (str): If provided, the raw content of the manifest to veirfy. + """ cipd_bin = 'cipd' if not input_api.is_windows else 'cipd.bat' cmd = [cipd_bin, 'ensure-file-verify'] kwargs = {} @@ -1804,12 +1803,12 @@ def CheckCIPDManifest(input_api, output_api, path=None, content=None): def CheckCIPDPackages(input_api, output_api, platforms, packages): """Verifies that all named CIPD packages can be resolved against all supplied - platforms. + platforms. - Args: - platforms (list): List of CIPD platforms to verify. - packages (dict): Mapping of package name to version. - """ + Args: + platforms (list): List of CIPD platforms to verify. + packages (dict): Mapping of package name to version. + """ manifest = [] for p in platforms: manifest.append('$VerifiedPlatform %s' % (p, )) @@ -1821,13 +1820,13 @@ def CheckCIPDPackages(input_api, output_api, platforms, packages): def CheckCIPDClientDigests(input_api, output_api, client_version_file): """Verifies that *.digests file was correctly regenerated. - .digests file contains pinned hashes of the CIPD client. - It is consulted during CIPD client bootstrap and self-update. It should be - regenerated each time CIPD client version file changes. + .digests file contains pinned hashes of the CIPD client. + It is consulted during CIPD client bootstrap and self-update. It should be + regenerated each time CIPD client version file changes. - Args: - client_version_file (str): Path to a text file with CIPD client version. - """ + Args: + client_version_file (str): Path to a text file with CIPD client version. + """ cmd = [ 'cipd' if not input_api.is_windows else 'cipd.bat', 'selfupdate-roll', @@ -1847,17 +1846,17 @@ def CheckCIPDClientDigests(input_api, output_api, client_version_file): def CheckForCommitObjects(input_api, output_api): """Validates that commit objects match DEPS. - Commit objects are put into the git tree typically by submodule tooling. - Because we use gclient to handle external repository references instead, - we want to ensure DEPS content and Git are in sync when desired. + Commit objects are put into the git tree typically by submodule tooling. + Because we use gclient to handle external repository references instead, + we want to ensure DEPS content and Git are in sync when desired. - Args: - input_api: Bag of input related interfaces. - output_api: Bag of output related interfaces. + Args: + input_api: Bag of input related interfaces. + output_api: Bag of output related interfaces. - Returns: - A presubmit error if a commit object is not expected. - """ + Returns: + A presubmit error if a commit object is not expected. + """ # Get DEPS file. deps_file = input_api.os_path.join(input_api.PresubmitLocalPath(), 'DEPS') if not input_api.os_path.isfile(deps_file): @@ -1880,12 +1879,12 @@ def CheckForCommitObjects(input_api, output_api): def parse_tree_entry(ent): """Splits a tree entry into components - Args: - ent: a tree entry in the form "filemode type hash\tname" + Args: + ent: a tree entry in the form "filemode type hash\tname" - Returns: - The tree entry split into component parts - """ + Returns: + The tree entry split into component parts + """ tabparts = ent.split('\t', 1) spaceparts = tabparts[0].split(' ', 2) return (spaceparts[0], spaceparts[1], spaceparts[2], tabparts[1]) @@ -2002,19 +2001,19 @@ def _ParseDeps(contents): def CheckVPythonSpec(input_api, output_api, file_filter=None): """Validates any changed .vpython and .vpython3 files with vpython - verification tool. + verification tool. - Args: - input_api: Bag of input related interfaces. - output_api: Bag of output related interfaces. - file_filter: Custom function that takes a path (relative to client root) and - returns boolean, which is used to filter files for which to apply the - verification to. Defaults to any path ending with .vpython, which captures - both global .vpython and