Merge remote-tracking branch 'upstream/master' into mixed-line-ending

This commit is contained in:
Morgan Courbet 2017-07-18 19:40:39 +02:00
commit 55658c4bbc
No known key found for this signature in database
GPG key ID: 467299D324A21B24
47 changed files with 923 additions and 302 deletions

View file

@ -51,7 +51,7 @@ def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument(
'filenames', nargs='*',
help='Filenames pre-commit believes are changed.'
help='Filenames pre-commit believes are changed.',
)
parser.add_argument(
'--maxkb', type=int, default=500,

View file

@ -4,7 +4,7 @@ from __future__ import unicode_literals
import argparse
import ast
import os.path
import platform
import sys
import traceback
@ -14,19 +14,19 @@ def check_ast(argv=None):
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
_, interpreter = os.path.split(sys.executable)
retval = 0
for filename in args.filenames:
try:
ast.parse(open(filename, 'rb').read(), filename=filename)
except SyntaxError:
print('{}: failed parsing with {}:'.format(
filename, interpreter,
print('{}: failed parsing with {} {}:'.format(
filename,
platform.python_implementation(),
sys.version.partition(' ')[0],
))
print('\n{}'.format(
' ' + traceback.format_exc().replace('\n', '\n ')
' ' + traceback.format_exc().replace('\n', '\n '),
))
retval = 1
return retval

View file

@ -45,7 +45,7 @@ def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument(
'filenames', nargs='*',
help='Filenames pre-commit believes are changed.'
help='Filenames pre-commit believes are changed.',
)
args = parser.parse_args(argv)

View file

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import argparse
@ -30,7 +31,7 @@ def check_docstring_first(src, filename='<unknown>'):
'{}:{} Multiple module docstrings '
'(first docstring on line {}).'.format(
filename, sline, found_docstring_line,
)
),
)
return 1
elif found_code_line is not None:
@ -38,7 +39,7 @@ def check_docstring_first(src, filename='<unknown>'):
'{}:{} Module docstring appears after code '
'(code seen on line {}).'.format(
filename, sline, found_code_line,
)
),
)
return 1
else:

View file

@ -0,0 +1,40 @@
"""Check that executable text files have a shebang."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import pipes
import sys
def check_has_shebang(path):
with open(path, 'rb') as f:
first_bytes = f.read(2)
if first_bytes != b'#!':
print(
'{path}: marked executable but has no (or invalid) shebang!\n'
" If it isn't supposed to be executable, try: chmod -x {quoted}\n"
' If it is supposed to be executable, double-check its shebang.'.format(
path=path,
quoted=pipes.quote(path),
),
file=sys.stderr,
)
return 1
else:
return 0
def main(argv=None):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
retv = 0
for filename in args.filenames:
retv |= check_has_shebang(filename)
return retv

View file

@ -7,7 +7,7 @@ CONFLICT_PATTERNS = [
b'<<<<<<< ',
b'======= ',
b'=======\n',
b'>>>>>>> '
b'>>>>>>> ',
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
@ -15,7 +15,11 @@ WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
def is_in_merge():
return (
os.path.exists(os.path.join('.git', 'MERGE_MSG')) and
os.path.exists(os.path.join('.git', 'MERGE_HEAD'))
(
os.path.exists(os.path.join('.git', 'MERGE_HEAD')) or
os.path.exists(os.path.join('.git', 'rebase-apply')) or
os.path.exists(os.path.join('.git', 'rebase-merge'))
)
)

View file

@ -29,7 +29,7 @@ class ImportStatementParser(ast.NodeVisitor):
def visit_ImportFrom(self, node):
if node.module in DEBUG_STATEMENTS:
self.debug_import_statements.append(
DebugStatement(node.module, node.lineno, node.col_offset)
DebugStatement(node.module, node.lineno, node.col_offset),
)
@ -52,7 +52,7 @@ def check_file_for_debug_statements(filename):
debug_statement.line,
debug_statement.col,
debug_statement.name,
)
),
)
return 1
else:

View file

@ -12,7 +12,7 @@ def get_aws_credential_files_from_env():
files = set()
for env_var in (
'AWS_CONFIG_FILE', 'AWS_CREDENTIAL_FILE', 'AWS_SHARED_CREDENTIALS_FILE',
'BOTO_CONFIG'
'BOTO_CONFIG',
):
if env_var in os.environ:
files.add(os.environ[env_var])
@ -23,7 +23,7 @@ def get_aws_secrets_from_env():
"""Extract AWS secrets from environment variables."""
keys = set()
for env_var in (
'AWS_SECRET_ACCESS_KEY', 'AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN'
'AWS_SECRET_ACCESS_KEY', 'AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN',
):
if env_var in os.environ:
keys.add(os.environ[env_var])
@ -50,7 +50,7 @@ def get_aws_secrets_from_file(credentials_file):
for section in parser.sections():
for var in (
'aws_secret_access_key', 'aws_security_token',
'aws_session_token'
'aws_session_token',
):
try:
keys.add(parser.get(section, var))
@ -93,13 +93,13 @@ def main(argv=None):
help=(
'Location of additional AWS credential files from which to get '
'secret keys from'
)
),
)
parser.add_argument(
'--allow-missing-credentials',
dest='allow_missing_credentials',
action='store_true',
help='Allow hook to pass when no credentials are detected.'
help='Allow hook to pass when no credentials are detected.',
)
args = parser.parse_args(argv)
@ -124,7 +124,7 @@ def main(argv=None):
print(
'No AWS keys were found in the configured credential files and '
'environment variables.\nPlease ensure you have the correct '
'setting for --credentials-file'
'setting for --credentials-file',
)
return 2

View file

@ -0,0 +1,52 @@
"""
A very simple pre-commit hook that, when passed one or more filenames
as arguments, will sort the lines in those files.
An example use case for this: you have a deploy-whitelist.txt file
in a repo that contains a list of filenames that is used to specify
files to be included in a docker container. This file has one filename
per line. Various users are adding/removing lines from this file; using
this hook on that file should reduce the instances of git merge
conflicts and keep the file nicely ordered.
"""
from __future__ import print_function
import argparse
PASS = 0
FAIL = 1
def sort_file_contents(f):
before = list(f)
after = sorted([line.strip(b'\n\r') for line in before if line.strip()])
before_string = b''.join(before)
after_string = b'\n'.join(after) + b'\n'
if before_string == after_string:
return PASS
else:
f.seek(0)
f.write(after_string)
f.truncate()
return FAIL
def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='+', help='Files to sort')
args = parser.parse_args(argv)
retv = PASS
for arg in args.filenames:
with open(arg, 'rb+') as file_obj:
ret_for_file = sort_file_contents(file_obj)
if ret_for_file:
print('Sorting {}'.format(arg))
retv |= ret_for_file
return retv

View file

@ -15,7 +15,8 @@ def is_on_branch(protected):
def main(argv=[]):
parser = argparse.ArgumentParser()
parser.add_argument(
'-b', '--branch', default='master', help='branch to disallow commits to')
'-b', '--branch', default='master', help='branch to disallow commits to',
)
args = parser.parse_args(argv)
return int(is_on_branch(args.branch))

View file

@ -120,7 +120,7 @@ def pretty_format_json(argv=None):
except simplejson.JSONDecodeError:
print(
"Input File {} is not a valid JSON, consider using check-json"
.format(json_file)
.format(json_file),
)
return 1

View file

@ -3,6 +3,10 @@ from __future__ import print_function
import argparse
PASS = 0
FAIL = 1
class Requirement(object):
def __init__(self):
@ -30,21 +34,25 @@ class Requirement(object):
def fix_requirements(f):
requirements = []
before = []
before = tuple(f)
after = []
for line in f:
before.append(line)
before_string = b''.join(before)
# If the most recent requirement object has a value, then it's time to
# start building the next requirement object.
# If the file is empty (i.e. only whitespace/newlines) exit early
if before_string.strip() == b'':
return PASS
for line in before:
# If the most recent requirement object has a value, then it's
# time to start building the next requirement object.
if not len(requirements) or requirements[-1].value is not None:
requirements.append(Requirement())
requirement = requirements[-1]
# If we see a newline before any requirements, then this is a top of
# file comment.
# If we see a newline before any requirements, then this is a
# top of file comment.
if len(requirements) == 1 and line.strip() == b'':
if len(requirement.comments) and requirement.comments[0].startswith(b'#'):
requirement.value = b'\n'
@ -56,20 +64,18 @@ def fix_requirements(f):
requirement.value = line
for requirement in sorted(requirements):
for comment in requirement.comments:
after.append(comment)
after.extend(requirement.comments)
after.append(requirement.value)
before_string = b''.join(before)
after_string = b''.join(after)
if before_string == after_string:
return 0
return PASS
else:
f.seek(0)
f.write(after_string)
f.truncate()
return 1
return FAIL
def fix_requirements_txt(argv=None):
@ -77,7 +83,7 @@ def fix_requirements_txt(argv=None):
parser.add_argument('filenames', nargs='*', help='Filenames to fix')
args = parser.parse_args(argv)
retv = 0
retv = PASS
for arg in args.filenames:
with open(arg, 'rb+') as file_obj:

View file

@ -0,0 +1,123 @@
#!/usr/bin/env python
"""Sort a simple YAML file, keeping blocks of comments and definitions
together.
We assume a strict subset of YAML that looks like:
# block of header comments
# here that should always
# be at the top of the file
# optional comments
# can go here
key: value
key: value
key: value
In other words, we don't sort deeper than the top layer, and might corrupt
complicated YAML files.
"""
from __future__ import print_function
import argparse
QUOTES = ["'", '"']
def sort(lines):
"""Sort a YAML file in alphabetical order, keeping blocks together.
:param lines: array of strings (without newlines)
:return: sorted array of strings
"""
# make a copy of lines since we will clobber it
lines = list(lines)
new_lines = parse_block(lines, header=True)
for block in sorted(parse_blocks(lines), key=first_key):
if new_lines:
new_lines.append('')
new_lines.extend(block)
return new_lines
def parse_block(lines, header=False):
"""Parse and return a single block, popping off the start of `lines`.
If parsing a header block, we stop after we reach a line that is not a
comment. Otherwise, we stop after reaching an empty line.
:param lines: list of lines
:param header: whether we are parsing a header block
:return: list of lines that form the single block
"""
block_lines = []
while lines and lines[0] and (not header or lines[0].startswith('#')):
block_lines.append(lines.pop(0))
return block_lines
def parse_blocks(lines):
"""Parse and return all possible blocks, popping off the start of `lines`.
:param lines: list of lines
:return: list of blocks, where each block is a list of lines
"""
blocks = []
while lines:
if lines[0] == '':
lines.pop(0)
else:
blocks.append(parse_block(lines))
return blocks
def first_key(lines):
"""Returns a string representing the sort key of a block.
The sort key is the first YAML key we encounter, ignoring comments, and
stripping leading quotes.
>>> print(test)
# some comment
'foo': true
>>> first_key(test)
'foo'
"""
for line in lines:
if line.startswith('#'):
continue
if any(line.startswith(quote) for quote in QUOTES):
return line[1:]
return line
def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Filenames to fix')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
with open(filename, 'r+') as f:
lines = [line.rstrip() for line in f.readlines()]
new_lines = sort(lines)
if lines != new_lines:
print("Fixing file `{filename}`".format(filename=filename))
f.seek(0)
f.write("\n".join(new_lines) + "\n")
f.truncate()
retval = 1
return retval
if __name__ == '__main__':
exit(main())

View file

@ -11,7 +11,7 @@ def validate_files(argv=None):
parser.add_argument('filenames', nargs='*')
parser.add_argument(
'--django', default=False, action='store_true',
help='Use Django-style test naming pattern (test*.py)'
help='Use Django-style test naming pattern (test*.py)',
)
args = parser.parse_args(argv)
@ -27,8 +27,8 @@ def validate_files(argv=None):
retcode = 1
print(
'{} does not match pattern "{}"'.format(
filename, test_name_pattern
)
filename, test_name_pattern,
),
)
return retcode

View file

@ -36,7 +36,7 @@ def fix_trailing_whitespace(argv=None):
const=[],
default=argparse.SUPPRESS,
dest='markdown_linebreak_ext',
help='Do not preserve linebreak spaces in Markdown'
help='Do not preserve linebreak spaces in Markdown',
)
parser.add_argument(
'--markdown-linebreak-ext',
@ -45,7 +45,7 @@ def fix_trailing_whitespace(argv=None):
default=['md,markdown'],
metavar='*|EXT[,EXT,...]',
nargs='?',
help='Markdown extensions (or *) for linebreak spaces'
help='Markdown extensions (or *) for linebreak spaces',
)
parser.add_argument('filenames', nargs='*', help='Filenames to fix')
args = parser.parse_args(argv)
@ -69,7 +69,7 @@ def fix_trailing_whitespace(argv=None):
parser.error(
"bad --markdown-linebreak-ext extension '{}' (has . / \\ :)\n"
" (probably filename; use '--markdown-linebreak-ext=EXT')"
.format(ext)
.format(ext),
)
return_code = 0