Use default flake8 config

This commit is contained in:
Anthony Sottile 2019-02-11 19:56:15 -08:00
parent 634383cffd
commit 4575652bd2
16 changed files with 119 additions and 81 deletions

View File

@ -21,22 +21,23 @@ BUILTIN_TYPES = {
}
BuiltinTypeCall = collections.namedtuple('BuiltinTypeCall', ['name', 'line', 'column'])
Call = collections.namedtuple('Call', ['name', 'line', 'column'])
class BuiltinTypeVisitor(ast.NodeVisitor):
class Visitor(ast.NodeVisitor):
def __init__(self, ignore=None, allow_dict_kwargs=True):
# type: (Optional[Sequence[str]], bool) -> None
self.builtin_type_calls = [] # type: List[BuiltinTypeCall]
self.builtin_type_calls = [] # type: List[Call]
self.ignore = set(ignore) if ignore else set()
self.allow_dict_kwargs = allow_dict_kwargs
def _check_dict_call(self, node): # type: (ast.Call) -> bool
return self.allow_dict_kwargs and (getattr(node, 'kwargs', None) or getattr(node, 'keywords', None))
return (
self.allow_dict_kwargs and
(getattr(node, 'kwargs', None) or getattr(node, 'keywords', None))
)
def visit_Call(self, node): # type: (ast.Call) -> None
if not isinstance(node.func, ast.Name):
# Ignore functions that are object attributes (`foo.bar()`).
# Assume that if the user calls `builtins.list()`, they know what
@ -49,15 +50,15 @@ class BuiltinTypeVisitor(ast.NodeVisitor):
elif node.args:
return
self.builtin_type_calls.append(
BuiltinTypeCall(node.func.id, node.lineno, node.col_offset),
Call(node.func.id, node.lineno, node.col_offset),
)
def check_file_for_builtin_type_constructors(filename, ignore=None, allow_dict_kwargs=True):
# type: (str, Optional[Sequence[str]], bool) -> List[BuiltinTypeCall]
def check_file(filename, ignore=None, allow_dict_kwargs=True):
# type: (str, Optional[Sequence[str]], bool) -> List[Call]
with open(filename, 'rb') as f:
tree = ast.parse(f.read(), filename=filename)
visitor = BuiltinTypeVisitor(ignore=ignore, allow_dict_kwargs=allow_dict_kwargs)
visitor = Visitor(ignore=ignore, allow_dict_kwargs=allow_dict_kwargs)
visitor.visit(tree)
return visitor.builtin_type_calls
@ -73,14 +74,17 @@ def main(argv=None): # type: (Optional[Sequence[str]]) -> int
mutex = parser.add_mutually_exclusive_group(required=False)
mutex.add_argument('--allow-dict-kwargs', action='store_true')
mutex.add_argument('--no-allow-dict-kwargs', dest='allow_dict_kwargs', action='store_false')
mutex.add_argument(
'--no-allow-dict-kwargs',
dest='allow_dict_kwargs', action='store_false',
)
mutex.set_defaults(allow_dict_kwargs=True)
args = parser.parse_args(argv)
rc = 0
for filename in args.filenames:
calls = check_file_for_builtin_type_constructors(
calls = check_file(
filename,
ignore=args.ignore,
allow_dict_kwargs=args.allow_dict_kwargs,
@ -89,7 +93,8 @@ def main(argv=None): # type: (Optional[Sequence[str]]) -> int
rc = rc or 1
for call in calls:
print(
'{filename}:{call.line}:{call.column} - Replace {call.name}() with {replacement}'.format(
'{filename}:{call.line}:{call.column}: '
'replace {call.name}() with {replacement}'.format(
filename=filename,
call=call,
replacement=BUILTIN_TYPES[call.name],

View File

@ -18,7 +18,8 @@ def check_has_shebang(path): # type: (str) -> int
print(
'{path}: marked executable but has no (or invalid) shebang!\n'
" If it isn't supposed to be executable, try: chmod -x {quoted}\n"
' If it is supposed to be executable, double-check its shebang.'.format(
' If it is supposed to be executable, double-check its shebang.'
.format(
path=path,
quoted=pipes.quote(path),
),

View File

@ -10,7 +10,7 @@ from typing import Sequence
def main(argv=None): # type: (Optional[Sequence[str]]) -> int
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='JSON filenames to check.')
parser.add_argument('filenames', nargs='*', help='Filenames to check.')
args = parser.parse_args(argv)
retval = 0

View File

@ -50,7 +50,7 @@ def main(argv=None): # type: (Optional[Sequence[str]]) -> int
'Implies --allow-multiple-documents'
),
)
parser.add_argument('filenames', nargs='*', help='Yaml filenames to check.')
parser.add_argument('filenames', nargs='*', help='Filenames to check.')
args = parser.parse_args(argv)
load_fn = LOAD_FNS[Key(multi=args.multi, unsafe=args.unsafe)]

View File

@ -12,16 +12,16 @@ from typing import Set
from six.moves import configparser
def get_aws_credential_files_from_env(): # type: () -> Set[str]
def get_aws_cred_files_from_env(): # type: () -> Set[str]
"""Extract credential file paths from environment variables."""
files = set()
for env_var in (
'AWS_CONFIG_FILE', 'AWS_CREDENTIAL_FILE', 'AWS_SHARED_CREDENTIALS_FILE',
'BOTO_CONFIG',
):
if env_var in os.environ:
files.add(os.environ[env_var])
return files
return {
os.environ[env_var]
for env_var in (
'AWS_CONFIG_FILE', 'AWS_CREDENTIAL_FILE',
'AWS_SHARED_CREDENTIALS_FILE', 'BOTO_CONFIG',
)
if env_var in os.environ
}
def get_aws_secrets_from_env(): # type: () -> Set[str]
@ -115,7 +115,7 @@ def main(argv=None): # type: (Optional[Sequence[str]]) -> int
# Add the credentials files configured via environment variables to the set
# of files to to gather AWS secrets from.
credential_files |= get_aws_credential_files_from_env()
credential_files |= get_aws_cred_files_from_env()
keys = set() # type: Set[str]
for credential_file in credential_files:

View File

@ -110,7 +110,9 @@ def _to_disp(pragma): # type: (bytes) -> str
def main(argv=None): # type: (Optional[Sequence[str]]) -> int
parser = argparse.ArgumentParser('Fixes the encoding pragma of python files')
parser = argparse.ArgumentParser(
'Fixes the encoding pragma of python files',
)
parser.add_argument('filenames', nargs='*', help='Filenames to fix')
parser.add_argument(
'--pragma', default=DEFAULT_PRAGMA, type=_normalize_pragma,

View File

@ -15,8 +15,9 @@ from typing import Union
from six import text_type
def _get_pretty_format(contents, indent, ensure_ascii=True, sort_keys=True, top_keys=()):
# type: (str, str, bool, bool, Sequence[str]) -> str
def _get_pretty_format(
contents, indent, ensure_ascii=True, sort_keys=True, top_keys=(),
): # type: (str, str, bool, bool, Sequence[str]) -> str
def pairs_first(pairs):
# type: (Sequence[Tuple[str, str]]) -> Mapping[str, str]
before = [pair for pair in pairs if pair[0] in top_keys]
@ -29,7 +30,8 @@ def _get_pretty_format(contents, indent, ensure_ascii=True, sort_keys=True, top_
json.loads(contents, object_pairs_hook=pairs_first),
indent=indent,
ensure_ascii=ensure_ascii,
separators=(',', ': '), # Workaround for https://bugs.python.org/issue16333
# Workaround for https://bugs.python.org/issue16333
separators=(',', ': '),
)
# Ensure unicode (Py2) and add the newline that dumps does not end with.
return text_type(json_pretty) + '\n'
@ -75,7 +77,10 @@ def main(argv=None): # type: (Optional[Sequence[str]]) -> int
action='store_true',
dest='no_ensure_ascii',
default=False,
help='Do NOT convert non-ASCII characters to Unicode escape sequences (\\uXXXX)',
help=(
'Do NOT convert non-ASCII characters to Unicode escape sequences '
'(\\uXXXX)'
),
)
parser.add_argument(
'--no-sort-keys',

View File

@ -61,7 +61,10 @@ def fix_requirements(f): # type: (IO[bytes]) -> int
# If we see a newline before any requirements, then this is a
# top of file comment.
if len(requirements) == 1 and line.strip() == b'':
if len(requirement.comments) and requirement.comments[0].startswith(b'#'):
if (
len(requirement.comments) and
requirement.comments[0].startswith(b'#')
):
requirement.value = b'\n'
else:
requirement.comments.append(line)

View File

@ -60,12 +60,12 @@ def main(argv=None): # type: (Optional[Sequence[str]]) -> int
if '' in md_args:
parser.error('--markdown-linebreak-ext requires a non-empty argument')
all_markdown = '*' in md_args
# normalize all extensions; split at ',', lowercase, and force 1 leading '.'
# normalize extensions; split at ',', lowercase, and force 1 leading '.'
md_exts = [
'.' + x.lower().lstrip('.') for x in ','.join(md_args).split(',')
]
# reject probable "eaten" filename as extension (skip leading '.' with [1:])
# reject probable "eaten" filename as extension: skip leading '.' with [1:]
for ext in md_exts:
if any(c in ext[1:] for c in r'./\:'):
parser.error(

View File

@ -32,34 +32,34 @@ setup(
entry_points={
'console_scripts': [
'autopep8-wrapper = pre_commit_hooks.autopep8_wrapper:main',
'check-added-large-files = pre_commit_hooks.check_added_large_files:main',
'check-added-large-files = pre_commit_hooks.check_added_large_files:main', # noqa: E501
'check-ast = pre_commit_hooks.check_ast:main',
'check-builtin-literals = pre_commit_hooks.check_builtin_literals:main',
'check-byte-order-marker = pre_commit_hooks.check_byte_order_marker:main',
'check-builtin-literals = pre_commit_hooks.check_builtin_literals:main', # noqa: E501
'check-byte-order-marker = pre_commit_hooks.check_byte_order_marker:main', # noqa: E501
'check-case-conflict = pre_commit_hooks.check_case_conflict:main',
'check-docstring-first = pre_commit_hooks.check_docstring_first:main',
'check-executables-have-shebangs = pre_commit_hooks.check_executables_have_shebangs:main',
'check-docstring-first = pre_commit_hooks.check_docstring_first:main', # noqa: E501
'check-executables-have-shebangs = pre_commit_hooks.check_executables_have_shebangs:main', # noqa: E501
'check-json = pre_commit_hooks.check_json:main',
'check-merge-conflict = pre_commit_hooks.check_merge_conflict:main',
'check-merge-conflict = pre_commit_hooks.check_merge_conflict:main', # noqa: E501
'check-symlinks = pre_commit_hooks.check_symlinks:main',
'check-vcs-permalinks = pre_commit_hooks.check_vcs_permalinks:main',
'check-vcs-permalinks = pre_commit_hooks.check_vcs_permalinks:main', # noqa: E501
'check-xml = pre_commit_hooks.check_xml:main',
'check-yaml = pre_commit_hooks.check_yaml:main',
'debug-statement-hook = pre_commit_hooks.debug_statement_hook:main',
'detect-aws-credentials = pre_commit_hooks.detect_aws_credentials:main',
'debug-statement-hook = pre_commit_hooks.debug_statement_hook:main', # noqa: E501
'detect-aws-credentials = pre_commit_hooks.detect_aws_credentials:main', # noqa: E501
'detect-private-key = pre_commit_hooks.detect_private_key:main',
'double-quote-string-fixer = pre_commit_hooks.string_fixer:main',
'end-of-file-fixer = pre_commit_hooks.end_of_file_fixer:main',
'file-contents-sorter = pre_commit_hooks.file_contents_sorter:main',
'file-contents-sorter = pre_commit_hooks.file_contents_sorter:main', # noqa: E501
'fix-encoding-pragma = pre_commit_hooks.fix_encoding_pragma:main',
'forbid-new-submodules = pre_commit_hooks.forbid_new_submodules:main',
'forbid-new-submodules = pre_commit_hooks.forbid_new_submodules:main', # noqa: E501
'mixed-line-ending = pre_commit_hooks.mixed_line_ending:main',
'name-tests-test = pre_commit_hooks.tests_should_end_in_test:main',
'no-commit-to-branch = pre_commit_hooks.no_commit_to_branch:main',
'pretty-format-json = pre_commit_hooks.pretty_format_json:main',
'requirements-txt-fixer = pre_commit_hooks.requirements_txt_fixer:main',
'requirements-txt-fixer = pre_commit_hooks.requirements_txt_fixer:main', # noqa: E501
'sort-simple-yaml = pre_commit_hooks.sort_simple_yaml:main',
'trailing-whitespace-fixer = pre_commit_hooks.trailing_whitespace_fixer:main',
'trailing-whitespace-fixer = pre_commit_hooks.trailing_whitespace_fixer:main', # noqa: E501
],
},
)

View File

@ -2,9 +2,9 @@ import ast
import pytest
from pre_commit_hooks.check_builtin_literals import BuiltinTypeCall
from pre_commit_hooks.check_builtin_literals import BuiltinTypeVisitor
from pre_commit_hooks.check_builtin_literals import Call
from pre_commit_hooks.check_builtin_literals import main
from pre_commit_hooks.check_builtin_literals import Visitor
BUILTIN_CONSTRUCTORS = '''\
from six.moves import builtins
@ -38,7 +38,7 @@ t1 = ()
@pytest.fixture
def visitor():
return BuiltinTypeVisitor()
return Visitor()
@pytest.mark.parametrize(
@ -48,35 +48,35 @@ def visitor():
('x[0]()', []),
# complex
("0j", []),
("complex()", [BuiltinTypeCall('complex', 1, 0)]),
("complex()", [Call('complex', 1, 0)]),
("complex(0, 0)", []),
("complex('0+0j')", []),
('builtins.complex()', []),
# float
("0.0", []),
("float()", [BuiltinTypeCall('float', 1, 0)]),
("float()", [Call('float', 1, 0)]),
("float('0.0')", []),
('builtins.float()', []),
# int
("0", []),
("int()", [BuiltinTypeCall('int', 1, 0)]),
("int()", [Call('int', 1, 0)]),
("int('0')", []),
('builtins.int()', []),
# list
("[]", []),
("list()", [BuiltinTypeCall('list', 1, 0)]),
("list()", [Call('list', 1, 0)]),
("list('abc')", []),
("list([c for c in 'abc'])", []),
("list(c for c in 'abc')", []),
('builtins.list()', []),
# str
("''", []),
("str()", [BuiltinTypeCall('str', 1, 0)]),
("str()", [Call('str', 1, 0)]),
("str('0')", []),
('builtins.str()', []),
# tuple
("()", []),
("tuple()", [BuiltinTypeCall('tuple', 1, 0)]),
("tuple()", [Call('tuple', 1, 0)]),
("tuple('abc')", []),
("tuple([c for c in 'abc'])", []),
("tuple(c for c in 'abc')", []),
@ -92,7 +92,7 @@ def test_non_dict_exprs(visitor, expression, calls):
('expression', 'calls'),
[
("{}", []),
("dict()", [BuiltinTypeCall('dict', 1, 0)]),
("dict()", [Call('dict', 1, 0)]),
("dict(a=1, b=2, c=3)", []),
("dict(**{'a': 1, 'b': 2, 'c': 3})", []),
("dict([(k, v) for k, v in [('a', 1), ('b', 2), ('c', 3)]])", []),
@ -108,20 +108,22 @@ def test_dict_allow_kwargs_exprs(visitor, expression, calls):
@pytest.mark.parametrize(
('expression', 'calls'),
[
("dict()", [BuiltinTypeCall('dict', 1, 0)]),
("dict(a=1, b=2, c=3)", [BuiltinTypeCall('dict', 1, 0)]),
("dict(**{'a': 1, 'b': 2, 'c': 3})", [BuiltinTypeCall('dict', 1, 0)]),
("dict()", [Call('dict', 1, 0)]),
("dict(a=1, b=2, c=3)", [Call('dict', 1, 0)]),
("dict(**{'a': 1, 'b': 2, 'c': 3})", [Call('dict', 1, 0)]),
('builtins.dict()', []),
],
)
def test_dict_no_allow_kwargs_exprs(expression, calls):
visitor = BuiltinTypeVisitor(allow_dict_kwargs=False)
visitor = Visitor(allow_dict_kwargs=False)
visitor.visit(ast.parse(expression))
assert visitor.builtin_type_calls == calls
def test_ignore_constructors():
visitor = BuiltinTypeVisitor(ignore=('complex', 'dict', 'float', 'int', 'list', 'str', 'tuple'))
visitor = Visitor(ignore=(
'complex', 'dict', 'float', 'int', 'list', 'str', 'tuple',
))
visitor.visit(ast.parse(BUILTIN_CONSTRUCTORS))
assert visitor.builtin_type_calls == []

View File

@ -1,7 +1,7 @@
import pytest
from mock import patch
from pre_commit_hooks.detect_aws_credentials import get_aws_credential_files_from_env
from pre_commit_hooks.detect_aws_credentials import get_aws_cred_files_from_env
from pre_commit_hooks.detect_aws_credentials import get_aws_secrets_from_env
from pre_commit_hooks.detect_aws_credentials import get_aws_secrets_from_file
from pre_commit_hooks.detect_aws_credentials import main
@ -35,9 +35,8 @@ from testing.util import get_resource_path
),
)
def test_get_aws_credentials_file_from_env(env_vars, values):
"""Test that reading credential files names from environment variables works."""
with patch.dict('os.environ', env_vars, clear=True):
assert get_aws_credential_files_from_env() == values
assert get_aws_cred_files_from_env() == values
@pytest.mark.parametrize(
@ -107,12 +106,11 @@ def test_get_aws_secrets_from_file(filename, expected_keys):
),
)
def test_detect_aws_credentials(filename, expected_retval):
"""Test if getting configured AWS secrets from files to be checked in works."""
# with a valid credentials file
ret = main((
get_resource_path(filename),
"--credentials-file=testing/resources/aws_config_with_multiple_sections.ini",
'--credentials-file',
'testing/resources/aws_config_with_multiple_sections.ini',
))
assert ret == expected_retval
@ -138,8 +136,9 @@ def test_non_existent_credentials(mock_secrets_env, mock_secrets_file, capsys):
@patch('pre_commit_hooks.detect_aws_credentials.get_aws_secrets_from_file')
@patch('pre_commit_hooks.detect_aws_credentials.get_aws_secrets_from_env')
def test_non_existent_credentials_with_allow_flag(mock_secrets_env, mock_secrets_file):
"""Test behavior with no configured AWS secrets and flag to allow when missing."""
def test_non_existent_credentials_with_allow_flag(
mock_secrets_env, mock_secrets_file,
):
mock_secrets_env.return_value = set()
mock_secrets_file.return_value = set()
ret = main((

View File

@ -57,7 +57,10 @@ def test_tab_main(filename, expected_retval): # pragma: no cover
def test_non_ascii_main():
ret = main(['--no-ensure-ascii', get_resource_path('non_ascii_pretty_formatted_json.json')])
ret = main((
'--no-ensure-ascii',
get_resource_path('non_ascii_pretty_formatted_json.json'),
))
assert ret == 0
@ -79,17 +82,23 @@ def test_autofix_main(tmpdir):
def test_orderfile_get_pretty_format():
ret = main(['--top-keys=alist', get_resource_path('pretty_formatted_json.json')])
ret = main((
'--top-keys=alist', get_resource_path('pretty_formatted_json.json'),
))
assert ret == 0
def test_not_orderfile_get_pretty_format():
ret = main(['--top-keys=blah', get_resource_path('pretty_formatted_json.json')])
ret = main((
'--top-keys=blah', get_resource_path('pretty_formatted_json.json'),
))
assert ret == 1
def test_top_sorted_get_pretty_format():
ret = main(['--top-keys=01-alist,alist', get_resource_path('top_sorted_json.json')])
ret = main((
'--top-keys=01-alist,alist', get_resource_path('top_sorted_json.json'),
))
assert ret == 0

View File

@ -15,13 +15,25 @@ from pre_commit_hooks.requirements_txt_fixer import Requirement
(b'foo\n# comment at end\n', PASS, b'foo\n# comment at end\n'),
(b'foo\nbar\n', FAIL, b'bar\nfoo\n'),
(b'bar\nfoo\n', PASS, b'bar\nfoo\n'),
(b'#comment1\nfoo\n#comment2\nbar\n', FAIL, b'#comment2\nbar\n#comment1\nfoo\n'),
(b'#comment1\nbar\n#comment2\nfoo\n', PASS, b'#comment1\nbar\n#comment2\nfoo\n'),
(
b'#comment1\nfoo\n#comment2\nbar\n',
FAIL,
b'#comment2\nbar\n#comment1\nfoo\n',
),
(
b'#comment1\nbar\n#comment2\nfoo\n',
PASS,
b'#comment1\nbar\n#comment2\nfoo\n',
),
(b'#comment\n\nfoo\nbar\n', FAIL, b'#comment\n\nbar\nfoo\n'),
(b'#comment\n\nbar\nfoo\n', PASS, b'#comment\n\nbar\nfoo\n'),
(b'\nfoo\nbar\n', FAIL, b'bar\n\nfoo\n'),
(b'\nbar\nfoo\n', PASS, b'\nbar\nfoo\n'),
(b'pyramid==1\npyramid-foo==2\n', PASS, b'pyramid==1\npyramid-foo==2\n'),
(
b'pyramid==1\npyramid-foo==2\n',
PASS,
b'pyramid==1\npyramid-foo==2\n',
),
(b'ocflib\nDjango\nPyMySQL\n', FAIL, b'Django\nocflib\nPyMySQL\n'),
(
b'-e git+ssh://git_url@tag#egg=ocflib\nDjango\nPyMySQL\n',

View File

@ -12,7 +12,10 @@ def test_main_one_fails():
def test_main_django_all_pass():
ret = main(['--django', 'tests.py', 'test_foo.py', 'test_bar.py', 'tests/test_baz.py'])
ret = main((
'--django', 'tests.py', 'test_foo.py', 'test_bar.py',
'tests/test_baz.py',
))
assert ret == 0

View File

@ -17,8 +17,5 @@ commands =
pre-commit install -f --install-hooks
pre-commit run --all-files
[flake8]
max-line-length=131
[pep8]
ignore=E265,E501,W504