Use asottile/add-trailing-comma

This commit is contained in:
Anthony Sottile 2017-07-12 18:35:24 -07:00
parent 286962eeb7
commit 2a902e0a07
19 changed files with 48 additions and 43 deletions

View File

@ -23,6 +23,10 @@
- id: reorder-python-imports
language_version: python2.7
- repo: https://github.com/asottile/pyupgrade
sha: v1.0.0
sha: v1.1.2
hooks:
- id: pyupgrade
- repo: https://github.com/asottile/add-trailing-comma
sha: v0.3.0
hooks:
- id: add-trailing-comma

View File

@ -51,7 +51,7 @@ def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument(
'filenames', nargs='*',
help='Filenames pre-commit believes are changed.'
help='Filenames pre-commit believes are changed.',
)
parser.add_argument(
'--maxkb', type=int, default=500,

View File

@ -26,7 +26,7 @@ def check_ast(argv=None):
sys.version.partition(' ')[0],
))
print('\n{}'.format(
' ' + traceback.format_exc().replace('\n', '\n ')
' ' + traceback.format_exc().replace('\n', '\n '),
))
retval = 1
return retval

View File

@ -45,7 +45,7 @@ def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument(
'filenames', nargs='*',
help='Filenames pre-commit believes are changed.'
help='Filenames pre-commit believes are changed.',
)
args = parser.parse_args(argv)

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import argparse
@ -30,7 +31,7 @@ def check_docstring_first(src, filename='<unknown>'):
'{}:{} Multiple module docstrings '
'(first docstring on line {}).'.format(
filename, sline, found_docstring_line,
)
),
)
return 1
elif found_code_line is not None:
@ -38,7 +39,7 @@ def check_docstring_first(src, filename='<unknown>'):
'{}:{} Module docstring appears after code '
'(code seen on line {}).'.format(
filename, sline, found_code_line,
)
),
)
return 1
else:

View File

@ -7,7 +7,7 @@ CONFLICT_PATTERNS = [
b'<<<<<<< ',
b'======= ',
b'=======\n',
b'>>>>>>> '
b'>>>>>>> ',
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'

View File

@ -29,7 +29,7 @@ class ImportStatementParser(ast.NodeVisitor):
def visit_ImportFrom(self, node):
if node.module in DEBUG_STATEMENTS:
self.debug_import_statements.append(
DebugStatement(node.module, node.lineno, node.col_offset)
DebugStatement(node.module, node.lineno, node.col_offset),
)
@ -52,7 +52,7 @@ def check_file_for_debug_statements(filename):
debug_statement.line,
debug_statement.col,
debug_statement.name,
)
),
)
return 1
else:

View File

@ -12,7 +12,7 @@ def get_aws_credential_files_from_env():
files = set()
for env_var in (
'AWS_CONFIG_FILE', 'AWS_CREDENTIAL_FILE', 'AWS_SHARED_CREDENTIALS_FILE',
'BOTO_CONFIG'
'BOTO_CONFIG',
):
if env_var in os.environ:
files.add(os.environ[env_var])
@ -23,7 +23,7 @@ def get_aws_secrets_from_env():
"""Extract AWS secrets from environment variables."""
keys = set()
for env_var in (
'AWS_SECRET_ACCESS_KEY', 'AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN'
'AWS_SECRET_ACCESS_KEY', 'AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN',
):
if env_var in os.environ:
keys.add(os.environ[env_var])
@ -50,7 +50,7 @@ def get_aws_secrets_from_file(credentials_file):
for section in parser.sections():
for var in (
'aws_secret_access_key', 'aws_security_token',
'aws_session_token'
'aws_session_token',
):
try:
keys.add(parser.get(section, var))
@ -93,13 +93,13 @@ def main(argv=None):
help=(
'Location of additional AWS credential files from which to get '
'secret keys from'
)
),
)
parser.add_argument(
'--allow-missing-credentials',
dest='allow_missing_credentials',
action='store_true',
help='Allow hook to pass when no credentials are detected.'
help='Allow hook to pass when no credentials are detected.',
)
args = parser.parse_args(argv)
@ -124,7 +124,7 @@ def main(argv=None):
print(
'No AWS keys were found in the configured credential files and '
'environment variables.\nPlease ensure you have the correct '
'setting for --credentials-file'
'setting for --credentials-file',
)
return 2

View File

@ -120,7 +120,7 @@ def pretty_format_json(argv=None):
except simplejson.JSONDecodeError:
print(
"Input File {} is not a valid JSON, consider using check-json"
.format(json_file)
.format(json_file),
)
return 1

View File

@ -11,7 +11,7 @@ def validate_files(argv=None):
parser.add_argument('filenames', nargs='*')
parser.add_argument(
'--django', default=False, action='store_true',
help='Use Django-style test naming pattern (test*.py)'
help='Use Django-style test naming pattern (test*.py)',
)
args = parser.parse_args(argv)
@ -27,8 +27,8 @@ def validate_files(argv=None):
retcode = 1
print(
'{} does not match pattern "{}"'.format(
filename, test_name_pattern
)
filename, test_name_pattern,
),
)
return retcode

View File

@ -36,7 +36,7 @@ def fix_trailing_whitespace(argv=None):
const=[],
default=argparse.SUPPRESS,
dest='markdown_linebreak_ext',
help='Do not preserve linebreak spaces in Markdown'
help='Do not preserve linebreak spaces in Markdown',
)
parser.add_argument(
'--markdown-linebreak-ext',
@ -45,7 +45,7 @@ def fix_trailing_whitespace(argv=None):
default=['md,markdown'],
metavar='*|EXT[,EXT,...]',
nargs='?',
help='Markdown extensions (or *) for linebreak spaces'
help='Markdown extensions (or *) for linebreak spaces',
)
parser.add_argument('filenames', nargs='*', help='Filenames to fix')
args = parser.parse_args(argv)
@ -69,7 +69,7 @@ def fix_trailing_whitespace(argv=None):
parser.error(
"bad --markdown-linebreak-ext extension '{}' (has . / \\ :)\n"
" (probably filename; use '--markdown-linebreak-ext=EXT')"
.format(ext)
.format(ext),
)
return_code = 0

View File

@ -19,7 +19,7 @@ TESTS = (
'"foo"\n',
1,
'{filename}:2 Module docstring appears after code '
'(code seen on line 1).\n'
'(code seen on line 1).\n',
),
# Test double docstring
(
@ -28,7 +28,7 @@ TESTS = (
'"fake docstring"\n',
1,
'{filename}:3 Multiple module docstrings '
'(first docstring on line 1).\n'
'(first docstring on line 1).\n',
),
# Test multiple lines of code above
(

View File

@ -45,7 +45,7 @@ def f1_is_a_conflict_file(tmpdir):
'child\n'
'=======\n'
'parent\n'
'>>>>>>>'
'>>>>>>>',
) or f1.startswith(
'<<<<<<< HEAD\n'
'child\n'
@ -53,7 +53,7 @@ def f1_is_a_conflict_file(tmpdir):
'||||||| merged common ancestors\n'
'=======\n'
'parent\n'
'>>>>>>>'
'>>>>>>>',
) or f1.startswith(
# .gitconfig with [pull] rebase = preserve causes a rebase which
# flips parent / child
@ -61,7 +61,7 @@ def f1_is_a_conflict_file(tmpdir):
'parent\n'
'=======\n'
'child\n'
'>>>>>>>'
'>>>>>>>',
)
assert os.path.exists(os.path.join('.git', 'MERGE_MSG'))
yield

View File

@ -46,7 +46,7 @@ def test_returns_one_form_1(ast_with_debug_import_form_1):
visitor = ImportStatementParser()
visitor.visit(ast_with_debug_import_form_1)
assert visitor.debug_import_statements == [
DebugStatement('ipdb', 3, 0)
DebugStatement('ipdb', 3, 0),
]
@ -54,7 +54,7 @@ def test_returns_one_form_2(ast_with_debug_import_form_2):
visitor = ImportStatementParser()
visitor.visit(ast_with_debug_import_form_2)
assert visitor.debug_import_statements == [
DebugStatement('pudb', 3, 0)
DebugStatement('pudb', 3, 0),
]

View File

@ -21,14 +21,14 @@ from testing.util import get_resource_path
(
{
'AWS_DUMMY_KEY': '/foo', 'AWS_CONFIG_FILE': '/bar',
'AWS_CREDENTIAL_FILE': '/baz'
'AWS_CREDENTIAL_FILE': '/baz',
},
{'/bar', '/baz'}
),
(
{
'AWS_CONFIG_FILE': '/foo', 'AWS_CREDENTIAL_FILE': '/bar',
'AWS_SHARED_CREDENTIALS_FILE': '/baz'
'AWS_SHARED_CREDENTIALS_FILE': '/baz',
},
{'/foo', '/bar', '/baz'}
),
@ -51,7 +51,7 @@ def test_get_aws_credentials_file_from_env(env_vars, values):
({'AWS_DUMMY_KEY': 'foo', 'AWS_SECRET_ACCESS_KEY': 'bar'}, {'bar'}),
(
{'AWS_SECRET_ACCESS_KEY': 'foo', 'AWS_SECURITY_TOKEN': 'bar'},
{'foo', 'bar'}
{'foo', 'bar'},
),
),
)
@ -66,7 +66,7 @@ def test_get_aws_secrets_from_env(env_vars, values):
(
(
'aws_config_with_secret.ini',
{'z2rpgs5uit782eapz5l1z0y2lurtsyyk6hcfozlb'}
{'z2rpgs5uit782eapz5l1z0y2lurtsyyk6hcfozlb'},
),
('aws_config_with_session_token.ini', {'foo'}),
('aws_config_with_secret_and_session_token.ini',
@ -77,8 +77,8 @@ def test_get_aws_secrets_from_env(env_vars, values):
'7xebzorgm5143ouge9gvepxb2z70bsb2rtrh099e',
'z2rpgs5uit782eapz5l1z0y2lurtsyyk6hcfozlb',
'ixswosj8gz3wuik405jl9k3vdajsnxfhnpui38ez',
'foo'
}
'foo',
},
),
('aws_config_without_secrets.ini', set()),
('nonsense.txt', set()),
@ -121,7 +121,7 @@ def test_non_existent_credentials(mock_secrets_env, mock_secrets_file, capsys):
mock_secrets_file.return_value = set()
ret = main((
get_resource_path('aws_config_without_secrets.ini'),
"--credentials-file=testing/resources/credentailsfilethatdoesntexist"
"--credentials-file=testing/resources/credentailsfilethatdoesntexist",
))
assert ret == 2
out, _ = capsys.readouterr()
@ -141,6 +141,6 @@ def test_non_existent_credentials_with_allow_flag(mock_secrets_env, mock_secrets
ret = main((
get_resource_path('aws_config_without_secrets.ini'),
"--credentials-file=testing/resources/credentailsfilethatdoesntexist",
"--allow-missing-credentials"
"--allow-missing-credentials",
))
assert ret == 0

View File

@ -21,7 +21,7 @@ from pre_commit_hooks.file_contents_sorter import PASS
(b'@\n-\n_\n#\n', FAIL, b'#\n-\n@\n_\n'),
(b'extra\n\n\nwhitespace\n', FAIL, b'extra\nwhitespace\n'),
(b'whitespace\n\n\nextra\n', FAIL, b'extra\nwhitespace\n'),
)
),
)
def test_integration(input_s, expected_retval, output, tmpdir):
path = tmpdir.join('file.txt')

View File

@ -56,7 +56,7 @@ def test_integration_remove_ok(tmpdir):
b'# -*- coding: utf-8 -*-\n'
b'foo = "bar"\n'
),
)
),
)
def test_ok_inputs(input_str):
bytesio = io.BytesIO(input_str)
@ -100,7 +100,7 @@ def test_ok_inputs(input_str):
(b'#!/usr/bin/env python\n', b''),
(b'#!/usr/bin/env python\n#coding: utf8\n', b''),
(b'#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n', b''),
)
),
)
def test_not_ok_inputs(input_str, output):
bytesio = io.BytesIO(input_str)

View File

@ -24,9 +24,9 @@ from pre_commit_hooks.requirements_txt_fixer import Requirement
(
b'-e git+ssh://git_url@tag#egg=ocflib\nDjango\nPyMySQL\n',
FAIL,
b'Django\n-e git+ssh://git_url@tag#egg=ocflib\nPyMySQL\n'
b'Django\n-e git+ssh://git_url@tag#egg=ocflib\nPyMySQL\n',
),
)
),
)
def test_integration(input_s, expected_retval, output, tmpdir):
path = tmpdir.join('file.txt')

View File

@ -69,7 +69,7 @@ def test_markdown_linebreak_ext_opt(filename, input_s, output, tmpdir):
path = tmpdir.join(filename)
path.write(input_s)
ret = fix_trailing_whitespace((
'--markdown-linebreak-ext=TxT', path.strpath
'--markdown-linebreak-ext=TxT', path.strpath,
))
assert ret == 1
assert path.read() == output