Add pyupgrade
This commit is contained in:
parent
a11d9314b2
commit
9cee71b5df
|
@ -22,3 +22,7 @@
|
|||
hooks:
|
||||
- id: reorder-python-imports
|
||||
language_version: python2.7
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
sha: v1.0.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
|
|
|
@ -17,7 +17,7 @@ def main(argv=None):
|
|||
original_contents = io.open(filename).read()
|
||||
new_contents = autopep8.fix_code(original_contents, args)
|
||||
if original_contents != new_contents:
|
||||
print('Fixing {0}'.format(filename))
|
||||
print('Fixing {}'.format(filename))
|
||||
retv = 1
|
||||
with io.open(filename, 'w') as output_file:
|
||||
output_file.write(new_contents)
|
||||
|
|
|
@ -26,10 +26,10 @@ def lfs_files():
|
|||
assert mode in ('A', 'R')
|
||||
return filepart if mode == 'A' else filepart.split(' -> ')[1]
|
||||
|
||||
return set(
|
||||
return {
|
||||
to_file_part(mode, filepart) for mode, filepart in modes_and_fileparts
|
||||
if mode in ('A', 'R')
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def find_large_added_files(filenames, maxkb):
|
||||
|
@ -41,7 +41,7 @@ def find_large_added_files(filenames, maxkb):
|
|||
for filename in filenames:
|
||||
kb = int(math.ceil(os.stat(filename).st_size / 1024))
|
||||
if kb > maxkb:
|
||||
print('{0} ({1} KB) exceeds {2} KB.'.format(filename, kb, maxkb))
|
||||
print('{} ({} KB) exceeds {} KB.'.format(filename, kb, maxkb))
|
||||
retv = 1
|
||||
|
||||
return retv
|
||||
|
|
|
@ -22,10 +22,10 @@ def check_ast(argv=None):
|
|||
try:
|
||||
ast.parse(open(filename, 'rb').read(), filename=filename)
|
||||
except SyntaxError:
|
||||
print('{0}: failed parsing with {1}:'.format(
|
||||
print('{}: failed parsing with {}:'.format(
|
||||
filename, interpreter,
|
||||
))
|
||||
print('\n{0}'.format(
|
||||
print('\n{}'.format(
|
||||
' ' + traceback.format_exc().replace('\n', '\n ')
|
||||
))
|
||||
retval = 1
|
||||
|
|
|
@ -16,7 +16,7 @@ def main(argv=None):
|
|||
with open(filename, 'rb') as f:
|
||||
if f.read(3) == b'\xef\xbb\xbf':
|
||||
retv = 1
|
||||
print('{0}: Has a byte-order marker'.format(filename))
|
||||
print('{}: Has a byte-order marker'.format(filename))
|
||||
|
||||
return retv
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ from pre_commit_hooks.util import cmd_output
|
|||
|
||||
|
||||
def lower_set(iterable):
|
||||
return set(x.lower() for x in iterable)
|
||||
return {x.lower() for x in iterable}
|
||||
|
||||
|
||||
def find_conflicting_filenames(filenames):
|
||||
|
@ -35,7 +35,7 @@ def find_conflicting_filenames(filenames):
|
|||
if x.lower() in conflicts
|
||||
]
|
||||
for filename in sorted(conflicting_files):
|
||||
print('Case-insensitivity conflict found: {0}'.format(filename))
|
||||
print('Case-insensitivity conflict found: {}'.format(filename))
|
||||
retv = 1
|
||||
|
||||
return retv
|
||||
|
|
|
@ -27,16 +27,16 @@ def check_docstring_first(src, filename='<unknown>'):
|
|||
if tok_type == tokenize.STRING and scol == 0:
|
||||
if found_docstring_line is not None:
|
||||
print(
|
||||
'{0}:{1} Multiple module docstrings '
|
||||
'(first docstring on line {2}).'.format(
|
||||
'{}:{} Multiple module docstrings '
|
||||
'(first docstring on line {}).'.format(
|
||||
filename, sline, found_docstring_line,
|
||||
)
|
||||
)
|
||||
return 1
|
||||
elif found_code_line is not None:
|
||||
print(
|
||||
'{0}:{1} Module docstring appears after code '
|
||||
'(code seen on line {2}).'.format(
|
||||
'{}:{} Module docstring appears after code '
|
||||
'(code seen on line {}).'.format(
|
||||
filename, sline, found_code_line,
|
||||
)
|
||||
)
|
||||
|
|
|
@ -16,7 +16,7 @@ def check_json(argv=None):
|
|||
try:
|
||||
simplejson.load(open(filename))
|
||||
except (simplejson.JSONDecodeError, UnicodeDecodeError) as exc:
|
||||
print('{0}: Failed to json decode ({1})'.format(filename, exc))
|
||||
print('{}: Failed to json decode ({})'.format(filename, exc))
|
||||
retval = 1
|
||||
return retval
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ def check_symlinks(argv=None):
|
|||
os.path.islink(filename) and
|
||||
not os.path.exists(filename)
|
||||
): # pragma: no cover (symlink support required)
|
||||
print('{0}: Broken symlink'.format(filename))
|
||||
print('{}: Broken symlink'.format(filename))
|
||||
retv = 1
|
||||
|
||||
return retv
|
||||
|
|
|
@ -19,7 +19,7 @@ def check_xml(argv=None):
|
|||
with io.open(filename, 'rb') as xml_file:
|
||||
xml.sax.parse(xml_file, xml.sax.ContentHandler())
|
||||
except xml.sax.SAXException as exc:
|
||||
print('{0}: Failed to xml parse ({1})'.format(filename, exc))
|
||||
print('{}: Failed to xml parse ({})'.format(filename, exc))
|
||||
retval = 1
|
||||
return retval
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ import collections
|
|||
import traceback
|
||||
|
||||
|
||||
DEBUG_STATEMENTS = set(['pdb', 'ipdb', 'pudb', 'q', 'rdb'])
|
||||
DEBUG_STATEMENTS = {'pdb', 'ipdb', 'pudb', 'q', 'rdb'}
|
||||
|
||||
|
||||
DebugStatement = collections.namedtuple(
|
||||
|
@ -37,7 +37,7 @@ def check_file_for_debug_statements(filename):
|
|||
try:
|
||||
ast_obj = ast.parse(open(filename).read(), filename=filename)
|
||||
except SyntaxError:
|
||||
print('{0} - Could not parse ast'.format(filename))
|
||||
print('{} - Could not parse ast'.format(filename))
|
||||
print()
|
||||
print('\t' + traceback.format_exc().replace('\n', '\n\t'))
|
||||
print()
|
||||
|
@ -47,7 +47,7 @@ def check_file_for_debug_statements(filename):
|
|||
if visitor.debug_import_statements:
|
||||
for debug_statement in visitor.debug_import_statements:
|
||||
print(
|
||||
'{0}:{1}:{2} - {3} imported'.format(
|
||||
'{}:{}:{} - {} imported'.format(
|
||||
filename,
|
||||
debug_statement.line,
|
||||
debug_statement.col,
|
||||
|
|
|
@ -26,7 +26,7 @@ def detect_private_key(argv=None):
|
|||
|
||||
if private_key_files:
|
||||
for private_key_file in private_key_files:
|
||||
print('Private key found: {0}'.format(private_key_file))
|
||||
print('Private key found: {}'.format(private_key_file))
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
|
|
@ -58,7 +58,7 @@ def end_of_file_fixer(argv=None):
|
|||
with open(filename, 'rb+') as file_obj:
|
||||
ret_for_file = fix_file(file_obj)
|
||||
if ret_for_file:
|
||||
print('Fixing {0}'.format(filename))
|
||||
print('Fixing {}'.format(filename))
|
||||
retv |= ret_for_file
|
||||
|
||||
return retv
|
||||
|
|
|
@ -25,7 +25,7 @@ def _get_pretty_format(contents, indent, sort_keys=True, top_keys=[]):
|
|||
|
||||
|
||||
def _autofix(filename, new_contents):
|
||||
print("Fixing file {0}".format(filename))
|
||||
print("Fixing file {}".format(filename))
|
||||
with open(filename, 'w') as f:
|
||||
f.write(new_contents)
|
||||
|
||||
|
@ -100,7 +100,7 @@ def pretty_format_json(argv=None):
|
|||
)
|
||||
|
||||
if contents != pretty_contents:
|
||||
print("File {0} is not pretty-formatted".format(json_file))
|
||||
print("File {} is not pretty-formatted".format(json_file))
|
||||
|
||||
if args.autofix:
|
||||
_autofix(json_file, pretty_contents)
|
||||
|
@ -109,7 +109,7 @@ def pretty_format_json(argv=None):
|
|||
|
||||
except simplejson.JSONDecodeError:
|
||||
print(
|
||||
"Input File {0} is not a valid JSON, consider using check-json"
|
||||
"Input File {} is not a valid JSON, consider using check-json"
|
||||
.format(json_file)
|
||||
)
|
||||
return 1
|
||||
|
|
|
@ -84,7 +84,7 @@ def fix_requirements_txt(argv=None):
|
|||
ret_for_file = fix_requirements(file_obj)
|
||||
|
||||
if ret_for_file:
|
||||
print('Sorting {0}'.format(arg))
|
||||
print('Sorting {}'.format(arg))
|
||||
|
||||
retv |= ret_for_file
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ def main(argv=None):
|
|||
for filename in args.filenames:
|
||||
return_value = fix_strings(filename)
|
||||
if return_value != 0:
|
||||
print('Fixing strings in {0}'.format(filename))
|
||||
print('Fixing strings in {}'.format(filename))
|
||||
retv |= return_value
|
||||
|
||||
return retv
|
||||
|
|
|
@ -26,7 +26,7 @@ def validate_files(argv=None):
|
|||
):
|
||||
retcode = 1
|
||||
print(
|
||||
'{0} does not match pattern "{1}"'.format(
|
||||
'{} does not match pattern "{}"'.format(
|
||||
filename, test_name_pattern
|
||||
)
|
||||
)
|
||||
|
|
|
@ -67,7 +67,7 @@ def fix_trailing_whitespace(argv=None):
|
|||
for ext in md_exts:
|
||||
if any(c in ext[1:] for c in r'./\:'):
|
||||
parser.error(
|
||||
"bad --markdown-linebreak-ext extension '{0}' (has . / \\ :)\n"
|
||||
"bad --markdown-linebreak-ext extension '{}' (has . / \\ :)\n"
|
||||
" (probably filename; use '--markdown-linebreak-ext=EXT')"
|
||||
.format(ext)
|
||||
)
|
||||
|
|
|
@ -10,4 +10,4 @@ def test_readme_contains_all_hooks():
|
|||
readme_contents = io.open('README.md').read()
|
||||
hooks = yaml.load(io.open('hooks.yaml').read())
|
||||
for hook in hooks:
|
||||
assert '`{0}`'.format(hook['id']) in readme_contents
|
||||
assert '`{}`'.format(hook['id']) in readme_contents
|
||||
|
|
Loading…
Reference in New Issue