Report duplicate keys in check_json

Raise ValueError and return 1 if json contains duplicate keys
This commit is contained in:
Aditya Khursale 2021-02-17 02:08:57 +05:30 committed by Anthony Sottile
parent 11a2fdbab8
commit fe37451719
3 changed files with 22 additions and 1 deletions

View File

@ -1,7 +1,23 @@
import argparse
import json
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Sequence
from typing import Tuple
def raise_duplicate_keys(
ordered_pairs: List[Tuple[str, Any]],
) -> Dict[str, Any]:
d = {}
for key, val in ordered_pairs:
if key in d:
raise ValueError(f'Duplicate key: {key}')
else:
d[key] = val
return d
def main(argv: Optional[Sequence[str]] = None) -> int:
@ -13,7 +29,7 @@ def main(argv: Optional[Sequence[str]] = None) -> int:
for filename in args.filenames:
with open(filename, 'rb') as f:
try:
json.load(f)
json.load(f, object_pairs_hook=raise_duplicate_keys)
except ValueError as exc:
print(f'{filename}: Failed to json decode ({exc})')
retval = 1

View File

@ -0,0 +1,4 @@
{
"hello": "world",
"hello": "planet"
}

View File

@ -9,6 +9,7 @@ from testing.util import get_resource_path
('bad_json.notjson', 1),
('bad_json_latin1.nonjson', 1),
('ok_json.json', 0),
('duplicate_key_json.json', 1),
),
)
def test_main(capsys, filename, expected_retval):