|
|
|
|
|
import json |
|
|
from collections import defaultdict |
|
|
from datasets import load_dataset |
|
|
|
|
|
def check_jsonl_file(filename): |
|
|
valid = True |
|
|
errors = [] |
|
|
column_types = defaultdict(set) |
|
|
with open(filename, 'r', encoding='utf-8') as f: |
|
|
for i, line in enumerate(f): |
|
|
line = line.strip() |
|
|
if not line: |
|
|
continue |
|
|
try: |
|
|
obj = json.loads(line) |
|
|
except Exception as e: |
|
|
valid = False |
|
|
errors.append(f"Line {i+1}: Invalid JSON ({e})") |
|
|
continue |
|
|
if not isinstance(obj, dict): |
|
|
valid = False |
|
|
errors.append(f"Line {i+1}: Not a JSON object") |
|
|
continue |
|
|
for k, v in obj.items(): |
|
|
column_types[k].add(type(v).__name__) |
|
|
|
|
|
|
|
|
inconsistent = {k: v for k, v in column_types.items() if len(v) > 1} |
|
|
if inconsistent: |
|
|
valid = False |
|
|
for k, types in inconsistent.items(): |
|
|
errors.append(f"Column '{k}' has inconsistent types: {types}") |
|
|
|
|
|
|
|
|
with open(filename, 'rb') as f: |
|
|
f.seek(-2, 2) |
|
|
last = f.read().decode(errors='ignore') |
|
|
if not last.endswith('\n'): |
|
|
errors.append("File does not end with a newline (possible trailing data)") |
|
|
|
|
|
if valid: |
|
|
print("data.jsonl passed all checks.") |
|
|
else: |
|
|
print("data.jsonl failed checks:") |
|
|
for err in errors: |
|
|
print(err) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
check_jsonl_file("data.jsonl") |
|
|
|
|
|
ds = load_dataset("cwolff/schemapile")["full"] |
|
|
print(f"Loaded dataset with {len(ds)} records.") |
|
|
print(ds[0]) |