jam-alt-lines / fix_languages.py
cifkao's picture
First commit
11dc96b
# %%
from pathlib import Path
import json
import logging
import datasets
import lingua
# %%
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
# %%
language_detector = lingua.LanguageDetectorBuilder.from_languages(
lingua.Language.ENGLISH,
lingua.Language.SPANISH,
lingua.Language.GERMAN,
lingua.Language.FRENCH,
).build()
# %%
fixes = []
language_cache = {}
for path in sorted(Path(".").glob("**/metadata.jsonl")):
dataset = datasets.load_dataset(
"json",
data_files=str(path),
split="train",
)
for record in dataset:
cache_key = (record["song_name"], record["text"].strip())
if cache_key in language_cache:
language = language_cache[cache_key]
else:
language = record["language"]
detected_languages = {
x.language.iso_code_639_1.name.lower(): x.value
for x in language_detector.compute_language_confidence_values(
record["text"]
)
}
detected_languages = {
lg: p for lg, p in detected_languages.items() if p > 0.4
}
if record["language"] not in detected_languages:
print(
f"{record['song_name']}: Detected languages {detected_languages} instead of "
f"{repr(language)} for line:\n{record['text'].strip()}"
)
language = input(f"Enter language [{language}]: ").strip() or language
print()
language_cache[cache_key] = language
if language != record["song_language"]:
fixes.append(
{
"file_name": record["file_name"],
"text": record["text"],
"language": language,
}
)
Path("language_fixes.jsonl").write_text(
"\n".join(json.dumps(f) for f in fixes) + "\n", encoding="utf-8"
)