launch-computation-example / parse_requests.py
meg-huggingface
update parse_requests.py
37ce445
raw
history blame contribute delete
607 Bytes
import os
from datasets import load_dataset, Dataset
TOKEN = os.environ.get("DEBUG")
requests_dataset = load_dataset("AIEnergyScore/requests_debug", split="test")
def normalize_task(task):
# Makes assumption about how the task names are being written, and called.
task = '_'.join(task.split()).lower()
return '_'.join(task.split('-')).lower()
requests_dset = requests_dataset.to_pandas()
for model, task in requests_dset[['model', 'task']].loc[
requests_dset['status'] == 'PENDING'].to_dict(orient='split', index=False)[
'data']:
print("%s,%s" % (model, normalize_task(task)))