# pip install --upgrade huggingface_hub from huggingface_hub import login, create_repo, upload_folder, HfApi # === EDIT THESE IF NEEDED === REPO_ID = "mdg-nlp/updated-time-ner-bert-base-cased" # target HF repo LOCAL_OUTPUTS = r"C:\Users\ASUS_GAMER\Desktop\time-ner\updated-time-ner-bert-base-cased\outputs" # local folder to upload USE_DATASETS_SECTION = False # set True if this repo should be under "Datasets" # ============================ repo_type = "dataset" if USE_DATASETS_SECTION else "model" # 1) Auth (or use: login(token="hf_...")) login() # 2) Ensure repo exists create_repo(repo_id=REPO_ID, repo_type=repo_type, exist_ok=True) # 3) Force re-upload LOCAL_OUTPUTS -> repo path "outputs" # delete=True removes remote /outputs first so it exactly mirrors your local folder. print("Uploading. This may take a while for large .safetensors/.bin files...") upload_folder( folder_path=LOCAL_OUTPUTS, repo_id=REPO_ID, repo_type=repo_type, path_in_repo="outputs", commit_message="Force re-upload of outputs (weights + tokenizer)", delete=True, # If you want to limit what gets uploaded, uncomment: # allow_patterns=["*.safetensors", "*.bin", "*.json", "*.txt", "*.model", "*.vocab", "*.merges"], # ignore_patterns=["checkpoint*/", "runs/", "logs/", "*.tmp"], ) print("✅ Upload complete.") # 4) Verify by listing files now on the repo under /outputs api = HfApi() files = api.list_repo_files(REPO_ID, repo_type=repo_type, revision="main") outputs_files = [f for f in files if f.startswith("outputs/")] print("\nRemote files under /outputs:") for f in outputs_files: print(" -", f) if not outputs_files: print("⚠️ No files found under /outputs on the remote. Double-check LOCAL_OUTPUTS path and permissions.")