Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
docker-container-test
#1047
by
alozowski
HF Staff
- opened
- Dockerfile +1 -0
- README.md +0 -6
- backend/README.md +2 -2
- backend/__init__.py +0 -0
- backend/app/api/dependencies.py +1 -1
- backend/app/api/endpoints/leaderboard.py +1 -1
- backend/app/api/endpoints/models.py +2 -15
- backend/app/api/endpoints/votes.py +9 -30
- backend/app/asgi.py +2 -2
- backend/app/config/hf_config.py +5 -1
- backend/app/core/cache.py +4 -4
- backend/app/core/fastapi_cache.py +15 -43
- backend/app/core/formatting.py +0 -104
- backend/app/services/hf_service.py +1 -1
- backend/app/services/leaderboard.py +2 -2
- backend/app/services/models.py +32 -121
- backend/app/services/votes.py +122 -172
- backend/app/utils/logging.py +104 -2
- backend/app/utils/model_validation.py +9 -65
- backend/pyproject.toml +4 -4
- backend/utils/analyze_prod_datasets.py +1 -1
- backend/utils/fix_wrong_model_size.py +0 -110
- backend/utils/sync_datasets_locally.py +1 -1
- backend/uv.lock +0 -0
- frontend/src/App.js +4 -0
- frontend/src/components/Navigation/Navigation.js +30 -0
- frontend/src/components/shared/AuthContainer.js +5 -34
- frontend/src/components/shared/PageHeader.js +1 -1
- frontend/src/pages/AddModelPage/AddModelPage.js +2 -5
- frontend/src/pages/AddModelPage/components/EvaluationQueues/EvaluationQueues.js +104 -173
- frontend/src/pages/AddModelPage/components/ModelSubmissionForm/ModelSubmissionForm.js +5 -5
- frontend/src/pages/AddModelPage/components/SubmissionLimitChecker/SubmissionLimitChecker.js +0 -85
- frontend/src/pages/LeaderboardPage/LeaderboardPage.js +2 -6
- frontend/src/pages/LeaderboardPage/components/Leaderboard/components/Filters/FilteredModelCount.js +5 -5
- frontend/src/pages/LeaderboardPage/components/Leaderboard/components/Filters/Filters.js +6 -6
- frontend/src/pages/LeaderboardPage/components/Leaderboard/components/Filters/QuickFilters.js +1 -1
- frontend/src/pages/LeaderboardPage/components/Leaderboard/components/Filters/hooks/useOfficialProvidersMode.js +3 -3
- frontend/src/pages/LeaderboardPage/components/Leaderboard/constants/defaults.js +3 -3
- frontend/src/pages/LeaderboardPage/components/Leaderboard/constants/quickFilters.js +1 -1
- frontend/src/pages/LeaderboardPage/components/Leaderboard/constants/tooltips.js +1 -1
- frontend/src/pages/LeaderboardPage/components/Leaderboard/context/LeaderboardContext.js +7 -7
- frontend/src/pages/LeaderboardPage/components/Leaderboard/hooks/useDataUtils.js +17 -21
- frontend/src/pages/LeaderboardPage/components/Leaderboard/hooks/useLeaderboardData.js +23 -17
- frontend/src/pages/LeaderboardPage/components/Leaderboard/utils/columnUtils.js +4 -4
- frontend/src/pages/QuotePage/QuotePage.js +1 -1
- frontend/src/pages/VoteModelPage/VoteModelPage.js +167 -350
Dockerfile
CHANGED
|
@@ -49,6 +49,7 @@ WORKDIR /app
|
|
| 49 |
|
| 50 |
# Environment variables
|
| 51 |
ENV HF_HOME=/app/.cache \
|
|
|
|
| 52 |
HF_DATASETS_CACHE=/app/.cache \
|
| 53 |
INTERNAL_API_PORT=7861 \
|
| 54 |
PORT=7860 \
|
|
|
|
| 49 |
|
| 50 |
# Environment variables
|
| 51 |
ENV HF_HOME=/app/.cache \
|
| 52 |
+
TRANSFORMERS_CACHE=/app/.cache \
|
| 53 |
HF_DATASETS_CACHE=/app/.cache \
|
| 54 |
INTERNAL_API_PORT=7861 \
|
| 55 |
PORT=7860 \
|
README.md
CHANGED
|
@@ -10,12 +10,6 @@ license: apache-2.0
|
|
| 10 |
duplicated_from: open-llm-leaderboard/open_llm_leaderboard
|
| 11 |
tags:
|
| 12 |
- leaderboard
|
| 13 |
-
- modality:text
|
| 14 |
-
- submission:automatic
|
| 15 |
-
- test:public
|
| 16 |
-
- language:english
|
| 17 |
-
- eval:code
|
| 18 |
-
- eval:math
|
| 19 |
short_description: Track, rank and evaluate open LLMs and chatbots
|
| 20 |
---
|
| 21 |
|
|
|
|
| 10 |
duplicated_from: open-llm-leaderboard/open_llm_leaderboard
|
| 11 |
tags:
|
| 12 |
- leaderboard
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
short_description: Track, rank and evaluate open LLMs and chatbots
|
| 14 |
---
|
| 15 |
|
backend/README.md
CHANGED
|
@@ -70,7 +70,7 @@ The application uses several datasets on the HuggingFace Hub:
|
|
| 70 |
- **Format**: Main dataset containing all scores and metrics
|
| 71 |
- **Updates**: Automatic after model evaluations
|
| 72 |
|
| 73 |
-
### 4.
|
| 74 |
|
| 75 |
- **Operations**:
|
| 76 |
- 📥 Read-only access for highlighted models
|
|
@@ -203,7 +203,7 @@ Swagger documentation available at http://localhost:7860/docs
|
|
| 203 |
is_merged: boolean,
|
| 204 |
is_moe: boolean,
|
| 205 |
is_flagged: boolean,
|
| 206 |
-
|
| 207 |
},
|
| 208 |
metadata: {
|
| 209 |
upload_date: string,
|
|
|
|
| 70 |
- **Format**: Main dataset containing all scores and metrics
|
| 71 |
- **Updates**: Automatic after model evaluations
|
| 72 |
|
| 73 |
+
### 4. Maintainers Highlight Dataset (`{HF_ORGANIZATION}/maintainers-highlight`)
|
| 74 |
|
| 75 |
- **Operations**:
|
| 76 |
- 📥 Read-only access for highlighted models
|
|
|
|
| 203 |
is_merged: boolean,
|
| 204 |
is_moe: boolean,
|
| 205 |
is_flagged: boolean,
|
| 206 |
+
is_highlighted_by_maintainer: boolean
|
| 207 |
},
|
| 208 |
metadata: {
|
| 209 |
upload_date: string,
|
backend/__init__.py
ADDED
|
File without changes
|
backend/app/api/dependencies.py
CHANGED
|
@@ -2,7 +2,7 @@ from fastapi import Depends, HTTPException
|
|
| 2 |
import logging
|
| 3 |
from app.services.models import ModelService
|
| 4 |
from app.services.votes import VoteService
|
| 5 |
-
from app.
|
| 6 |
|
| 7 |
logger = logging.getLogger(__name__)
|
| 8 |
|
|
|
|
| 2 |
import logging
|
| 3 |
from app.services.models import ModelService
|
| 4 |
from app.services.votes import VoteService
|
| 5 |
+
from app.utils.logging import LogFormatter
|
| 6 |
|
| 7 |
logger = logging.getLogger(__name__)
|
| 8 |
|
backend/app/api/endpoints/leaderboard.py
CHANGED
|
@@ -3,7 +3,7 @@ from typing import List, Dict, Any
|
|
| 3 |
from app.services.leaderboard import LeaderboardService
|
| 4 |
from app.core.fastapi_cache import cached, build_cache_key
|
| 5 |
import logging
|
| 6 |
-
from app.
|
| 7 |
|
| 8 |
logger = logging.getLogger(__name__)
|
| 9 |
router = APIRouter()
|
|
|
|
| 3 |
from app.services.leaderboard import LeaderboardService
|
| 4 |
from app.core.fastapi_cache import cached, build_cache_key
|
| 5 |
import logging
|
| 6 |
+
from app.utils.logging import LogFormatter
|
| 7 |
|
| 8 |
logger = logging.getLogger(__name__)
|
| 9 |
router = APIRouter()
|
backend/app/api/endpoints/models.py
CHANGED
|
@@ -1,10 +1,10 @@
|
|
| 1 |
-
from fastapi import APIRouter, HTTPException, Depends
|
| 2 |
from typing import Dict, Any, List
|
| 3 |
import logging
|
| 4 |
from app.services.models import ModelService
|
| 5 |
from app.api.dependencies import get_model_service
|
| 6 |
from app.core.fastapi_cache import cached
|
| 7 |
-
from app.
|
| 8 |
|
| 9 |
logger = logging.getLogger(__name__)
|
| 10 |
router = APIRouter(tags=["models"])
|
|
@@ -80,19 +80,6 @@ async def submit_model(
|
|
| 80 |
logger.error(LogFormatter.error("Submission failed", e))
|
| 81 |
raise HTTPException(status_code=500, detail=str(e))
|
| 82 |
|
| 83 |
-
@router.get("/organization/{organization}/submissions")
|
| 84 |
-
async def get_organization_submissions(
|
| 85 |
-
organization: str,
|
| 86 |
-
days: int = Query(default=7, ge=1, le=30),
|
| 87 |
-
model_service: ModelService = Depends(get_model_service)
|
| 88 |
-
) -> List[Dict[str, Any]]:
|
| 89 |
-
"""Get all submissions from an organization in the last n days"""
|
| 90 |
-
try:
|
| 91 |
-
submissions = await model_service.get_organization_submissions(organization, days)
|
| 92 |
-
return submissions
|
| 93 |
-
except Exception as e:
|
| 94 |
-
raise HTTPException(status_code=500, detail=str(e))
|
| 95 |
-
|
| 96 |
@router.get("/{model_id}/status")
|
| 97 |
async def get_model_status(
|
| 98 |
model_id: str,
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, Depends
|
| 2 |
from typing import Dict, Any, List
|
| 3 |
import logging
|
| 4 |
from app.services.models import ModelService
|
| 5 |
from app.api.dependencies import get_model_service
|
| 6 |
from app.core.fastapi_cache import cached
|
| 7 |
+
from app.utils.logging import LogFormatter
|
| 8 |
|
| 9 |
logger = logging.getLogger(__name__)
|
| 10 |
router = APIRouter(tags=["models"])
|
|
|
|
| 80 |
logger.error(LogFormatter.error("Submission failed", e))
|
| 81 |
raise HTTPException(status_code=500, detail=str(e))
|
| 82 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 83 |
@router.get("/{model_id}/status")
|
| 84 |
async def get_model_status(
|
| 85 |
model_id: str,
|
backend/app/api/endpoints/votes.py
CHANGED
|
@@ -1,17 +1,14 @@
|
|
| 1 |
-
from fastapi import APIRouter, HTTPException, Query, Depends
|
| 2 |
from typing import Dict, Any, List
|
| 3 |
from app.services.votes import VoteService
|
| 4 |
from app.core.fastapi_cache import cached, build_cache_key, invalidate_cache_key
|
| 5 |
import logging
|
| 6 |
-
from app.
|
| 7 |
-
from datetime import datetime, timezone
|
| 8 |
|
| 9 |
logger = logging.getLogger(__name__)
|
| 10 |
router = APIRouter()
|
| 11 |
vote_service = VoteService()
|
| 12 |
|
| 13 |
-
CACHE_TTL = 30 # 30 seconds cache
|
| 14 |
-
|
| 15 |
def model_votes_key_builder(func, namespace: str = "model_votes", **kwargs):
|
| 16 |
"""Build cache key for model votes"""
|
| 17 |
provider = kwargs.get('provider')
|
|
@@ -29,25 +26,22 @@ def user_votes_key_builder(func, namespace: str = "user_votes", **kwargs):
|
|
| 29 |
|
| 30 |
@router.post("/{model_id:path}")
|
| 31 |
async def add_vote(
|
| 32 |
-
response: Response,
|
| 33 |
model_id: str,
|
| 34 |
vote_type: str = Query(..., description="Type of vote (up/down)"),
|
| 35 |
-
user_id: str = Query(..., description="HuggingFace username")
|
| 36 |
-
vote_data: Dict[str, Any] = None
|
| 37 |
) -> Dict[str, Any]:
|
| 38 |
try:
|
| 39 |
logger.info(LogFormatter.section("ADDING VOTE"))
|
| 40 |
stats = {
|
| 41 |
"Model": model_id,
|
| 42 |
"User": user_id,
|
| 43 |
-
"Type": vote_type
|
| 44 |
-
"Config": vote_data or {}
|
| 45 |
}
|
| 46 |
for line in LogFormatter.tree(stats, "Vote Details"):
|
| 47 |
logger.info(line)
|
| 48 |
|
| 49 |
await vote_service.initialize()
|
| 50 |
-
result = await vote_service.add_vote(model_id, user_id, vote_type
|
| 51 |
|
| 52 |
# Invalidate affected caches
|
| 53 |
try:
|
|
@@ -58,8 +52,8 @@ async def add_vote(
|
|
| 58 |
model_cache_key = build_cache_key("model_votes", provider, model)
|
| 59 |
user_cache_key = build_cache_key("user_votes", user_id)
|
| 60 |
|
| 61 |
-
|
| 62 |
-
|
| 63 |
|
| 64 |
cache_stats = {
|
| 65 |
"Model_Cache": model_cache_key,
|
|
@@ -71,18 +65,14 @@ async def add_vote(
|
|
| 71 |
except Exception as e:
|
| 72 |
logger.error(LogFormatter.error("Failed to invalidate cache", e))
|
| 73 |
|
| 74 |
-
# Add cache control headers
|
| 75 |
-
response.headers["Cache-Control"] = "no-cache"
|
| 76 |
-
|
| 77 |
return result
|
| 78 |
except Exception as e:
|
| 79 |
logger.error(LogFormatter.error("Failed to add vote", e))
|
| 80 |
raise HTTPException(status_code=400, detail=str(e))
|
| 81 |
|
| 82 |
@router.get("/model/{provider}/{model}")
|
| 83 |
-
@cached(expire=
|
| 84 |
async def get_model_votes(
|
| 85 |
-
response: Response,
|
| 86 |
provider: str,
|
| 87 |
model: str
|
| 88 |
) -> Dict[str, Any]:
|
|
@@ -92,11 +82,6 @@ async def get_model_votes(
|
|
| 92 |
await vote_service.initialize()
|
| 93 |
model_id = f"{provider}/{model}"
|
| 94 |
result = await vote_service.get_model_votes(model_id)
|
| 95 |
-
|
| 96 |
-
# Add cache control headers
|
| 97 |
-
response.headers["Cache-Control"] = f"max-age={CACHE_TTL}"
|
| 98 |
-
response.headers["Last-Modified"] = vote_service._last_sync.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
| 99 |
-
|
| 100 |
logger.info(LogFormatter.success(f"Found {result.get('total_votes', 0)} votes"))
|
| 101 |
return result
|
| 102 |
except Exception as e:
|
|
@@ -104,9 +89,8 @@ async def get_model_votes(
|
|
| 104 |
raise HTTPException(status_code=400, detail=str(e))
|
| 105 |
|
| 106 |
@router.get("/user/{user_id}")
|
| 107 |
-
@cached(expire=
|
| 108 |
async def get_user_votes(
|
| 109 |
-
response: Response,
|
| 110 |
user_id: str
|
| 111 |
) -> List[Dict[str, Any]]:
|
| 112 |
"""Get all votes from a specific user"""
|
|
@@ -114,11 +98,6 @@ async def get_user_votes(
|
|
| 114 |
logger.info(LogFormatter.info(f"Fetching votes for user: {user_id}"))
|
| 115 |
await vote_service.initialize()
|
| 116 |
votes = await vote_service.get_user_votes(user_id)
|
| 117 |
-
|
| 118 |
-
# Add cache control headers
|
| 119 |
-
response.headers["Cache-Control"] = f"max-age={CACHE_TTL}"
|
| 120 |
-
response.headers["Last-Modified"] = vote_service._last_sync.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
| 121 |
-
|
| 122 |
logger.info(LogFormatter.success(f"Found {len(votes)} votes"))
|
| 123 |
return votes
|
| 124 |
except Exception as e:
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, Query, Depends
|
| 2 |
from typing import Dict, Any, List
|
| 3 |
from app.services.votes import VoteService
|
| 4 |
from app.core.fastapi_cache import cached, build_cache_key, invalidate_cache_key
|
| 5 |
import logging
|
| 6 |
+
from app.utils.logging import LogFormatter
|
|
|
|
| 7 |
|
| 8 |
logger = logging.getLogger(__name__)
|
| 9 |
router = APIRouter()
|
| 10 |
vote_service = VoteService()
|
| 11 |
|
|
|
|
|
|
|
| 12 |
def model_votes_key_builder(func, namespace: str = "model_votes", **kwargs):
|
| 13 |
"""Build cache key for model votes"""
|
| 14 |
provider = kwargs.get('provider')
|
|
|
|
| 26 |
|
| 27 |
@router.post("/{model_id:path}")
|
| 28 |
async def add_vote(
|
|
|
|
| 29 |
model_id: str,
|
| 30 |
vote_type: str = Query(..., description="Type of vote (up/down)"),
|
| 31 |
+
user_id: str = Query(..., description="HuggingFace username")
|
|
|
|
| 32 |
) -> Dict[str, Any]:
|
| 33 |
try:
|
| 34 |
logger.info(LogFormatter.section("ADDING VOTE"))
|
| 35 |
stats = {
|
| 36 |
"Model": model_id,
|
| 37 |
"User": user_id,
|
| 38 |
+
"Type": vote_type
|
|
|
|
| 39 |
}
|
| 40 |
for line in LogFormatter.tree(stats, "Vote Details"):
|
| 41 |
logger.info(line)
|
| 42 |
|
| 43 |
await vote_service.initialize()
|
| 44 |
+
result = await vote_service.add_vote(model_id, user_id, vote_type)
|
| 45 |
|
| 46 |
# Invalidate affected caches
|
| 47 |
try:
|
|
|
|
| 52 |
model_cache_key = build_cache_key("model_votes", provider, model)
|
| 53 |
user_cache_key = build_cache_key("user_votes", user_id)
|
| 54 |
|
| 55 |
+
invalidate_cache_key(model_cache_key)
|
| 56 |
+
invalidate_cache_key(user_cache_key)
|
| 57 |
|
| 58 |
cache_stats = {
|
| 59 |
"Model_Cache": model_cache_key,
|
|
|
|
| 65 |
except Exception as e:
|
| 66 |
logger.error(LogFormatter.error("Failed to invalidate cache", e))
|
| 67 |
|
|
|
|
|
|
|
|
|
|
| 68 |
return result
|
| 69 |
except Exception as e:
|
| 70 |
logger.error(LogFormatter.error("Failed to add vote", e))
|
| 71 |
raise HTTPException(status_code=400, detail=str(e))
|
| 72 |
|
| 73 |
@router.get("/model/{provider}/{model}")
|
| 74 |
+
@cached(expire=60, key_builder=model_votes_key_builder)
|
| 75 |
async def get_model_votes(
|
|
|
|
| 76 |
provider: str,
|
| 77 |
model: str
|
| 78 |
) -> Dict[str, Any]:
|
|
|
|
| 82 |
await vote_service.initialize()
|
| 83 |
model_id = f"{provider}/{model}"
|
| 84 |
result = await vote_service.get_model_votes(model_id)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 85 |
logger.info(LogFormatter.success(f"Found {result.get('total_votes', 0)} votes"))
|
| 86 |
return result
|
| 87 |
except Exception as e:
|
|
|
|
| 89 |
raise HTTPException(status_code=400, detail=str(e))
|
| 90 |
|
| 91 |
@router.get("/user/{user_id}")
|
| 92 |
+
@cached(expire=60, key_builder=user_votes_key_builder)
|
| 93 |
async def get_user_votes(
|
|
|
|
| 94 |
user_id: str
|
| 95 |
) -> List[Dict[str, Any]]:
|
| 96 |
"""Get all votes from a specific user"""
|
|
|
|
| 98 |
logger.info(LogFormatter.info(f"Fetching votes for user: {user_id}"))
|
| 99 |
await vote_service.initialize()
|
| 100 |
votes = await vote_service.get_user_votes(user_id)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 101 |
logger.info(LogFormatter.success(f"Found {len(votes)} votes"))
|
| 102 |
return votes
|
| 103 |
except Exception as e:
|
backend/app/asgi.py
CHANGED
|
@@ -12,7 +12,7 @@ import sys
|
|
| 12 |
|
| 13 |
from app.api.router import router
|
| 14 |
from app.core.fastapi_cache import setup_cache
|
| 15 |
-
from app.
|
| 16 |
from app.config import hf_config
|
| 17 |
|
| 18 |
# Configure logging before anything else
|
|
@@ -99,7 +99,7 @@ async def startup_event():
|
|
| 99 |
logger.info(LogFormatter.info(f" - Queue: {hf_config.QUEUE_REPO}"))
|
| 100 |
logger.info(LogFormatter.info(f" - Aggregated: {hf_config.AGGREGATED_REPO}"))
|
| 101 |
logger.info(LogFormatter.info(f" - Votes: {hf_config.VOTES_REPO}"))
|
| 102 |
-
logger.info(LogFormatter.info(f" -
|
| 103 |
|
| 104 |
# Setup cache
|
| 105 |
setup_cache()
|
|
|
|
| 12 |
|
| 13 |
from app.api.router import router
|
| 14 |
from app.core.fastapi_cache import setup_cache
|
| 15 |
+
from app.utils.logging import LogFormatter
|
| 16 |
from app.config import hf_config
|
| 17 |
|
| 18 |
# Configure logging before anything else
|
|
|
|
| 99 |
logger.info(LogFormatter.info(f" - Queue: {hf_config.QUEUE_REPO}"))
|
| 100 |
logger.info(LogFormatter.info(f" - Aggregated: {hf_config.AGGREGATED_REPO}"))
|
| 101 |
logger.info(LogFormatter.info(f" - Votes: {hf_config.VOTES_REPO}"))
|
| 102 |
+
logger.info(LogFormatter.info(f" - Maintainers Highlight: {hf_config.MAINTAINERS_HIGHLIGHT_REPO}"))
|
| 103 |
|
| 104 |
# Setup cache
|
| 105 |
setup_cache()
|
backend/app/config/hf_config.py
CHANGED
|
@@ -1,9 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
import logging
|
| 3 |
from typing import Optional
|
| 4 |
from huggingface_hub import HfApi
|
| 5 |
from pathlib import Path
|
| 6 |
from app.core.cache import cache_config
|
|
|
|
| 7 |
|
| 8 |
logger = logging.getLogger(__name__)
|
| 9 |
|
|
@@ -22,7 +26,7 @@ API = HfApi(token=HF_TOKEN)
|
|
| 22 |
QUEUE_REPO = f"{HF_ORGANIZATION}/requests"
|
| 23 |
AGGREGATED_REPO = f"{HF_ORGANIZATION}/contents"
|
| 24 |
VOTES_REPO = f"{HF_ORGANIZATION}/votes"
|
| 25 |
-
|
| 26 |
|
| 27 |
# File paths from cache config
|
| 28 |
VOTES_PATH = cache_config.votes_file
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Hugging Face configuration module
|
| 3 |
+
"""
|
| 4 |
import os
|
| 5 |
import logging
|
| 6 |
from typing import Optional
|
| 7 |
from huggingface_hub import HfApi
|
| 8 |
from pathlib import Path
|
| 9 |
from app.core.cache import cache_config
|
| 10 |
+
from app.utils.logging import LogFormatter
|
| 11 |
|
| 12 |
logger = logging.getLogger(__name__)
|
| 13 |
|
|
|
|
| 26 |
QUEUE_REPO = f"{HF_ORGANIZATION}/requests"
|
| 27 |
AGGREGATED_REPO = f"{HF_ORGANIZATION}/contents"
|
| 28 |
VOTES_REPO = f"{HF_ORGANIZATION}/votes"
|
| 29 |
+
MAINTAINERS_HIGHLIGHT_REPO = f"{HF_ORGANIZATION}/maintainers-highlight"
|
| 30 |
|
| 31 |
# File paths from cache config
|
| 32 |
VOTES_PATH = cache_config.votes_file
|
backend/app/core/cache.py
CHANGED
|
@@ -3,7 +3,7 @@ import shutil
|
|
| 3 |
from pathlib import Path
|
| 4 |
from datetime import timedelta
|
| 5 |
import logging
|
| 6 |
-
from app.
|
| 7 |
from app.config.base import (
|
| 8 |
CACHE_ROOT,
|
| 9 |
DATASETS_CACHE,
|
|
@@ -58,16 +58,16 @@ class CacheConfig:
|
|
| 58 |
def _setup_environment(self):
|
| 59 |
"""Configure HuggingFace environment variables"""
|
| 60 |
logger.info(LogFormatter.subsection("ENVIRONMENT SETUP"))
|
| 61 |
-
|
| 62 |
env_vars = {
|
| 63 |
"HF_HOME": str(self.cache_root),
|
|
|
|
| 64 |
"HF_DATASETS_CACHE": str(self.datasets_cache)
|
| 65 |
}
|
| 66 |
-
|
| 67 |
for var, value in env_vars.items():
|
| 68 |
os.environ[var] = value
|
| 69 |
logger.info(LogFormatter.info(f"Set {var}={value}"))
|
| 70 |
-
|
| 71 |
|
| 72 |
def get_cache_path(self, cache_type: str) -> Path:
|
| 73 |
"""Returns the path for a specific cache type"""
|
|
|
|
| 3 |
from pathlib import Path
|
| 4 |
from datetime import timedelta
|
| 5 |
import logging
|
| 6 |
+
from app.utils.logging import LogFormatter
|
| 7 |
from app.config.base import (
|
| 8 |
CACHE_ROOT,
|
| 9 |
DATASETS_CACHE,
|
|
|
|
| 58 |
def _setup_environment(self):
|
| 59 |
"""Configure HuggingFace environment variables"""
|
| 60 |
logger.info(LogFormatter.subsection("ENVIRONMENT SETUP"))
|
| 61 |
+
|
| 62 |
env_vars = {
|
| 63 |
"HF_HOME": str(self.cache_root),
|
| 64 |
+
"TRANSFORMERS_CACHE": str(self.models_cache),
|
| 65 |
"HF_DATASETS_CACHE": str(self.datasets_cache)
|
| 66 |
}
|
| 67 |
+
|
| 68 |
for var, value in env_vars.items():
|
| 69 |
os.environ[var] = value
|
| 70 |
logger.info(LogFormatter.info(f"Set {var}={value}"))
|
|
|
|
| 71 |
|
| 72 |
def get_cache_path(self, cache_type: str) -> Path:
|
| 73 |
"""Returns the path for a specific cache type"""
|
backend/app/core/fastapi_cache.py
CHANGED
|
@@ -4,64 +4,36 @@ from fastapi_cache.decorator import cache
|
|
| 4 |
from datetime import timedelta
|
| 5 |
from app.config import CACHE_TTL
|
| 6 |
import logging
|
| 7 |
-
from app.
|
| 8 |
-
from typing import Optional, Any
|
| 9 |
|
| 10 |
logger = logging.getLogger(__name__)
|
| 11 |
|
| 12 |
-
class CustomInMemoryBackend(InMemoryBackend):
|
| 13 |
-
def __init__(self):
|
| 14 |
-
"""Initialize the cache backend"""
|
| 15 |
-
super().__init__()
|
| 16 |
-
self.cache = {}
|
| 17 |
-
|
| 18 |
-
async def delete(self, key: str) -> bool:
|
| 19 |
-
"""Delete a key from the cache"""
|
| 20 |
-
try:
|
| 21 |
-
if key in self.cache:
|
| 22 |
-
del self.cache[key]
|
| 23 |
-
return True
|
| 24 |
-
return False
|
| 25 |
-
except Exception as e:
|
| 26 |
-
logger.error(LogFormatter.error(f"Failed to delete key {key} from cache", e))
|
| 27 |
-
return False
|
| 28 |
-
|
| 29 |
-
async def get(self, key: str) -> Any:
|
| 30 |
-
"""Get a value from the cache"""
|
| 31 |
-
return self.cache.get(key)
|
| 32 |
-
|
| 33 |
-
async def set(self, key: str, value: Any, expire: Optional[int] = None) -> None:
|
| 34 |
-
"""Set a value in the cache"""
|
| 35 |
-
self.cache[key] = value
|
| 36 |
-
|
| 37 |
def setup_cache():
|
| 38 |
"""Initialize FastAPI Cache with in-memory backend"""
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
logger.error(LogFormatter.error("Failed to initialize cache", e))
|
| 48 |
-
raise
|
| 49 |
-
|
| 50 |
-
async def invalidate_cache_key(key: str):
|
| 51 |
"""Invalidate a specific cache key"""
|
| 52 |
try:
|
| 53 |
backend = FastAPICache.get_backend()
|
| 54 |
if hasattr(backend, 'delete'):
|
| 55 |
-
|
| 56 |
logger.info(LogFormatter.success(f"Cache invalidated for key: {key}"))
|
| 57 |
else:
|
| 58 |
logger.warning(LogFormatter.warning("Cache backend does not support deletion"))
|
| 59 |
except Exception as e:
|
| 60 |
logger.error(LogFormatter.error(f"Failed to invalidate cache key: {key}", e))
|
| 61 |
|
| 62 |
-
def build_cache_key(*args) -> str:
|
| 63 |
-
"""Build a cache key
|
| 64 |
-
|
|
|
|
|
|
|
| 65 |
|
| 66 |
def cached(expire: int = CACHE_TTL, key_builder=None):
|
| 67 |
"""Decorator for caching endpoint responses
|
|
|
|
| 4 |
from datetime import timedelta
|
| 5 |
from app.config import CACHE_TTL
|
| 6 |
import logging
|
| 7 |
+
from app.utils.logging import LogFormatter
|
|
|
|
| 8 |
|
| 9 |
logger = logging.getLogger(__name__)
|
| 10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
def setup_cache():
|
| 12 |
"""Initialize FastAPI Cache with in-memory backend"""
|
| 13 |
+
FastAPICache.init(
|
| 14 |
+
backend=InMemoryBackend(),
|
| 15 |
+
prefix="fastapi-cache",
|
| 16 |
+
expire=CACHE_TTL
|
| 17 |
+
)
|
| 18 |
+
logger.info(LogFormatter.success("FastAPI Cache initialized with in-memory backend"))
|
| 19 |
+
|
| 20 |
+
def invalidate_cache_key(key: str):
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
"""Invalidate a specific cache key"""
|
| 22 |
try:
|
| 23 |
backend = FastAPICache.get_backend()
|
| 24 |
if hasattr(backend, 'delete'):
|
| 25 |
+
backend.delete(key)
|
| 26 |
logger.info(LogFormatter.success(f"Cache invalidated for key: {key}"))
|
| 27 |
else:
|
| 28 |
logger.warning(LogFormatter.warning("Cache backend does not support deletion"))
|
| 29 |
except Exception as e:
|
| 30 |
logger.error(LogFormatter.error(f"Failed to invalidate cache key: {key}", e))
|
| 31 |
|
| 32 |
+
def build_cache_key(namespace: str, *args) -> str:
|
| 33 |
+
"""Build a consistent cache key"""
|
| 34 |
+
key = f"fastapi-cache:{namespace}:{':'.join(str(arg) for arg in args)}"
|
| 35 |
+
logger.debug(LogFormatter.info(f"Built cache key: {key}"))
|
| 36 |
+
return key
|
| 37 |
|
| 38 |
def cached(expire: int = CACHE_TTL, key_builder=None):
|
| 39 |
"""Decorator for caching endpoint responses
|
backend/app/core/formatting.py
DELETED
|
@@ -1,104 +0,0 @@
|
|
| 1 |
-
import logging
|
| 2 |
-
from typing import Dict, Any, List, Optional
|
| 3 |
-
|
| 4 |
-
logger = logging.getLogger(__name__)
|
| 5 |
-
|
| 6 |
-
class LogFormatter:
|
| 7 |
-
"""Utility class for consistent log formatting across the application"""
|
| 8 |
-
|
| 9 |
-
@staticmethod
|
| 10 |
-
def section(title: str) -> str:
|
| 11 |
-
"""Create a section header"""
|
| 12 |
-
return f"\n{'='*20} {title.upper()} {'='*20}"
|
| 13 |
-
|
| 14 |
-
@staticmethod
|
| 15 |
-
def subsection(title: str) -> str:
|
| 16 |
-
"""Create a subsection header"""
|
| 17 |
-
return f"\n{'─'*20} {title} {'─'*20}"
|
| 18 |
-
|
| 19 |
-
@staticmethod
|
| 20 |
-
def tree(items: Dict[str, Any], title: str = None) -> List[str]:
|
| 21 |
-
"""Create a tree view of dictionary data"""
|
| 22 |
-
lines = []
|
| 23 |
-
if title:
|
| 24 |
-
lines.append(f"📊 {title}:")
|
| 25 |
-
|
| 26 |
-
# Get the maximum length for alignment
|
| 27 |
-
max_key_length = max(len(str(k)) for k in items.keys())
|
| 28 |
-
|
| 29 |
-
# Format each item
|
| 30 |
-
for i, (key, value) in enumerate(items.items()):
|
| 31 |
-
prefix = "└──" if i == len(items) - 1 else "├──"
|
| 32 |
-
if isinstance(value, (int, float)):
|
| 33 |
-
value = f"{value:,}" # Add thousand separators
|
| 34 |
-
lines.append(f"{prefix} {str(key):<{max_key_length}}: {value}")
|
| 35 |
-
|
| 36 |
-
return lines
|
| 37 |
-
|
| 38 |
-
@staticmethod
|
| 39 |
-
def stats(stats: Dict[str, int], title: str = None) -> List[str]:
|
| 40 |
-
"""Format statistics with icons"""
|
| 41 |
-
lines = []
|
| 42 |
-
if title:
|
| 43 |
-
lines.append(f"📊 {title}:")
|
| 44 |
-
|
| 45 |
-
# Get the maximum length for alignment
|
| 46 |
-
max_key_length = max(len(str(k)) for k in stats.keys())
|
| 47 |
-
|
| 48 |
-
# Format each stat with an appropriate icon
|
| 49 |
-
icons = {
|
| 50 |
-
"total": "📌",
|
| 51 |
-
"success": "✅",
|
| 52 |
-
"error": "❌",
|
| 53 |
-
"pending": "⏳",
|
| 54 |
-
"processing": "⚙️",
|
| 55 |
-
"finished": "✨",
|
| 56 |
-
"evaluating": "🔄",
|
| 57 |
-
"downloads": "⬇️",
|
| 58 |
-
"files": "📁",
|
| 59 |
-
"cached": "💾",
|
| 60 |
-
"size": "📏",
|
| 61 |
-
"time": "⏱️",
|
| 62 |
-
"rate": "🚀"
|
| 63 |
-
}
|
| 64 |
-
|
| 65 |
-
# Format each item
|
| 66 |
-
for i, (key, value) in enumerate(stats.items()):
|
| 67 |
-
prefix = "└──" if i == len(stats) - 1 else "├──"
|
| 68 |
-
icon = icons.get(key.lower().split('_')[0], "•")
|
| 69 |
-
if isinstance(value, (int, float)):
|
| 70 |
-
value = f"{value:,}" # Add thousand separators
|
| 71 |
-
lines.append(f"{prefix} {icon} {str(key):<{max_key_length}}: {value}")
|
| 72 |
-
|
| 73 |
-
return lines
|
| 74 |
-
|
| 75 |
-
@staticmethod
|
| 76 |
-
def progress_bar(current: int, total: int, width: int = 20) -> str:
|
| 77 |
-
"""Create a progress bar"""
|
| 78 |
-
percentage = (current * 100) // total
|
| 79 |
-
filled = "█" * (percentage * width // 100)
|
| 80 |
-
empty = "░" * (width - len(filled))
|
| 81 |
-
return f"{filled}{empty} {percentage:3d}%"
|
| 82 |
-
|
| 83 |
-
@staticmethod
|
| 84 |
-
def error(message: str, error: Optional[Exception] = None) -> str:
|
| 85 |
-
"""Format error message"""
|
| 86 |
-
error_msg = f"\n❌ Error: {message}"
|
| 87 |
-
if error:
|
| 88 |
-
error_msg += f"\n └── Details: {str(error)}"
|
| 89 |
-
return error_msg
|
| 90 |
-
|
| 91 |
-
@staticmethod
|
| 92 |
-
def success(message: str) -> str:
|
| 93 |
-
"""Format success message"""
|
| 94 |
-
return f"✅ {message}"
|
| 95 |
-
|
| 96 |
-
@staticmethod
|
| 97 |
-
def warning(message: str) -> str:
|
| 98 |
-
"""Format warning message"""
|
| 99 |
-
return f"⚠️ {message}"
|
| 100 |
-
|
| 101 |
-
@staticmethod
|
| 102 |
-
def info(message: str) -> str:
|
| 103 |
-
"""Format info message"""
|
| 104 |
-
return f"ℹ️ {message}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
backend/app/services/hf_service.py
CHANGED
|
@@ -2,7 +2,7 @@ from typing import Optional
|
|
| 2 |
from huggingface_hub import HfApi
|
| 3 |
from app.config import HF_TOKEN, API
|
| 4 |
from app.core.cache import cache_config
|
| 5 |
-
from app.
|
| 6 |
import logging
|
| 7 |
|
| 8 |
logger = logging.getLogger(__name__)
|
|
|
|
| 2 |
from huggingface_hub import HfApi
|
| 3 |
from app.config import HF_TOKEN, API
|
| 4 |
from app.core.cache import cache_config
|
| 5 |
+
from app.utils.logging import LogFormatter
|
| 6 |
import logging
|
| 7 |
|
| 8 |
logger = logging.getLogger(__name__)
|
backend/app/services/leaderboard.py
CHANGED
|
@@ -5,7 +5,7 @@ import datasets
|
|
| 5 |
from fastapi import HTTPException
|
| 6 |
import logging
|
| 7 |
from app.config.base import HF_ORGANIZATION
|
| 8 |
-
from app.
|
| 9 |
|
| 10 |
logger = logging.getLogger(__name__)
|
| 11 |
|
|
@@ -143,7 +143,7 @@ class LeaderboardService:
|
|
| 143 |
"is_merged": data.get("Merged", False),
|
| 144 |
"is_moe": data.get("MoE", False),
|
| 145 |
"is_flagged": data.get("Flagged", False),
|
| 146 |
-
"
|
| 147 |
}
|
| 148 |
|
| 149 |
metadata = {
|
|
|
|
| 5 |
from fastapi import HTTPException
|
| 6 |
import logging
|
| 7 |
from app.config.base import HF_ORGANIZATION
|
| 8 |
+
from app.utils.logging import LogFormatter
|
| 9 |
|
| 10 |
logger = logging.getLogger(__name__)
|
| 11 |
|
|
|
|
| 143 |
"is_merged": data.get("Merged", False),
|
| 144 |
"is_moe": data.get("MoE", False),
|
| 145 |
"is_flagged": data.get("Flagged", False),
|
| 146 |
+
"is_highlighted_by_maintainer": data.get("Official Providers", False)
|
| 147 |
}
|
| 148 |
|
| 149 |
metadata = {
|
backend/app/services/models.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
from datetime import datetime, timezone
|
| 2 |
from typing import Dict, Any, Optional, List
|
| 3 |
import json
|
| 4 |
import os
|
|
@@ -25,7 +25,7 @@ from app.services.hf_service import HuggingFaceService
|
|
| 25 |
from app.utils.model_validation import ModelValidator
|
| 26 |
from app.services.votes import VoteService
|
| 27 |
from app.core.cache import cache_config
|
| 28 |
-
from app.
|
| 29 |
|
| 30 |
# Disable datasets progress bars globally
|
| 31 |
disable_progress_bar()
|
|
@@ -217,25 +217,24 @@ class ModelService(HuggingFaceService):
|
|
| 217 |
|
| 218 |
try:
|
| 219 |
logger.info(LogFormatter.subsection("DATASET LOADING"))
|
| 220 |
-
logger.info(LogFormatter.info("Loading dataset..."))
|
| 221 |
|
| 222 |
-
#
|
| 223 |
with suppress_output():
|
| 224 |
-
|
| 225 |
repo_id=QUEUE_REPO,
|
| 226 |
repo_type="dataset",
|
| 227 |
token=self.token
|
| 228 |
)
|
| 229 |
|
| 230 |
-
#
|
| 231 |
-
|
| 232 |
-
json_files = list(local_path.glob("**/*.json"))
|
| 233 |
total_files = len(json_files)
|
| 234 |
|
| 235 |
# Log repository stats
|
| 236 |
stats = {
|
| 237 |
-
"Total_Files":
|
| 238 |
-
"
|
| 239 |
}
|
| 240 |
for line in LogFormatter.stats(stats, "Repository Statistics"):
|
| 241 |
logger.info(line)
|
|
@@ -246,67 +245,28 @@ class ModelService(HuggingFaceService):
|
|
| 246 |
# Initialize progress tracker
|
| 247 |
progress = ProgressTracker(total_files, "PROCESSING FILES")
|
| 248 |
|
| 249 |
-
|
| 250 |
-
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
|
| 254 |
-
content = json.load(f)
|
| 255 |
-
|
| 256 |
-
# Get status and determine target status
|
| 257 |
-
status = content.get("status", "PENDING").upper()
|
| 258 |
-
target_status = None
|
| 259 |
-
status_map = {
|
| 260 |
-
"PENDING": ["PENDING"],
|
| 261 |
-
"EVALUATING": ["RUNNING"],
|
| 262 |
-
"FINISHED": ["FINISHED"]
|
| 263 |
-
}
|
| 264 |
|
| 265 |
-
for
|
| 266 |
-
|
| 267 |
-
|
| 268 |
-
|
| 269 |
-
|
| 270 |
-
|
| 271 |
-
|
| 272 |
-
continue
|
| 273 |
-
|
| 274 |
-
# Calculate wait time
|
| 275 |
-
try:
|
| 276 |
-
submit_time = datetime.fromisoformat(content["submitted_time"].replace("Z", "+00:00"))
|
| 277 |
-
if submit_time.tzinfo is None:
|
| 278 |
-
submit_time = submit_time.replace(tzinfo=timezone.utc)
|
| 279 |
-
current_time = datetime.now(timezone.utc)
|
| 280 |
-
wait_time = current_time - submit_time
|
| 281 |
-
|
| 282 |
-
model_info = {
|
| 283 |
-
"name": content["model"],
|
| 284 |
-
"submitter": content.get("sender", "Unknown"),
|
| 285 |
-
"revision": content["revision"],
|
| 286 |
-
"wait_time": f"{wait_time.total_seconds():.1f}s",
|
| 287 |
-
"submission_time": content["submitted_time"],
|
| 288 |
-
"status": target_status,
|
| 289 |
-
"precision": content.get("precision", "Unknown")
|
| 290 |
-
}
|
| 291 |
|
| 292 |
-
#
|
| 293 |
-
|
| 294 |
-
|
| 295 |
-
|
| 296 |
-
|
| 297 |
-
except (ValueError, TypeError) as e:
|
| 298 |
-
logger.error(LogFormatter.error(f"Failed to process {file_path.name}", e))
|
| 299 |
-
|
| 300 |
-
except Exception as e:
|
| 301 |
-
logger.error(LogFormatter.error(f"Failed to load {file_path.name}", e))
|
| 302 |
-
finally:
|
| 303 |
-
progress.update()
|
| 304 |
-
|
| 305 |
-
# Populate models dict with deduplicated submissions
|
| 306 |
-
for model_info in model_submissions.values():
|
| 307 |
-
models[model_info["status"].lower()].append(model_info)
|
| 308 |
|
| 309 |
-
|
|
|
|
| 310 |
|
| 311 |
# Final summary with fancy formatting
|
| 312 |
logger.info(LogFormatter.section("CACHE SUMMARY"))
|
|
@@ -449,14 +409,6 @@ class ModelService(HuggingFaceService):
|
|
| 449 |
logger.info(LogFormatter.subsection("CHECKING EXISTING SUBMISSIONS"))
|
| 450 |
existing_models = await self.get_models()
|
| 451 |
|
| 452 |
-
# Call the official provider status check
|
| 453 |
-
is_valid, error_message = await self.validator.check_official_provider_status(
|
| 454 |
-
model_data["model_id"],
|
| 455 |
-
existing_models
|
| 456 |
-
)
|
| 457 |
-
if not is_valid:
|
| 458 |
-
raise ValueError(error_message)
|
| 459 |
-
|
| 460 |
# Check in all statuses (pending, evaluating, finished)
|
| 461 |
for status, models in existing_models.items():
|
| 462 |
for model in models:
|
|
@@ -502,11 +454,11 @@ class ModelService(HuggingFaceService):
|
|
| 502 |
if model_size is None:
|
| 503 |
logger.error(LogFormatter.error("Model size validation failed", error))
|
| 504 |
raise Exception(error)
|
| 505 |
-
logger.info(LogFormatter.success(f"Model size validation passed: {model_size:.1f}
|
| 506 |
|
| 507 |
# Size limits based on precision
|
| 508 |
if model_data["precision"] in ["float16", "bfloat16"] and model_size > 100:
|
| 509 |
-
error_msg = f"Model too large for {model_data['precision']} (limit:
|
| 510 |
logger.error(LogFormatter.error("Size limit exceeded", error_msg))
|
| 511 |
raise Exception(error_msg)
|
| 512 |
|
|
@@ -590,11 +542,7 @@ class ModelService(HuggingFaceService):
|
|
| 590 |
await self.vote_service.add_vote(
|
| 591 |
model_data["model_id"],
|
| 592 |
user_id,
|
| 593 |
-
"up"
|
| 594 |
-
{
|
| 595 |
-
"precision": model_data["precision"],
|
| 596 |
-
"revision": model_data["revision"]
|
| 597 |
-
}
|
| 598 |
)
|
| 599 |
logger.info(LogFormatter.success("Vote recorded successfully"))
|
| 600 |
except Exception as e:
|
|
@@ -628,41 +576,4 @@ class ModelService(HuggingFaceService):
|
|
| 628 |
return status
|
| 629 |
|
| 630 |
logger.warning(LogFormatter.warning(f"No status found for model: {model_id}"))
|
| 631 |
-
return {"status": "not_found"}
|
| 632 |
-
|
| 633 |
-
async def get_organization_submissions(self, organization: str, days: int = 7) -> List[Dict[str, Any]]:
|
| 634 |
-
"""Get all submissions from a user in the last n days"""
|
| 635 |
-
try:
|
| 636 |
-
# Get all models
|
| 637 |
-
all_models = await self.get_models()
|
| 638 |
-
current_time = datetime.now(timezone.utc)
|
| 639 |
-
cutoff_time = current_time - timedelta(days=days)
|
| 640 |
-
|
| 641 |
-
# Filter models by submitter and submission time
|
| 642 |
-
user_submissions = []
|
| 643 |
-
for status, models in all_models.items():
|
| 644 |
-
for model in models:
|
| 645 |
-
# Check if model was submitted by the user
|
| 646 |
-
if model["submitter"] == organization:
|
| 647 |
-
# Parse submission time
|
| 648 |
-
submit_time = datetime.fromisoformat(
|
| 649 |
-
model["submission_time"].replace("Z", "+00:00")
|
| 650 |
-
)
|
| 651 |
-
# Check if within time window
|
| 652 |
-
if submit_time > cutoff_time:
|
| 653 |
-
user_submissions.append({
|
| 654 |
-
"name": model["name"],
|
| 655 |
-
"status": status,
|
| 656 |
-
"submission_time": model["submission_time"],
|
| 657 |
-
"precision": model["precision"]
|
| 658 |
-
})
|
| 659 |
-
|
| 660 |
-
return sorted(
|
| 661 |
-
user_submissions,
|
| 662 |
-
key=lambda x: x["submission_time"],
|
| 663 |
-
reverse=True
|
| 664 |
-
)
|
| 665 |
-
|
| 666 |
-
except Exception as e:
|
| 667 |
-
logger.error(LogFormatter.error(f"Failed to get submissions for {organization}", e))
|
| 668 |
-
raise
|
|
|
|
| 1 |
+
from datetime import datetime, timezone
|
| 2 |
from typing import Dict, Any, Optional, List
|
| 3 |
import json
|
| 4 |
import os
|
|
|
|
| 25 |
from app.utils.model_validation import ModelValidator
|
| 26 |
from app.services.votes import VoteService
|
| 27 |
from app.core.cache import cache_config
|
| 28 |
+
from app.utils.logging import LogFormatter
|
| 29 |
|
| 30 |
# Disable datasets progress bars globally
|
| 31 |
disable_progress_bar()
|
|
|
|
| 217 |
|
| 218 |
try:
|
| 219 |
logger.info(LogFormatter.subsection("DATASET LOADING"))
|
| 220 |
+
logger.info(LogFormatter.info("Loading dataset files..."))
|
| 221 |
|
| 222 |
+
# List files in repository
|
| 223 |
with suppress_output():
|
| 224 |
+
files = self.hf_api.list_repo_files(
|
| 225 |
repo_id=QUEUE_REPO,
|
| 226 |
repo_type="dataset",
|
| 227 |
token=self.token
|
| 228 |
)
|
| 229 |
|
| 230 |
+
# Filter JSON files
|
| 231 |
+
json_files = [f for f in files if f.endswith('.json')]
|
|
|
|
| 232 |
total_files = len(json_files)
|
| 233 |
|
| 234 |
# Log repository stats
|
| 235 |
stats = {
|
| 236 |
+
"Total_Files": len(files),
|
| 237 |
+
"JSON_Files": total_files,
|
| 238 |
}
|
| 239 |
for line in LogFormatter.stats(stats, "Repository Statistics"):
|
| 240 |
logger.info(line)
|
|
|
|
| 245 |
# Initialize progress tracker
|
| 246 |
progress = ProgressTracker(total_files, "PROCESSING FILES")
|
| 247 |
|
| 248 |
+
try:
|
| 249 |
+
# Create aiohttp session to reuse connections
|
| 250 |
+
async with aiohttp.ClientSession() as session:
|
| 251 |
+
# Process files in chunks
|
| 252 |
+
chunk_size = 50
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 253 |
|
| 254 |
+
for i in range(0, len(json_files), chunk_size):
|
| 255 |
+
chunk = json_files[i:i + chunk_size]
|
| 256 |
+
chunk_tasks = [
|
| 257 |
+
self._download_and_process_file(file, session, progress)
|
| 258 |
+
for file in chunk
|
| 259 |
+
]
|
| 260 |
+
results = await asyncio.gather(*chunk_tasks)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 261 |
|
| 262 |
+
# Process results
|
| 263 |
+
for result in results:
|
| 264 |
+
if result:
|
| 265 |
+
status = result.pop("status")
|
| 266 |
+
models[status.lower()].append(result)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 267 |
|
| 268 |
+
finally:
|
| 269 |
+
progress.close()
|
| 270 |
|
| 271 |
# Final summary with fancy formatting
|
| 272 |
logger.info(LogFormatter.section("CACHE SUMMARY"))
|
|
|
|
| 409 |
logger.info(LogFormatter.subsection("CHECKING EXISTING SUBMISSIONS"))
|
| 410 |
existing_models = await self.get_models()
|
| 411 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 412 |
# Check in all statuses (pending, evaluating, finished)
|
| 413 |
for status, models in existing_models.items():
|
| 414 |
for model in models:
|
|
|
|
| 454 |
if model_size is None:
|
| 455 |
logger.error(LogFormatter.error("Model size validation failed", error))
|
| 456 |
raise Exception(error)
|
| 457 |
+
logger.info(LogFormatter.success(f"Model size validation passed: {model_size:.1f}GB"))
|
| 458 |
|
| 459 |
# Size limits based on precision
|
| 460 |
if model_data["precision"] in ["float16", "bfloat16"] and model_size > 100:
|
| 461 |
+
error_msg = f"Model too large for {model_data['precision']} (limit: 100GB)"
|
| 462 |
logger.error(LogFormatter.error("Size limit exceeded", error_msg))
|
| 463 |
raise Exception(error_msg)
|
| 464 |
|
|
|
|
| 542 |
await self.vote_service.add_vote(
|
| 543 |
model_data["model_id"],
|
| 544 |
user_id,
|
| 545 |
+
"up"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 546 |
)
|
| 547 |
logger.info(LogFormatter.success("Vote recorded successfully"))
|
| 548 |
except Exception as e:
|
|
|
|
| 576 |
return status
|
| 577 |
|
| 578 |
logger.warning(LogFormatter.warning(f"No status found for model: {model_id}"))
|
| 579 |
+
return {"status": "not_found"}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
backend/app/services/votes.py
CHANGED
|
@@ -4,16 +4,16 @@ import json
|
|
| 4 |
import logging
|
| 5 |
import asyncio
|
| 6 |
from pathlib import Path
|
|
|
|
| 7 |
import aiohttp
|
| 8 |
from huggingface_hub import HfApi
|
| 9 |
-
import
|
| 10 |
-
import os
|
| 11 |
|
| 12 |
from app.services.hf_service import HuggingFaceService
|
| 13 |
-
from app.config import HF_TOKEN
|
| 14 |
from app.config.hf_config import HF_ORGANIZATION
|
| 15 |
from app.core.cache import cache_config
|
| 16 |
-
from app.
|
| 17 |
|
| 18 |
logger = logging.getLogger(__name__)
|
| 19 |
|
|
@@ -31,15 +31,17 @@ class VoteService(HuggingFaceService):
|
|
| 31 |
super().__init__()
|
| 32 |
self.votes_file = cache_config.votes_file
|
| 33 |
self.votes_to_upload: List[Dict[str, Any]] = []
|
| 34 |
-
self.vote_check_set: Set[Tuple[str, str, str
|
| 35 |
self._votes_by_model: Dict[str, List[Dict[str, Any]]] = {}
|
| 36 |
self._votes_by_user: Dict[str, List[Dict[str, Any]]] = {}
|
|
|
|
| 37 |
self._last_sync = None
|
| 38 |
self._sync_interval = 300 # 5 minutes
|
| 39 |
self._total_votes = 0
|
| 40 |
self._last_vote_timestamp = None
|
| 41 |
self._max_retries = 3
|
| 42 |
self._retry_delay = 1 # seconds
|
|
|
|
| 43 |
self.hf_api = HfApi(token=HF_TOKEN)
|
| 44 |
self._init_done = True
|
| 45 |
|
|
@@ -55,21 +57,29 @@ class VoteService(HuggingFaceService):
|
|
| 55 |
# Ensure votes directory exists
|
| 56 |
self.votes_file.parent.mkdir(parents=True, exist_ok=True)
|
| 57 |
|
| 58 |
-
# Load
|
| 59 |
-
|
| 60 |
-
if
|
| 61 |
-
logger.info(LogFormatter.info(f"
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 70 |
await self._load_existing_votes()
|
| 71 |
else:
|
| 72 |
-
logger.
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
|
| 74 |
self._initialized = True
|
| 75 |
self._last_sync = datetime.now(timezone.utc)
|
|
@@ -87,105 +97,80 @@ class VoteService(HuggingFaceService):
|
|
| 87 |
logger.error(LogFormatter.error("Initialization failed", e))
|
| 88 |
raise
|
| 89 |
|
| 90 |
-
async def
|
| 91 |
-
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 92 |
url = f"https://huggingface.co/datasets/{HF_ORGANIZATION}/votes/raw/main/votes_data.jsonl"
|
| 93 |
-
headers = {"Authorization": f"Bearer {
|
| 94 |
|
| 95 |
try:
|
| 96 |
async with aiohttp.ClientSession() as session:
|
| 97 |
async with session.get(url, headers=headers) as response:
|
| 98 |
if response.status == 200:
|
| 99 |
-
|
| 100 |
async for line in response.content:
|
| 101 |
-
if line.strip():
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
votes.append(vote)
|
| 105 |
-
except json.JSONDecodeError:
|
| 106 |
-
continue
|
| 107 |
-
return votes
|
| 108 |
else:
|
| 109 |
logger.error(f"Failed to get remote votes: HTTP {response.status}")
|
| 110 |
-
return
|
| 111 |
except Exception as e:
|
| 112 |
-
logger.error(f"Error
|
| 113 |
-
return
|
| 114 |
-
|
| 115 |
-
async def _check_for_new_votes(self):
|
| 116 |
-
"""Check for new votes on the hub and sync if needed"""
|
| 117 |
-
try:
|
| 118 |
-
remote_votes = await self._fetch_remote_votes()
|
| 119 |
-
if len(remote_votes) != self._total_votes:
|
| 120 |
-
logger.info(f"Vote count changed: Local ({self._total_votes}) ≠ Remote ({len(remote_votes)})")
|
| 121 |
-
# Save to local file
|
| 122 |
-
with open(self.votes_file, 'w') as f:
|
| 123 |
-
for vote in remote_votes:
|
| 124 |
-
json.dump(vote, f)
|
| 125 |
-
f.write('\n')
|
| 126 |
-
|
| 127 |
-
# Reload into memory
|
| 128 |
-
await self._load_existing_votes()
|
| 129 |
-
else:
|
| 130 |
-
logger.info("Votes are in sync")
|
| 131 |
-
|
| 132 |
-
except Exception as e:
|
| 133 |
-
logger.error(f"Error checking for new votes: {str(e)}")
|
| 134 |
|
| 135 |
async def _sync_with_hub(self):
|
| 136 |
-
"""Sync votes with HuggingFace hub"""
|
| 137 |
try:
|
| 138 |
logger.info(LogFormatter.section("VOTE SYNC"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 139 |
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
logger.info(LogFormatter.info(f"Loaded {len(remote_votes)} votes from hub"))
|
| 143 |
|
| 144 |
-
#
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 150 |
|
| 151 |
-
#
|
| 152 |
-
with
|
| 153 |
for vote in remote_votes:
|
| 154 |
-
json.
|
| 155 |
-
temp_file.write('\n')
|
| 156 |
-
temp_path = temp_file.name
|
| 157 |
|
| 158 |
-
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
repo_id=f"{HF_ORGANIZATION}/votes",
|
| 164 |
-
repo_type="dataset",
|
| 165 |
-
commit_message=f"Update votes: +{len(self.votes_to_upload)} new votes",
|
| 166 |
-
token=self.token
|
| 167 |
-
)
|
| 168 |
-
|
| 169 |
-
# Clear pending votes only if upload succeeded
|
| 170 |
-
self.votes_to_upload.clear()
|
| 171 |
-
logger.info(LogFormatter.success("Pending votes uploaded successfully"))
|
| 172 |
-
|
| 173 |
-
except Exception as e:
|
| 174 |
-
logger.error(LogFormatter.error("Failed to upload votes to hub", e))
|
| 175 |
-
raise
|
| 176 |
-
finally:
|
| 177 |
-
# Clean up temp file
|
| 178 |
-
os.unlink(temp_path)
|
| 179 |
-
|
| 180 |
-
# Update local state
|
| 181 |
-
with open(self.votes_file, 'w') as f:
|
| 182 |
-
for vote in remote_votes:
|
| 183 |
-
json.dump(vote, f)
|
| 184 |
-
f.write('\n')
|
| 185 |
-
|
| 186 |
-
# Reload votes in memory
|
| 187 |
-
await self._load_existing_votes()
|
| 188 |
-
logger.info(LogFormatter.success("Sync completed successfully"))
|
| 189 |
|
| 190 |
self._last_sync = datetime.now(timezone.utc)
|
| 191 |
|
|
@@ -193,6 +178,23 @@ class VoteService(HuggingFaceService):
|
|
| 193 |
logger.error(LogFormatter.error("Sync failed", e))
|
| 194 |
raise
|
| 195 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 196 |
async def _load_existing_votes(self):
|
| 197 |
"""Load existing votes from file"""
|
| 198 |
if not self.votes_file.exists():
|
|
@@ -260,13 +262,7 @@ class VoteService(HuggingFaceService):
|
|
| 260 |
def _add_vote_to_memory(self, vote: Dict[str, Any]):
|
| 261 |
"""Add vote to memory structures"""
|
| 262 |
try:
|
| 263 |
-
|
| 264 |
-
check_tuple = (
|
| 265 |
-
vote["model"],
|
| 266 |
-
vote.get("revision", "main"),
|
| 267 |
-
vote["username"],
|
| 268 |
-
vote.get("precision", "unknown")
|
| 269 |
-
)
|
| 270 |
|
| 271 |
# Skip if we already have this vote
|
| 272 |
if check_tuple in self.vote_check_set:
|
|
@@ -285,19 +281,13 @@ class VoteService(HuggingFaceService):
|
|
| 285 |
self._votes_by_user[vote["username"]].append(vote)
|
| 286 |
|
| 287 |
except KeyError as e:
|
| 288 |
-
logger.error(
|
| 289 |
except Exception as e:
|
| 290 |
-
logger.error(
|
| 291 |
|
| 292 |
async def get_user_votes(self, user_id: str) -> List[Dict[str, Any]]:
|
| 293 |
"""Get all votes from a specific user"""
|
| 294 |
logger.info(LogFormatter.info(f"Fetching votes for user: {user_id}"))
|
| 295 |
-
|
| 296 |
-
# Check if we need to refresh votes
|
| 297 |
-
if (datetime.now(timezone.utc) - self._last_sync).total_seconds() > self._sync_interval:
|
| 298 |
-
logger.info(LogFormatter.info("Cache expired, refreshing votes..."))
|
| 299 |
-
await self._check_for_new_votes()
|
| 300 |
-
|
| 301 |
votes = self._votes_by_user.get(user_id, [])
|
| 302 |
logger.info(LogFormatter.success(f"Found {len(votes):,} votes"))
|
| 303 |
return votes
|
|
@@ -305,31 +295,19 @@ class VoteService(HuggingFaceService):
|
|
| 305 |
async def get_model_votes(self, model_id: str) -> Dict[str, Any]:
|
| 306 |
"""Get all votes for a specific model"""
|
| 307 |
logger.info(LogFormatter.info(f"Fetching votes for model: {model_id}"))
|
| 308 |
-
|
| 309 |
-
# Check if we need to refresh votes
|
| 310 |
-
if (datetime.now(timezone.utc) - self._last_sync).total_seconds() > self._sync_interval:
|
| 311 |
-
logger.info(LogFormatter.info("Cache expired, refreshing votes..."))
|
| 312 |
-
await self._check_for_new_votes()
|
| 313 |
-
|
| 314 |
votes = self._votes_by_model.get(model_id, [])
|
| 315 |
|
| 316 |
-
# Group votes by revision
|
| 317 |
-
|
| 318 |
for vote in votes:
|
| 319 |
-
revision = vote
|
| 320 |
-
|
| 321 |
-
|
| 322 |
-
|
| 323 |
-
votes_by_config[config_key] = {
|
| 324 |
-
"revision": revision,
|
| 325 |
-
"precision": precision,
|
| 326 |
-
"count": 0
|
| 327 |
-
}
|
| 328 |
-
votes_by_config[config_key]["count"] += 1
|
| 329 |
|
| 330 |
stats = {
|
| 331 |
"Total_Votes": len(votes),
|
| 332 |
-
**{f"
|
| 333 |
}
|
| 334 |
|
| 335 |
logger.info(LogFormatter.section("VOTE STATISTICS"))
|
|
@@ -338,7 +316,7 @@ class VoteService(HuggingFaceService):
|
|
| 338 |
|
| 339 |
return {
|
| 340 |
"total_votes": len(votes),
|
| 341 |
-
"
|
| 342 |
"votes": votes
|
| 343 |
}
|
| 344 |
|
|
@@ -360,7 +338,7 @@ class VoteService(HuggingFaceService):
|
|
| 360 |
logger.warning(f"Using 'main' as fallback revision for {model_id} after {self._max_retries} failed attempts")
|
| 361 |
return "main"
|
| 362 |
|
| 363 |
-
async def add_vote(self, model_id: str, user_id: str, vote_type: str
|
| 364 |
"""Add a vote for a model"""
|
| 365 |
try:
|
| 366 |
self._log_repo_operation("add", f"{HF_ORGANIZATION}/votes", f"Adding {vote_type} vote for {model_id} by {user_id}")
|
|
@@ -368,48 +346,23 @@ class VoteService(HuggingFaceService):
|
|
| 368 |
stats = {
|
| 369 |
"Model": model_id,
|
| 370 |
"User": user_id,
|
| 371 |
-
"Type": vote_type
|
| 372 |
-
"Config": vote_data or {}
|
| 373 |
}
|
| 374 |
for line in LogFormatter.tree(stats, "Vote Details"):
|
| 375 |
logger.info(line)
|
| 376 |
|
| 377 |
-
|
| 378 |
-
|
| 379 |
-
revision = None
|
| 380 |
-
|
| 381 |
-
if vote_data:
|
| 382 |
-
precision = vote_data.get("precision")
|
| 383 |
-
revision = vote_data.get("revision")
|
| 384 |
-
|
| 385 |
-
# If any info is missing, try to get it from model info
|
| 386 |
-
if not all([precision, revision]):
|
| 387 |
-
try:
|
| 388 |
-
model_info = await asyncio.to_thread(self.hf_api.model_info, model_id)
|
| 389 |
-
model_card_data = model_info.cardData if hasattr(model_info, 'cardData') else {}
|
| 390 |
-
|
| 391 |
-
if not precision:
|
| 392 |
-
precision = model_card_data.get("precision", "unknown")
|
| 393 |
-
if not revision:
|
| 394 |
-
revision = model_info.sha
|
| 395 |
-
except Exception as e:
|
| 396 |
-
logger.warning(LogFormatter.warning(f"Failed to get model info: {str(e)}. Using default values."))
|
| 397 |
-
precision = precision or "unknown"
|
| 398 |
-
revision = revision or "main"
|
| 399 |
-
|
| 400 |
-
# Check if vote already exists with this configuration
|
| 401 |
-
check_tuple = (model_id, revision, user_id, precision)
|
| 402 |
|
| 403 |
if check_tuple in self.vote_check_set:
|
| 404 |
-
raise ValueError(
|
| 405 |
|
| 406 |
vote = {
|
| 407 |
"model": model_id,
|
| 408 |
"revision": revision,
|
| 409 |
"username": user_id,
|
| 410 |
"timestamp": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
| 411 |
-
"vote_type": vote_type
|
| 412 |
-
"precision": precision
|
| 413 |
}
|
| 414 |
|
| 415 |
# Update local storage
|
|
@@ -421,18 +374,15 @@ class VoteService(HuggingFaceService):
|
|
| 421 |
|
| 422 |
stats = {
|
| 423 |
"Status": "Success",
|
| 424 |
-
"Queue_Size": len(self.votes_to_upload)
|
| 425 |
-
"Model_Config": {
|
| 426 |
-
"Precision": precision,
|
| 427 |
-
"Revision": revision[:7] if revision else "unknown"
|
| 428 |
-
}
|
| 429 |
}
|
| 430 |
for line in LogFormatter.stats(stats):
|
| 431 |
logger.info(line)
|
| 432 |
|
| 433 |
-
#
|
| 434 |
-
|
| 435 |
-
|
|
|
|
| 436 |
|
| 437 |
return {"status": "success", "message": "Vote added successfully"}
|
| 438 |
|
|
|
|
| 4 |
import logging
|
| 5 |
import asyncio
|
| 6 |
from pathlib import Path
|
| 7 |
+
import os
|
| 8 |
import aiohttp
|
| 9 |
from huggingface_hub import HfApi
|
| 10 |
+
import datasets
|
|
|
|
| 11 |
|
| 12 |
from app.services.hf_service import HuggingFaceService
|
| 13 |
+
from app.config import HF_TOKEN, API
|
| 14 |
from app.config.hf_config import HF_ORGANIZATION
|
| 15 |
from app.core.cache import cache_config
|
| 16 |
+
from app.utils.logging import LogFormatter
|
| 17 |
|
| 18 |
logger = logging.getLogger(__name__)
|
| 19 |
|
|
|
|
| 31 |
super().__init__()
|
| 32 |
self.votes_file = cache_config.votes_file
|
| 33 |
self.votes_to_upload: List[Dict[str, Any]] = []
|
| 34 |
+
self.vote_check_set: Set[Tuple[str, str, str]] = set()
|
| 35 |
self._votes_by_model: Dict[str, List[Dict[str, Any]]] = {}
|
| 36 |
self._votes_by_user: Dict[str, List[Dict[str, Any]]] = {}
|
| 37 |
+
self._upload_lock = asyncio.Lock()
|
| 38 |
self._last_sync = None
|
| 39 |
self._sync_interval = 300 # 5 minutes
|
| 40 |
self._total_votes = 0
|
| 41 |
self._last_vote_timestamp = None
|
| 42 |
self._max_retries = 3
|
| 43 |
self._retry_delay = 1 # seconds
|
| 44 |
+
self._upload_batch_size = 10
|
| 45 |
self.hf_api = HfApi(token=HF_TOKEN)
|
| 46 |
self._init_done = True
|
| 47 |
|
|
|
|
| 57 |
# Ensure votes directory exists
|
| 58 |
self.votes_file.parent.mkdir(parents=True, exist_ok=True)
|
| 59 |
|
| 60 |
+
# Load existing votes if file exists
|
| 61 |
+
local_vote_count = 0
|
| 62 |
+
if self.votes_file.exists():
|
| 63 |
+
logger.info(LogFormatter.info(f"Loading votes from {self.votes_file}"))
|
| 64 |
+
local_vote_count = await self._count_local_votes()
|
| 65 |
+
logger.info(LogFormatter.info(f"Found {local_vote_count:,} local votes"))
|
| 66 |
+
|
| 67 |
+
# Check remote votes count
|
| 68 |
+
remote_vote_count = await self._count_remote_votes()
|
| 69 |
+
logger.info(LogFormatter.info(f"Found {remote_vote_count:,} remote votes"))
|
| 70 |
+
|
| 71 |
+
if remote_vote_count > local_vote_count:
|
| 72 |
+
logger.info(LogFormatter.info(f"Fetching {remote_vote_count - local_vote_count:,} new votes"))
|
| 73 |
+
await self._sync_with_hub()
|
| 74 |
+
elif remote_vote_count < local_vote_count:
|
| 75 |
+
logger.warning(LogFormatter.warning(f"Local votes ({local_vote_count:,}) > Remote votes ({remote_vote_count:,})"))
|
| 76 |
await self._load_existing_votes()
|
| 77 |
else:
|
| 78 |
+
logger.info(LogFormatter.success("Local and remote votes are in sync"))
|
| 79 |
+
if local_vote_count > 0:
|
| 80 |
+
await self._load_existing_votes()
|
| 81 |
+
else:
|
| 82 |
+
logger.info(LogFormatter.info("No votes found"))
|
| 83 |
|
| 84 |
self._initialized = True
|
| 85 |
self._last_sync = datetime.now(timezone.utc)
|
|
|
|
| 97 |
logger.error(LogFormatter.error("Initialization failed", e))
|
| 98 |
raise
|
| 99 |
|
| 100 |
+
async def _count_local_votes(self) -> int:
|
| 101 |
+
"""Count votes in local file"""
|
| 102 |
+
if not self.votes_file.exists():
|
| 103 |
+
return 0
|
| 104 |
+
|
| 105 |
+
count = 0
|
| 106 |
+
try:
|
| 107 |
+
with open(self.votes_file, 'r') as f:
|
| 108 |
+
for _ in f:
|
| 109 |
+
count += 1
|
| 110 |
+
return count
|
| 111 |
+
except Exception as e:
|
| 112 |
+
logger.error(f"Error counting local votes: {str(e)}")
|
| 113 |
+
return 0
|
| 114 |
+
|
| 115 |
+
async def _count_remote_votes(self) -> int:
|
| 116 |
+
"""Count votes in remote file"""
|
| 117 |
url = f"https://huggingface.co/datasets/{HF_ORGANIZATION}/votes/raw/main/votes_data.jsonl"
|
| 118 |
+
headers = {"Authorization": f"Bearer {HF_TOKEN}"} if HF_TOKEN else {}
|
| 119 |
|
| 120 |
try:
|
| 121 |
async with aiohttp.ClientSession() as session:
|
| 122 |
async with session.get(url, headers=headers) as response:
|
| 123 |
if response.status == 200:
|
| 124 |
+
count = 0
|
| 125 |
async for line in response.content:
|
| 126 |
+
if line.strip(): # Skip empty lines
|
| 127 |
+
count += 1
|
| 128 |
+
return count
|
|
|
|
|
|
|
|
|
|
|
|
|
| 129 |
else:
|
| 130 |
logger.error(f"Failed to get remote votes: HTTP {response.status}")
|
| 131 |
+
return 0
|
| 132 |
except Exception as e:
|
| 133 |
+
logger.error(f"Error counting remote votes: {str(e)}")
|
| 134 |
+
return 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 135 |
|
| 136 |
async def _sync_with_hub(self):
|
| 137 |
+
"""Sync votes with HuggingFace hub using datasets"""
|
| 138 |
try:
|
| 139 |
logger.info(LogFormatter.section("VOTE SYNC"))
|
| 140 |
+
self._log_repo_operation("sync", f"{HF_ORGANIZATION}/votes", "Syncing local votes with HF hub")
|
| 141 |
+
logger.info(LogFormatter.info("Syncing with HuggingFace hub..."))
|
| 142 |
+
|
| 143 |
+
# Load votes from HF dataset
|
| 144 |
+
dataset = datasets.load_dataset(
|
| 145 |
+
f"{HF_ORGANIZATION}/votes",
|
| 146 |
+
split="train",
|
| 147 |
+
cache_dir=cache_config.get_cache_path("datasets")
|
| 148 |
+
)
|
| 149 |
|
| 150 |
+
remote_votes = len(dataset)
|
| 151 |
+
logger.info(LogFormatter.info(f"Dataset loaded with {remote_votes:,} votes"))
|
|
|
|
| 152 |
|
| 153 |
+
# Convert to list of dictionaries
|
| 154 |
+
df = dataset.to_pandas()
|
| 155 |
+
if 'timestamp' in df.columns:
|
| 156 |
+
df['timestamp'] = df['timestamp'].dt.strftime('%Y-%m-%dT%H:%M:%SZ')
|
| 157 |
+
remote_votes = df.to_dict('records')
|
| 158 |
+
|
| 159 |
+
# If we have more remote votes than local
|
| 160 |
+
if len(remote_votes) > self._total_votes:
|
| 161 |
+
new_votes = len(remote_votes) - self._total_votes
|
| 162 |
+
logger.info(LogFormatter.info(f"Processing {new_votes:,} new votes..."))
|
| 163 |
|
| 164 |
+
# Save votes to local file
|
| 165 |
+
with open(self.votes_file, 'w') as f:
|
| 166 |
for vote in remote_votes:
|
| 167 |
+
f.write(json.dumps(vote) + '\n')
|
|
|
|
|
|
|
| 168 |
|
| 169 |
+
# Reload votes in memory
|
| 170 |
+
await self._load_existing_votes()
|
| 171 |
+
logger.info(LogFormatter.success("Sync completed successfully"))
|
| 172 |
+
else:
|
| 173 |
+
logger.info(LogFormatter.success("Local votes are up to date"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 174 |
|
| 175 |
self._last_sync = datetime.now(timezone.utc)
|
| 176 |
|
|
|
|
| 178 |
logger.error(LogFormatter.error("Sync failed", e))
|
| 179 |
raise
|
| 180 |
|
| 181 |
+
async def _check_for_new_votes(self):
|
| 182 |
+
"""Check for new votes on the hub"""
|
| 183 |
+
try:
|
| 184 |
+
self._log_repo_operation("check", f"{HF_ORGANIZATION}/votes", "Checking for new votes")
|
| 185 |
+
# Load only dataset metadata
|
| 186 |
+
dataset_info = datasets.load_dataset(f"{HF_ORGANIZATION}/votes", split="train")
|
| 187 |
+
remote_vote_count = len(dataset_info)
|
| 188 |
+
|
| 189 |
+
if remote_vote_count > self._total_votes:
|
| 190 |
+
logger.info(f"Found {remote_vote_count - self._total_votes} new votes on hub")
|
| 191 |
+
await self._sync_with_hub()
|
| 192 |
+
else:
|
| 193 |
+
logger.info("No new votes found on hub")
|
| 194 |
+
|
| 195 |
+
except Exception as e:
|
| 196 |
+
logger.error(f"Error checking for new votes: {str(e)}")
|
| 197 |
+
|
| 198 |
async def _load_existing_votes(self):
|
| 199 |
"""Load existing votes from file"""
|
| 200 |
if not self.votes_file.exists():
|
|
|
|
| 262 |
def _add_vote_to_memory(self, vote: Dict[str, Any]):
|
| 263 |
"""Add vote to memory structures"""
|
| 264 |
try:
|
| 265 |
+
check_tuple = (vote["model"], vote["revision"], vote["username"])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 266 |
|
| 267 |
# Skip if we already have this vote
|
| 268 |
if check_tuple in self.vote_check_set:
|
|
|
|
| 281 |
self._votes_by_user[vote["username"]].append(vote)
|
| 282 |
|
| 283 |
except KeyError as e:
|
| 284 |
+
logger.error(f"Malformed vote data, missing key: {str(e)}")
|
| 285 |
except Exception as e:
|
| 286 |
+
logger.error(f"Error adding vote to memory: {str(e)}")
|
| 287 |
|
| 288 |
async def get_user_votes(self, user_id: str) -> List[Dict[str, Any]]:
|
| 289 |
"""Get all votes from a specific user"""
|
| 290 |
logger.info(LogFormatter.info(f"Fetching votes for user: {user_id}"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 291 |
votes = self._votes_by_user.get(user_id, [])
|
| 292 |
logger.info(LogFormatter.success(f"Found {len(votes):,} votes"))
|
| 293 |
return votes
|
|
|
|
| 295 |
async def get_model_votes(self, model_id: str) -> Dict[str, Any]:
|
| 296 |
"""Get all votes for a specific model"""
|
| 297 |
logger.info(LogFormatter.info(f"Fetching votes for model: {model_id}"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 298 |
votes = self._votes_by_model.get(model_id, [])
|
| 299 |
|
| 300 |
+
# Group votes by revision
|
| 301 |
+
votes_by_revision = {}
|
| 302 |
for vote in votes:
|
| 303 |
+
revision = vote["revision"]
|
| 304 |
+
if revision not in votes_by_revision:
|
| 305 |
+
votes_by_revision[revision] = 0
|
| 306 |
+
votes_by_revision[revision] += 1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 307 |
|
| 308 |
stats = {
|
| 309 |
"Total_Votes": len(votes),
|
| 310 |
+
**{f"Revision_{k}": v for k, v in votes_by_revision.items()}
|
| 311 |
}
|
| 312 |
|
| 313 |
logger.info(LogFormatter.section("VOTE STATISTICS"))
|
|
|
|
| 316 |
|
| 317 |
return {
|
| 318 |
"total_votes": len(votes),
|
| 319 |
+
"votes_by_revision": votes_by_revision,
|
| 320 |
"votes": votes
|
| 321 |
}
|
| 322 |
|
|
|
|
| 338 |
logger.warning(f"Using 'main' as fallback revision for {model_id} after {self._max_retries} failed attempts")
|
| 339 |
return "main"
|
| 340 |
|
| 341 |
+
async def add_vote(self, model_id: str, user_id: str, vote_type: str) -> Dict[str, Any]:
|
| 342 |
"""Add a vote for a model"""
|
| 343 |
try:
|
| 344 |
self._log_repo_operation("add", f"{HF_ORGANIZATION}/votes", f"Adding {vote_type} vote for {model_id} by {user_id}")
|
|
|
|
| 346 |
stats = {
|
| 347 |
"Model": model_id,
|
| 348 |
"User": user_id,
|
| 349 |
+
"Type": vote_type
|
|
|
|
| 350 |
}
|
| 351 |
for line in LogFormatter.tree(stats, "Vote Details"):
|
| 352 |
logger.info(line)
|
| 353 |
|
| 354 |
+
revision = await self._get_model_revision(model_id)
|
| 355 |
+
check_tuple = (model_id, revision, user_id)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 356 |
|
| 357 |
if check_tuple in self.vote_check_set:
|
| 358 |
+
raise ValueError("Vote already recorded for this model")
|
| 359 |
|
| 360 |
vote = {
|
| 361 |
"model": model_id,
|
| 362 |
"revision": revision,
|
| 363 |
"username": user_id,
|
| 364 |
"timestamp": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
| 365 |
+
"vote_type": vote_type
|
|
|
|
| 366 |
}
|
| 367 |
|
| 368 |
# Update local storage
|
|
|
|
| 374 |
|
| 375 |
stats = {
|
| 376 |
"Status": "Success",
|
| 377 |
+
"Queue_Size": len(self.votes_to_upload)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 378 |
}
|
| 379 |
for line in LogFormatter.stats(stats):
|
| 380 |
logger.info(line)
|
| 381 |
|
| 382 |
+
# Try to upload if batch size reached
|
| 383 |
+
if len(self.votes_to_upload) >= self._upload_batch_size:
|
| 384 |
+
logger.info(LogFormatter.info(f"Upload batch size reached ({self._upload_batch_size}), triggering sync"))
|
| 385 |
+
await self._sync_with_hub()
|
| 386 |
|
| 387 |
return {"status": "success", "message": "Vote added successfully"}
|
| 388 |
|
backend/app/utils/logging.py
CHANGED
|
@@ -1,3 +1,105 @@
|
|
| 1 |
-
|
|
|
|
|
|
|
| 2 |
|
| 3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import sys
|
| 3 |
+
from typing import Dict, Any, List, Optional
|
| 4 |
|
| 5 |
+
logger = logging.getLogger(__name__)
|
| 6 |
+
|
| 7 |
+
class LogFormatter:
|
| 8 |
+
"""Utility class for consistent log formatting across the application"""
|
| 9 |
+
|
| 10 |
+
@staticmethod
|
| 11 |
+
def section(title: str) -> str:
|
| 12 |
+
"""Create a section header"""
|
| 13 |
+
return f"\n{'='*20} {title.upper()} {'='*20}"
|
| 14 |
+
|
| 15 |
+
@staticmethod
|
| 16 |
+
def subsection(title: str) -> str:
|
| 17 |
+
"""Create a subsection header"""
|
| 18 |
+
return f"\n{'─'*20} {title} {'─'*20}"
|
| 19 |
+
|
| 20 |
+
@staticmethod
|
| 21 |
+
def tree(items: Dict[str, Any], title: str = None) -> List[str]:
|
| 22 |
+
"""Create a tree view of dictionary data"""
|
| 23 |
+
lines = []
|
| 24 |
+
if title:
|
| 25 |
+
lines.append(f"📊 {title}:")
|
| 26 |
+
|
| 27 |
+
# Get the maximum length for alignment
|
| 28 |
+
max_key_length = max(len(str(k)) for k in items.keys())
|
| 29 |
+
|
| 30 |
+
# Format each item
|
| 31 |
+
for i, (key, value) in enumerate(items.items()):
|
| 32 |
+
prefix = "└──" if i == len(items) - 1 else "├──"
|
| 33 |
+
if isinstance(value, (int, float)):
|
| 34 |
+
value = f"{value:,}" # Add thousand separators
|
| 35 |
+
lines.append(f"{prefix} {str(key):<{max_key_length}}: {value}")
|
| 36 |
+
|
| 37 |
+
return lines
|
| 38 |
+
|
| 39 |
+
@staticmethod
|
| 40 |
+
def stats(stats: Dict[str, int], title: str = None) -> List[str]:
|
| 41 |
+
"""Format statistics with icons"""
|
| 42 |
+
lines = []
|
| 43 |
+
if title:
|
| 44 |
+
lines.append(f"📊 {title}:")
|
| 45 |
+
|
| 46 |
+
# Get the maximum length for alignment
|
| 47 |
+
max_key_length = max(len(str(k)) for k in stats.keys())
|
| 48 |
+
|
| 49 |
+
# Format each stat with an appropriate icon
|
| 50 |
+
icons = {
|
| 51 |
+
"total": "📌",
|
| 52 |
+
"success": "✅",
|
| 53 |
+
"error": "❌",
|
| 54 |
+
"pending": "⏳",
|
| 55 |
+
"processing": "⚙️",
|
| 56 |
+
"finished": "✨",
|
| 57 |
+
"evaluating": "🔄",
|
| 58 |
+
"downloads": "⬇️",
|
| 59 |
+
"files": "📁",
|
| 60 |
+
"cached": "💾",
|
| 61 |
+
"size": "📏",
|
| 62 |
+
"time": "⏱️",
|
| 63 |
+
"rate": "🚀"
|
| 64 |
+
}
|
| 65 |
+
|
| 66 |
+
# Format each item
|
| 67 |
+
for i, (key, value) in enumerate(stats.items()):
|
| 68 |
+
prefix = "└──" if i == len(stats) - 1 else "├──"
|
| 69 |
+
icon = icons.get(key.lower().split('_')[0], "•")
|
| 70 |
+
if isinstance(value, (int, float)):
|
| 71 |
+
value = f"{value:,}" # Add thousand separators
|
| 72 |
+
lines.append(f"{prefix} {icon} {str(key):<{max_key_length}}: {value}")
|
| 73 |
+
|
| 74 |
+
return lines
|
| 75 |
+
|
| 76 |
+
@staticmethod
|
| 77 |
+
def progress_bar(current: int, total: int, width: int = 20) -> str:
|
| 78 |
+
"""Create a progress bar"""
|
| 79 |
+
percentage = (current * 100) // total
|
| 80 |
+
filled = "█" * (percentage * width // 100)
|
| 81 |
+
empty = "░" * (width - len(filled))
|
| 82 |
+
return f"{filled}{empty} {percentage:3d}%"
|
| 83 |
+
|
| 84 |
+
@staticmethod
|
| 85 |
+
def error(message: str, error: Optional[Exception] = None) -> str:
|
| 86 |
+
"""Format error message"""
|
| 87 |
+
error_msg = f"\n❌ Error: {message}"
|
| 88 |
+
if error:
|
| 89 |
+
error_msg += f"\n └── Details: {str(error)}"
|
| 90 |
+
return error_msg
|
| 91 |
+
|
| 92 |
+
@staticmethod
|
| 93 |
+
def success(message: str) -> str:
|
| 94 |
+
"""Format success message"""
|
| 95 |
+
return f"✅ {message}"
|
| 96 |
+
|
| 97 |
+
@staticmethod
|
| 98 |
+
def warning(message: str) -> str:
|
| 99 |
+
"""Format warning message"""
|
| 100 |
+
return f"⚠️ {message}"
|
| 101 |
+
|
| 102 |
+
@staticmethod
|
| 103 |
+
def info(message: str) -> str:
|
| 104 |
+
"""Format info message"""
|
| 105 |
+
return f"ℹ️ {message}"
|
backend/app/utils/model_validation.py
CHANGED
|
@@ -1,14 +1,15 @@
|
|
| 1 |
import json
|
| 2 |
import logging
|
| 3 |
import asyncio
|
|
|
|
| 4 |
from typing import Tuple, Optional, Dict, Any
|
| 5 |
-
|
| 6 |
from huggingface_hub import HfApi, ModelCard, hf_hub_download
|
| 7 |
from huggingface_hub import hf_api
|
| 8 |
from transformers import AutoConfig, AutoTokenizer
|
| 9 |
-
from app.config.base import HF_TOKEN
|
| 10 |
-
from app.
|
| 11 |
-
|
| 12 |
|
| 13 |
logger = logging.getLogger(__name__)
|
| 14 |
|
|
@@ -104,12 +105,13 @@ class ModelValidator:
|
|
| 104 |
if adapter_meta and base_meta:
|
| 105 |
adapter_size = sum(adapter_meta.parameter_count.values())
|
| 106 |
base_size = sum(base_meta.parameter_count.values())
|
| 107 |
-
model_size = adapter_size + base_size
|
| 108 |
else:
|
| 109 |
# For regular models, just get the model size
|
| 110 |
meta = await self.get_safetensors_metadata(model_info.id, revision=revision)
|
| 111 |
if meta:
|
| 112 |
-
|
|
|
|
| 113 |
|
| 114 |
if model_size is None:
|
| 115 |
# If model size could not be determined, return an error
|
|
@@ -117,7 +119,6 @@ class ModelValidator:
|
|
| 117 |
|
| 118 |
# Adjust size for GPTQ models
|
| 119 |
size_factor = 8 if (precision == "GPTQ" or "gptq" in model_info.id.lower()) else 1
|
| 120 |
-
model_size = model_size / 1e9 # Convert to billions, assuming float16
|
| 121 |
model_size = round(size_factor * model_size, 3)
|
| 122 |
|
| 123 |
logger.info(LogFormatter.success(f"Model size: {model_size}B parameters"))
|
|
@@ -206,61 +207,4 @@ class ModelValidator:
|
|
| 206 |
except Exception as e:
|
| 207 |
if "You are trying to access a gated repo." in str(e):
|
| 208 |
return True, "The model is gated and requires special access permissions.", None
|
| 209 |
-
return False, f"The model was not found or is misconfigured on the Hub. Error: {e.args[0]}", None
|
| 210 |
-
|
| 211 |
-
async def check_official_provider_status(
|
| 212 |
-
self,
|
| 213 |
-
model_id: str,
|
| 214 |
-
existing_models: Dict[str, list]
|
| 215 |
-
) -> Tuple[bool, Optional[str]]:
|
| 216 |
-
"""
|
| 217 |
-
Check if model is from official provider and has finished submission.
|
| 218 |
-
|
| 219 |
-
Args:
|
| 220 |
-
model_id: The model identifier (org/model-name)
|
| 221 |
-
existing_models: Dictionary of models by status from get_models()
|
| 222 |
-
|
| 223 |
-
Returns:
|
| 224 |
-
Tuple[bool, Optional[str]]: (is_valid, error_message)
|
| 225 |
-
"""
|
| 226 |
-
try:
|
| 227 |
-
logger.info(LogFormatter.info(f"Checking official provider status for {model_id}"))
|
| 228 |
-
|
| 229 |
-
# Get model organization
|
| 230 |
-
model_org = model_id.split('/')[0] if '/' in model_id else None
|
| 231 |
-
|
| 232 |
-
if not model_org:
|
| 233 |
-
return True, None
|
| 234 |
-
|
| 235 |
-
# Load official providers dataset
|
| 236 |
-
dataset = load_dataset(OFFICIAL_PROVIDERS_REPO)
|
| 237 |
-
official_providers = dataset["train"][0]["CURATED_SET"]
|
| 238 |
-
|
| 239 |
-
# Check if model org is in official providers
|
| 240 |
-
is_official = model_org in official_providers
|
| 241 |
-
|
| 242 |
-
if is_official:
|
| 243 |
-
logger.info(LogFormatter.info(f"Model organization '{model_org}' is an official provider"))
|
| 244 |
-
|
| 245 |
-
# Check for finished submissions
|
| 246 |
-
if "finished" in existing_models:
|
| 247 |
-
for model in existing_models["finished"]:
|
| 248 |
-
if model["name"] == model_id:
|
| 249 |
-
error_msg = (
|
| 250 |
-
f"Model {model_id} is an official provider model "
|
| 251 |
-
f"with a completed evaluation. "
|
| 252 |
-
f"To re-evaluate, please open a discussion."
|
| 253 |
-
)
|
| 254 |
-
logger.error(LogFormatter.error("Validation failed", error_msg))
|
| 255 |
-
return False, error_msg
|
| 256 |
-
|
| 257 |
-
logger.info(LogFormatter.success("No finished submission found for this official provider model"))
|
| 258 |
-
else:
|
| 259 |
-
logger.info(LogFormatter.info(f"Model organization '{model_org}' is not an official provider"))
|
| 260 |
-
|
| 261 |
-
return True, None
|
| 262 |
-
|
| 263 |
-
except Exception as e:
|
| 264 |
-
error_msg = f"Failed to check official provider status: {str(e)}"
|
| 265 |
-
logger.error(LogFormatter.error(error_msg))
|
| 266 |
-
return False, error_msg
|
|
|
|
| 1 |
import json
|
| 2 |
import logging
|
| 3 |
import asyncio
|
| 4 |
+
import re
|
| 5 |
from typing import Tuple, Optional, Dict, Any
|
| 6 |
+
import aiohttp
|
| 7 |
from huggingface_hub import HfApi, ModelCard, hf_hub_download
|
| 8 |
from huggingface_hub import hf_api
|
| 9 |
from transformers import AutoConfig, AutoTokenizer
|
| 10 |
+
from app.config.base import HF_TOKEN, API
|
| 11 |
+
from app.utils.logging import LogFormatter
|
| 12 |
+
|
| 13 |
|
| 14 |
logger = logging.getLogger(__name__)
|
| 15 |
|
|
|
|
| 105 |
if adapter_meta and base_meta:
|
| 106 |
adapter_size = sum(adapter_meta.parameter_count.values())
|
| 107 |
base_size = sum(base_meta.parameter_count.values())
|
| 108 |
+
model_size = (adapter_size + base_size) / (2 * 1e9) # Convert to billions, assuming float16
|
| 109 |
else:
|
| 110 |
# For regular models, just get the model size
|
| 111 |
meta = await self.get_safetensors_metadata(model_info.id, revision=revision)
|
| 112 |
if meta:
|
| 113 |
+
total_params = sum(meta.parameter_count.values())
|
| 114 |
+
model_size = total_params / (2 * 1e9) # Convert to billions, assuming float16
|
| 115 |
|
| 116 |
if model_size is None:
|
| 117 |
# If model size could not be determined, return an error
|
|
|
|
| 119 |
|
| 120 |
# Adjust size for GPTQ models
|
| 121 |
size_factor = 8 if (precision == "GPTQ" or "gptq" in model_info.id.lower()) else 1
|
|
|
|
| 122 |
model_size = round(size_factor * model_size, 3)
|
| 123 |
|
| 124 |
logger.info(LogFormatter.success(f"Model size: {model_size}B parameters"))
|
|
|
|
| 207 |
except Exception as e:
|
| 208 |
if "You are trying to access a gated repo." in str(e):
|
| 209 |
return True, "The model is gated and requires special access permissions.", None
|
| 210 |
+
return False, f"The model was not found or is misconfigured on the Hub. Error: {e.args[0]}", None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
backend/pyproject.toml
CHANGED
|
@@ -10,12 +10,12 @@ fastapi = "^0.115.6"
|
|
| 10 |
uvicorn = {extras = ["standard"], version = "^0.34.0"}
|
| 11 |
numpy = "^2.2.0"
|
| 12 |
pandas = "^2.2.3"
|
| 13 |
-
datasets = "^3.
|
| 14 |
pyarrow = "^18.1.0"
|
| 15 |
python-multipart = "^0.0.20"
|
| 16 |
-
huggingface-hub = "0.
|
| 17 |
-
transformers = "4.
|
| 18 |
-
safetensors = "^0.5
|
| 19 |
aiofiles = "^24.1.0"
|
| 20 |
fastapi-cache2 = "^0.2.1"
|
| 21 |
python-dotenv = "^1.0.1"
|
|
|
|
| 10 |
uvicorn = {extras = ["standard"], version = "^0.34.0"}
|
| 11 |
numpy = "^2.2.0"
|
| 12 |
pandas = "^2.2.3"
|
| 13 |
+
datasets = "^3.2.0"
|
| 14 |
pyarrow = "^18.1.0"
|
| 15 |
python-multipart = "^0.0.20"
|
| 16 |
+
huggingface-hub = "^0.27.0"
|
| 17 |
+
transformers = "^4.47.0"
|
| 18 |
+
safetensors = "^0.4.5"
|
| 19 |
aiofiles = "^24.1.0"
|
| 20 |
fastapi-cache2 = "^0.2.1"
|
| 21 |
python-dotenv = "^1.0.1"
|
backend/utils/analyze_prod_datasets.py
CHANGED
|
@@ -132,7 +132,7 @@ def main():
|
|
| 132 |
"description": "User votes"
|
| 133 |
},
|
| 134 |
{
|
| 135 |
-
"id": f"{HF_ORGANIZATION}/
|
| 136 |
"description": "Highlighted models"
|
| 137 |
}
|
| 138 |
]
|
|
|
|
| 132 |
"description": "User votes"
|
| 133 |
},
|
| 134 |
{
|
| 135 |
+
"id": f"{HF_ORGANIZATION}/maintainers-highlight",
|
| 136 |
"description": "Highlighted models"
|
| 137 |
}
|
| 138 |
]
|
backend/utils/fix_wrong_model_size.py
DELETED
|
@@ -1,110 +0,0 @@
|
|
| 1 |
-
import os
|
| 2 |
-
import json
|
| 3 |
-
import pytz
|
| 4 |
-
import logging
|
| 5 |
-
import asyncio
|
| 6 |
-
from datetime import datetime
|
| 7 |
-
from pathlib import Path
|
| 8 |
-
import huggingface_hub
|
| 9 |
-
from huggingface_hub.errors import RepositoryNotFoundError, RevisionNotFoundError
|
| 10 |
-
from dotenv import load_dotenv
|
| 11 |
-
from git import Repo
|
| 12 |
-
from datetime import datetime
|
| 13 |
-
from tqdm.auto import tqdm
|
| 14 |
-
from tqdm.contrib.logging import logging_redirect_tqdm
|
| 15 |
-
|
| 16 |
-
from app.config.hf_config import HF_TOKEN, API
|
| 17 |
-
|
| 18 |
-
from app.utils.model_validation import ModelValidator
|
| 19 |
-
|
| 20 |
-
huggingface_hub.logging.set_verbosity_error()
|
| 21 |
-
huggingface_hub.utils.disable_progress_bars()
|
| 22 |
-
|
| 23 |
-
logging.basicConfig(
|
| 24 |
-
level=logging.ERROR,
|
| 25 |
-
format='%(message)s'
|
| 26 |
-
)
|
| 27 |
-
logger = logging.getLogger(__name__)
|
| 28 |
-
load_dotenv()
|
| 29 |
-
|
| 30 |
-
validator = ModelValidator()
|
| 31 |
-
|
| 32 |
-
def get_changed_files(repo_path, start_date, end_date):
|
| 33 |
-
repo = Repo(repo_path)
|
| 34 |
-
start = datetime.strptime(start_date, '%Y-%m-%d')
|
| 35 |
-
end = datetime.strptime(end_date, '%Y-%m-%d')
|
| 36 |
-
|
| 37 |
-
changed_files = set()
|
| 38 |
-
pbar = tqdm(repo.iter_commits(), desc=f"Reading commits from {end_date} to {start_date}")
|
| 39 |
-
for commit in pbar:
|
| 40 |
-
commit_date = datetime.fromtimestamp(commit.committed_date)
|
| 41 |
-
pbar.set_postfix_str(f"Commit date: {commit_date}")
|
| 42 |
-
if start <= commit_date <= end:
|
| 43 |
-
changed_files.update(item.a_path for item in commit.diff(commit.parents[0]))
|
| 44 |
-
|
| 45 |
-
if commit_date < start:
|
| 46 |
-
break
|
| 47 |
-
|
| 48 |
-
return changed_files
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
def read_json(repo_path, file):
|
| 52 |
-
with open(f"{repo_path}/{file}") as file:
|
| 53 |
-
return json.load(file)
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
def write_json(repo_path, file, content):
|
| 57 |
-
with open(f"{repo_path}/{file}", "w") as file:
|
| 58 |
-
json.dump(content, file, indent=2)
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
def main():
|
| 62 |
-
requests_path = "/requests"
|
| 63 |
-
start_date = "2024-12-09"
|
| 64 |
-
end_date = "2025-01-07"
|
| 65 |
-
|
| 66 |
-
changed_files = get_changed_files(requests_path, start_date, end_date)
|
| 67 |
-
|
| 68 |
-
for file in tqdm(changed_files):
|
| 69 |
-
try:
|
| 70 |
-
request_data = read_json(requests_path, file)
|
| 71 |
-
except FileNotFoundError as e:
|
| 72 |
-
tqdm.write(f"File {file} not found")
|
| 73 |
-
continue
|
| 74 |
-
|
| 75 |
-
try:
|
| 76 |
-
model_info = API.model_info(
|
| 77 |
-
repo_id=request_data["model"],
|
| 78 |
-
revision=request_data["revision"],
|
| 79 |
-
token=HF_TOKEN
|
| 80 |
-
)
|
| 81 |
-
except (RepositoryNotFoundError, RevisionNotFoundError) as e:
|
| 82 |
-
tqdm.write(f"Model info for {request_data["model"]} not found")
|
| 83 |
-
continue
|
| 84 |
-
|
| 85 |
-
with logging_redirect_tqdm():
|
| 86 |
-
new_model_size, error = asyncio.run(validator.get_model_size(
|
| 87 |
-
model_info=model_info,
|
| 88 |
-
precision=request_data["precision"],
|
| 89 |
-
base_model=request_data["base_model"],
|
| 90 |
-
revision=request_data["revision"]
|
| 91 |
-
))
|
| 92 |
-
|
| 93 |
-
if error:
|
| 94 |
-
tqdm.write(f"Error getting model size info for {request_data["model"]}, {error}")
|
| 95 |
-
continue
|
| 96 |
-
|
| 97 |
-
old_model_size = request_data["params"]
|
| 98 |
-
if old_model_size != new_model_size:
|
| 99 |
-
if new_model_size > 100:
|
| 100 |
-
tqdm.write(f"Model: {request_data["model"]}, size is more 100B: {new_model_size}")
|
| 101 |
-
|
| 102 |
-
tqdm.write(f"Model: {request_data["model"]}, old size: {request_data["params"]} new size: {new_model_size}")
|
| 103 |
-
tqdm.write(f"Updating request file {file}")
|
| 104 |
-
|
| 105 |
-
request_data["params"] = new_model_size
|
| 106 |
-
write_json(requests_path, file, content=request_data)
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
if __name__ == "__main__":
|
| 110 |
-
main()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
backend/utils/sync_datasets_locally.py
CHANGED
|
@@ -30,7 +30,7 @@ DATASET_NAMES = [
|
|
| 30 |
"results",
|
| 31 |
"requests",
|
| 32 |
"contents",
|
| 33 |
-
"
|
| 34 |
]
|
| 35 |
|
| 36 |
# Build list of datasets with their source and destination paths
|
|
|
|
| 30 |
"results",
|
| 31 |
"requests",
|
| 32 |
"contents",
|
| 33 |
+
"maintainers-highlight",
|
| 34 |
]
|
| 35 |
|
| 36 |
# Build list of datasets with their source and destination paths
|
backend/uv.lock
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
frontend/src/App.js
CHANGED
|
@@ -10,7 +10,9 @@ import { ThemeProvider } from "@mui/material/styles";
|
|
| 10 |
import { Box, CssBaseline } from "@mui/material";
|
| 11 |
import Navigation from "./components/Navigation/Navigation";
|
| 12 |
import LeaderboardPage from "./pages/LeaderboardPage/LeaderboardPage";
|
|
|
|
| 13 |
import QuotePage from "./pages/QuotePage/QuotePage";
|
|
|
|
| 14 |
import Footer from "./components/Footer/Footer";
|
| 15 |
import getTheme from "./config/theme";
|
| 16 |
import { useThemeMode } from "./hooks/useThemeMode";
|
|
@@ -105,7 +107,9 @@ function App() {
|
|
| 105 |
>
|
| 106 |
<Routes>
|
| 107 |
<Route path="/" element={<LeaderboardPage />} />
|
|
|
|
| 108 |
<Route path="/quote" element={<QuotePage />} />
|
|
|
|
| 109 |
</Routes>
|
| 110 |
</Box>
|
| 111 |
<Footer />
|
|
|
|
| 10 |
import { Box, CssBaseline } from "@mui/material";
|
| 11 |
import Navigation from "./components/Navigation/Navigation";
|
| 12 |
import LeaderboardPage from "./pages/LeaderboardPage/LeaderboardPage";
|
| 13 |
+
import AddModelPage from "./pages/AddModelPage/AddModelPage";
|
| 14 |
import QuotePage from "./pages/QuotePage/QuotePage";
|
| 15 |
+
import VoteModelPage from "./pages/VoteModelPage/VoteModelPage";
|
| 16 |
import Footer from "./components/Footer/Footer";
|
| 17 |
import getTheme from "./config/theme";
|
| 18 |
import { useThemeMode } from "./hooks/useThemeMode";
|
|
|
|
| 107 |
>
|
| 108 |
<Routes>
|
| 109 |
<Route path="/" element={<LeaderboardPage />} />
|
| 110 |
+
<Route path="/add" element={<AddModelPage />} />
|
| 111 |
<Route path="/quote" element={<QuotePage />} />
|
| 112 |
+
<Route path="/vote" element={<VoteModelPage />} />
|
| 113 |
</Routes>
|
| 114 |
</Box>
|
| 115 |
<Footer />
|
frontend/src/components/Navigation/Navigation.js
CHANGED
|
@@ -249,6 +249,24 @@ const Navigation = ({ onToggleTheme, mode }) => {
|
|
| 249 |
>
|
| 250 |
Leaderboard
|
| 251 |
</MenuItem>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 252 |
<MenuItem
|
| 253 |
onClick={(e) => {
|
| 254 |
handleNavigation("/quote")(e);
|
|
@@ -366,6 +384,18 @@ const Navigation = ({ onToggleTheme, mode }) => {
|
|
| 366 |
>
|
| 367 |
Leaderboard
|
| 368 |
</Box>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 369 |
<Box
|
| 370 |
onClick={handleNavigation("/quote")}
|
| 371 |
sx={linkStyle(location.pathname === "/quote")}
|
|
|
|
| 249 |
>
|
| 250 |
Leaderboard
|
| 251 |
</MenuItem>
|
| 252 |
+
<MenuItem
|
| 253 |
+
onClick={(e) => {
|
| 254 |
+
handleNavigation("/add")(e);
|
| 255 |
+
handleMenuClose();
|
| 256 |
+
}}
|
| 257 |
+
selected={location.pathname === "/add"}
|
| 258 |
+
>
|
| 259 |
+
Submit model
|
| 260 |
+
</MenuItem>
|
| 261 |
+
<MenuItem
|
| 262 |
+
onClick={(e) => {
|
| 263 |
+
handleNavigation("/vote")(e);
|
| 264 |
+
handleMenuClose();
|
| 265 |
+
}}
|
| 266 |
+
selected={location.pathname === "/vote"}
|
| 267 |
+
>
|
| 268 |
+
Vote for next model
|
| 269 |
+
</MenuItem>
|
| 270 |
<MenuItem
|
| 271 |
onClick={(e) => {
|
| 272 |
handleNavigation("/quote")(e);
|
|
|
|
| 384 |
>
|
| 385 |
Leaderboard
|
| 386 |
</Box>
|
| 387 |
+
<Box
|
| 388 |
+
onClick={handleNavigation("/add")}
|
| 389 |
+
sx={linkStyle(location.pathname === "/add")}
|
| 390 |
+
>
|
| 391 |
+
Submit model
|
| 392 |
+
</Box>
|
| 393 |
+
<Box
|
| 394 |
+
onClick={handleNavigation("/vote")}
|
| 395 |
+
sx={linkStyle(location.pathname === "/vote")}
|
| 396 |
+
>
|
| 397 |
+
Vote for next model
|
| 398 |
+
</Box>
|
| 399 |
<Box
|
| 400 |
onClick={handleNavigation("/quote")}
|
| 401 |
sx={linkStyle(location.pathname === "/quote")}
|
frontend/src/components/shared/AuthContainer.js
CHANGED
|
@@ -7,8 +7,6 @@ import {
|
|
| 7 |
Stack,
|
| 8 |
Paper,
|
| 9 |
CircularProgress,
|
| 10 |
-
useTheme,
|
| 11 |
-
useMediaQuery,
|
| 12 |
} from "@mui/material";
|
| 13 |
import HFLogo from "../Logo/HFLogo";
|
| 14 |
import { useAuth } from "../../hooks/useAuth";
|
|
@@ -18,8 +16,6 @@ import { useNavigate } from "react-router-dom";
|
|
| 18 |
function AuthContainer({ actionText = "DO_ACTION" }) {
|
| 19 |
const { isAuthenticated, user, login, logout, loading } = useAuth();
|
| 20 |
const navigate = useNavigate();
|
| 21 |
-
const theme = useTheme();
|
| 22 |
-
const isMobile = useMediaQuery(theme.breakpoints.down("sm"));
|
| 23 |
|
| 24 |
const handleLogout = () => {
|
| 25 |
if (isAuthenticated && logout) {
|
|
@@ -67,14 +63,7 @@ function AuthContainer({ actionText = "DO_ACTION" }) {
|
|
| 67 |
<Typography variant="h6" align="center">
|
| 68 |
Login to {actionText}
|
| 69 |
</Typography>
|
| 70 |
-
<Typography
|
| 71 |
-
variant="body2"
|
| 72 |
-
color="text.secondary"
|
| 73 |
-
align="center"
|
| 74 |
-
sx={{
|
| 75 |
-
px: isMobile ? 2 : 0,
|
| 76 |
-
}}
|
| 77 |
-
>
|
| 78 |
You need to be logged in with your Hugging Face account to{" "}
|
| 79 |
{actionText.toLowerCase()}
|
| 80 |
</Typography>
|
|
@@ -98,7 +87,6 @@ function AuthContainer({ actionText = "DO_ACTION" }) {
|
|
| 98 |
fontWeight: 600,
|
| 99 |
py: 1,
|
| 100 |
px: 2,
|
| 101 |
-
width: isMobile ? "100%" : "auto",
|
| 102 |
}}
|
| 103 |
>
|
| 104 |
Sign in with Hugging Face
|
|
@@ -113,22 +101,13 @@ function AuthContainer({ actionText = "DO_ACTION" }) {
|
|
| 113 |
sx={{ p: 2, border: "1px solid", borderColor: "grey.300", mb: 4 }}
|
| 114 |
>
|
| 115 |
<Stack
|
| 116 |
-
direction=
|
| 117 |
spacing={2}
|
| 118 |
-
alignItems=
|
| 119 |
justifyContent="space-between"
|
| 120 |
>
|
| 121 |
-
<Stack
|
| 122 |
-
|
| 123 |
-
spacing={1}
|
| 124 |
-
alignItems={isMobile ? "stretch" : "center"}
|
| 125 |
-
sx={{ width: "100%" }}
|
| 126 |
-
>
|
| 127 |
-
<Typography
|
| 128 |
-
variant="body1"
|
| 129 |
-
align={isMobile ? "center" : "left"}
|
| 130 |
-
sx={{ mb: isMobile ? 1 : 0 }}
|
| 131 |
-
>
|
| 132 |
Connected as <strong>{user?.username}</strong>
|
| 133 |
</Typography>
|
| 134 |
<Chip
|
|
@@ -136,13 +115,6 @@ function AuthContainer({ actionText = "DO_ACTION" }) {
|
|
| 136 |
color="success"
|
| 137 |
size="small"
|
| 138 |
variant="outlined"
|
| 139 |
-
sx={{
|
| 140 |
-
width: isMobile ? "100%" : "auto",
|
| 141 |
-
height: isMobile ? 32 : 24,
|
| 142 |
-
"& .MuiChip-label": {
|
| 143 |
-
px: isMobile ? 2 : 1,
|
| 144 |
-
},
|
| 145 |
-
}}
|
| 146 |
/>
|
| 147 |
</Stack>
|
| 148 |
<Button
|
|
@@ -155,7 +127,6 @@ function AuthContainer({ actionText = "DO_ACTION" }) {
|
|
| 155 |
height: 36,
|
| 156 |
textTransform: "none",
|
| 157 |
fontSize: "0.9375rem",
|
| 158 |
-
width: isMobile ? "100%" : "auto",
|
| 159 |
}}
|
| 160 |
>
|
| 161 |
Logout
|
|
|
|
| 7 |
Stack,
|
| 8 |
Paper,
|
| 9 |
CircularProgress,
|
|
|
|
|
|
|
| 10 |
} from "@mui/material";
|
| 11 |
import HFLogo from "../Logo/HFLogo";
|
| 12 |
import { useAuth } from "../../hooks/useAuth";
|
|
|
|
| 16 |
function AuthContainer({ actionText = "DO_ACTION" }) {
|
| 17 |
const { isAuthenticated, user, login, logout, loading } = useAuth();
|
| 18 |
const navigate = useNavigate();
|
|
|
|
|
|
|
| 19 |
|
| 20 |
const handleLogout = () => {
|
| 21 |
if (isAuthenticated && logout) {
|
|
|
|
| 63 |
<Typography variant="h6" align="center">
|
| 64 |
Login to {actionText}
|
| 65 |
</Typography>
|
| 66 |
+
<Typography variant="body2" color="text.secondary" align="center">
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 67 |
You need to be logged in with your Hugging Face account to{" "}
|
| 68 |
{actionText.toLowerCase()}
|
| 69 |
</Typography>
|
|
|
|
| 87 |
fontWeight: 600,
|
| 88 |
py: 1,
|
| 89 |
px: 2,
|
|
|
|
| 90 |
}}
|
| 91 |
>
|
| 92 |
Sign in with Hugging Face
|
|
|
|
| 101 |
sx={{ p: 2, border: "1px solid", borderColor: "grey.300", mb: 4 }}
|
| 102 |
>
|
| 103 |
<Stack
|
| 104 |
+
direction="row"
|
| 105 |
spacing={2}
|
| 106 |
+
alignItems="center"
|
| 107 |
justifyContent="space-between"
|
| 108 |
>
|
| 109 |
+
<Stack direction="row" spacing={1} alignItems="center">
|
| 110 |
+
<Typography variant="body1">
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 111 |
Connected as <strong>{user?.username}</strong>
|
| 112 |
</Typography>
|
| 113 |
<Chip
|
|
|
|
| 115 |
color="success"
|
| 116 |
size="small"
|
| 117 |
variant="outlined"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 118 |
/>
|
| 119 |
</Stack>
|
| 120 |
<Button
|
|
|
|
| 127 |
height: 36,
|
| 128 |
textTransform: "none",
|
| 129 |
fontSize: "0.9375rem",
|
|
|
|
| 130 |
}}
|
| 131 |
>
|
| 132 |
Logout
|
frontend/src/components/shared/PageHeader.js
CHANGED
|
@@ -14,7 +14,7 @@ const PageHeader = ({ title, subtitle }) => {
|
|
| 14 |
gap: 2,
|
| 15 |
}}
|
| 16 |
>
|
| 17 |
-
<Typography fontWeight="bold" variant="
|
| 18 |
{title}
|
| 19 |
</Typography>
|
| 20 |
{subtitle && (
|
|
|
|
| 14 |
gap: 2,
|
| 15 |
}}
|
| 16 |
>
|
| 17 |
+
<Typography fontWeight="bold" variant="h3" component="h1">
|
| 18 |
{title}
|
| 19 |
</Typography>
|
| 20 |
{subtitle && (
|
frontend/src/pages/AddModelPage/AddModelPage.js
CHANGED
|
@@ -5,7 +5,6 @@ import PageHeader from "../../components/shared/PageHeader";
|
|
| 5 |
import EvaluationQueues from "./components/EvaluationQueues/EvaluationQueues";
|
| 6 |
import ModelSubmissionForm from "./components/ModelSubmissionForm/ModelSubmissionForm";
|
| 7 |
import SubmissionGuide from "./components/SubmissionGuide/SubmissionGuide";
|
| 8 |
-
import SubmissionLimitChecker from "./components/SubmissionLimitChecker/SubmissionLimitChecker";
|
| 9 |
|
| 10 |
function AddModelPage() {
|
| 11 |
const { isAuthenticated, loading, user } = useAuth();
|
|
@@ -26,7 +25,7 @@ function AddModelPage() {
|
|
| 26 |
}
|
| 27 |
|
| 28 |
return (
|
| 29 |
-
<Box sx={{ width: "100%", maxWidth: 1200, margin: "0 auto",
|
| 30 |
<PageHeader
|
| 31 |
title="Submit a Model for Evaluation"
|
| 32 |
subtitle={
|
|
@@ -39,9 +38,7 @@ function AddModelPage() {
|
|
| 39 |
|
| 40 |
<SubmissionGuide />
|
| 41 |
|
| 42 |
-
<
|
| 43 |
-
<ModelSubmissionForm user={user} isAuthenticated={isAuthenticated} />
|
| 44 |
-
</SubmissionLimitChecker>
|
| 45 |
|
| 46 |
<EvaluationQueues defaultExpanded={false} />
|
| 47 |
</Box>
|
|
|
|
| 5 |
import EvaluationQueues from "./components/EvaluationQueues/EvaluationQueues";
|
| 6 |
import ModelSubmissionForm from "./components/ModelSubmissionForm/ModelSubmissionForm";
|
| 7 |
import SubmissionGuide from "./components/SubmissionGuide/SubmissionGuide";
|
|
|
|
| 8 |
|
| 9 |
function AddModelPage() {
|
| 10 |
const { isAuthenticated, loading, user } = useAuth();
|
|
|
|
| 25 |
}
|
| 26 |
|
| 27 |
return (
|
| 28 |
+
<Box sx={{ width: "100%", maxWidth: 1200, margin: "0 auto", padding: 4 }}>
|
| 29 |
<PageHeader
|
| 30 |
title="Submit a Model for Evaluation"
|
| 31 |
subtitle={
|
|
|
|
| 38 |
|
| 39 |
<SubmissionGuide />
|
| 40 |
|
| 41 |
+
<ModelSubmissionForm user={user} isAuthenticated={isAuthenticated} />
|
|
|
|
|
|
|
| 42 |
|
| 43 |
<EvaluationQueues defaultExpanded={false} />
|
| 44 |
</Box>
|
frontend/src/pages/AddModelPage/components/EvaluationQueues/EvaluationQueues.js
CHANGED
|
@@ -17,8 +17,6 @@ import {
|
|
| 17 |
AccordionDetails,
|
| 18 |
Stack,
|
| 19 |
Tooltip,
|
| 20 |
-
useTheme,
|
| 21 |
-
useMediaQuery,
|
| 22 |
} from "@mui/material";
|
| 23 |
import AccessTimeIcon from "@mui/icons-material/AccessTime";
|
| 24 |
import CheckCircleIcon from "@mui/icons-material/CheckCircle";
|
|
@@ -194,13 +192,16 @@ const ModelTable = ({ models, emptyMessage, status }) => {
|
|
| 194 |
style={{
|
| 195 |
height: `${rowVirtualizer.getTotalSize()}px`,
|
| 196 |
padding: 0,
|
| 197 |
-
position: "relative",
|
| 198 |
-
width: "100%",
|
| 199 |
-
height: `${rowVirtualizer.getTotalSize()}px`,
|
| 200 |
}}
|
| 201 |
colSpan={columns.length}
|
| 202 |
>
|
| 203 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 204 |
{rowVirtualizer.getVirtualItems().map((virtualRow) => {
|
| 205 |
const model = models[virtualRow.index];
|
| 206 |
const waitTime = formatWaitTime(model.wait_time);
|
|
@@ -221,6 +222,7 @@ const ModelTable = ({ models, emptyMessage, status }) => {
|
|
| 221 |
hover
|
| 222 |
>
|
| 223 |
<TableCell
|
|
|
|
| 224 |
sx={{
|
| 225 |
flex: `0 0 ${columns[0].width}`,
|
| 226 |
padding: "12px 16px",
|
|
@@ -256,6 +258,7 @@ const ModelTable = ({ models, emptyMessage, status }) => {
|
|
| 256 |
</Link>
|
| 257 |
</TableCell>
|
| 258 |
<TableCell
|
|
|
|
| 259 |
sx={{
|
| 260 |
flex: `0 0 ${columns[1].width}`,
|
| 261 |
padding: "12px 16px",
|
|
@@ -271,6 +274,7 @@ const ModelTable = ({ models, emptyMessage, status }) => {
|
|
| 271 |
{model.submitter}
|
| 272 |
</TableCell>
|
| 273 |
<TableCell
|
|
|
|
| 274 |
align={columns[2].align}
|
| 275 |
sx={{
|
| 276 |
flex: `0 0 ${columns[2].width}`,
|
|
@@ -299,6 +303,7 @@ const ModelTable = ({ models, emptyMessage, status }) => {
|
|
| 299 |
</Tooltip>
|
| 300 |
</TableCell>
|
| 301 |
<TableCell
|
|
|
|
| 302 |
align={columns[3].align}
|
| 303 |
sx={{
|
| 304 |
flex: `0 0 ${columns[3].width}`,
|
|
@@ -315,6 +320,7 @@ const ModelTable = ({ models, emptyMessage, status }) => {
|
|
| 315 |
</Typography>
|
| 316 |
</TableCell>
|
| 317 |
<TableCell
|
|
|
|
| 318 |
align={columns[4].align}
|
| 319 |
sx={{
|
| 320 |
flex: `0 0 ${columns[4].width}`,
|
|
@@ -330,6 +336,7 @@ const ModelTable = ({ models, emptyMessage, status }) => {
|
|
| 330 |
{model.revision.substring(0, 7)}
|
| 331 |
</TableCell>
|
| 332 |
<TableCell
|
|
|
|
| 333 |
align={columns[5].align}
|
| 334 |
sx={{
|
| 335 |
flex: `0 0 ${columns[5].width}`,
|
|
@@ -344,7 +351,7 @@ const ModelTable = ({ models, emptyMessage, status }) => {
|
|
| 344 |
</TableRow>
|
| 345 |
);
|
| 346 |
})}
|
| 347 |
-
|
| 348 |
</TableCell>
|
| 349 |
</TableRow>
|
| 350 |
</TableBody>
|
|
@@ -361,133 +368,90 @@ const QueueAccordion = ({
|
|
| 361 |
expanded,
|
| 362 |
onChange,
|
| 363 |
loading,
|
| 364 |
-
}) =>
|
| 365 |
-
|
| 366 |
-
|
| 367 |
-
|
| 368 |
-
|
| 369 |
-
|
| 370 |
-
|
| 371 |
-
|
| 372 |
-
|
| 373 |
-
|
| 374 |
-
|
| 375 |
-
|
| 376 |
-
|
| 377 |
-
|
| 378 |
-
|
| 379 |
-
|
| 380 |
-
|
| 381 |
-
|
| 382 |
-
|
| 383 |
-
|
| 384 |
-
|
| 385 |
-
|
| 386 |
-
|
| 387 |
-
|
| 388 |
-
|
| 389 |
-
|
| 390 |
-
|
| 391 |
-
|
| 392 |
-
|
| 393 |
-
|
| 394 |
-
|
| 395 |
-
|
| 396 |
-
|
| 397 |
-
|
| 398 |
-
|
| 399 |
-
|
| 400 |
-
}}
|
| 401 |
-
>
|
| 402 |
-
{title}
|
| 403 |
-
</Typography>
|
| 404 |
-
<Stack
|
| 405 |
-
direction={{ xs: "column", sm: "row" }}
|
| 406 |
-
spacing={1}
|
| 407 |
-
alignItems={{ xs: "stretch", sm: "center" }}
|
| 408 |
-
sx={{
|
| 409 |
-
ml: { xs: 0, sm: "auto" },
|
| 410 |
-
width: { xs: "100%", sm: "auto" },
|
| 411 |
-
}}
|
| 412 |
-
>
|
| 413 |
-
<Chip
|
| 414 |
-
label={models.length}
|
| 415 |
-
size={isMobile ? "small" : "medium"}
|
| 416 |
-
color={
|
| 417 |
status === "finished"
|
| 418 |
-
?
|
| 419 |
: status === "evaluating"
|
| 420 |
-
?
|
| 421 |
-
:
|
| 422 |
-
|
| 423 |
-
|
| 424 |
-
|
| 425 |
-
|
| 426 |
-
|
| 427 |
-
|
| 428 |
-
|
| 429 |
-
|
|
|
|
|
|
|
| 430 |
bgcolor:
|
| 431 |
status === "finished"
|
| 432 |
-
? theme.palette.success[
|
| 433 |
-
: status === "evaluating"
|
| 434 |
-
? theme.palette.warning[100]
|
| 435 |
-
: theme.palette.info[100],
|
| 436 |
-
borderColor:
|
| 437 |
-
status === "finished"
|
| 438 |
-
? theme.palette.success[400]
|
| 439 |
: status === "evaluating"
|
| 440 |
-
? theme.palette.warning[
|
| 441 |
-
: theme.palette.info[
|
| 442 |
-
|
| 443 |
-
|
| 444 |
-
? theme.palette.success[700]
|
| 445 |
-
: status === "evaluating"
|
| 446 |
-
? theme.palette.warning[700]
|
| 447 |
-
: theme.palette.info[700],
|
| 448 |
-
"& .MuiChip-label": {
|
| 449 |
-
px: { xs: 1, sm: 1.2 },
|
| 450 |
-
width: "100%",
|
| 451 |
-
},
|
| 452 |
-
"&:hover": {
|
| 453 |
-
bgcolor:
|
| 454 |
-
status === "finished"
|
| 455 |
-
? theme.palette.success[200]
|
| 456 |
-
: status === "evaluating"
|
| 457 |
-
? theme.palette.warning[200]
|
| 458 |
-
: theme.palette.info[200],
|
| 459 |
-
},
|
| 460 |
-
})}
|
| 461 |
-
/>
|
| 462 |
-
{loading && (
|
| 463 |
-
<CircularProgress
|
| 464 |
-
size={isMobile ? 14 : 16}
|
| 465 |
-
color="inherit"
|
| 466 |
-
sx={{ opacity: 0.5 }}
|
| 467 |
-
/>
|
| 468 |
-
)}
|
| 469 |
-
</Stack>
|
| 470 |
-
</Stack>
|
| 471 |
-
</AccordionSummary>
|
| 472 |
-
<AccordionDetails sx={{ p: { xs: 1, sm: 2 } }}>
|
| 473 |
-
<Box
|
| 474 |
-
sx={{
|
| 475 |
-
border: "1px solid",
|
| 476 |
-
borderColor: "grey.200",
|
| 477 |
-
borderRadius: 1,
|
| 478 |
-
overflow: "hidden",
|
| 479 |
-
}}
|
| 480 |
-
>
|
| 481 |
-
<ModelTable
|
| 482 |
-
models={models}
|
| 483 |
-
emptyMessage={emptyMessage}
|
| 484 |
-
status={status}
|
| 485 |
/>
|
| 486 |
-
|
| 487 |
-
|
| 488 |
-
|
| 489 |
-
|
| 490 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 491 |
|
| 492 |
const EvaluationQueues = ({ defaultExpanded = true }) => {
|
| 493 |
const [expanded, setExpanded] = useState(defaultExpanded);
|
|
@@ -499,8 +463,6 @@ const EvaluationQueues = ({ defaultExpanded = true }) => {
|
|
| 499 |
});
|
| 500 |
const [loading, setLoading] = useState(true);
|
| 501 |
const [error, setError] = useState(null);
|
| 502 |
-
const theme = useTheme();
|
| 503 |
-
const isMobile = useMediaQuery(theme.breakpoints.down("sm"));
|
| 504 |
|
| 505 |
useEffect(() => {
|
| 506 |
const fetchModels = async () => {
|
|
@@ -581,12 +543,11 @@ const EvaluationQueues = ({ defaultExpanded = true }) => {
|
|
| 581 |
opacity: 0.9,
|
| 582 |
},
|
| 583 |
"& .MuiAccordionSummary-root": {
|
| 584 |
-
minHeight:
|
| 585 |
bgcolor: "background.paper",
|
| 586 |
borderRadius: "8px",
|
| 587 |
-
alignItems: { xs: "flex-start", sm: "center" },
|
| 588 |
"&.Mui-expanded": {
|
| 589 |
-
minHeight:
|
| 590 |
borderRadius: "8px 8px 0 0",
|
| 591 |
},
|
| 592 |
},
|
|
@@ -601,70 +562,52 @@ const EvaluationQueues = ({ defaultExpanded = true }) => {
|
|
| 601 |
<AccordionSummary
|
| 602 |
expandIcon={<ExpandMoreIcon />}
|
| 603 |
sx={{
|
| 604 |
-
px:
|
| 605 |
"& .MuiAccordionSummary-expandIconWrapper": {
|
| 606 |
color: "text.secondary",
|
| 607 |
transform: "rotate(0deg)",
|
| 608 |
transition: "transform 150ms",
|
| 609 |
-
marginTop: { xs: "4px", sm: 0 },
|
| 610 |
"&.Mui-expanded": {
|
| 611 |
transform: "rotate(180deg)",
|
| 612 |
},
|
| 613 |
},
|
| 614 |
}}
|
| 615 |
>
|
| 616 |
-
<Stack
|
| 617 |
-
direction={{ xs: "column", sm: "row" }}
|
| 618 |
-
spacing={{ xs: 1, sm: 2 }}
|
| 619 |
-
alignItems={{ xs: "flex-start", sm: "center" }}
|
| 620 |
-
sx={{ width: "100%" }}
|
| 621 |
-
>
|
| 622 |
<Typography
|
| 623 |
variant="h6"
|
| 624 |
sx={{
|
| 625 |
fontWeight: 600,
|
| 626 |
color: "text.primary",
|
| 627 |
letterSpacing: "-0.01em",
|
| 628 |
-
fontSize: { xs: "1.1rem", sm: "1.25rem" },
|
| 629 |
}}
|
| 630 |
>
|
| 631 |
Evaluation Status
|
| 632 |
</Typography>
|
| 633 |
{!loading && (
|
| 634 |
<Stack
|
| 635 |
-
direction=
|
| 636 |
spacing={1}
|
| 637 |
sx={{
|
| 638 |
transition: "opacity 0.2s",
|
| 639 |
".Mui-expanded &": {
|
| 640 |
opacity: 0,
|
| 641 |
-
height: 0,
|
| 642 |
-
m: 0,
|
| 643 |
-
overflow: "hidden",
|
| 644 |
},
|
| 645 |
-
width: { xs: "100%", sm: "auto" },
|
| 646 |
-
alignItems: { xs: "stretch", sm: "center" },
|
| 647 |
}}
|
| 648 |
>
|
| 649 |
<Chip
|
| 650 |
label={`${models.pending.length} In Queue`}
|
| 651 |
-
size=
|
| 652 |
color="info"
|
| 653 |
variant="outlined"
|
| 654 |
sx={{
|
| 655 |
borderWidth: 2,
|
| 656 |
fontWeight: 600,
|
| 657 |
-
fontSize: { xs: "0.75rem", sm: "0.875rem" },
|
| 658 |
-
height: { xs: "24px", sm: "32px" },
|
| 659 |
bgcolor: "info.100",
|
| 660 |
borderColor: "info.400",
|
| 661 |
color: "info.700",
|
| 662 |
-
width: { xs: "100%", sm: "auto" },
|
| 663 |
"& .MuiChip-label": {
|
| 664 |
-
px:
|
| 665 |
-
width: "100%",
|
| 666 |
-
display: "flex",
|
| 667 |
-
justifyContent: "center",
|
| 668 |
},
|
| 669 |
"&:hover": {
|
| 670 |
bgcolor: "info.200",
|
|
@@ -673,23 +616,17 @@ const EvaluationQueues = ({ defaultExpanded = true }) => {
|
|
| 673 |
/>
|
| 674 |
<Chip
|
| 675 |
label={`${models.evaluating.length} Evaluating`}
|
| 676 |
-
size=
|
| 677 |
color="warning"
|
| 678 |
variant="outlined"
|
| 679 |
sx={{
|
| 680 |
borderWidth: 2,
|
| 681 |
fontWeight: 600,
|
| 682 |
-
fontSize: { xs: "0.75rem", sm: "0.875rem" },
|
| 683 |
-
height: { xs: "24px", sm: "32px" },
|
| 684 |
bgcolor: "warning.100",
|
| 685 |
borderColor: "warning.400",
|
| 686 |
color: "warning.700",
|
| 687 |
-
width: { xs: "100%", sm: "auto" },
|
| 688 |
"& .MuiChip-label": {
|
| 689 |
-
px:
|
| 690 |
-
width: "100%",
|
| 691 |
-
display: "flex",
|
| 692 |
-
justifyContent: "center",
|
| 693 |
},
|
| 694 |
"&:hover": {
|
| 695 |
bgcolor: "warning.200",
|
|
@@ -698,23 +635,17 @@ const EvaluationQueues = ({ defaultExpanded = true }) => {
|
|
| 698 |
/>
|
| 699 |
<Chip
|
| 700 |
label={`${models.finished.length} Evaluated`}
|
| 701 |
-
size=
|
| 702 |
color="success"
|
| 703 |
variant="outlined"
|
| 704 |
sx={{
|
| 705 |
borderWidth: 2,
|
| 706 |
fontWeight: 600,
|
| 707 |
-
fontSize: { xs: "0.75rem", sm: "0.875rem" },
|
| 708 |
-
height: { xs: "24px", sm: "32px" },
|
| 709 |
bgcolor: "success.100",
|
| 710 |
borderColor: "success.400",
|
| 711 |
color: "success.700",
|
| 712 |
-
width: { xs: "100%", sm: "auto" },
|
| 713 |
"& .MuiChip-label": {
|
| 714 |
-
px:
|
| 715 |
-
width: "100%",
|
| 716 |
-
display: "flex",
|
| 717 |
-
justifyContent: "center",
|
| 718 |
},
|
| 719 |
"&:hover": {
|
| 720 |
bgcolor: "success.200",
|
|
@@ -725,7 +656,7 @@ const EvaluationQueues = ({ defaultExpanded = true }) => {
|
|
| 725 |
)}
|
| 726 |
{loading && (
|
| 727 |
<CircularProgress
|
| 728 |
-
size={
|
| 729 |
sx={{
|
| 730 |
color: "primary.main",
|
| 731 |
}}
|
|
|
|
| 17 |
AccordionDetails,
|
| 18 |
Stack,
|
| 19 |
Tooltip,
|
|
|
|
|
|
|
| 20 |
} from "@mui/material";
|
| 21 |
import AccessTimeIcon from "@mui/icons-material/AccessTime";
|
| 22 |
import CheckCircleIcon from "@mui/icons-material/CheckCircle";
|
|
|
|
| 192 |
style={{
|
| 193 |
height: `${rowVirtualizer.getTotalSize()}px`,
|
| 194 |
padding: 0,
|
|
|
|
|
|
|
|
|
|
| 195 |
}}
|
| 196 |
colSpan={columns.length}
|
| 197 |
>
|
| 198 |
+
<div
|
| 199 |
+
style={{
|
| 200 |
+
position: "relative",
|
| 201 |
+
width: "100%",
|
| 202 |
+
height: `${rowVirtualizer.getTotalSize()}px`,
|
| 203 |
+
}}
|
| 204 |
+
>
|
| 205 |
{rowVirtualizer.getVirtualItems().map((virtualRow) => {
|
| 206 |
const model = models[virtualRow.index];
|
| 207 |
const waitTime = formatWaitTime(model.wait_time);
|
|
|
|
| 222 |
hover
|
| 223 |
>
|
| 224 |
<TableCell
|
| 225 |
+
component="div"
|
| 226 |
sx={{
|
| 227 |
flex: `0 0 ${columns[0].width}`,
|
| 228 |
padding: "12px 16px",
|
|
|
|
| 258 |
</Link>
|
| 259 |
</TableCell>
|
| 260 |
<TableCell
|
| 261 |
+
component="div"
|
| 262 |
sx={{
|
| 263 |
flex: `0 0 ${columns[1].width}`,
|
| 264 |
padding: "12px 16px",
|
|
|
|
| 274 |
{model.submitter}
|
| 275 |
</TableCell>
|
| 276 |
<TableCell
|
| 277 |
+
component="div"
|
| 278 |
align={columns[2].align}
|
| 279 |
sx={{
|
| 280 |
flex: `0 0 ${columns[2].width}`,
|
|
|
|
| 303 |
</Tooltip>
|
| 304 |
</TableCell>
|
| 305 |
<TableCell
|
| 306 |
+
component="div"
|
| 307 |
align={columns[3].align}
|
| 308 |
sx={{
|
| 309 |
flex: `0 0 ${columns[3].width}`,
|
|
|
|
| 320 |
</Typography>
|
| 321 |
</TableCell>
|
| 322 |
<TableCell
|
| 323 |
+
component="div"
|
| 324 |
align={columns[4].align}
|
| 325 |
sx={{
|
| 326 |
flex: `0 0 ${columns[4].width}`,
|
|
|
|
| 336 |
{model.revision.substring(0, 7)}
|
| 337 |
</TableCell>
|
| 338 |
<TableCell
|
| 339 |
+
component="div"
|
| 340 |
align={columns[5].align}
|
| 341 |
sx={{
|
| 342 |
flex: `0 0 ${columns[5].width}`,
|
|
|
|
| 351 |
</TableRow>
|
| 352 |
);
|
| 353 |
})}
|
| 354 |
+
</div>
|
| 355 |
</TableCell>
|
| 356 |
</TableRow>
|
| 357 |
</TableBody>
|
|
|
|
| 368 |
expanded,
|
| 369 |
onChange,
|
| 370 |
loading,
|
| 371 |
+
}) => (
|
| 372 |
+
<Accordion
|
| 373 |
+
expanded={expanded}
|
| 374 |
+
onChange={onChange}
|
| 375 |
+
disabled={loading}
|
| 376 |
+
sx={{
|
| 377 |
+
"&:before": { display: "none" },
|
| 378 |
+
boxShadow: "none",
|
| 379 |
+
border: "none",
|
| 380 |
+
}}
|
| 381 |
+
>
|
| 382 |
+
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
|
| 383 |
+
<Stack direction="row" spacing={2} alignItems="center">
|
| 384 |
+
<Typography>{title}</Typography>
|
| 385 |
+
<Stack direction="row" spacing={1} alignItems="center">
|
| 386 |
+
<Chip
|
| 387 |
+
label={models.length}
|
| 388 |
+
size="small"
|
| 389 |
+
color={
|
| 390 |
+
status === "finished"
|
| 391 |
+
? "success"
|
| 392 |
+
: status === "evaluating"
|
| 393 |
+
? "warning"
|
| 394 |
+
: "info"
|
| 395 |
+
}
|
| 396 |
+
variant="outlined"
|
| 397 |
+
sx={(theme) => ({
|
| 398 |
+
borderWidth: 2,
|
| 399 |
+
fontWeight: 600,
|
| 400 |
+
bgcolor:
|
| 401 |
+
status === "finished"
|
| 402 |
+
? theme.palette.success[100]
|
| 403 |
+
: status === "evaluating"
|
| 404 |
+
? theme.palette.warning[100]
|
| 405 |
+
: theme.palette.info[100],
|
| 406 |
+
borderColor:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 407 |
status === "finished"
|
| 408 |
+
? theme.palette.success[400]
|
| 409 |
: status === "evaluating"
|
| 410 |
+
? theme.palette.warning[400]
|
| 411 |
+
: theme.palette.info[400],
|
| 412 |
+
color:
|
| 413 |
+
status === "finished"
|
| 414 |
+
? theme.palette.success[700]
|
| 415 |
+
: status === "evaluating"
|
| 416 |
+
? theme.palette.warning[700]
|
| 417 |
+
: theme.palette.info[700],
|
| 418 |
+
"& .MuiChip-label": {
|
| 419 |
+
px: 1.2,
|
| 420 |
+
},
|
| 421 |
+
"&:hover": {
|
| 422 |
bgcolor:
|
| 423 |
status === "finished"
|
| 424 |
+
? theme.palette.success[200]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 425 |
: status === "evaluating"
|
| 426 |
+
? theme.palette.warning[200]
|
| 427 |
+
: theme.palette.info[200],
|
| 428 |
+
},
|
| 429 |
+
})}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 430 |
/>
|
| 431 |
+
{loading && (
|
| 432 |
+
<CircularProgress size={16} color="inherit" sx={{ opacity: 0.5 }} />
|
| 433 |
+
)}
|
| 434 |
+
</Stack>
|
| 435 |
+
</Stack>
|
| 436 |
+
</AccordionSummary>
|
| 437 |
+
<AccordionDetails sx={{ p: 2 }}>
|
| 438 |
+
<Box
|
| 439 |
+
sx={{
|
| 440 |
+
border: "1px solid",
|
| 441 |
+
borderColor: "grey.200",
|
| 442 |
+
borderRadius: 1,
|
| 443 |
+
overflow: "hidden",
|
| 444 |
+
}}
|
| 445 |
+
>
|
| 446 |
+
<ModelTable
|
| 447 |
+
models={models}
|
| 448 |
+
emptyMessage={emptyMessage}
|
| 449 |
+
status={status}
|
| 450 |
+
/>
|
| 451 |
+
</Box>
|
| 452 |
+
</AccordionDetails>
|
| 453 |
+
</Accordion>
|
| 454 |
+
);
|
| 455 |
|
| 456 |
const EvaluationQueues = ({ defaultExpanded = true }) => {
|
| 457 |
const [expanded, setExpanded] = useState(defaultExpanded);
|
|
|
|
| 463 |
});
|
| 464 |
const [loading, setLoading] = useState(true);
|
| 465 |
const [error, setError] = useState(null);
|
|
|
|
|
|
|
| 466 |
|
| 467 |
useEffect(() => {
|
| 468 |
const fetchModels = async () => {
|
|
|
|
| 543 |
opacity: 0.9,
|
| 544 |
},
|
| 545 |
"& .MuiAccordionSummary-root": {
|
| 546 |
+
minHeight: 64,
|
| 547 |
bgcolor: "background.paper",
|
| 548 |
borderRadius: "8px",
|
|
|
|
| 549 |
"&.Mui-expanded": {
|
| 550 |
+
minHeight: 64,
|
| 551 |
borderRadius: "8px 8px 0 0",
|
| 552 |
},
|
| 553 |
},
|
|
|
|
| 562 |
<AccordionSummary
|
| 563 |
expandIcon={<ExpandMoreIcon />}
|
| 564 |
sx={{
|
| 565 |
+
px: 3,
|
| 566 |
"& .MuiAccordionSummary-expandIconWrapper": {
|
| 567 |
color: "text.secondary",
|
| 568 |
transform: "rotate(0deg)",
|
| 569 |
transition: "transform 150ms",
|
|
|
|
| 570 |
"&.Mui-expanded": {
|
| 571 |
transform: "rotate(180deg)",
|
| 572 |
},
|
| 573 |
},
|
| 574 |
}}
|
| 575 |
>
|
| 576 |
+
<Stack direction="row" spacing={2} alignItems="center">
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 577 |
<Typography
|
| 578 |
variant="h6"
|
| 579 |
sx={{
|
| 580 |
fontWeight: 600,
|
| 581 |
color: "text.primary",
|
| 582 |
letterSpacing: "-0.01em",
|
|
|
|
| 583 |
}}
|
| 584 |
>
|
| 585 |
Evaluation Status
|
| 586 |
</Typography>
|
| 587 |
{!loading && (
|
| 588 |
<Stack
|
| 589 |
+
direction="row"
|
| 590 |
spacing={1}
|
| 591 |
sx={{
|
| 592 |
transition: "opacity 0.2s",
|
| 593 |
".Mui-expanded &": {
|
| 594 |
opacity: 0,
|
|
|
|
|
|
|
|
|
|
| 595 |
},
|
|
|
|
|
|
|
| 596 |
}}
|
| 597 |
>
|
| 598 |
<Chip
|
| 599 |
label={`${models.pending.length} In Queue`}
|
| 600 |
+
size="small"
|
| 601 |
color="info"
|
| 602 |
variant="outlined"
|
| 603 |
sx={{
|
| 604 |
borderWidth: 2,
|
| 605 |
fontWeight: 600,
|
|
|
|
|
|
|
| 606 |
bgcolor: "info.100",
|
| 607 |
borderColor: "info.400",
|
| 608 |
color: "info.700",
|
|
|
|
| 609 |
"& .MuiChip-label": {
|
| 610 |
+
px: 1.2,
|
|
|
|
|
|
|
|
|
|
| 611 |
},
|
| 612 |
"&:hover": {
|
| 613 |
bgcolor: "info.200",
|
|
|
|
| 616 |
/>
|
| 617 |
<Chip
|
| 618 |
label={`${models.evaluating.length} Evaluating`}
|
| 619 |
+
size="small"
|
| 620 |
color="warning"
|
| 621 |
variant="outlined"
|
| 622 |
sx={{
|
| 623 |
borderWidth: 2,
|
| 624 |
fontWeight: 600,
|
|
|
|
|
|
|
| 625 |
bgcolor: "warning.100",
|
| 626 |
borderColor: "warning.400",
|
| 627 |
color: "warning.700",
|
|
|
|
| 628 |
"& .MuiChip-label": {
|
| 629 |
+
px: 1.2,
|
|
|
|
|
|
|
|
|
|
| 630 |
},
|
| 631 |
"&:hover": {
|
| 632 |
bgcolor: "warning.200",
|
|
|
|
| 635 |
/>
|
| 636 |
<Chip
|
| 637 |
label={`${models.finished.length} Evaluated`}
|
| 638 |
+
size="small"
|
| 639 |
color="success"
|
| 640 |
variant="outlined"
|
| 641 |
sx={{
|
| 642 |
borderWidth: 2,
|
| 643 |
fontWeight: 600,
|
|
|
|
|
|
|
| 644 |
bgcolor: "success.100",
|
| 645 |
borderColor: "success.400",
|
| 646 |
color: "success.700",
|
|
|
|
| 647 |
"& .MuiChip-label": {
|
| 648 |
+
px: 1.2,
|
|
|
|
|
|
|
|
|
|
| 649 |
},
|
| 650 |
"&:hover": {
|
| 651 |
bgcolor: "success.200",
|
|
|
|
| 656 |
)}
|
| 657 |
{loading && (
|
| 658 |
<CircularProgress
|
| 659 |
+
size={20}
|
| 660 |
sx={{
|
| 661 |
color: "primary.main",
|
| 662 |
}}
|
frontend/src/pages/AddModelPage/components/ModelSubmissionForm/ModelSubmissionForm.js
CHANGED
|
@@ -18,6 +18,7 @@ import {
|
|
| 18 |
} from "@mui/material";
|
| 19 |
import RocketLaunchIcon from "@mui/icons-material/RocketLaunch";
|
| 20 |
import CheckCircleOutlineIcon from "@mui/icons-material/CheckCircleOutline";
|
|
|
|
| 21 |
import { alpha } from "@mui/material/styles";
|
| 22 |
import InfoIconWithTooltip from "../../../../components/shared/InfoIconWithTooltip";
|
| 23 |
import { MODEL_TYPES } from "../../../../pages/LeaderboardPage/components/Leaderboard/constants/modelTypes";
|
|
@@ -112,10 +113,9 @@ const HELP_TEXTS = {
|
|
| 112 |
Chat Template Support
|
| 113 |
</Typography>
|
| 114 |
<Typography variant="body2" sx={{ opacity: 0.9, lineHeight: 1.4 }}>
|
| 115 |
-
Activates automatically for chat models. It uses the standardized
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
fine-tuning.
|
| 119 |
</Typography>
|
| 120 |
</Box>
|
| 121 |
),
|
|
@@ -411,7 +411,7 @@ function ModelSubmissionForm({ user, isAuthenticated }) {
|
|
| 411 |
placeholder="organization/model-name"
|
| 412 |
value={formData.modelName}
|
| 413 |
onChange={handleChange}
|
| 414 |
-
helperText="Example: meta-llama/Llama-
|
| 415 |
InputProps={{
|
| 416 |
endAdornment: (
|
| 417 |
<InfoIconWithTooltip tooltip={HELP_TEXTS.modelName} />
|
|
|
|
| 18 |
} from "@mui/material";
|
| 19 |
import RocketLaunchIcon from "@mui/icons-material/RocketLaunch";
|
| 20 |
import CheckCircleOutlineIcon from "@mui/icons-material/CheckCircleOutline";
|
| 21 |
+
import ThumbUpIcon from "@mui/icons-material/ThumbUp";
|
| 22 |
import { alpha } from "@mui/material/styles";
|
| 23 |
import InfoIconWithTooltip from "../../../../components/shared/InfoIconWithTooltip";
|
| 24 |
import { MODEL_TYPES } from "../../../../pages/LeaderboardPage/components/Leaderboard/constants/modelTypes";
|
|
|
|
| 113 |
Chat Template Support
|
| 114 |
</Typography>
|
| 115 |
<Typography variant="body2" sx={{ opacity: 0.9, lineHeight: 1.4 }}>
|
| 116 |
+
Activates automatically for chat models. It uses the standardized Hugging
|
| 117 |
+
Face chat template for consistent prompt formatting during evaluation.
|
| 118 |
+
Required for models using RLHF, DPO, or instruction fine-tuning.
|
|
|
|
| 119 |
</Typography>
|
| 120 |
</Box>
|
| 121 |
),
|
|
|
|
| 411 |
placeholder="organization/model-name"
|
| 412 |
value={formData.modelName}
|
| 413 |
onChange={handleChange}
|
| 414 |
+
helperText="Example: meta-llama/Llama-2-7b-hf"
|
| 415 |
InputProps={{
|
| 416 |
endAdornment: (
|
| 417 |
<InfoIconWithTooltip tooltip={HELP_TEXTS.modelName} />
|
frontend/src/pages/AddModelPage/components/SubmissionLimitChecker/SubmissionLimitChecker.js
DELETED
|
@@ -1,85 +0,0 @@
|
|
| 1 |
-
import React, { useState, useEffect } from "react";
|
| 2 |
-
import { Alert, Box, CircularProgress } from "@mui/material";
|
| 3 |
-
|
| 4 |
-
const MAX_SUBMISSIONS_PER_WEEK = 10;
|
| 5 |
-
|
| 6 |
-
function SubmissionLimitChecker({ user, children }) {
|
| 7 |
-
const [loading, setLoading] = useState(true);
|
| 8 |
-
const [reachedLimit, setReachedLimit] = useState(false);
|
| 9 |
-
const [error, setError] = useState(false);
|
| 10 |
-
|
| 11 |
-
useEffect(() => {
|
| 12 |
-
const checkSubmissionLimit = async () => {
|
| 13 |
-
if (!user?.username) {
|
| 14 |
-
setLoading(false);
|
| 15 |
-
return;
|
| 16 |
-
}
|
| 17 |
-
|
| 18 |
-
try {
|
| 19 |
-
const response = await fetch(
|
| 20 |
-
`/api/models/organization/${user.username}/submissions?days=7`
|
| 21 |
-
);
|
| 22 |
-
if (!response.ok) {
|
| 23 |
-
throw new Error("Failed to fetch submission data");
|
| 24 |
-
}
|
| 25 |
-
|
| 26 |
-
const submissions = await response.json();
|
| 27 |
-
console.log(`Recent submissions for ${user.username}:`, submissions);
|
| 28 |
-
setReachedLimit(submissions.length >= MAX_SUBMISSIONS_PER_WEEK);
|
| 29 |
-
setError(false);
|
| 30 |
-
} catch (error) {
|
| 31 |
-
console.error("Error checking submission limit:", error);
|
| 32 |
-
setError(true);
|
| 33 |
-
} finally {
|
| 34 |
-
setLoading(false);
|
| 35 |
-
}
|
| 36 |
-
};
|
| 37 |
-
|
| 38 |
-
checkSubmissionLimit();
|
| 39 |
-
}, [user?.username]);
|
| 40 |
-
|
| 41 |
-
if (loading) {
|
| 42 |
-
return (
|
| 43 |
-
<Box sx={{ display: "flex", justifyContent: "center", py: 4 }}>
|
| 44 |
-
<CircularProgress />
|
| 45 |
-
</Box>
|
| 46 |
-
);
|
| 47 |
-
}
|
| 48 |
-
|
| 49 |
-
if (error) {
|
| 50 |
-
return (
|
| 51 |
-
<Alert
|
| 52 |
-
severity="error"
|
| 53 |
-
sx={{
|
| 54 |
-
mb: 3,
|
| 55 |
-
"& .MuiAlert-message": {
|
| 56 |
-
fontSize: "1rem",
|
| 57 |
-
},
|
| 58 |
-
}}
|
| 59 |
-
>
|
| 60 |
-
Unable to verify submission limits. Please try again in a few minutes.
|
| 61 |
-
</Alert>
|
| 62 |
-
);
|
| 63 |
-
}
|
| 64 |
-
|
| 65 |
-
if (reachedLimit) {
|
| 66 |
-
return (
|
| 67 |
-
<Alert
|
| 68 |
-
severity="warning"
|
| 69 |
-
sx={{
|
| 70 |
-
mb: 3,
|
| 71 |
-
"& .MuiAlert-message": {
|
| 72 |
-
fontSize: "1rem",
|
| 73 |
-
},
|
| 74 |
-
}}
|
| 75 |
-
>
|
| 76 |
-
For fairness reasons, you cannot submit more than{" "}
|
| 77 |
-
{MAX_SUBMISSIONS_PER_WEEK} models per week. Please try again later.
|
| 78 |
-
</Alert>
|
| 79 |
-
);
|
| 80 |
-
}
|
| 81 |
-
|
| 82 |
-
return children;
|
| 83 |
-
}
|
| 84 |
-
|
| 85 |
-
export default SubmissionLimitChecker;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
frontend/src/pages/LeaderboardPage/LeaderboardPage.js
CHANGED
|
@@ -30,14 +30,10 @@ function LeaderboardPage() {
|
|
| 30 |
<Box
|
| 31 |
sx={{ display: "flex", justifyContent: "center", pt: 6, mb: -4, pb: 0 }}
|
| 32 |
>
|
| 33 |
-
<Logo height="
|
| 34 |
</Box>
|
| 35 |
<PageHeader
|
| 36 |
-
title=
|
| 37 |
-
<>
|
| 38 |
-
Open LLM Leaderboard {" "}<span style= {{backgroundColor: "rgb(0, 140, 255)"}}>Archived</span>
|
| 39 |
-
</>
|
| 40 |
-
}
|
| 41 |
subtitle={
|
| 42 |
<>
|
| 43 |
Comparing Large Language Models in an{" "}
|
|
|
|
| 30 |
<Box
|
| 31 |
sx={{ display: "flex", justifyContent: "center", pt: 6, mb: -4, pb: 0 }}
|
| 32 |
>
|
| 33 |
+
<Logo height="80px" />
|
| 34 |
</Box>
|
| 35 |
<PageHeader
|
| 36 |
+
title="Open LLM Leaderboard"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 37 |
subtitle={
|
| 38 |
<>
|
| 39 |
Comparing Large Language Models in an{" "}
|
frontend/src/pages/LeaderboardPage/components/Leaderboard/components/Filters/FilteredModelCount.js
CHANGED
|
@@ -19,7 +19,7 @@ const useModelCount = ({ totalCount, filteredCount, data, table, loading }) => {
|
|
| 19 |
};
|
| 20 |
}
|
| 21 |
const displayCount = isOfficialProviderActive
|
| 22 |
-
? officialOnlyCounts.
|
| 23 |
: totalCount;
|
| 24 |
|
| 25 |
// Calculate total number of pinned models
|
|
@@ -46,8 +46,8 @@ const useModelCount = ({ totalCount, filteredCount, data, table, loading }) => {
|
|
| 46 |
// Filter by official providers
|
| 47 |
if (filterConfig.isOfficialProviderActive) {
|
| 48 |
if (
|
| 49 |
-
!model.features?.
|
| 50 |
-
!model.metadata?.
|
| 51 |
) {
|
| 52 |
return false;
|
| 53 |
}
|
|
@@ -100,7 +100,7 @@ const useModelCount = ({ totalCount, filteredCount, data, table, loading }) => {
|
|
| 100 |
typeof filter === "object" ? filter.value : filter;
|
| 101 |
|
| 102 |
// Maintainer's Highlight keeps positive logic
|
| 103 |
-
if (filterValue === "
|
| 104 |
return model.features[filterValue];
|
| 105 |
}
|
| 106 |
|
|
@@ -134,7 +134,7 @@ const useModelCount = ({ totalCount, filteredCount, data, table, loading }) => {
|
|
| 134 |
data,
|
| 135 |
state.filters,
|
| 136 |
isOfficialProviderActive,
|
| 137 |
-
officialOnlyCounts.
|
| 138 |
]);
|
| 139 |
};
|
| 140 |
|
|
|
|
| 19 |
};
|
| 20 |
}
|
| 21 |
const displayCount = isOfficialProviderActive
|
| 22 |
+
? officialOnlyCounts.maintainersHighlight
|
| 23 |
: totalCount;
|
| 24 |
|
| 25 |
// Calculate total number of pinned models
|
|
|
|
| 46 |
// Filter by official providers
|
| 47 |
if (filterConfig.isOfficialProviderActive) {
|
| 48 |
if (
|
| 49 |
+
!model.features?.is_highlighted_by_maintainer &&
|
| 50 |
+
!model.metadata?.is_highlighted_by_maintainer
|
| 51 |
) {
|
| 52 |
return false;
|
| 53 |
}
|
|
|
|
| 100 |
typeof filter === "object" ? filter.value : filter;
|
| 101 |
|
| 102 |
// Maintainer's Highlight keeps positive logic
|
| 103 |
+
if (filterValue === "is_highlighted_by_maintainer") {
|
| 104 |
return model.features[filterValue];
|
| 105 |
}
|
| 106 |
|
|
|
|
| 134 |
data,
|
| 135 |
state.filters,
|
| 136 |
isOfficialProviderActive,
|
| 137 |
+
officialOnlyCounts.maintainersHighlight,
|
| 138 |
]);
|
| 139 |
};
|
| 140 |
|
frontend/src/pages/LeaderboardPage/components/Leaderboard/components/Filters/Filters.js
CHANGED
|
@@ -781,18 +781,18 @@ const LeaderboardFilters = ({
|
|
| 781 |
<FilterTag
|
| 782 |
label={filter.label}
|
| 783 |
checked={
|
| 784 |
-
filter.value === "
|
| 785 |
? isOfficialProviderActive
|
| 786 |
: selectedBooleanFilters.includes(filter.value)
|
| 787 |
}
|
| 788 |
onChange={
|
| 789 |
-
filter.value === "
|
| 790 |
? handleOfficialProviderToggle
|
| 791 |
: () => handleBooleanFilterToggle(filter.value)
|
| 792 |
}
|
| 793 |
count={
|
| 794 |
-
filter.value === "
|
| 795 |
-
? currentCounts.
|
| 796 |
: 0
|
| 797 |
}
|
| 798 |
showCheckbox={true}
|
|
@@ -815,7 +815,7 @@ const LeaderboardFilters = ({
|
|
| 815 |
borderRadius: "50%",
|
| 816 |
backgroundColor: (
|
| 817 |
filter.value ===
|
| 818 |
-
"
|
| 819 |
? isOfficialProviderActive
|
| 820 |
: selectedBooleanFilters.includes(
|
| 821 |
filter.value
|
|
@@ -826,7 +826,7 @@ const LeaderboardFilters = ({
|
|
| 826 |
}}
|
| 827 |
/>
|
| 828 |
{(
|
| 829 |
-
filter.value === "
|
| 830 |
? isOfficialProviderActive
|
| 831 |
: selectedBooleanFilters.includes(filter.value)
|
| 832 |
)
|
|
|
|
| 781 |
<FilterTag
|
| 782 |
label={filter.label}
|
| 783 |
checked={
|
| 784 |
+
filter.value === "is_highlighted_by_maintainer"
|
| 785 |
? isOfficialProviderActive
|
| 786 |
: selectedBooleanFilters.includes(filter.value)
|
| 787 |
}
|
| 788 |
onChange={
|
| 789 |
+
filter.value === "is_highlighted_by_maintainer"
|
| 790 |
? handleOfficialProviderToggle
|
| 791 |
: () => handleBooleanFilterToggle(filter.value)
|
| 792 |
}
|
| 793 |
count={
|
| 794 |
+
filter.value === "is_highlighted_by_maintainer"
|
| 795 |
+
? currentCounts.maintainersHighlight
|
| 796 |
: 0
|
| 797 |
}
|
| 798 |
showCheckbox={true}
|
|
|
|
| 815 |
borderRadius: "50%",
|
| 816 |
backgroundColor: (
|
| 817 |
filter.value ===
|
| 818 |
+
"is_highlighted_by_maintainer"
|
| 819 |
? isOfficialProviderActive
|
| 820 |
: selectedBooleanFilters.includes(
|
| 821 |
filter.value
|
|
|
|
| 826 |
}}
|
| 827 |
/>
|
| 828 |
{(
|
| 829 |
+
filter.value === "is_highlighted_by_maintainer"
|
| 830 |
? isOfficialProviderActive
|
| 831 |
: selectedBooleanFilters.includes(filter.value)
|
| 832 |
)
|
frontend/src/pages/LeaderboardPage/components/Leaderboard/components/Filters/QuickFilters.js
CHANGED
|
@@ -206,7 +206,7 @@ const QuickFilters = ({ totalCount = 0, loading = false }) => {
|
|
| 206 |
label={officialProvidersPreset.label}
|
| 207 |
checked={isOfficialProviderActive}
|
| 208 |
onChange={handleOfficialProviderToggle}
|
| 209 |
-
count={currentCounts.
|
| 210 |
totalCount={totalCount}
|
| 211 |
showCheckbox={true}
|
| 212 |
variant="secondary"
|
|
|
|
| 206 |
label={officialProvidersPreset.label}
|
| 207 |
checked={isOfficialProviderActive}
|
| 208 |
onChange={handleOfficialProviderToggle}
|
| 209 |
+
count={currentCounts.maintainersHighlight}
|
| 210 |
totalCount={totalCount}
|
| 211 |
showCheckbox={true}
|
| 212 |
variant="secondary"
|
frontend/src/pages/LeaderboardPage/components/Leaderboard/components/Filters/hooks/useOfficialProvidersMode.js
CHANGED
|
@@ -23,7 +23,7 @@ export const useOfficialProvidersMode = () => {
|
|
| 23 |
|
| 24 |
const filters = searchParams.get("filters");
|
| 25 |
const isHighlighted =
|
| 26 |
-
filters?.includes("
|
| 27 |
|
| 28 |
// On initial load
|
| 29 |
if (isInitialLoadRef.current) {
|
|
@@ -33,7 +33,7 @@ export const useOfficialProvidersMode = () => {
|
|
| 33 |
if (isHighlighted && filters) {
|
| 34 |
const initialNormalFilters = filters
|
| 35 |
.split(",")
|
| 36 |
-
.filter((f) => f !== "
|
| 37 |
.filter(Boolean);
|
| 38 |
if (initialNormalFilters.length > 0) {
|
| 39 |
normalFiltersRef.current = initialNormalFilters.join(",");
|
|
@@ -70,7 +70,7 @@ export const useOfficialProvidersMode = () => {
|
|
| 70 |
const currentFiltersStr = searchParams.get("filters");
|
| 71 |
const currentFilters =
|
| 72 |
currentFiltersStr?.split(",").filter(Boolean) || [];
|
| 73 |
-
const highlightFilter = "
|
| 74 |
const newSearchParams = new URLSearchParams(searchParams);
|
| 75 |
|
| 76 |
if (currentFilters.includes(highlightFilter)) {
|
|
|
|
| 23 |
|
| 24 |
const filters = searchParams.get("filters");
|
| 25 |
const isHighlighted =
|
| 26 |
+
filters?.includes("is_highlighted_by_maintainer") || false;
|
| 27 |
|
| 28 |
// On initial load
|
| 29 |
if (isInitialLoadRef.current) {
|
|
|
|
| 33 |
if (isHighlighted && filters) {
|
| 34 |
const initialNormalFilters = filters
|
| 35 |
.split(",")
|
| 36 |
+
.filter((f) => f !== "is_highlighted_by_maintainer" && f !== "")
|
| 37 |
.filter(Boolean);
|
| 38 |
if (initialNormalFilters.length > 0) {
|
| 39 |
normalFiltersRef.current = initialNormalFilters.join(",");
|
|
|
|
| 70 |
const currentFiltersStr = searchParams.get("filters");
|
| 71 |
const currentFilters =
|
| 72 |
currentFiltersStr?.split(",").filter(Boolean) || [];
|
| 73 |
+
const highlightFilter = "is_highlighted_by_maintainer";
|
| 74 |
const newSearchParams = new URLSearchParams(searchParams);
|
| 75 |
|
| 76 |
if (currentFilters.includes(highlightFilter)) {
|
frontend/src/pages/LeaderboardPage/components/Leaderboard/constants/defaults.js
CHANGED
|
@@ -59,14 +59,14 @@ const FILTERS = {
|
|
| 59 |
hide: true,
|
| 60 |
},
|
| 61 |
{
|
| 62 |
-
value: "
|
| 63 |
label: "Only Official Providers",
|
| 64 |
hide: false,
|
| 65 |
},
|
| 66 |
],
|
| 67 |
HIGHLIGHT_OPTIONS: [
|
| 68 |
{
|
| 69 |
-
value: "
|
| 70 |
label: "Only Official Providers",
|
| 71 |
},
|
| 72 |
],
|
|
@@ -237,7 +237,7 @@ const COLUMNS = {
|
|
| 237 |
defaultVisible: false,
|
| 238 |
label: "Hub Availability",
|
| 239 |
},
|
| 240 |
-
"features.
|
| 241 |
group: "additional_info",
|
| 242 |
size: COLUMN_SIZES.OFFICIAL_PROVIDER,
|
| 243 |
defaultVisible: false,
|
|
|
|
| 59 |
hide: true,
|
| 60 |
},
|
| 61 |
{
|
| 62 |
+
value: "is_highlighted_by_maintainer",
|
| 63 |
label: "Only Official Providers",
|
| 64 |
hide: false,
|
| 65 |
},
|
| 66 |
],
|
| 67 |
HIGHLIGHT_OPTIONS: [
|
| 68 |
{
|
| 69 |
+
value: "is_highlighted_by_maintainer",
|
| 70 |
label: "Only Official Providers",
|
| 71 |
},
|
| 72 |
],
|
|
|
|
| 237 |
defaultVisible: false,
|
| 238 |
label: "Hub Availability",
|
| 239 |
},
|
| 240 |
+
"features.is_highlighted_by_maintainer": {
|
| 241 |
group: "additional_info",
|
| 242 |
size: COLUMN_SIZES.OFFICIAL_PROVIDER,
|
| 243 |
defaultVisible: false,
|
frontend/src/pages/LeaderboardPage/components/Leaderboard/constants/quickFilters.js
CHANGED
|
@@ -45,7 +45,7 @@ export const QUICK_FILTER_PRESETS = [
|
|
| 45 |
shortDescription: 'Officially provided models',
|
| 46 |
description: 'Models that are officially provided and maintained by official creators or organizations.',
|
| 47 |
filters: {
|
| 48 |
-
selectedBooleanFilters: ['
|
| 49 |
}
|
| 50 |
}
|
| 51 |
];
|
|
|
|
| 45 |
shortDescription: 'Officially provided models',
|
| 46 |
description: 'Models that are officially provided and maintained by official creators or organizations.',
|
| 47 |
filters: {
|
| 48 |
+
selectedBooleanFilters: ['is_highlighted_by_maintainer']
|
| 49 |
}
|
| 50 |
}
|
| 51 |
];
|
frontend/src/pages/LeaderboardPage/components/Leaderboard/constants/tooltips.js
CHANGED
|
@@ -258,7 +258,7 @@ export const COLUMN_TOOLTIPS = {
|
|
| 258 |
"For detailed information about our CO₂ calculation methodology, visit:",
|
| 259 |
subItems: [
|
| 260 |
<a
|
| 261 |
-
href="https://huggingface.co/docs/
|
| 262 |
target="_blank"
|
| 263 |
rel="noopener noreferrer"
|
| 264 |
style={{ color: "#90caf9" }}
|
|
|
|
| 258 |
"For detailed information about our CO₂ calculation methodology, visit:",
|
| 259 |
subItems: [
|
| 260 |
<a
|
| 261 |
+
href="https://huggingface.co/docs/hub/carbon-emissions"
|
| 262 |
target="_blank"
|
| 263 |
rel="noopener noreferrer"
|
| 264 |
style={{ color: "#90caf9" }}
|
frontend/src/pages/LeaderboardPage/components/Leaderboard/context/LeaderboardContext.js
CHANGED
|
@@ -47,7 +47,7 @@ const createInitialCounts = () => {
|
|
| 47 |
return {
|
| 48 |
modelTypes,
|
| 49 |
precisions,
|
| 50 |
-
|
| 51 |
mixtureOfExperts: 0,
|
| 52 |
flagged: 0,
|
| 53 |
merged: 0,
|
|
@@ -129,7 +129,7 @@ const modelMatchesFilters = (model, filters) => {
|
|
| 129 |
const filterValue = typeof filter === "object" ? filter.value : filter;
|
| 130 |
|
| 131 |
// Maintainer's Highlight keeps positive logic
|
| 132 |
-
if (filterValue === "
|
| 133 |
return model.features[filterValue];
|
| 134 |
}
|
| 135 |
|
|
@@ -187,8 +187,8 @@ const calculateModelCounts = (models) => {
|
|
| 187 |
|
| 188 |
models.forEach((model) => {
|
| 189 |
const isOfficial =
|
| 190 |
-
model.features?.
|
| 191 |
-
model.metadata?.
|
| 192 |
const countsToUpdate = [normalCounts];
|
| 193 |
|
| 194 |
if (isOfficial) {
|
|
@@ -214,10 +214,10 @@ const calculateModelCounts = (models) => {
|
|
| 214 |
|
| 215 |
// Boolean filters
|
| 216 |
if (
|
| 217 |
-
model.features?.
|
| 218 |
-
model.metadata?.
|
| 219 |
)
|
| 220 |
-
counts.
|
| 221 |
if (model.features?.is_moe || model.metadata?.is_moe)
|
| 222 |
counts.mixtureOfExperts++;
|
| 223 |
if (model.features?.is_flagged || model.metadata?.is_flagged)
|
|
|
|
| 47 |
return {
|
| 48 |
modelTypes,
|
| 49 |
precisions,
|
| 50 |
+
maintainersHighlight: 0,
|
| 51 |
mixtureOfExperts: 0,
|
| 52 |
flagged: 0,
|
| 53 |
merged: 0,
|
|
|
|
| 129 |
const filterValue = typeof filter === "object" ? filter.value : filter;
|
| 130 |
|
| 131 |
// Maintainer's Highlight keeps positive logic
|
| 132 |
+
if (filterValue === "is_highlighted_by_maintainer") {
|
| 133 |
return model.features[filterValue];
|
| 134 |
}
|
| 135 |
|
|
|
|
| 187 |
|
| 188 |
models.forEach((model) => {
|
| 189 |
const isOfficial =
|
| 190 |
+
model.features?.is_highlighted_by_maintainer ||
|
| 191 |
+
model.metadata?.is_highlighted_by_maintainer;
|
| 192 |
const countsToUpdate = [normalCounts];
|
| 193 |
|
| 194 |
if (isOfficial) {
|
|
|
|
| 214 |
|
| 215 |
// Boolean filters
|
| 216 |
if (
|
| 217 |
+
model.features?.is_highlighted_by_maintainer ||
|
| 218 |
+
model.metadata?.is_highlighted_by_maintainer
|
| 219 |
)
|
| 220 |
+
counts.maintainersHighlight++;
|
| 221 |
if (model.features?.is_moe || model.metadata?.is_moe)
|
| 222 |
counts.mixtureOfExperts++;
|
| 223 |
if (model.features?.is_flagged || model.metadata?.is_flagged)
|
frontend/src/pages/LeaderboardPage/components/Leaderboard/hooks/useDataUtils.js
CHANGED
|
@@ -4,7 +4,6 @@ import {
|
|
| 4 |
parseSearchQuery,
|
| 5 |
getValueByPath,
|
| 6 |
} from "../utils/searchUtils";
|
| 7 |
-
import { MODEL_TYPE_ORDER } from "../constants/modelTypes";
|
| 8 |
|
| 9 |
// Calculate min/max averages
|
| 10 |
export const useAverageRange = (data) => {
|
|
@@ -59,7 +58,9 @@ export const useProcessedData = (data, averageMode, visibleColumns) => {
|
|
| 59 |
...item.features,
|
| 60 |
is_moe: Boolean(item.features.is_moe),
|
| 61 |
is_flagged: Boolean(item.features.is_flagged),
|
| 62 |
-
|
|
|
|
|
|
|
| 63 |
is_merged: Boolean(item.features.is_merged),
|
| 64 |
is_not_available_on_hub: Boolean(item.features.is_not_available_on_hub),
|
| 65 |
};
|
|
@@ -83,12 +84,10 @@ export const useProcessedData = (data, averageMode, visibleColumns) => {
|
|
| 83 |
return b.model.average_score - a.model.average_score;
|
| 84 |
});
|
| 85 |
|
| 86 |
-
|
| 87 |
...item,
|
| 88 |
static_rank: index + 1,
|
| 89 |
}));
|
| 90 |
-
|
| 91 |
-
return result;
|
| 92 |
}, [data, averageMode, visibleColumns]);
|
| 93 |
};
|
| 94 |
|
|
@@ -108,7 +107,6 @@ export const useFilteredData = (
|
|
| 108 |
const pinnedData = processedData.filter((row) => {
|
| 109 |
return pinnedModels.includes(row.id);
|
| 110 |
});
|
| 111 |
-
|
| 112 |
const unpinnedData = processedData.filter((row) => {
|
| 113 |
return !pinnedModels.includes(row.id);
|
| 114 |
});
|
|
@@ -119,8 +117,8 @@ export const useFilteredData = (
|
|
| 119 |
if (isOfficialProviderActive) {
|
| 120 |
filteredUnpinned = filteredUnpinned.filter(
|
| 121 |
(row) =>
|
| 122 |
-
row.features?.
|
| 123 |
-
row.metadata?.
|
| 124 |
);
|
| 125 |
}
|
| 126 |
|
|
@@ -132,10 +130,7 @@ export const useFilteredData = (
|
|
| 132 |
}
|
| 133 |
|
| 134 |
// Filter by type
|
| 135 |
-
if (
|
| 136 |
-
selectedTypes.length > 0 &&
|
| 137 |
-
selectedTypes.length < MODEL_TYPE_ORDER.length
|
| 138 |
-
) {
|
| 139 |
filteredUnpinned = filteredUnpinned.filter((row) => {
|
| 140 |
const modelType = row.model.type?.toLowerCase().trim();
|
| 141 |
return selectedTypes.some((type) => modelType?.includes(type));
|
|
@@ -143,14 +138,15 @@ export const useFilteredData = (
|
|
| 143 |
}
|
| 144 |
|
| 145 |
// Filter by parameters
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
|
|
|
| 154 |
|
| 155 |
// Filter by search
|
| 156 |
if (searchValue) {
|
|
@@ -201,7 +197,7 @@ export const useFilteredData = (
|
|
| 201 |
typeof filter === "object" ? filter.value : filter;
|
| 202 |
|
| 203 |
// Maintainer's Highlight keeps positive logic
|
| 204 |
-
if (filterValue === "
|
| 205 |
return row.features[filterValue];
|
| 206 |
}
|
| 207 |
|
|
|
|
| 4 |
parseSearchQuery,
|
| 5 |
getValueByPath,
|
| 6 |
} from "../utils/searchUtils";
|
|
|
|
| 7 |
|
| 8 |
// Calculate min/max averages
|
| 9 |
export const useAverageRange = (data) => {
|
|
|
|
| 58 |
...item.features,
|
| 59 |
is_moe: Boolean(item.features.is_moe),
|
| 60 |
is_flagged: Boolean(item.features.is_flagged),
|
| 61 |
+
is_highlighted_by_maintainer: Boolean(
|
| 62 |
+
item.features.is_highlighted_by_maintainer
|
| 63 |
+
),
|
| 64 |
is_merged: Boolean(item.features.is_merged),
|
| 65 |
is_not_available_on_hub: Boolean(item.features.is_not_available_on_hub),
|
| 66 |
};
|
|
|
|
| 84 |
return b.model.average_score - a.model.average_score;
|
| 85 |
});
|
| 86 |
|
| 87 |
+
return processed.map((item, index) => ({
|
| 88 |
...item,
|
| 89 |
static_rank: index + 1,
|
| 90 |
}));
|
|
|
|
|
|
|
| 91 |
}, [data, averageMode, visibleColumns]);
|
| 92 |
};
|
| 93 |
|
|
|
|
| 107 |
const pinnedData = processedData.filter((row) => {
|
| 108 |
return pinnedModels.includes(row.id);
|
| 109 |
});
|
|
|
|
| 110 |
const unpinnedData = processedData.filter((row) => {
|
| 111 |
return !pinnedModels.includes(row.id);
|
| 112 |
});
|
|
|
|
| 117 |
if (isOfficialProviderActive) {
|
| 118 |
filteredUnpinned = filteredUnpinned.filter(
|
| 119 |
(row) =>
|
| 120 |
+
row.features?.is_highlighted_by_maintainer ||
|
| 121 |
+
row.metadata?.is_highlighted_by_maintainer
|
| 122 |
);
|
| 123 |
}
|
| 124 |
|
|
|
|
| 130 |
}
|
| 131 |
|
| 132 |
// Filter by type
|
| 133 |
+
if (selectedTypes.length > 0) {
|
|
|
|
|
|
|
|
|
|
| 134 |
filteredUnpinned = filteredUnpinned.filter((row) => {
|
| 135 |
const modelType = row.model.type?.toLowerCase().trim();
|
| 136 |
return selectedTypes.some((type) => modelType?.includes(type));
|
|
|
|
| 138 |
}
|
| 139 |
|
| 140 |
// Filter by parameters
|
| 141 |
+
filteredUnpinned = filteredUnpinned.filter((row) => {
|
| 142 |
+
// Skip parameter filtering if no filter is active
|
| 143 |
+
if (paramsRange[0] === -1 && paramsRange[1] === 140) return true;
|
| 144 |
+
|
| 145 |
+
const params =
|
| 146 |
+
row.metadata?.params_billions || row.features?.params_billions;
|
| 147 |
+
if (params === undefined || params === null) return false;
|
| 148 |
+
return params >= paramsRange[0] && params < paramsRange[1];
|
| 149 |
+
});
|
| 150 |
|
| 151 |
// Filter by search
|
| 152 |
if (searchValue) {
|
|
|
|
| 197 |
typeof filter === "object" ? filter.value : filter;
|
| 198 |
|
| 199 |
// Maintainer's Highlight keeps positive logic
|
| 200 |
+
if (filterValue === "is_highlighted_by_maintainer") {
|
| 201 |
return row.features[filterValue];
|
| 202 |
}
|
| 203 |
|
frontend/src/pages/LeaderboardPage/components/Leaderboard/hooks/useLeaderboardData.js
CHANGED
|
@@ -4,6 +4,9 @@ import { useSearchParams } from "react-router-dom";
|
|
| 4 |
import { useLeaderboard } from "../context/LeaderboardContext";
|
| 5 |
import { useDataProcessing } from "../components/Table/hooks/useDataProcessing";
|
| 6 |
|
|
|
|
|
|
|
|
|
|
| 7 |
export const useLeaderboardData = () => {
|
| 8 |
const queryClient = useQueryClient();
|
| 9 |
const [searchParams] = useSearchParams();
|
|
@@ -12,41 +15,44 @@ export const useLeaderboardData = () => {
|
|
| 12 |
const { data, isLoading, error } = useQuery({
|
| 13 |
queryKey: ["leaderboard"],
|
| 14 |
queryFn: async () => {
|
| 15 |
-
console.log("🔄 Starting API fetch attempt...");
|
| 16 |
try {
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
|
|
|
|
| 21 |
if (!response.ok) {
|
| 22 |
-
const errorText = await response.text();
|
| 23 |
-
console.error("🚨 API Error:", {
|
| 24 |
-
status: response.status,
|
| 25 |
-
statusText: response.statusText,
|
| 26 |
-
body: errorText,
|
| 27 |
-
});
|
| 28 |
throw new Error(`HTTP error! status: ${response.status}`);
|
| 29 |
}
|
| 30 |
|
| 31 |
const newData = await response.json();
|
| 32 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
return newData;
|
| 34 |
} catch (error) {
|
| 35 |
-
console.error("
|
| 36 |
-
name: error.name,
|
| 37 |
-
message: error.message,
|
| 38 |
-
stack: error.stack,
|
| 39 |
-
});
|
| 40 |
throw error;
|
| 41 |
}
|
| 42 |
},
|
|
|
|
|
|
|
| 43 |
refetchOnWindowFocus: false,
|
| 44 |
enabled: isInitialLoadRef.current || !!searchParams.toString(),
|
| 45 |
});
|
| 46 |
|
| 47 |
useMemo(() => {
|
| 48 |
if (data && isInitialLoadRef.current) {
|
| 49 |
-
console.log("🎯 Initial load complete");
|
| 50 |
isInitialLoadRef.current = false;
|
| 51 |
}
|
| 52 |
}, [data]);
|
|
|
|
| 4 |
import { useLeaderboard } from "../context/LeaderboardContext";
|
| 5 |
import { useDataProcessing } from "../components/Table/hooks/useDataProcessing";
|
| 6 |
|
| 7 |
+
const CACHE_KEY = "leaderboardData";
|
| 8 |
+
const CACHE_DURATION = 5 * 60 * 1000; // 5 minutes
|
| 9 |
+
|
| 10 |
export const useLeaderboardData = () => {
|
| 11 |
const queryClient = useQueryClient();
|
| 12 |
const [searchParams] = useSearchParams();
|
|
|
|
| 15 |
const { data, isLoading, error } = useQuery({
|
| 16 |
queryKey: ["leaderboard"],
|
| 17 |
queryFn: async () => {
|
|
|
|
| 18 |
try {
|
| 19 |
+
const cachedData = localStorage.getItem(CACHE_KEY);
|
| 20 |
+
if (cachedData) {
|
| 21 |
+
const { data: cached, timestamp } = JSON.parse(cachedData);
|
| 22 |
+
const age = Date.now() - timestamp;
|
| 23 |
+
if (age < CACHE_DURATION) {
|
| 24 |
+
return cached;
|
| 25 |
+
}
|
| 26 |
+
}
|
| 27 |
|
| 28 |
+
const response = await fetch("/api/leaderboard/formatted");
|
| 29 |
if (!response.ok) {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
throw new Error(`HTTP error! status: ${response.status}`);
|
| 31 |
}
|
| 32 |
|
| 33 |
const newData = await response.json();
|
| 34 |
+
localStorage.setItem(
|
| 35 |
+
CACHE_KEY,
|
| 36 |
+
JSON.stringify({
|
| 37 |
+
data: newData,
|
| 38 |
+
timestamp: Date.now(),
|
| 39 |
+
})
|
| 40 |
+
);
|
| 41 |
+
|
| 42 |
return newData;
|
| 43 |
} catch (error) {
|
| 44 |
+
console.error("Detailed error:", error);
|
|
|
|
|
|
|
|
|
|
|
|
|
| 45 |
throw error;
|
| 46 |
}
|
| 47 |
},
|
| 48 |
+
staleTime: CACHE_DURATION,
|
| 49 |
+
cacheTime: CACHE_DURATION * 2,
|
| 50 |
refetchOnWindowFocus: false,
|
| 51 |
enabled: isInitialLoadRef.current || !!searchParams.toString(),
|
| 52 |
});
|
| 53 |
|
| 54 |
useMemo(() => {
|
| 55 |
if (data && isInitialLoadRef.current) {
|
|
|
|
| 56 |
isInitialLoadRef.current = false;
|
| 57 |
}
|
| 58 |
}, [data]);
|
frontend/src/pages/LeaderboardPage/components/Leaderboard/utils/columnUtils.js
CHANGED
|
@@ -1003,18 +1003,18 @@ export const createColumns = (
|
|
| 1003 |
],
|
| 1004 |
},
|
| 1005 |
{
|
| 1006 |
-
accessorKey: "features.
|
| 1007 |
header: createHeaderCell(
|
| 1008 |
"Official Providers",
|
| 1009 |
"Models that are officially provided and maintained by their original creators or organizations"
|
| 1010 |
),
|
| 1011 |
cell: ({ row }) => (
|
| 1012 |
<BooleanValue
|
| 1013 |
-
value={row.original.features.
|
| 1014 |
/>
|
| 1015 |
),
|
| 1016 |
size: TABLE_DEFAULTS.COLUMNS.COLUMN_SIZES[
|
| 1017 |
-
"features.
|
| 1018 |
],
|
| 1019 |
enableSorting: true,
|
| 1020 |
},
|
|
@@ -1061,7 +1061,7 @@ export const createColumns = (
|
|
| 1061 |
"metadata.base_model": 10,
|
| 1062 |
"model.has_chat_template": 11,
|
| 1063 |
"features.is_not_available_on_hub": 12,
|
| 1064 |
-
"features.
|
| 1065 |
"features.is_moe": 14,
|
| 1066 |
"features.is_flagged": 15,
|
| 1067 |
};
|
|
|
|
| 1003 |
],
|
| 1004 |
},
|
| 1005 |
{
|
| 1006 |
+
accessorKey: "features.is_highlighted_by_maintainer",
|
| 1007 |
header: createHeaderCell(
|
| 1008 |
"Official Providers",
|
| 1009 |
"Models that are officially provided and maintained by their original creators or organizations"
|
| 1010 |
),
|
| 1011 |
cell: ({ row }) => (
|
| 1012 |
<BooleanValue
|
| 1013 |
+
value={row.original.features.is_highlighted_by_maintainer}
|
| 1014 |
/>
|
| 1015 |
),
|
| 1016 |
size: TABLE_DEFAULTS.COLUMNS.COLUMN_SIZES[
|
| 1017 |
+
"features.is_highlighted_by_maintainer"
|
| 1018 |
],
|
| 1019 |
enableSorting: true,
|
| 1020 |
},
|
|
|
|
| 1061 |
"metadata.base_model": 10,
|
| 1062 |
"model.has_chat_template": 11,
|
| 1063 |
"features.is_not_available_on_hub": 12,
|
| 1064 |
+
"features.is_highlighted_by_maintainer": 13,
|
| 1065 |
"features.is_moe": 14,
|
| 1066 |
"features.is_flagged": 15,
|
| 1067 |
};
|
frontend/src/pages/QuotePage/QuotePage.js
CHANGED
|
@@ -226,7 +226,7 @@ const CitationBlock = ({ citation, title, authors, url, type }) => {
|
|
| 226 |
|
| 227 |
function QuotePage() {
|
| 228 |
return (
|
| 229 |
-
<Box sx={{ width: "100%", maxWidth: 1200, margin: "0 auto",
|
| 230 |
<PageHeader
|
| 231 |
title="Citation Information"
|
| 232 |
subtitle="How to cite the Open LLM Leaderboard in your work"
|
|
|
|
| 226 |
|
| 227 |
function QuotePage() {
|
| 228 |
return (
|
| 229 |
+
<Box sx={{ width: "100%", maxWidth: 1200, margin: "0 auto", padding: 4 }}>
|
| 230 |
<PageHeader
|
| 231 |
title="Citation Information"
|
| 232 |
subtitle="How to cite the Open LLM Leaderboard in your work"
|
frontend/src/pages/VoteModelPage/VoteModelPage.js
CHANGED
|
@@ -13,8 +13,6 @@ import {
|
|
| 13 |
IconButton,
|
| 14 |
Stack,
|
| 15 |
Link,
|
| 16 |
-
useTheme,
|
| 17 |
-
useMediaQuery,
|
| 18 |
} from "@mui/material";
|
| 19 |
import AccessTimeIcon from "@mui/icons-material/AccessTime";
|
| 20 |
import PersonIcon from "@mui/icons-material/Person";
|
|
@@ -70,23 +68,12 @@ const NoModelsToVote = () => (
|
|
| 70 |
</Box>
|
| 71 |
);
|
| 72 |
|
| 73 |
-
const LOCAL_STORAGE_KEY = "pending_votes";
|
| 74 |
-
|
| 75 |
function VoteModelPage() {
|
| 76 |
-
const { isAuthenticated, user, loading
|
| 77 |
const [pendingModels, setPendingModels] = useState([]);
|
| 78 |
const [loadingModels, setLoadingModels] = useState(true);
|
| 79 |
const [error, setError] = useState(null);
|
| 80 |
const [userVotes, setUserVotes] = useState(new Set());
|
| 81 |
-
const [loadingVotes, setLoadingVotes] = useState({});
|
| 82 |
-
const [localVotes, setLocalVotes] = useState(new Set());
|
| 83 |
-
const theme = useTheme();
|
| 84 |
-
const isMobile = useMediaQuery(theme.breakpoints.down("sm"));
|
| 85 |
-
|
| 86 |
-
// Create a unique identifier for a model
|
| 87 |
-
const getModelUniqueId = (model) => {
|
| 88 |
-
return `${model.name}_${model.precision}_${model.revision}`;
|
| 89 |
-
};
|
| 90 |
|
| 91 |
const formatWaitTime = (submissionTime) => {
|
| 92 |
if (!submissionTime) return "N/A";
|
|
@@ -111,263 +98,177 @@ function VoteModelPage() {
|
|
| 111 |
return `${diffInWeeks}w`;
|
| 112 |
};
|
| 113 |
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
const
|
| 117 |
-
|
| 118 |
-
// Compter les votes du serveur
|
| 119 |
-
let serverVotes = 0;
|
| 120 |
-
for (const [key, config] of Object.entries(votesData.votes_by_config)) {
|
| 121 |
-
if (
|
| 122 |
-
config.precision === model.precision &&
|
| 123 |
-
config.revision === model.revision
|
| 124 |
-
) {
|
| 125 |
-
serverVotes = config.count;
|
| 126 |
-
break;
|
| 127 |
-
}
|
| 128 |
-
}
|
| 129 |
-
|
| 130 |
-
// Ajouter les votes en attente du localStorage
|
| 131 |
-
const pendingVote = localVotes.has(modelUniqueId) ? 1 : 0;
|
| 132 |
-
|
| 133 |
-
return serverVotes + pendingVote;
|
| 134 |
-
};
|
| 135 |
|
| 136 |
-
|
| 137 |
-
|
| 138 |
-
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
}
|
| 143 |
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
|
|
|
|
|
|
| 148 |
|
| 149 |
-
|
| 150 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 151 |
}
|
|
|
|
| 152 |
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
});
|
| 156 |
-
};
|
| 157 |
-
|
| 158 |
-
// Add this function to handle localStorage
|
| 159 |
-
const updateLocalVotes = (modelUniqueId, action = "add") => {
|
| 160 |
-
const storedVotes = JSON.parse(
|
| 161 |
-
localStorage.getItem(LOCAL_STORAGE_KEY) || "[]"
|
| 162 |
-
);
|
| 163 |
-
if (action === "add") {
|
| 164 |
-
if (!storedVotes.includes(modelUniqueId)) {
|
| 165 |
-
storedVotes.push(modelUniqueId);
|
| 166 |
-
}
|
| 167 |
-
} else {
|
| 168 |
-
const index = storedVotes.indexOf(modelUniqueId);
|
| 169 |
-
if (index > -1) {
|
| 170 |
-
storedVotes.splice(index, 1);
|
| 171 |
-
}
|
| 172 |
-
}
|
| 173 |
-
localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(storedVotes));
|
| 174 |
-
setLocalVotes(new Set(storedVotes));
|
| 175 |
-
};
|
| 176 |
|
| 177 |
useEffect(() => {
|
| 178 |
-
const
|
| 179 |
try {
|
| 180 |
-
|
| 181 |
-
if (
|
| 182 |
-
setLoadingModels(true);
|
| 183 |
-
}
|
| 184 |
-
setError(null);
|
| 185 |
-
|
| 186 |
-
// Charger d'abord les votes en attente du localStorage
|
| 187 |
-
const storedVotes = JSON.parse(
|
| 188 |
-
localStorage.getItem(LOCAL_STORAGE_KEY) || "[]"
|
| 189 |
-
);
|
| 190 |
-
const localVotesSet = new Set(storedVotes);
|
| 191 |
-
|
| 192 |
-
// Préparer toutes les requêtes en parallèle
|
| 193 |
-
const [pendingModelsResponse, userVotesResponse] = await Promise.all([
|
| 194 |
-
fetch("/api/models/pending"),
|
| 195 |
-
isAuthenticated && user
|
| 196 |
-
? fetch(`/api/votes/user/${user.username}`)
|
| 197 |
-
: Promise.resolve(null),
|
| 198 |
-
]);
|
| 199 |
-
|
| 200 |
-
if (!pendingModelsResponse.ok) {
|
| 201 |
throw new Error("Failed to fetch pending models");
|
| 202 |
}
|
|
|
|
| 203 |
|
| 204 |
-
|
| 205 |
-
const
|
| 206 |
-
|
| 207 |
-
// Traiter les votes de l'utilisateur si connecté
|
| 208 |
-
if (userVotesResponse && userVotesResponse.ok) {
|
| 209 |
-
const votesData = await userVotesResponse.json();
|
| 210 |
-
const userVotes = Array.isArray(votesData) ? votesData : [];
|
| 211 |
-
|
| 212 |
-
userVotes.forEach((vote) => {
|
| 213 |
-
const uniqueId = `${vote.model}_${vote.precision || "unknown"}_${
|
| 214 |
-
vote.revision || "main"
|
| 215 |
-
}`;
|
| 216 |
-
votedModels.add(uniqueId);
|
| 217 |
-
if (localVotesSet.has(uniqueId)) {
|
| 218 |
-
localVotesSet.delete(uniqueId);
|
| 219 |
-
updateLocalVotes(uniqueId, "remove");
|
| 220 |
-
}
|
| 221 |
-
});
|
| 222 |
-
}
|
| 223 |
-
|
| 224 |
-
// Préparer et exécuter toutes les requêtes de votes en une seule fois
|
| 225 |
-
const modelVotesResponses = await Promise.all(
|
| 226 |
-
modelsData.map((model) => {
|
| 227 |
const [provider, modelName] = model.name.split("/");
|
| 228 |
-
|
| 229 |
-
|
| 230 |
-
|
| 231 |
-
|
| 232 |
-
|
| 233 |
-
|
| 234 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 235 |
})
|
| 236 |
);
|
| 237 |
|
| 238 |
-
//
|
| 239 |
-
const
|
| 240 |
-
const votesData = modelVotesResponses[index];
|
| 241 |
-
const modelUniqueId = getModelUniqueId(model);
|
| 242 |
-
const isVotedByUser =
|
| 243 |
-
votedModels.has(modelUniqueId) || localVotesSet.has(modelUniqueId);
|
| 244 |
-
|
| 245 |
-
return {
|
| 246 |
-
...model,
|
| 247 |
-
votes: getConfigVotes(
|
| 248 |
-
{
|
| 249 |
-
...votesData,
|
| 250 |
-
votes_by_config: votesData.votes_by_config || {},
|
| 251 |
-
},
|
| 252 |
-
model
|
| 253 |
-
),
|
| 254 |
-
votes_by_config: votesData.votes_by_config || {},
|
| 255 |
-
wait_time: formatWaitTime(model.submission_time),
|
| 256 |
-
hasVoted: isVotedByUser,
|
| 257 |
-
};
|
| 258 |
-
});
|
| 259 |
-
|
| 260 |
-
// Mettre à jour tous les états en une seule fois
|
| 261 |
-
const sortedModels = sortModels(modelsWithVotes);
|
| 262 |
-
|
| 263 |
-
// Batch updates
|
| 264 |
-
const updates = () => {
|
| 265 |
-
setPendingModels(sortedModels);
|
| 266 |
-
setUserVotes(votedModels);
|
| 267 |
-
setLocalVotes(localVotesSet);
|
| 268 |
-
setLoadingModels(false);
|
| 269 |
-
};
|
| 270 |
|
| 271 |
-
|
| 272 |
} catch (err) {
|
| 273 |
-
console.error("Error fetching data:", err);
|
| 274 |
setError(err.message);
|
|
|
|
| 275 |
setLoadingModels(false);
|
| 276 |
}
|
| 277 |
};
|
| 278 |
|
| 279 |
-
|
| 280 |
-
}, [
|
| 281 |
|
| 282 |
-
|
| 283 |
-
const handleVote = async (model) => {
|
| 284 |
if (!isAuthenticated) return;
|
| 285 |
|
| 286 |
-
const modelUniqueId = getModelUniqueId(model);
|
| 287 |
-
|
| 288 |
try {
|
| 289 |
-
|
| 290 |
-
|
| 291 |
-
|
| 292 |
-
|
| 293 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 294 |
|
| 295 |
-
//
|
| 296 |
-
const
|
| 297 |
|
| 298 |
const response = await fetch(
|
| 299 |
-
`/api/votes/${
|
| 300 |
{
|
| 301 |
method: "POST",
|
| 302 |
headers: {
|
| 303 |
"Content-Type": "application/json",
|
| 304 |
},
|
| 305 |
-
body: JSON.stringify({
|
| 306 |
-
precision: model.precision,
|
| 307 |
-
revision: model.revision,
|
| 308 |
-
}),
|
| 309 |
}
|
| 310 |
);
|
| 311 |
|
| 312 |
if (!response.ok) {
|
| 313 |
-
//
|
| 314 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 315 |
throw new Error("Failed to submit vote");
|
| 316 |
}
|
| 317 |
|
| 318 |
-
// Refresh votes for this model
|
| 319 |
-
const [provider, modelName] = model.name.split("/");
|
| 320 |
-
const timestamp = Date.now();
|
| 321 |
const votesResponse = await fetch(
|
| 322 |
-
`/api/votes/model/${provider}/${
|
| 323 |
);
|
| 324 |
-
|
| 325 |
-
if (!votesResponse.ok) {
|
| 326 |
-
throw new Error("Failed to fetch updated votes");
|
| 327 |
-
}
|
| 328 |
-
|
| 329 |
const votesData = await votesResponse.json();
|
| 330 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 331 |
|
| 332 |
// Update model and resort the list
|
| 333 |
setPendingModels((models) => {
|
| 334 |
-
const updatedModels = models.map((
|
| 335 |
-
|
| 336 |
? {
|
| 337 |
-
...
|
| 338 |
-
votes:
|
| 339 |
-
|
| 340 |
-
hasVoted: true,
|
| 341 |
}
|
| 342 |
-
:
|
| 343 |
);
|
| 344 |
-
|
| 345 |
-
console.log("Updated and sorted models:", sortedModels); // Debug log
|
| 346 |
-
return sortedModels;
|
| 347 |
});
|
| 348 |
-
|
| 349 |
-
// Update user votes with unique ID
|
| 350 |
-
setUserVotes((prev) => new Set([...prev, getModelUniqueId(model)]));
|
| 351 |
} catch (err) {
|
| 352 |
-
console.error("Error voting:", err);
|
| 353 |
setError(err.message);
|
| 354 |
-
} finally {
|
| 355 |
-
// Clear loading state for this model
|
| 356 |
-
setLoadingVotes((prev) => ({
|
| 357 |
-
...prev,
|
| 358 |
-
[modelUniqueId]: false,
|
| 359 |
-
}));
|
| 360 |
}
|
| 361 |
};
|
| 362 |
|
| 363 |
-
|
| 364 |
-
// Inside the map function where you render models
|
| 365 |
-
const isVoted = (model) => {
|
| 366 |
-
const modelUniqueId = getModelUniqueId(model);
|
| 367 |
-
return userVotes.has(modelUniqueId) || localVotes.has(modelUniqueId);
|
| 368 |
-
};
|
| 369 |
-
|
| 370 |
-
if (authLoading || (loadingModels && pendingModels.length === 0)) {
|
| 371 |
return (
|
| 372 |
<Box
|
| 373 |
sx={{
|
|
@@ -383,15 +284,7 @@ function VoteModelPage() {
|
|
| 383 |
}
|
| 384 |
|
| 385 |
return (
|
| 386 |
-
<Box
|
| 387 |
-
sx={{
|
| 388 |
-
width: "100%",
|
| 389 |
-
maxWidth: 1200,
|
| 390 |
-
margin: "0 auto",
|
| 391 |
-
py: 4,
|
| 392 |
-
px: 0,
|
| 393 |
-
}}
|
| 394 |
-
>
|
| 395 |
<PageHeader
|
| 396 |
title="Vote for the Next Models"
|
| 397 |
subtitle={
|
|
@@ -504,7 +397,7 @@ function VoteModelPage() {
|
|
| 504 |
borderBottom: "1px solid",
|
| 505 |
borderColor: "divider",
|
| 506 |
bgcolor: "background.paper",
|
| 507 |
-
display:
|
| 508 |
gridTemplateColumns: "1fr 200px 160px",
|
| 509 |
gap: 3,
|
| 510 |
alignItems: "center",
|
|
@@ -548,16 +441,16 @@ function VoteModelPage() {
|
|
| 548 |
{pendingModels.map((model, index) => {
|
| 549 |
const isTopThree = index < 3;
|
| 550 |
return (
|
| 551 |
-
<React.Fragment key={
|
| 552 |
{index > 0 && <Divider />}
|
| 553 |
<ListItem
|
| 554 |
sx={{
|
| 555 |
py: 2.5,
|
| 556 |
px: 3,
|
| 557 |
display: "grid",
|
| 558 |
-
gridTemplateColumns:
|
| 559 |
-
gap:
|
| 560 |
-
alignItems: "
|
| 561 |
position: "relative",
|
| 562 |
"&:hover": {
|
| 563 |
bgcolor: "action.hover",
|
|
@@ -568,100 +461,41 @@ function VoteModelPage() {
|
|
| 568 |
<Box>
|
| 569 |
<Stack spacing={1}>
|
| 570 |
{/* Model name and link */}
|
| 571 |
-
<Stack
|
| 572 |
-
|
| 573 |
-
|
| 574 |
-
|
| 575 |
-
|
| 576 |
-
|
| 577 |
-
|
| 578 |
-
|
| 579 |
-
|
| 580 |
-
|
|
|
|
|
|
|
|
|
|
| 581 |
>
|
| 582 |
-
|
| 583 |
-
|
| 584 |
-
|
| 585 |
-
|
| 586 |
-
|
| 587 |
-
|
| 588 |
-
|
| 589 |
-
fontWeight: 500,
|
| 590 |
-
"&:hover": {
|
| 591 |
-
textDecoration: "underline",
|
| 592 |
-
},
|
| 593 |
-
fontSize: { xs: "0.9rem", sm: "inherit" },
|
| 594 |
-
wordBreak: "break-word",
|
| 595 |
-
}}
|
| 596 |
-
>
|
| 597 |
-
{model.name}
|
| 598 |
-
</Link>
|
| 599 |
-
<IconButton
|
| 600 |
-
size="small"
|
| 601 |
-
href={`https://huggingface.co/${model.name}`}
|
| 602 |
-
target="_blank"
|
| 603 |
-
rel="noopener noreferrer"
|
| 604 |
-
sx={{
|
| 605 |
-
ml: 0.5,
|
| 606 |
-
p: 0.5,
|
| 607 |
-
color: "action.active",
|
| 608 |
-
"&:hover": {
|
| 609 |
-
color: "primary.main",
|
| 610 |
-
},
|
| 611 |
-
}}
|
| 612 |
-
>
|
| 613 |
-
<OpenInNewIcon sx={{ fontSize: "1rem" }} />
|
| 614 |
-
</IconButton>
|
| 615 |
-
</Stack>
|
| 616 |
-
<Stack
|
| 617 |
-
direction="row"
|
| 618 |
-
spacing={1}
|
| 619 |
sx={{
|
| 620 |
-
|
| 621 |
-
|
| 622 |
-
|
| 623 |
-
|
|
|
|
| 624 |
},
|
| 625 |
-
flexWrap: "wrap",
|
| 626 |
-
gap: 1,
|
| 627 |
}}
|
| 628 |
>
|
| 629 |
-
<
|
| 630 |
-
|
| 631 |
-
size="small"
|
| 632 |
-
variant="outlined"
|
| 633 |
-
sx={{
|
| 634 |
-
borderColor: "grey.300",
|
| 635 |
-
bgcolor: "grey.50",
|
| 636 |
-
"& .MuiChip-label": {
|
| 637 |
-
fontSize: "0.75rem",
|
| 638 |
-
fontWeight: 600,
|
| 639 |
-
color: "text.secondary",
|
| 640 |
-
},
|
| 641 |
-
}}
|
| 642 |
-
/>
|
| 643 |
-
<Chip
|
| 644 |
-
label={`rev: ${model.revision.slice(0, 7)}`}
|
| 645 |
-
size="small"
|
| 646 |
-
variant="outlined"
|
| 647 |
-
sx={{
|
| 648 |
-
borderColor: "grey.300",
|
| 649 |
-
bgcolor: "grey.50",
|
| 650 |
-
"& .MuiChip-label": {
|
| 651 |
-
fontSize: "0.75rem",
|
| 652 |
-
fontWeight: 600,
|
| 653 |
-
color: "text.secondary",
|
| 654 |
-
},
|
| 655 |
-
}}
|
| 656 |
-
/>
|
| 657 |
-
</Stack>
|
| 658 |
</Stack>
|
| 659 |
{/* Metadata row */}
|
| 660 |
-
<Stack
|
| 661 |
-
direction={{ xs: "column", sm: "row" }}
|
| 662 |
-
spacing={{ xs: 1, sm: 2 }}
|
| 663 |
-
alignItems={{ xs: "flex-start", sm: "center" }}
|
| 664 |
-
>
|
| 665 |
<Stack
|
| 666 |
direction="row"
|
| 667 |
spacing={0.5}
|
|
@@ -697,22 +531,17 @@ function VoteModelPage() {
|
|
| 697 |
</Box>
|
| 698 |
|
| 699 |
{/* Vote Column */}
|
| 700 |
-
<Box
|
| 701 |
-
sx={{
|
| 702 |
-
textAlign: { xs: "left", sm: "right" },
|
| 703 |
-
mt: { xs: 2, sm: 0 },
|
| 704 |
-
}}
|
| 705 |
-
>
|
| 706 |
<Stack
|
| 707 |
-
direction=
|
| 708 |
spacing={2.5}
|
| 709 |
-
justifyContent=
|
| 710 |
alignItems="center"
|
| 711 |
>
|
| 712 |
<Stack
|
| 713 |
-
alignItems=
|
| 714 |
sx={{
|
| 715 |
-
minWidth:
|
| 716 |
}}
|
| 717 |
>
|
| 718 |
<Typography
|
|
@@ -721,7 +550,7 @@ function VoteModelPage() {
|
|
| 721 |
sx={{
|
| 722 |
fontWeight: 700,
|
| 723 |
lineHeight: 1,
|
| 724 |
-
fontSize:
|
| 725 |
display: "flex",
|
| 726 |
alignItems: "center",
|
| 727 |
justifyContent: "center",
|
|
@@ -730,7 +559,7 @@ function VoteModelPage() {
|
|
| 730 |
<Typography
|
| 731 |
component="span"
|
| 732 |
sx={{
|
| 733 |
-
fontSize:
|
| 734 |
fontWeight: 600,
|
| 735 |
color: "primary.main",
|
| 736 |
lineHeight: 1,
|
|
@@ -769,22 +598,18 @@ function VoteModelPage() {
|
|
| 769 |
</Typography>
|
| 770 |
</Stack>
|
| 771 |
<Button
|
| 772 |
-
variant={
|
| 773 |
-
size=
|
| 774 |
-
onClick={() => handleVote(model)}
|
| 775 |
-
disabled={
|
| 776 |
-
!isAuthenticated ||
|
| 777 |
-
isVoted(model) ||
|
| 778 |
-
loadingVotes[getModelUniqueId(model)]
|
| 779 |
-
}
|
| 780 |
color="primary"
|
| 781 |
sx={{
|
| 782 |
-
minWidth:
|
| 783 |
-
height:
|
| 784 |
textTransform: "none",
|
| 785 |
fontWeight: 600,
|
| 786 |
-
fontSize:
|
| 787 |
-
...(
|
| 788 |
? {
|
| 789 |
bgcolor: "primary.main",
|
| 790 |
"&:hover": {
|
|
@@ -804,9 +629,7 @@ function VoteModelPage() {
|
|
| 804 |
}),
|
| 805 |
}}
|
| 806 |
>
|
| 807 |
-
{
|
| 808 |
-
<CircularProgress size={20} color="inherit" />
|
| 809 |
-
) : isVoted(model) ? (
|
| 810 |
<Stack
|
| 811 |
direction="row"
|
| 812 |
spacing={0.5}
|
|
@@ -823,13 +646,7 @@ function VoteModelPage() {
|
|
| 823 |
</Box>
|
| 824 |
|
| 825 |
{/* Priority Column */}
|
| 826 |
-
<Box
|
| 827 |
-
sx={{
|
| 828 |
-
textAlign: { xs: "left", sm: "right" },
|
| 829 |
-
mt: { xs: 2, sm: 0 },
|
| 830 |
-
display: { xs: "none", sm: "block" },
|
| 831 |
-
}}
|
| 832 |
-
>
|
| 833 |
<Chip
|
| 834 |
label={
|
| 835 |
<Stack
|
|
|
|
| 13 |
IconButton,
|
| 14 |
Stack,
|
| 15 |
Link,
|
|
|
|
|
|
|
| 16 |
} from "@mui/material";
|
| 17 |
import AccessTimeIcon from "@mui/icons-material/AccessTime";
|
| 18 |
import PersonIcon from "@mui/icons-material/Person";
|
|
|
|
| 68 |
</Box>
|
| 69 |
);
|
| 70 |
|
|
|
|
|
|
|
| 71 |
function VoteModelPage() {
|
| 72 |
+
const { isAuthenticated, user, loading } = useAuth();
|
| 73 |
const [pendingModels, setPendingModels] = useState([]);
|
| 74 |
const [loadingModels, setLoadingModels] = useState(true);
|
| 75 |
const [error, setError] = useState(null);
|
| 76 |
const [userVotes, setUserVotes] = useState(new Set());
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
|
| 78 |
const formatWaitTime = (submissionTime) => {
|
| 79 |
if (!submissionTime) return "N/A";
|
|
|
|
| 98 |
return `${diffInWeeks}w`;
|
| 99 |
};
|
| 100 |
|
| 101 |
+
// Fetch user's votes
|
| 102 |
+
useEffect(() => {
|
| 103 |
+
const fetchUserVotes = async () => {
|
| 104 |
+
if (!isAuthenticated || !user) return;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 105 |
|
| 106 |
+
try {
|
| 107 |
+
// Récupérer les votes du localStorage
|
| 108 |
+
const localVotes = JSON.parse(
|
| 109 |
+
localStorage.getItem(`votes_${user.username}`) || "[]"
|
| 110 |
+
);
|
| 111 |
+
const localVotesSet = new Set(localVotes);
|
|
|
|
| 112 |
|
| 113 |
+
// Récupérer les votes du serveur
|
| 114 |
+
const response = await fetch(`/api/votes/user/${user.username}`);
|
| 115 |
+
if (!response.ok) {
|
| 116 |
+
throw new Error("Failed to fetch user votes");
|
| 117 |
+
}
|
| 118 |
+
const data = await response.json();
|
| 119 |
|
| 120 |
+
// Fusionner les votes du serveur avec les votes locaux
|
| 121 |
+
const votedModels = new Set([
|
| 122 |
+
...data.map((vote) => vote.model),
|
| 123 |
+
...localVotesSet,
|
| 124 |
+
]);
|
| 125 |
+
setUserVotes(votedModels);
|
| 126 |
+
} catch (err) {
|
| 127 |
+
console.error("Error fetching user votes:", err);
|
| 128 |
}
|
| 129 |
+
};
|
| 130 |
|
| 131 |
+
fetchUserVotes();
|
| 132 |
+
}, [isAuthenticated, user]);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 133 |
|
| 134 |
useEffect(() => {
|
| 135 |
+
const fetchModels = async () => {
|
| 136 |
try {
|
| 137 |
+
const response = await fetch("/api/models/pending");
|
| 138 |
+
if (!response.ok) {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 139 |
throw new Error("Failed to fetch pending models");
|
| 140 |
}
|
| 141 |
+
const data = await response.json();
|
| 142 |
|
| 143 |
+
// Fetch votes for each model
|
| 144 |
+
const modelsWithVotes = await Promise.all(
|
| 145 |
+
data.map(async (model) => {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 146 |
const [provider, modelName] = model.name.split("/");
|
| 147 |
+
const votesResponse = await fetch(
|
| 148 |
+
`/api/votes/model/${provider}/${modelName}`
|
| 149 |
+
);
|
| 150 |
+
const votesData = await votesResponse.json();
|
| 151 |
+
|
| 152 |
+
// Calculate total vote score from votes_by_revision
|
| 153 |
+
const totalScore = Object.values(
|
| 154 |
+
votesData.votes_by_revision || {}
|
| 155 |
+
).reduce((a, b) => a + b, 0);
|
| 156 |
+
|
| 157 |
+
// Calculate wait time based on submission_time from model data
|
| 158 |
+
const waitTimeDisplay = formatWaitTime(model.submission_time);
|
| 159 |
+
|
| 160 |
+
return {
|
| 161 |
+
...model,
|
| 162 |
+
votes: totalScore,
|
| 163 |
+
votes_by_revision: votesData.votes_by_revision,
|
| 164 |
+
wait_time: waitTimeDisplay,
|
| 165 |
+
hasVoted: userVotes.has(model.name),
|
| 166 |
+
};
|
| 167 |
})
|
| 168 |
);
|
| 169 |
|
| 170 |
+
// Sort models by vote score in descending order
|
| 171 |
+
const sortedModels = modelsWithVotes.sort((a, b) => b.votes - a.votes);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 172 |
|
| 173 |
+
setPendingModels(sortedModels);
|
| 174 |
} catch (err) {
|
|
|
|
| 175 |
setError(err.message);
|
| 176 |
+
} finally {
|
| 177 |
setLoadingModels(false);
|
| 178 |
}
|
| 179 |
};
|
| 180 |
|
| 181 |
+
fetchModels();
|
| 182 |
+
}, [userVotes]);
|
| 183 |
|
| 184 |
+
const handleVote = async (modelName) => {
|
|
|
|
| 185 |
if (!isAuthenticated) return;
|
| 186 |
|
|
|
|
|
|
|
| 187 |
try {
|
| 188 |
+
// Disable the button immediately by adding the model to userVotes
|
| 189 |
+
setUserVotes((prev) => {
|
| 190 |
+
const newSet = new Set([...prev, modelName]);
|
| 191 |
+
// Sauvegarder dans le localStorage
|
| 192 |
+
if (user) {
|
| 193 |
+
const localVotes = JSON.parse(
|
| 194 |
+
localStorage.getItem(`votes_${user.username}`) || "[]"
|
| 195 |
+
);
|
| 196 |
+
if (!localVotes.includes(modelName)) {
|
| 197 |
+
localVotes.push(modelName);
|
| 198 |
+
localStorage.setItem(
|
| 199 |
+
`votes_${user.username}`,
|
| 200 |
+
JSON.stringify(localVotes)
|
| 201 |
+
);
|
| 202 |
+
}
|
| 203 |
+
}
|
| 204 |
+
return newSet;
|
| 205 |
+
});
|
| 206 |
|
| 207 |
+
// Split modelName into provider and model
|
| 208 |
+
const [provider, model] = modelName.split("/");
|
| 209 |
|
| 210 |
const response = await fetch(
|
| 211 |
+
`/api/votes/${modelName}?vote_type=up&user_id=${user.username}`,
|
| 212 |
{
|
| 213 |
method: "POST",
|
| 214 |
headers: {
|
| 215 |
"Content-Type": "application/json",
|
| 216 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
| 217 |
}
|
| 218 |
);
|
| 219 |
|
| 220 |
if (!response.ok) {
|
| 221 |
+
// Si le vote échoue, on retire le vote du localStorage et du state
|
| 222 |
+
setUserVotes((prev) => {
|
| 223 |
+
const newSet = new Set(prev);
|
| 224 |
+
newSet.delete(modelName);
|
| 225 |
+
if (user) {
|
| 226 |
+
const localVotes = JSON.parse(
|
| 227 |
+
localStorage.getItem(`votes_${user.username}`) || "[]"
|
| 228 |
+
);
|
| 229 |
+
const updatedVotes = localVotes.filter(
|
| 230 |
+
(vote) => vote !== modelName
|
| 231 |
+
);
|
| 232 |
+
localStorage.setItem(
|
| 233 |
+
`votes_${user.username}`,
|
| 234 |
+
JSON.stringify(updatedVotes)
|
| 235 |
+
);
|
| 236 |
+
}
|
| 237 |
+
return newSet;
|
| 238 |
+
});
|
| 239 |
throw new Error("Failed to submit vote");
|
| 240 |
}
|
| 241 |
|
| 242 |
+
// Refresh votes for this model
|
|
|
|
|
|
|
| 243 |
const votesResponse = await fetch(
|
| 244 |
+
`/api/votes/model/${provider}/${model}`
|
| 245 |
);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 246 |
const votesData = await votesResponse.json();
|
| 247 |
+
|
| 248 |
+
// Calculate total vote score from votes_by_revision
|
| 249 |
+
const totalScore = Object.values(
|
| 250 |
+
votesData.votes_by_revision || {}
|
| 251 |
+
).reduce((a, b) => a + b, 0);
|
| 252 |
|
| 253 |
// Update model and resort the list
|
| 254 |
setPendingModels((models) => {
|
| 255 |
+
const updatedModels = models.map((model) =>
|
| 256 |
+
model.name === modelName
|
| 257 |
? {
|
| 258 |
+
...model,
|
| 259 |
+
votes: totalScore,
|
| 260 |
+
votes_by_revision: votesData.votes_by_revision,
|
|
|
|
| 261 |
}
|
| 262 |
+
: model
|
| 263 |
);
|
| 264 |
+
return updatedModels.sort((a, b) => b.votes - a.votes);
|
|
|
|
|
|
|
| 265 |
});
|
|
|
|
|
|
|
|
|
|
| 266 |
} catch (err) {
|
|
|
|
| 267 |
setError(err.message);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 268 |
}
|
| 269 |
};
|
| 270 |
|
| 271 |
+
if (loading) {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 272 |
return (
|
| 273 |
<Box
|
| 274 |
sx={{
|
|
|
|
| 284 |
}
|
| 285 |
|
| 286 |
return (
|
| 287 |
+
<Box sx={{ width: "100%", maxWidth: 1200, margin: "0 auto", padding: 4 }}>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 288 |
<PageHeader
|
| 289 |
title="Vote for the Next Models"
|
| 290 |
subtitle={
|
|
|
|
| 397 |
borderBottom: "1px solid",
|
| 398 |
borderColor: "divider",
|
| 399 |
bgcolor: "background.paper",
|
| 400 |
+
display: "grid",
|
| 401 |
gridTemplateColumns: "1fr 200px 160px",
|
| 402 |
gap: 3,
|
| 403 |
alignItems: "center",
|
|
|
|
| 441 |
{pendingModels.map((model, index) => {
|
| 442 |
const isTopThree = index < 3;
|
| 443 |
return (
|
| 444 |
+
<React.Fragment key={model.name}>
|
| 445 |
{index > 0 && <Divider />}
|
| 446 |
<ListItem
|
| 447 |
sx={{
|
| 448 |
py: 2.5,
|
| 449 |
px: 3,
|
| 450 |
display: "grid",
|
| 451 |
+
gridTemplateColumns: "1fr 200px 160px",
|
| 452 |
+
gap: 3,
|
| 453 |
+
alignItems: "center",
|
| 454 |
position: "relative",
|
| 455 |
"&:hover": {
|
| 456 |
bgcolor: "action.hover",
|
|
|
|
| 461 |
<Box>
|
| 462 |
<Stack spacing={1}>
|
| 463 |
{/* Model name and link */}
|
| 464 |
+
<Stack direction="row" spacing={1} alignItems="center">
|
| 465 |
+
<Link
|
| 466 |
+
href={`https://huggingface.co/${model.name}`}
|
| 467 |
+
target="_blank"
|
| 468 |
+
rel="noopener noreferrer"
|
| 469 |
+
sx={{
|
| 470 |
+
textDecoration: "none",
|
| 471 |
+
color: "primary.main",
|
| 472 |
+
fontWeight: 500,
|
| 473 |
+
"&:hover": {
|
| 474 |
+
textDecoration: "underline",
|
| 475 |
+
},
|
| 476 |
+
}}
|
| 477 |
>
|
| 478 |
+
{model.name}
|
| 479 |
+
</Link>
|
| 480 |
+
<IconButton
|
| 481 |
+
size="small"
|
| 482 |
+
href={`https://huggingface.co/${model.name}`}
|
| 483 |
+
target="_blank"
|
| 484 |
+
rel="noopener noreferrer"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 485 |
sx={{
|
| 486 |
+
ml: 0.5,
|
| 487 |
+
p: 0.5,
|
| 488 |
+
color: "action.active",
|
| 489 |
+
"&:hover": {
|
| 490 |
+
color: "primary.main",
|
| 491 |
},
|
|
|
|
|
|
|
| 492 |
}}
|
| 493 |
>
|
| 494 |
+
<OpenInNewIcon sx={{ fontSize: "1rem" }} />
|
| 495 |
+
</IconButton>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 496 |
</Stack>
|
| 497 |
{/* Metadata row */}
|
| 498 |
+
<Stack direction="row" spacing={2} alignItems="center">
|
|
|
|
|
|
|
|
|
|
|
|
|
| 499 |
<Stack
|
| 500 |
direction="row"
|
| 501 |
spacing={0.5}
|
|
|
|
| 531 |
</Box>
|
| 532 |
|
| 533 |
{/* Vote Column */}
|
| 534 |
+
<Box sx={{ textAlign: "right" }}>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 535 |
<Stack
|
| 536 |
+
direction="row"
|
| 537 |
spacing={2.5}
|
| 538 |
+
justifyContent="flex-end"
|
| 539 |
alignItems="center"
|
| 540 |
>
|
| 541 |
<Stack
|
| 542 |
+
alignItems="center"
|
| 543 |
sx={{
|
| 544 |
+
minWidth: "90px",
|
| 545 |
}}
|
| 546 |
>
|
| 547 |
<Typography
|
|
|
|
| 550 |
sx={{
|
| 551 |
fontWeight: 700,
|
| 552 |
lineHeight: 1,
|
| 553 |
+
fontSize: "2rem",
|
| 554 |
display: "flex",
|
| 555 |
alignItems: "center",
|
| 556 |
justifyContent: "center",
|
|
|
|
| 559 |
<Typography
|
| 560 |
component="span"
|
| 561 |
sx={{
|
| 562 |
+
fontSize: "1.5rem",
|
| 563 |
fontWeight: 600,
|
| 564 |
color: "primary.main",
|
| 565 |
lineHeight: 1,
|
|
|
|
| 598 |
</Typography>
|
| 599 |
</Stack>
|
| 600 |
<Button
|
| 601 |
+
variant={model.hasVoted ? "contained" : "outlined"}
|
| 602 |
+
size="large"
|
| 603 |
+
onClick={() => handleVote(model.name)}
|
| 604 |
+
disabled={!isAuthenticated || model.hasVoted}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 605 |
color="primary"
|
| 606 |
sx={{
|
| 607 |
+
minWidth: "100px",
|
| 608 |
+
height: "40px",
|
| 609 |
textTransform: "none",
|
| 610 |
fontWeight: 600,
|
| 611 |
+
fontSize: "0.95rem",
|
| 612 |
+
...(model.hasVoted
|
| 613 |
? {
|
| 614 |
bgcolor: "primary.main",
|
| 615 |
"&:hover": {
|
|
|
|
| 629 |
}),
|
| 630 |
}}
|
| 631 |
>
|
| 632 |
+
{model.hasVoted ? (
|
|
|
|
|
|
|
| 633 |
<Stack
|
| 634 |
direction="row"
|
| 635 |
spacing={0.5}
|
|
|
|
| 646 |
</Box>
|
| 647 |
|
| 648 |
{/* Priority Column */}
|
| 649 |
+
<Box sx={{ textAlign: "right" }}>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 650 |
<Chip
|
| 651 |
label={
|
| 652 |
<Stack
|