Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Move Space CI config to README.md
#520
by
Wauplin
HF staff
- opened
- README.md +5 -0
- app.py +4 -11
- requirements.txt +1 -1
README.md
CHANGED
@@ -10,6 +10,11 @@ pinned: true
|
|
10 |
license: apache-2.0
|
11 |
duplicated_from: HuggingFaceH4/open_llm_leaderboard
|
12 |
fullWidth: true
|
|
|
|
|
|
|
|
|
|
|
13 |
---
|
14 |
|
15 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
10 |
license: apache-2.0
|
11 |
duplicated_from: HuggingFaceH4/open_llm_leaderboard
|
12 |
fullWidth: true
|
13 |
+
space_ci: # See https://huggingface.co/spaces/Wauplin/gradio-space-ci
|
14 |
+
private: true
|
15 |
+
secrets:
|
16 |
+
- HF_TOKEN
|
17 |
+
- H4_TOKEN
|
18 |
---
|
19 |
|
20 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
CHANGED
@@ -2,7 +2,7 @@ import gradio as gr
|
|
2 |
import pandas as pd
|
3 |
from apscheduler.schedulers.background import BackgroundScheduler
|
4 |
from huggingface_hub import snapshot_download
|
5 |
-
from gradio_space_ci import
|
6 |
|
7 |
from src.display.about import (
|
8 |
CITATION_BUTTON_LABEL,
|
@@ -37,6 +37,8 @@ from src.tools.plots import (
|
|
37 |
create_scores_df,
|
38 |
)
|
39 |
|
|
|
|
|
40 |
|
41 |
def restart_space():
|
42 |
API.restart_space(repo_id=REPO_ID, token=H4_TOKEN)
|
@@ -420,13 +422,4 @@ scheduler = BackgroundScheduler()
|
|
420 |
scheduler.add_job(restart_space, "interval", seconds=10800)
|
421 |
scheduler.start()
|
422 |
|
423 |
-
|
424 |
-
configure_space_ci(
|
425 |
-
demo.queue(default_concurrency_limit=40),
|
426 |
-
trusted_authors=[], # add manually trusted authors
|
427 |
-
private="True", # ephemeral spaces will have same visibility as the main space. Otherwise, set to `True` or `False` explicitly.
|
428 |
-
variables={}, # We overwrite HF_HOME as tmp CI spaces will have no cache
|
429 |
-
secrets=["HF_TOKEN", "H4_TOKEN"], # which secret do I want to copy from the main space? Can be a `List[str]`.
|
430 |
-
hardware=None, # "cpu-basic" by default. Otherwise set to "auto" to have same hardware as the main space or any valid string value.
|
431 |
-
storage=None, # no storage by default. Otherwise set to "auto" to have same storage as the main space or any valid string value.
|
432 |
-
).launch()
|
|
|
2 |
import pandas as pd
|
3 |
from apscheduler.schedulers.background import BackgroundScheduler
|
4 |
from huggingface_hub import snapshot_download
|
5 |
+
from gradio_space_ci import enable_space_ci
|
6 |
|
7 |
from src.display.about import (
|
8 |
CITATION_BUTTON_LABEL,
|
|
|
37 |
create_scores_df,
|
38 |
)
|
39 |
|
40 |
+
# Start ephemeral Spaces on PRs (see config in README.md)
|
41 |
+
enable_space_ci()
|
42 |
|
43 |
def restart_space():
|
44 |
API.restart_space(repo_id=REPO_ID, token=H4_TOKEN)
|
|
|
422 |
scheduler.add_job(restart_space, "interval", seconds=10800)
|
423 |
scheduler.start()
|
424 |
|
425 |
+
demo.queue(default_concurrency_limit=40).launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
requirements.txt
CHANGED
@@ -15,4 +15,4 @@ sentencepiece
|
|
15 |
tqdm==4.65.0
|
16 |
transformers==4.36.0
|
17 |
tokenizers>=0.15.0
|
18 |
-
gradio-space-ci @ git+https://huggingface.co/spaces/Wauplin/gradio-space-ci@0.
|
|
|
15 |
tqdm==4.65.0
|
16 |
transformers==4.36.0
|
17 |
tokenizers>=0.15.0
|
18 |
+
gradio-space-ci @ git+https://huggingface.co/spaces/Wauplin/gradio-space-ci@0.2.0 # CI !!!
|