Zai commited on
Commit
fac5833
·
1 Parent(s): 6936ef7

Edit gitignore and huggingface space.py

Browse files
Files changed (3) hide show
  1. .github/workflows/huggingface.yml +42 -0
  2. .gitignore +3 -1
  3. space.py +9 -0
.github/workflows/huggingface.yml ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Uploading on Huggingface
2
+ on:
3
+ push:
4
+ branches: [main]
5
+ workflow_dispatch:
6
+
7
+ jobs:
8
+ sync-to-hub:
9
+ runs-on: ubuntu-latest
10
+ steps:
11
+ - uses: actions/checkout@v3
12
+ with:
13
+ fetch-depth: 0
14
+ lfs: true
15
+ - name: Set Git identity
16
+ run: |
17
+ git config --global user.email "[email protected]"
18
+ git config --global user.name "GitHub Actions"
19
+
20
+ - name: Update README.md
21
+ run: |
22
+ tmp_file=$(mktemp)
23
+ echo "---" >> $tmp_file
24
+ echo "title: Burmese GPT" >> $tmp_file
25
+ echo "emoji: 💫️" >> $tmp_file
26
+ echo "colorFrom: yellow" >> $tmp_file
27
+ echo "colorTo: blue" >> $tmp_file
28
+ echo "sdk: streamlit" >> $tmp_file
29
+ echo "sdk_version: 1.29.0" >> $tmp_file
30
+ echo "app_file: space.py" >> $tmp_file
31
+ echo "pinned: false" >> $tmp_file
32
+ echo "license: openrail" >> $tmp_file
33
+ echo "---" >> $tmp_file
34
+ echo "" >> $tmp_file
35
+ cat README.md >> $tmp_file
36
+ mv $tmp_file README.md
37
+ git add README.md
38
+ git commit -m "Updated README.md"
39
+ - name: Push to hub
40
+ env:
41
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
42
+ run: git push https://zaibutcooler:[email protected]/spaces/zaibutcooler/burmese-gpt --force main
.gitignore CHANGED
@@ -139,4 +139,6 @@ cython_debug/
139
  # Logs
140
  *.log
141
 
142
- .idea
 
 
 
139
  # Logs
140
  *.log
141
 
142
+ .idea
143
+
144
+ checkpoints/
space.py CHANGED
@@ -3,6 +3,8 @@ from transformers import AutoTokenizer
3
  import streamlit as st
4
  from burmese_gpt.config import ModelConfig
5
  from burmese_gpt.models import BurmeseGPT
 
 
6
 
7
  # Model configuration
8
  VOCAB_SIZE = 119547
@@ -12,6 +14,13 @@ CHECKPOINT_PATH = "checkpoints/best_model.pth"
12
  # Load model function (cached to avoid reloading on every interaction)
13
  @st.cache_resource
14
  def load_model():
 
 
 
 
 
 
 
15
  model_config = ModelConfig()
16
 
17
  tokenizer = AutoTokenizer.from_pretrained("bert-base-multilingual-cased")
 
3
  import streamlit as st
4
  from burmese_gpt.config import ModelConfig
5
  from burmese_gpt.models import BurmeseGPT
6
+ from .scripts.download import download_pretrained_model
7
+ import os
8
 
9
  # Model configuration
10
  VOCAB_SIZE = 119547
 
14
  # Load model function (cached to avoid reloading on every interaction)
15
  @st.cache_resource
16
  def load_model():
17
+ if os.path.exists(CHECKPOINT_PATH):
18
+ st.warning("Model already exists, skipping download.")
19
+ else:
20
+ st.info("Downloading model...")
21
+ download_pretrained_model()
22
+ st.success("Model downloaded successfully.")
23
+
24
  model_config = ModelConfig()
25
 
26
  tokenizer = AutoTokenizer.from_pretrained("bert-base-multilingual-cased")