Spaces:
Running on Zero

hexgrad commited on
Commit
2af00d7
·
verified ·
1 Parent(s): 4558382

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -1,6 +1,4 @@
1
  import spaces
2
- import misaki
3
- print(misaki.__version__)
4
  from kokoro import KModel, KPipeline
5
  import gradio as gr
6
  import os
@@ -8,9 +6,12 @@ import random
8
  import torch
9
 
10
  IS_DUPLICATE = not os.getenv('SPACE_ID', '').startswith('hexgrad/')
11
- CHAR_LIMIT = None if IS_DUPLICATE else 5000
12
-
13
  CUDA_AVAILABLE = torch.cuda.is_available()
 
 
 
 
 
14
  models = {gpu: KModel().to('cuda' if gpu else 'cpu').eval() for gpu in [False] + ([True] if CUDA_AVAILABLE else [])}
15
  pipelines = {lang_code: KPipeline(lang_code=lang_code, model=False) for lang_code in 'ab'}
16
  pipelines['a'].g2p.lexicon.golds['kokoro'] = 'kˈOkəɹO'
 
1
  import spaces
 
 
2
  from kokoro import KModel, KPipeline
3
  import gradio as gr
4
  import os
 
6
  import torch
7
 
8
  IS_DUPLICATE = not os.getenv('SPACE_ID', '').startswith('hexgrad/')
 
 
9
  CUDA_AVAILABLE = torch.cuda.is_available()
10
+ if not IS_DUPLICATE:
11
+ import misaki
12
+ print('DEBUG', CUDA_AVAILABLE, misaki.__version__)
13
+
14
+ CHAR_LIMIT = None if IS_DUPLICATE else 5000
15
  models = {gpu: KModel().to('cuda' if gpu else 'cpu').eval() for gpu in [False] + ([True] if CUDA_AVAILABLE else [])}
16
  pipelines = {lang_code: KPipeline(lang_code=lang_code, model=False) for lang_code in 'ab'}
17
  pipelines['a'].g2p.lexicon.golds['kokoro'] = 'kˈOkəɹO'