Spaces:
Running
on
Zero
Running
on
Zero
Update inference.py
Browse files- inference.py +3 -1
inference.py
CHANGED
@@ -13,6 +13,7 @@ import soundfile as sf
|
|
13 |
import torch.nn as nn
|
14 |
import numpy as np
|
15 |
from assets.i18n.i18n import I18nAuto
|
|
|
16 |
|
17 |
# Colab kontrolü
|
18 |
try:
|
@@ -138,6 +139,7 @@ def run_folder(model, args, config, device, verbose: bool = False):
|
|
138 |
|
139 |
print(i18n("elapsed_time").format(time.time() - start_time))
|
140 |
|
|
|
141 |
def proc_folder(args):
|
142 |
parser = argparse.ArgumentParser(description=i18n("proc_folder_description"))
|
143 |
parser.add_argument("--model_type", type=str, default='mdx23c', help=i18n("model_type_help"))
|
@@ -171,7 +173,7 @@ def proc_folder(args):
|
|
171 |
print(i18n("cuda_available"))
|
172 |
device = f'cuda:{args.device_ids[0]}' if type(args.device_ids) == list else f'cuda:{args.device_ids}'
|
173 |
elif torch.backends.mps.is_available():
|
174 |
-
|
175 |
|
176 |
print(i18n("using_device").format(device))
|
177 |
|
|
|
13 |
import torch.nn as nn
|
14 |
import numpy as np
|
15 |
from assets.i18n.i18n import I18nAuto
|
16 |
+
import spaces
|
17 |
|
18 |
# Colab kontrolü
|
19 |
try:
|
|
|
139 |
|
140 |
print(i18n("elapsed_time").format(time.time() - start_time))
|
141 |
|
142 |
+
@spaces.GPU
|
143 |
def proc_folder(args):
|
144 |
parser = argparse.ArgumentParser(description=i18n("proc_folder_description"))
|
145 |
parser.add_argument("--model_type", type=str, default='mdx23c', help=i18n("model_type_help"))
|
|
|
173 |
print(i18n("cuda_available"))
|
174 |
device = f'cuda:{args.device_ids[0]}' if type(args.device_ids) == list else f'cuda:{args.device_ids}'
|
175 |
elif torch.backends.mps.is_available():
|
176 |
+
device = "mps"
|
177 |
|
178 |
print(i18n("using_device").format(device))
|
179 |
|