Spaces:
Running
Running
print available memory
Browse files
app.py
CHANGED
@@ -92,6 +92,8 @@ def inference(video):
|
|
92 |
|
93 |
model = torch.hub.load("PeterL1n/RobustVideoMatting", "mobilenetv3")
|
94 |
if torch.cuda.is_available():
|
|
|
|
|
95 |
model = model.cuda()
|
96 |
|
97 |
convert_video(
|
|
|
92 |
|
93 |
model = torch.hub.load("PeterL1n/RobustVideoMatting", "mobilenetv3")
|
94 |
if torch.cuda.is_available():
|
95 |
+
free_memory = get_free_memory_gb()
|
96 |
+
print(f"Available video memory: {free_memory} GB")
|
97 |
model = model.cuda()
|
98 |
|
99 |
convert_video(
|