Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
98dd72d
1
Parent(s):
453a650
code update for duration of ZeroGPU
Browse files- tts/gradio_api.py +1 -2
tts/gradio_api.py
CHANGED
|
@@ -20,6 +20,7 @@ import gradio as gr
|
|
| 20 |
import traceback
|
| 21 |
from tts.infer_cli import MegaTTS3DiTInfer, convert_to_wav, cut_wav
|
| 22 |
|
|
|
|
| 23 |
CUDA_AVAILABLE = torch.cuda.is_available()
|
| 24 |
infer_pipe = MegaTTS3DiTInfer(device='cuda' if CUDA_AVAILABLE else 'cpu')
|
| 25 |
|
|
@@ -59,8 +60,6 @@ def main(inp_audio, inp_npy, inp_text, infer_timestep, p_w, t_w, processes, inpu
|
|
| 59 |
|
| 60 |
|
| 61 |
if __name__ == '__main__':
|
| 62 |
-
os.system('huggingface-cli download ByteDance/MegaTTS3 --local-dir ./checkpoints --repo-type model')
|
| 63 |
-
|
| 64 |
mp.set_start_method('spawn', force=True)
|
| 65 |
mp_manager = mp.Manager()
|
| 66 |
|
|
|
|
| 20 |
import traceback
|
| 21 |
from tts.infer_cli import MegaTTS3DiTInfer, convert_to_wav, cut_wav
|
| 22 |
|
| 23 |
+
os.system('huggingface-cli download ByteDance/MegaTTS3 --local-dir ./checkpoints --repo-type model')
|
| 24 |
CUDA_AVAILABLE = torch.cuda.is_available()
|
| 25 |
infer_pipe = MegaTTS3DiTInfer(device='cuda' if CUDA_AVAILABLE else 'cpu')
|
| 26 |
|
|
|
|
| 60 |
|
| 61 |
|
| 62 |
if __name__ == '__main__':
|
|
|
|
|
|
|
| 63 |
mp.set_start_method('spawn', force=True)
|
| 64 |
mp_manager = mp.Manager()
|
| 65 |
|