File size: 458 Bytes
bed01bd
01e655b
 
 
 
 
 
 
bed01bd
01e655b
 
 
bed01bd
01e655b
 
bed01bd
 
 
01e655b
 
 
 
0129fb6
2ca1c87
bed01bd
 
 
 
f367757
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
numpy==1.26.4
scipy
lxml
pydub
fastapi
soundfile
omegaconf
pypinyin
vocos
pandas
vector_quantize_pytorch
einops
transformers~=4.41.1
omegaconf~=2.3.0
tqdm
# torch
# torchvision
# torchaudio
gradio
emojiswitch
python-dotenv
zhon
mistune==3.0.2
cn2an
# audio_denoiser
python-box
ftfy
librosa
pyrubberband
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9.post1/flash_attn-2.5.9.post1+cu118torch1.12cxx11abiFALSE-cp310-cp310-linux_x86_64.whl