File size: 504 Bytes
0fa20f6
9a465dd
 
0fa20f6
 
 
 
 
 
d90b45d
 
0fa20f6
0c388a7
0fa20f6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
omegaconf
# torch
# torchvision
transformers==4.44.0
sentencepiece==0.1.99
accelerate==0.33.0
einops==0.6.1
einops-exts==0.0.4
timm==0.6.13
# flash_attn
https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
scipy
gradio

monotonic_align
librosa==0.8.0
phonemizer
unidecode
hydra-core==1.3.2
pytorch_lightning==1.1.0
wget
wrapt
onnx
frozendict
inflect
braceexpand
webdataset
torch_stft
sox
editdistance
numpy==1.23.5