File size: 383 Bytes
0bb6b0a
 
 
521aa26
ddd8f6d
521aa26
0bb6b0a
 
521aa26
 
 
 
0bb6b0a
521aa26
0bb6b0a
ddd8f6d
62ff9b6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
torch==2.6.0
transformers==4.46.3
tokenizers==0.20.3
einops
addict
easydict
gradio>=4.0.0
spaces>=0.20.0
Pillow>=10.0.0
safetensors>=0.4.0
accelerate>=0.24.0
sentencepiece>=0.1.99
protobuf>=3.20.0
torchvision
flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.3/flash_attn-2.7.3+cu12torch2.6cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
hf_transfer
PyMuPDF