File size: 305 Bytes
7834e6c
 
d59f323
 
 
 
 
 
f535b2f
8227cd8
b286df8
1
2
3
4
5
6
7
8
9
10
11
torch==2.3.1
torchvision==0.18.1
transformers==4.42.3
xtuner[deepspeed]==0.1.23
timm==1.0.9
mmdet==3.3.0
hydra-core==1.3.2
ninja==1.11.1
decord==0.6.0
peft==0.12.0
https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.3cxx11abiTRUE-cp310-cp310-linux_x86_64.whl