torch==2.1.2 torchvision transformers xtuner[deepspeed]==0.1.23 timm==1.0.9 mmdet==3.3.0 hydra-core==1.3.2 ninja==1.11.1 decord==0.6.0 peft==0.12.0 numpy<2 https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.1cxx11abiTRUE-cp310-cp310-linux_x86_64.whl