# latest stable version (as of June 26th 2025) torch==2.7.1 torchvision==0.22.1 # before we used an older version of flash attention which didn't have a binary yet for Pytorch 2.7 # so we use pytorch 2.6 # torch==2.6.0 # torch>=2.4.0 # torchvision==0.21.0 # torchvision>=0.19.0 torchdata==0.10.1 torchao==0.9.0 #numpy>=1.26.4 numpy==1.24.4 # this version doesn't support Pytorch 2.7 #flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiFALSE-cp311-cp311-linux_x86_64.whl # this version supports Pytorch 2.7, for some reason install works but the module cannot be found (I love Python) flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.0.post2/flash_attn-2.8.0.post2+cu12torch2.7cxx11abiFALSE-cp311-cp311-linux_x86_64.whl # this doesn't work because ", --no-build-isolation" isn't recognized # flash_attn==2.8.0.post2, --no-build-isolation # this doesn't work because I think "--no-build-isolation" is mandatory # flash_attn==2.8.0.post2 opencv-python>=4.9.0.80 diffusers==0.31.0 transformers>=4.49.0 tokenizers>=0.20.3 accelerate>=1.1.1 tqdm imageio easydict ftfy dashscope imageio-ffmpeg wandb omegaconf einops av==13.1.0 opencv-python git+https://github.com/openai/CLIP.git open_clip_torch starlette pycocotools lmdb matplotlib sentencepiece pydantic==2.10.6 scikit-image huggingface_hub[cli] dominate nvidia-tensorrt onnx onnxruntime onnxscript onnxconverter_common flask flask-socketio