jbilcke-hf HF Staff commited on
Commit
b631815
·
verified ·
1 Parent(s): bd6c364

Update requirements.txt

Browse files
Files changed (1) hide show
  1. requirements.txt +8 -4
requirements.txt CHANGED
@@ -19,11 +19,15 @@ numpy==1.24.4
19
  # this version doesn't support Pytorch 2.7
20
  #flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiFALSE-cp311-cp311-linux_x86_64.whl
21
 
22
- # but this one does
23
- #flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.0.post2/flash_attn-2.8.0.post2+cu12torch2.7cxx11abiTRUE-cp311-cp311-linux_x86_64.whl
 
 
 
 
 
 
24
 
25
- # actually, let's just try this for now:
26
- flash_attn==2.8.0.post2, --no-build-isolation
27
 
28
  opencv-python>=4.9.0.80
29
  diffusers==0.31.0
 
19
  # this version doesn't support Pytorch 2.7
20
  #flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiFALSE-cp311-cp311-linux_x86_64.whl
21
 
22
+ # this version supports Pytorch 2.7, for some reason install works but the module cannot be found (I love Python)
23
+ flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.0.post2/flash_attn-2.8.0.post2+cu12torch2.7cxx11abiFALSE-cp311-cp311-linux_x86_64.whl
24
+
25
+ # this doesn't work because ", --no-build-isolation" isn't recognized
26
+ # flash_attn==2.8.0.post2, --no-build-isolation
27
+
28
+ # this doesn't work because I think "--no-build-isolation" is mandatory
29
+ # flash_attn==2.8.0.post2
30
 
 
 
31
 
32
  opencv-python>=4.9.0.80
33
  diffusers==0.31.0