multimodalart HF Staff commited on
Commit
b9186cf
·
verified ·
1 Parent(s): bec3822

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -1,3 +1,6 @@
 
 
 
1
  import subprocess
2
  subprocess.run('pip install flash-attn==2.7.4.post1 --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
3
 
@@ -5,14 +8,11 @@ subprocess.run('pip install flash-attn==2.7.4.post1 --no-build-isolation', env={
5
  import gradio as gr
6
  import torch
7
  import os
8
- import sys
9
  from huggingface_hub import snapshot_download
10
  from PIL import Image
11
  import random
12
  import numpy as np
13
  import spaces
14
- # Add project root to sys.path to allow importing 'wan'
15
- sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
16
 
17
  import wan
18
  from wan.configs import WAN_CONFIGS, SIZE_CONFIGS, MAX_AREA_CONFIGS, SUPPORTED_SIZES
 
1
+ import sys
2
+ sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
3
+
4
  import subprocess
5
  subprocess.run('pip install flash-attn==2.7.4.post1 --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
6
 
 
8
  import gradio as gr
9
  import torch
10
  import os
 
11
  from huggingface_hub import snapshot_download
12
  from PIL import Image
13
  import random
14
  import numpy as np
15
  import spaces
 
 
16
 
17
  import wan
18
  from wan.configs import WAN_CONFIGS, SIZE_CONFIGS, MAX_AREA_CONFIGS, SUPPORTED_SIZES