Spaces:
Running
on
Zero
Running
on
Zero
xinjie.wang
commited on
Commit
·
6924769
1
Parent(s):
631a83a
update
Browse files
embodied_gen/models/sr_model.py
CHANGED
@@ -53,7 +53,7 @@ class ImageStableSR:
|
|
53 |
torch_dtype=torch.float16,
|
54 |
).to(device)
|
55 |
self.up_pipeline_x4.set_progress_bar_config(disable=True)
|
56 |
-
self.up_pipeline_x4.enable_model_cpu_offload()
|
57 |
|
58 |
@spaces.GPU
|
59 |
def __call__(
|
|
|
53 |
torch_dtype=torch.float16,
|
54 |
).to(device)
|
55 |
self.up_pipeline_x4.set_progress_bar_config(disable=True)
|
56 |
+
# self.up_pipeline_x4.enable_model_cpu_offload()
|
57 |
|
58 |
@spaces.GPU
|
59 |
def __call__(
|
embodied_gen/models/text_model.py
CHANGED
@@ -135,7 +135,7 @@ def build_text2img_ip_pipeline(
|
|
135 |
|
136 |
pipe = pipe.to(device)
|
137 |
pipe.image_encoder = pipe.image_encoder.to(device)
|
138 |
-
pipe.enable_model_cpu_offload()
|
139 |
# pipe.enable_xformers_memory_efficient_attention()
|
140 |
# pipe.enable_vae_slicing()
|
141 |
|
@@ -168,8 +168,8 @@ def build_text2img_pipeline(
|
|
168 |
force_zeros_for_empty_prompt=False,
|
169 |
)
|
170 |
pipe = pipe.to(device)
|
171 |
-
pipe.enable_model_cpu_offload()
|
172 |
-
pipe.enable_xformers_memory_efficient_attention()
|
173 |
|
174 |
return pipe
|
175 |
|
|
|
135 |
|
136 |
pipe = pipe.to(device)
|
137 |
pipe.image_encoder = pipe.image_encoder.to(device)
|
138 |
+
# pipe.enable_model_cpu_offload()
|
139 |
# pipe.enable_xformers_memory_efficient_attention()
|
140 |
# pipe.enable_vae_slicing()
|
141 |
|
|
|
168 |
force_zeros_for_empty_prompt=False,
|
169 |
)
|
170 |
pipe = pipe.to(device)
|
171 |
+
# pipe.enable_model_cpu_offload()
|
172 |
+
# pipe.enable_xformers_memory_efficient_attention()
|
173 |
|
174 |
return pipe
|
175 |
|
embodied_gen/models/texture_model.py
CHANGED
@@ -106,6 +106,6 @@ def build_texture_gen_pipe(
|
|
106 |
pipe.set_ip_adapter_scale([ip_adapt_scale])
|
107 |
|
108 |
pipe = pipe.to(device)
|
109 |
-
pipe.enable_model_cpu_offload()
|
110 |
|
111 |
return pipe
|
|
|
106 |
pipe.set_ip_adapter_scale([ip_adapt_scale])
|
107 |
|
108 |
pipe = pipe.to(device)
|
109 |
+
# pipe.enable_model_cpu_offload()
|
110 |
|
111 |
return pipe
|