Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -99,25 +99,25 @@ def load_and_prepare_model():
|
|
99 |
# low_cpu_mem_usage = False,
|
100 |
add_watermarker=False,
|
101 |
)
|
102 |
-
pipe.vae = vaeXL
|
103 |
pipe.scheduler = sched
|
104 |
#pipe.vae.do_resize=False
|
105 |
#pipe.vae.vae_scale_factor=8
|
106 |
-
#pipe.to(device
|
107 |
-
pipe.to(
|
108 |
-
pipe.to(torch.bfloat16)
|
109 |
pipe.vae.set_default_attn_processor()
|
110 |
print(f'init noise scale: {pipe.scheduler.init_noise_sigma}')
|
111 |
pipe.watermark=None
|
112 |
pipe.safety_checker=None
|
113 |
pipe.unet = pipe.unet.to(memory_format=torch.contiguous_format)
|
114 |
pipe.unet = torch.compile(pipe.unet, backend="hidet")
|
|
|
115 |
|
116 |
return pipe
|
117 |
|
118 |
# for compile
|
119 |
hidet.option.parallel_build(True)
|
120 |
-
hidet.option.parallel_tune(-1,
|
121 |
torch._dynamo.config.suppress_errors = True
|
122 |
torch._dynamo.disallow_in_graph(diffusers.models.attention.BasicTransformerBlock)
|
123 |
# more search
|
|
|
99 |
# low_cpu_mem_usage = False,
|
100 |
add_watermarker=False,
|
101 |
)
|
102 |
+
pipe.vae = vaeXL #.to(torch.bfloat16)
|
103 |
pipe.scheduler = sched
|
104 |
#pipe.vae.do_resize=False
|
105 |
#pipe.vae.vae_scale_factor=8
|
106 |
+
#pipe.to(device)
|
107 |
+
#pipe.to(torch.bfloat16)
|
|
|
108 |
pipe.vae.set_default_attn_processor()
|
109 |
print(f'init noise scale: {pipe.scheduler.init_noise_sigma}')
|
110 |
pipe.watermark=None
|
111 |
pipe.safety_checker=None
|
112 |
pipe.unet = pipe.unet.to(memory_format=torch.contiguous_format)
|
113 |
pipe.unet = torch.compile(pipe.unet, backend="hidet")
|
114 |
+
pipe.to(device=device, dtype=torch.bfloat16)
|
115 |
|
116 |
return pipe
|
117 |
|
118 |
# for compile
|
119 |
hidet.option.parallel_build(True)
|
120 |
+
hidet.option.parallel_tune(-1,8.0)
|
121 |
torch._dynamo.config.suppress_errors = True
|
122 |
torch._dynamo.disallow_in_graph(diffusers.models.attention.BasicTransformerBlock)
|
123 |
# more search
|