Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -134,12 +134,13 @@ def load_and_prepare_model(model_id):
|
|
134 |
#pipe.to(dtype=torch.bfloat16)
|
135 |
#pipe.unet = pipeX.unet
|
136 |
pipe.scheduler=EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1)
|
137 |
-
pipe.unet.to(torch.bfloat16)
|
138 |
#pipe.to(device)
|
139 |
-
pipe.to(torch.device("cuda:0"))
|
140 |
-
#pipe.vae.to(torch.bfloat16)
|
141 |
#pipe.to(torch.bfloat16)
|
142 |
-
|
|
|
|
|
|
|
143 |
#del pipeX
|
144 |
#sched = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", algorithm_type="dpmsolver++")
|
145 |
#sched = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config, beta_schedule="linear", algorithm_type="dpmsolver++")
|
|
|
134 |
#pipe.to(dtype=torch.bfloat16)
|
135 |
#pipe.unet = pipeX.unet
|
136 |
pipe.scheduler=EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1)
|
137 |
+
#pipe.unet.to(torch.bfloat16)
|
138 |
#pipe.to(device)
|
|
|
|
|
139 |
#pipe.to(torch.bfloat16)
|
140 |
+
|
141 |
+
#pipe.to(torch.device("cuda:0"))
|
142 |
+
#pipe.vae.to(torch.bfloat16)
|
143 |
+
pipe.to(device, torch.bfloat16)
|
144 |
#del pipeX
|
145 |
#sched = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", algorithm_type="dpmsolver++")
|
146 |
#sched = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config, beta_schedule="linear", algorithm_type="dpmsolver++")
|