Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -15,8 +15,8 @@ from PIL import Image
|
|
15 |
import torch
|
16 |
from diffusers import AutoencoderKL, StableDiffusionXLPipeline
|
17 |
from diffusers import EulerAncestralDiscreteScheduler
|
18 |
-
|
19 |
-
|
20 |
from typing import Tuple
|
21 |
import paramiko
|
22 |
import gc
|
@@ -112,7 +112,7 @@ def load_and_prepare_model(model_id):
|
|
112 |
vae = AutoencoderKL.from_pretrained('ford442/Juggernaut-XI-v11-fp32',subfolder='vae')
|
113 |
# vae = AutoencoderKL.from_pretrained("BeastHF/MyBack_SDXL_Juggernaut_XL_VAE/MyBack_SDXL_Juggernaut_XL_VAE_V10(version_X).safetensors",safety_checker=None).to(torch.bfloat16)
|
114 |
#sched = EulerAncestralDiscreteScheduler.from_pretrained("SG161222/RealVisXL_V5.0", subfolder='scheduler',beta_schedule="scaled_linear", steps_offset=1,timestep_spacing="trailing"))
|
115 |
-
sched = EulerAncestralDiscreteScheduler.from_pretrained("SG161222/RealVisXL_V5.0", subfolder='scheduler', steps_offset=1,timestep_spacing="trailing")
|
116 |
#sched = EulerAncestralDiscreteScheduler.from_pretrained('ford442/RealVisXL_V5.0_BF16', subfolder='scheduler',beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1,use_karras_sigmas=True)
|
117 |
# sched = EulerAncestralDiscreteScheduler.from_config('ford442/RealVisXL_V5.0_BF16', beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1)
|
118 |
pipeX = StableDiffusionXLPipeline.from_pretrained("SG161222/RealVisXL_V5.0")
|
@@ -139,9 +139,9 @@ def load_and_prepare_model(model_id):
|
|
139 |
|
140 |
#sched = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear",use_karras_sigmas=True, algorithm_type="dpmsolver++")
|
141 |
#pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1)
|
142 |
-
|
143 |
|
144 |
-
pipe.scheduler = sched
|
145 |
pipe.vae=vae.to(torch.bfloat16)
|
146 |
pipe.unet=pipeX.unet.to(torch.bfloat16)
|
147 |
#pipe.scheduler=EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1)
|
@@ -229,7 +229,7 @@ def generate_30(
|
|
229 |
"guidance_scale": guidance_scale,
|
230 |
"num_inference_steps": num_inference_steps,
|
231 |
"generator": generator,
|
232 |
-
|
233 |
"output_type": "pil",
|
234 |
}
|
235 |
if use_resolution_binning:
|
@@ -296,7 +296,7 @@ def generate_60(
|
|
296 |
"guidance_scale": guidance_scale,
|
297 |
"num_inference_steps": num_inference_steps,
|
298 |
"generator": generator,
|
299 |
-
|
300 |
"output_type": "pil",
|
301 |
}
|
302 |
if use_resolution_binning:
|
@@ -363,7 +363,7 @@ def generate_90(
|
|
363 |
"guidance_scale": guidance_scale,
|
364 |
"num_inference_steps": num_inference_steps,
|
365 |
"generator": generator,
|
366 |
-
|
367 |
"output_type": "pil",
|
368 |
}
|
369 |
if use_resolution_binning:
|
|
|
15 |
import torch
|
16 |
from diffusers import AutoencoderKL, StableDiffusionXLPipeline
|
17 |
from diffusers import EulerAncestralDiscreteScheduler
|
18 |
+
from diffusers import DPMSolverMultistepScheduler
|
19 |
+
|
20 |
from typing import Tuple
|
21 |
import paramiko
|
22 |
import gc
|
|
|
112 |
vae = AutoencoderKL.from_pretrained('ford442/Juggernaut-XI-v11-fp32',subfolder='vae')
|
113 |
# vae = AutoencoderKL.from_pretrained("BeastHF/MyBack_SDXL_Juggernaut_XL_VAE/MyBack_SDXL_Juggernaut_XL_VAE_V10(version_X).safetensors",safety_checker=None).to(torch.bfloat16)
|
114 |
#sched = EulerAncestralDiscreteScheduler.from_pretrained("SG161222/RealVisXL_V5.0", subfolder='scheduler',beta_schedule="scaled_linear", steps_offset=1,timestep_spacing="trailing"))
|
115 |
+
#sched = EulerAncestralDiscreteScheduler.from_pretrained("SG161222/RealVisXL_V5.0", subfolder='scheduler', steps_offset=1,timestep_spacing="trailing")
|
116 |
#sched = EulerAncestralDiscreteScheduler.from_pretrained('ford442/RealVisXL_V5.0_BF16', subfolder='scheduler',beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1,use_karras_sigmas=True)
|
117 |
# sched = EulerAncestralDiscreteScheduler.from_config('ford442/RealVisXL_V5.0_BF16', beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1)
|
118 |
pipeX = StableDiffusionXLPipeline.from_pretrained("SG161222/RealVisXL_V5.0")
|
|
|
139 |
|
140 |
#sched = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear",use_karras_sigmas=True, algorithm_type="dpmsolver++")
|
141 |
#pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1)
|
142 |
+
pipeline.scheduler = DPMSolverMultistepScheduler.from_pretrained('SG161222/RealVisXL_V5.0', algorithm_type='sde-dpmsolver++')
|
143 |
|
144 |
+
#pipe.scheduler = sched
|
145 |
pipe.vae=vae.to(torch.bfloat16)
|
146 |
pipe.unet=pipeX.unet.to(torch.bfloat16)
|
147 |
#pipe.scheduler=EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012, steps_offset=1)
|
|
|
229 |
"guidance_scale": guidance_scale,
|
230 |
"num_inference_steps": num_inference_steps,
|
231 |
"generator": generator,
|
232 |
+
"timesteps": sampling_schedule,
|
233 |
"output_type": "pil",
|
234 |
}
|
235 |
if use_resolution_binning:
|
|
|
296 |
"guidance_scale": guidance_scale,
|
297 |
"num_inference_steps": num_inference_steps,
|
298 |
"generator": generator,
|
299 |
+
"timesteps": sampling_schedule,
|
300 |
"output_type": "pil",
|
301 |
}
|
302 |
if use_resolution_binning:
|
|
|
363 |
"guidance_scale": guidance_scale,
|
364 |
"num_inference_steps": num_inference_steps,
|
365 |
"generator": generator,
|
366 |
+
"timesteps": sampling_schedule,
|
367 |
"output_type": "pil",
|
368 |
}
|
369 |
if use_resolution_binning:
|