ford442 commited on
Commit
20b52c9
·
1 Parent(s): f34d30a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -14,9 +14,9 @@ import numpy as np
14
  from PIL import Image
15
  import torch
16
  from diffusers import AutoencoderKL, StableDiffusionXLPipeline
17
- #from diffusers import EulerAncestralDiscreteScheduler
18
  #from diffusers import DPMSolverMultistepScheduler
19
- from diffusers import DDIMScheduler
20
  #from diffusers import AutoencoderKL
21
  from typing import Tuple
22
  #from transformers import AutoTokenizer, AutoModelForCausalLM
@@ -120,9 +120,9 @@ def load_and_prepare_model(model_id):
120
  use_safetensors=True,
121
  vae=vae,
122
  )
123
- #sched = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="squaredcos_cap_v2",use_karras_sigmas=True, algorithm_type="dpmsolver++")
124
  #sched = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config, beta_schedule="linear", algorithm_type="dpmsolver++")
125
- sched = DDIMScheduler.from_config(pipe.scheduler.config)
126
  pipe.scheduler=sched
127
  #pipe.to(device=device, dtype=torch.bfloat16)
128
  pipe.to(device=device)
 
14
  from PIL import Image
15
  import torch
16
  from diffusers import AutoencoderKL, StableDiffusionXLPipeline
17
+ from diffusers import EulerAncestralDiscreteScheduler
18
  #from diffusers import DPMSolverMultistepScheduler
19
+ #from diffusers import DDIMScheduler
20
  #from diffusers import AutoencoderKL
21
  from typing import Tuple
22
  #from transformers import AutoTokenizer, AutoModelForCausalLM
 
120
  use_safetensors=True,
121
  vae=vae,
122
  )
123
+ sched = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config, beta_schedule="squaredcos_cap_v2",use_karras_sigmas=True, algorithm_type="dpmsolver++")
124
  #sched = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config, beta_schedule="linear", algorithm_type="dpmsolver++")
125
+ #sched = DDIMScheduler.from_config(pipe.scheduler.config)
126
  pipe.scheduler=sched
127
  #pipe.to(device=device, dtype=torch.bfloat16)
128
  pipe.to(device=device)