RohitGandikota commited on
Commit
b47ddec
·
1 Parent(s): a476ea0

fixing multiple model inference

Browse files
Files changed (1) hide show
  1. app.py +6 -0
app.py CHANGED
@@ -67,6 +67,12 @@ class Demo:
67
  self.device = 'cuda'
68
  self.weight_dtype = torch.bfloat16
69
  model_id = "stabilityai/sdxl-turbo"
 
 
 
 
 
 
70
  self.current_model = 'SDXL Turbo'
71
  euler_anc = EulerAncestralDiscreteScheduler.from_pretrained(model_id, subfolder="scheduler")
72
  self.pipe = StableDiffusionXLPipeline.from_pretrained(model_id, scheduler=euler_anc, torch_dtype=self.weight_dtype).to(self.device)
 
67
  self.device = 'cuda'
68
  self.weight_dtype = torch.bfloat16
69
  model_id = "stabilityai/sdxl-turbo"
70
+ model_id = 'stabilityai/stable-diffusion-xl-base-1.0'
71
+ pipe = StableDiffusionXLPipeline.from_pretrained(model_id, torch_dtype=self.weight_dtype).to(self.device)
72
+ pipe = None
73
+ del pipe
74
+ torch.cuda.empty_cache()
75
+
76
  self.current_model = 'SDXL Turbo'
77
  euler_anc = EulerAncestralDiscreteScheduler.from_pretrained(model_id, subfolder="scheduler")
78
  self.pipe = StableDiffusionXLPipeline.from_pretrained(model_id, scheduler=euler_anc, torch_dtype=self.weight_dtype).to(self.device)