ford442 commited on
Commit
2c86017
·
1 Parent(s): 42336a6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -63,7 +63,7 @@ MODEL_OPTIONS = {
63
 
64
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "4096"))
65
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE", "0") == "1"
66
- ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD", "0") == "1"
67
  BATCH_SIZE = int(os.getenv("BATCH_SIZE", "1"))
68
 
69
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
@@ -114,11 +114,14 @@ def load_and_prepare_model(model_id):
114
  model_id,
115
  torch_dtype=torch.bfloat16,
116
  add_watermarker=False,
 
117
  vae=vae,
118
  )
119
  pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
120
- #pipe.to(torch.bfloat16)
121
  pipe.to('cuda')
 
 
122
  return pipe
123
 
124
  # Preload and compile both models
@@ -182,7 +185,7 @@ def generate(
182
  global models
183
  pipe = models[model_choice]
184
  seed = int(randomize_seed_fn(seed, randomize_seed))
185
- generator = torch.Generator(device='cpu').manual_seed(seed)
186
 
187
  prompt, negative_prompt = apply_style(style_selection, prompt, negative_prompt)
188
 
 
63
 
64
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "4096"))
65
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE", "0") == "1"
66
+ ENABLE_CPU_OFFLOAD = 0
67
  BATCH_SIZE = int(os.getenv("BATCH_SIZE", "1"))
68
 
69
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
 
114
  model_id,
115
  torch_dtype=torch.bfloat16,
116
  add_watermarker=False,
117
+ use_safetensors=True,
118
  vae=vae,
119
  )
120
  pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
121
+ pipe.to(torch.bfloat16)
122
  pipe.to('cuda')
123
+ if ENABLE_CPU_OFFLOAD:
124
+ pipe.enable_model_cpu_offload()
125
  return pipe
126
 
127
  # Preload and compile both models
 
185
  global models
186
  pipe = models[model_choice]
187
  seed = int(randomize_seed_fn(seed, randomize_seed))
188
+ generator = torch.Generator(device='cuda').manual_seed(seed)
189
 
190
  prompt, negative_prompt = apply_style(style_selection, prompt, negative_prompt)
191