gokaygokay commited on
Commit
4348c83
·
verified ·
1 Parent(s): 1ebaa97

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -22,7 +22,7 @@ pipe.fuse_lora()
22
  MAX_SEED = 2**32-1
23
 
24
  @spaces.GPU(duration=75)
25
- def generate_image(prompt, steps=28, seed=None, cfg_scale=2.8, width=1024, height=1024, lora_scale=0.5):
26
  if seed is None:
27
  seed = random.randint(0, MAX_SEED)
28
  generator = torch.Generator(device="cuda").manual_seed(seed)
@@ -38,10 +38,10 @@ def generate_image(prompt, steps=28, seed=None, cfg_scale=2.8, width=1024, heigh
38
  ).images[0]
39
  return image
40
 
41
- def run_lora(prompt, cfg_scale=2.8, steps=28, randomize_seed=True, seed=None, width=1024, height=1024, lora_scale=0.5):
42
  # Handle the case when only prompt is provided (for Examples)
43
  if isinstance(prompt, str) and all(param is None for param in [cfg_scale, steps, randomize_seed, seed, width, height, lora_scale]):
44
- cfg_scale = 2.8
45
  steps = 28
46
  randomize_seed = True
47
  seed = None
 
22
  MAX_SEED = 2**32-1
23
 
24
  @spaces.GPU(duration=75)
25
+ def generate_image(prompt, steps=28, seed=None, cfg_scale=3.5, width=1024, height=1024, lora_scale=0.5):
26
  if seed is None:
27
  seed = random.randint(0, MAX_SEED)
28
  generator = torch.Generator(device="cuda").manual_seed(seed)
 
38
  ).images[0]
39
  return image
40
 
41
+ def run_lora(prompt, cfg_scale=3.5, steps=28, randomize_seed=True, seed=None, width=1024, height=1024, lora_scale=0.5):
42
  # Handle the case when only prompt is provided (for Examples)
43
  if isinstance(prompt, str) and all(param is None for param in [cfg_scale, steps, randomize_seed, seed, width, height, lora_scale]):
44
+ cfg_scale = 3.5
45
  steps = 28
46
  randomize_seed = True
47
  seed = None