Eyalgut commited on
Commit
dceaee5
·
verified ·
1 Parent(s): 63f4ca3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -23,6 +23,7 @@ del vae
23
 
24
 
25
  pipe.force_zeros_for_empty_prompt = False
 
26
 
27
  print("Optimizing BRIA 2.2 HR - this could take a while")
28
  t=time.time()
@@ -34,7 +35,8 @@ with torch.no_grad():
34
  prompt="an apple",
35
  num_inference_steps=30,
36
  width=1536,
37
- height=1536
 
38
  )
39
 
40
  # This will avoid future compilations on different shapes
@@ -68,7 +70,7 @@ def infer(prompt,seed,resolution):
68
 
69
  w,h = resolution.split()
70
  w,h = int(w),int(h)
71
- image = pipe(prompt,num_inference_steps=30,generator=generator,width=w,height=h).images[0]
72
  print(f'gen time is {time.time()-t} secs')
73
 
74
  # Future
 
23
 
24
 
25
  pipe.force_zeros_for_empty_prompt = False
26
+ negative_prompt= "Logo,Watermark,Text,Ugly,Morbid,Extra fingers,Poorly drawn hands,Mutation,Blurry,Extra limbs,Gross proportions,Missing arms,Mutated hands,Long neck,Duplicate,Mutilated,Mutilated hands,Poorly drawn face,Deformed,Bad anatomy,Cloned face,Malformed limbs,Missing legs,Too many fingers"
27
 
28
  print("Optimizing BRIA 2.2 HR - this could take a while")
29
  t=time.time()
 
35
  prompt="an apple",
36
  num_inference_steps=30,
37
  width=1536,
38
+ height=1536,
39
+ negative_prompt=negative_prompt
40
  )
41
 
42
  # This will avoid future compilations on different shapes
 
70
 
71
  w,h = resolution.split()
72
  w,h = int(w),int(h)
73
+ image = pipe(prompt,num_inference_steps=30,generator=generator,width=w,height=h,negative_prompt=negative_prompt).images[0]
74
  print(f'gen time is {time.time()-t} secs')
75
 
76
  # Future