shichen1231 commited on
Commit
0b96e40
Β·
1 Parent(s): c74fa4c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -2
app.py CHANGED
@@ -33,6 +33,11 @@ def infer(
33
 
34
  conditioning_image_raw = Image.fromarray(conditioning_image)
35
  conditioning_image = conditioning_image_raw.convert('L')
 
 
 
 
 
36
  g_cpu = torch.Generator()
37
 
38
  if seed == -1:
@@ -43,8 +48,8 @@ def infer(
43
  output_image = pipe(
44
  prompt,
45
  conditioning_image,
46
- height=size,
47
- width=size,
48
  num_inference_steps=num_inference_steps,
49
  generator=generator,
50
  negative_prompt=negative_prompt,
@@ -131,6 +136,19 @@ with gr.Blocks() as demo:
131
  ],
132
  outputs=output
133
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
134
  gr.Markdown(
135
  """
136
  * [Dataset](https://huggingface.co/datasets/ioclab/grayscale_image_aesthetic_3M) Note that this was handled extra, and a preview version of the processing is here
 
33
 
34
  conditioning_image_raw = Image.fromarray(conditioning_image)
35
  conditioning_image = conditioning_image_raw.convert('L')
36
+ w = conditioning_image.width
37
+ h = conditioning_image.height
38
+ ratio = 768 / max(W, H)
39
+ w = int(W * ratio)
40
+ h = int(H * ratio)
41
  g_cpu = torch.Generator()
42
 
43
  if seed == -1:
 
48
  output_image = pipe(
49
  prompt,
50
  conditioning_image,
51
+ height=h,
52
+ width=w,
53
  num_inference_steps=num_inference_steps,
54
  generator=generator,
55
  negative_prompt=negative_prompt,
 
136
  ],
137
  outputs=output
138
  )
139
+ gr.Examples(
140
+ examples=[
141
+ ["masterpiece, best quality, High contrast,A bamboo forest, a stream,The rising sun, colorful,", "((nsfw)),(blush),(bare),(worst quality:2, low quality:2),(zombie, sketch, interlocked fingers), greyscale", "./conditioning_images/ty1.jpg","./conditioning_images/tyt1.jpg"],
142
+ ["masterpiece, best quality, High contrast,A bamboo forest, a stream,The rising sun, colorful,", "((nsfw)),(blush),(bare),(worst quality:2, low quality:2),(zombie, sketch, interlocked fingers), greyscale", "./conditioning_images/ty2.jpg","./conditioning_images/tyt2.jpg"],
143
+
144
+ ],
145
+ inputs=[
146
+ prompt, negative_prompt, conditioning_image
147
+ ],
148
+ outputs=output,
149
+ fn=infer,
150
+ cache_examples=True,
151
+ )
152
  gr.Markdown(
153
  """
154
  * [Dataset](https://huggingface.co/datasets/ioclab/grayscale_image_aesthetic_3M) Note that this was handled extra, and a preview version of the processing is here