hsuwill000 commited on
Commit
ee70940
·
verified ·
1 Parent(s): 5c31923

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -7,25 +7,26 @@ model_id = "hsuwill000/Fluently-v4-LCM-openvino"
7
  HIGH = 1024
8
  WIDTH = 512
9
 
10
- batch_size = -1
11
 
12
  pipe = OVStableDiffusionPipeline.from_pretrained(
13
  model_id,
14
  compile=False,
15
  ov_config={"CACHE_DIR": ""},
16
- torch_dtype=torch.int8, # fast
17
  safety_checker=None,
18
  use_safetensors=False,
19
  )
20
  print(pipe.scheduler.compatibles)
21
 
22
- pipe.reshape(batch_size=-1, height=HIGH, width=WIDTH, num_images_per_prompt=1)
23
 
24
  pipe.compile()
25
 
26
  prompt = ""
27
  negative_prompt = "EasyNegative, "
28
  num_inference_steps = 4
 
29
  def infer(prompt, negative_prompt, num_inference_steps):
30
  image = pipe(
31
  prompt=prompt,
@@ -71,20 +72,20 @@ with gr.Blocks(css=css) as demo:
71
  placeholder="Enter your prompt",
72
  container=False,
73
  )
74
- run_button = gr.Button("Run", scale=0)
75
 
76
  result = gr.Image(label="Result", show_label=False)
77
 
78
  gr.Examples(
79
  examples=examples,
80
  fn=infer,
81
- inputs=[prompt],
82
  outputs=[result]
83
  )
84
 
85
  run_button.click(
86
  fn=infer,
87
- inputs=[prompt],
88
  outputs=[result]
89
  )
90
 
 
7
  HIGH = 1024
8
  WIDTH = 512
9
 
10
+ batch_size = None # Or set it to a specific positive integer if needed
11
 
12
  pipe = OVStableDiffusionPipeline.from_pretrained(
13
  model_id,
14
  compile=False,
15
  ov_config={"CACHE_DIR": ""},
16
+ torch_dtype=torch.float16, # More standard dtype for speed
17
  safety_checker=None,
18
  use_safetensors=False,
19
  )
20
  print(pipe.scheduler.compatibles)
21
 
22
+ pipe.reshape(batch_size=batch_size, height=HIGH, width=WIDTH, num_images_per_prompt=1)
23
 
24
  pipe.compile()
25
 
26
  prompt = ""
27
  negative_prompt = "EasyNegative, "
28
  num_inference_steps = 4
29
+
30
  def infer(prompt, negative_prompt, num_inference_steps):
31
  image = pipe(
32
  prompt=prompt,
 
72
  placeholder="Enter your prompt",
73
  container=False,
74
  )
75
+ run_button = gr.Button("Run", scale=1)
76
 
77
  result = gr.Image(label="Result", show_label=False)
78
 
79
  gr.Examples(
80
  examples=examples,
81
  fn=infer,
82
+ inputs=[prompt, negative_prompt, num_inference_steps],
83
  outputs=[result]
84
  )
85
 
86
  run_button.click(
87
  fn=infer,
88
+ inputs=[prompt, negative_prompt, num_inference_steps],
89
  outputs=[result]
90
  )
91