Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
@@ -248,12 +248,29 @@ def query(prompt, model, custom_lora, is_negative=False, steps=35, cfg_scale=7,
|
|
248 |
}
|
249 |
}
|
250 |
|
251 |
-
|
252 |
-
|
|
|
253 |
|
254 |
-
|
255 |
-
|
256 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
257 |
|
258 |
except requests.exceptions.RequestException as e:
|
259 |
error_message = f"Request failed: {str(e)}"
|
@@ -269,8 +286,6 @@ def query(prompt, model, custom_lora, is_negative=False, steps=35, cfg_scale=7,
|
|
269 |
raise gr.Error(f"Unexpected error: {str(e)}")
|
270 |
|
271 |
|
272 |
-
|
273 |
-
|
274 |
def generate_grid(prompt, selected_models, custom_lora, negative_prompt, steps, cfg_scale, seed, strength, width, height, progress=gr.Progress()):
|
275 |
if len(selected_models) > 4:
|
276 |
raise gr.Error("Please select up to 4 models")
|
|
|
248 |
}
|
249 |
}
|
250 |
|
251 |
+
# νμμμ κ°μ λλ¦¬κ³ μ¬μλ λ‘μ§ μΆκ°
|
252 |
+
max_retries = 3
|
253 |
+
current_retry = 0
|
254 |
|
255 |
+
while current_retry < max_retries:
|
256 |
+
try:
|
257 |
+
response = requests.post(API_URL, headers=headers, json=payload, timeout=180) # νμμμμ 180μ΄λ‘ μ¦κ°
|
258 |
+
response.raise_for_status()
|
259 |
+
|
260 |
+
image = Image.open(io.BytesIO(response.content))
|
261 |
+
print(f'Generation {key} completed successfully')
|
262 |
+
return image
|
263 |
+
|
264 |
+
except requests.exceptions.Timeout:
|
265 |
+
current_retry += 1
|
266 |
+
if current_retry < max_retries:
|
267 |
+
print(f"Timeout occurred. Retrying... (Attempt {current_retry + 1}/{max_retries})")
|
268 |
+
continue
|
269 |
+
else:
|
270 |
+
raise gr.Error(f"Request timed out after {max_retries} attempts. The model might be busy, please try again later.")
|
271 |
+
|
272 |
+
except requests.exceptions.RequestException as e:
|
273 |
+
raise gr.Error(f"Request failed: {str(e)}")
|
274 |
|
275 |
except requests.exceptions.RequestException as e:
|
276 |
error_message = f"Request failed: {str(e)}"
|
|
|
286 |
raise gr.Error(f"Unexpected error: {str(e)}")
|
287 |
|
288 |
|
|
|
|
|
289 |
def generate_grid(prompt, selected_models, custom_lora, negative_prompt, steps, cfg_scale, seed, strength, width, height, progress=gr.Progress()):
|
290 |
if len(selected_models) > 4:
|
291 |
raise gr.Error("Please select up to 4 models")
|