Spaces:
Runtime error
Runtime error
feat: Fix selected space loading in app.py
Browse filesThe code changes fix the loading of the selected space in the `infer` function of `app.py`. Previously, the code was not properly handling the case when the selected space failed to load. The updated code now attempts to load the next available space from the list of spaces until a successful load is achieved. This ensures that the model is properly loaded and prevents any errors from occurring during inference.
This commit builds upon the recent user commits that updated the selected space and integrated the FLUX.1 [schnell] model with Gradio. It addresses the issue of failed space loading and improves the reliability of the model integration.
app.py
CHANGED
@@ -7,24 +7,34 @@ MAX_IMAGE_SIZE = 2048
|
|
7 |
|
8 |
|
9 |
flux_1_schell_spaces = ["https://black-forest-labs-flux-1-schnell.hf.space", "ChristianHappy/FLUX.1-schnell", "innoai/FLUX.1-schnell", "tuan2308/FLUX.1-schnell", "FiditeNemini/FLUX.1-schnell"]
|
10 |
-
flux_1_schnell_space = "https://black-forest-labs-flux-1-schnell.hf.space"
|
11 |
|
12 |
client = None
|
13 |
job = None
|
14 |
|
15 |
-
|
16 |
|
17 |
-
def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)):
|
18 |
global job
|
19 |
global client
|
20 |
-
|
|
|
|
|
|
|
|
|
|
|
21 |
try:
|
22 |
-
|
23 |
-
|
|
|
24 |
except ValueError as e:
|
25 |
-
print(f"Failed to load custom model: {e}")
|
|
|
26 |
client = None
|
27 |
-
|
|
|
|
|
|
|
28 |
|
29 |
try:
|
30 |
job = client.submit(
|
@@ -40,7 +50,7 @@ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_in
|
|
40 |
except ValueError as e:
|
41 |
raise gr.Error(e)
|
42 |
|
43 |
-
return result
|
44 |
|
45 |
examples = [
|
46 |
"a tiny astronaut hatching from an egg on the moon",
|
@@ -130,8 +140,8 @@ with gr.Blocks(css=css) as demo:
|
|
130 |
gr.on(
|
131 |
triggers=[run_button.click, prompt.submit],
|
132 |
fn = infer,
|
133 |
-
inputs = [prompt, seed, randomize_seed, width, height, num_inference_steps],
|
134 |
-
outputs = [result, seed]
|
135 |
)
|
136 |
|
137 |
demo.launch()
|
|
|
7 |
|
8 |
|
9 |
flux_1_schell_spaces = ["https://black-forest-labs-flux-1-schnell.hf.space", "ChristianHappy/FLUX.1-schnell", "innoai/FLUX.1-schnell", "tuan2308/FLUX.1-schnell", "FiditeNemini/FLUX.1-schnell"]
|
10 |
+
# flux_1_schnell_space = "https://black-forest-labs-flux-1-schnell.hf.space"
|
11 |
|
12 |
client = None
|
13 |
job = None
|
14 |
|
15 |
+
selected_space_index = gr.State(0);
|
16 |
|
17 |
+
def infer(selected_space_index, prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)):
|
18 |
global job
|
19 |
global client
|
20 |
+
|
21 |
+
# Asegúrate de que selected_space_index esté inicializado antes de este bloque de código
|
22 |
+
max_attempts = len(flux_1_schell_spaces)
|
23 |
+
attempts = 0
|
24 |
+
|
25 |
+
while client is None and attempts < max_attempts:
|
26 |
try:
|
27 |
+
selected_space = flux_1_schell_spaces[selected_space_index]
|
28 |
+
client = Client(selected_space)
|
29 |
+
print(f"Loaded custom model from {selected_space}")
|
30 |
except ValueError as e:
|
31 |
+
print(f"Failed to load custom model from {selected_space}: {e}")
|
32 |
+
selected_space_index = (selected_space_index + 1) % len(flux_1_schell_spaces)
|
33 |
client = None
|
34 |
+
attempts += 1
|
35 |
+
|
36 |
+
if client is None:
|
37 |
+
raise gr.Error("Failed to load client after trying all spaces.")
|
38 |
|
39 |
try:
|
40 |
job = client.submit(
|
|
|
50 |
except ValueError as e:
|
51 |
raise gr.Error(e)
|
52 |
|
53 |
+
return (selected_space_index, ) + result
|
54 |
|
55 |
examples = [
|
56 |
"a tiny astronaut hatching from an egg on the moon",
|
|
|
140 |
gr.on(
|
141 |
triggers=[run_button.click, prompt.submit],
|
142 |
fn = infer,
|
143 |
+
inputs = [selected_space_index, prompt, seed, randomize_seed, width, height, num_inference_steps],
|
144 |
+
outputs = [selected_space_index, result, seed]
|
145 |
)
|
146 |
|
147 |
demo.launch()
|