Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -13,11 +13,19 @@ import gradio as gr
|
|
13 |
import numpy as np
|
14 |
from PIL import Image
|
15 |
import torch
|
16 |
-
from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
|
17 |
from typing import Tuple
|
18 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
19 |
import paramiko
|
20 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
FTP_HOST = "1ink.us"
|
22 |
FTP_USER = "ford442"
|
23 |
FTP_PASS = "GoogleBez12!"
|
@@ -141,7 +149,7 @@ def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
|
141 |
seed = random.randint(0, MAX_SEED)
|
142 |
return seed
|
143 |
|
144 |
-
@spaces.GPU(duration=
|
145 |
def generate(
|
146 |
model_choice: str,
|
147 |
prompt: str,
|
@@ -356,7 +364,7 @@ with gr.Blocks(css=css, theme="bethecloud/storj_theme") as demo:
|
|
356 |
minimum=10,
|
357 |
maximum=1000,
|
358 |
step=10,
|
359 |
-
value=
|
360 |
)
|
361 |
|
362 |
gr.Examples(
|
|
|
13 |
import numpy as np
|
14 |
from PIL import Image
|
15 |
import torch
|
16 |
+
from diffusers import AutoencoderKL, StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
|
17 |
from typing import Tuple
|
18 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
19 |
import paramiko
|
20 |
|
21 |
+
torch.backends.cuda.matmul.allow_tf32 = True
|
22 |
+
torch.backends.cuda.matmul.allow_bf16_reduced_precision_reduction = True
|
23 |
+
torch.backends.cuda.matmul.allow_fp16_reduced_precision_reduction = True
|
24 |
+
torch.backends.cudnn.allow_tf32 = True
|
25 |
+
torch.backends.cudnn.deterministic = False
|
26 |
+
torch.backends.cudnn.benchmark = True
|
27 |
+
torch.set_float32_matmul_precision("highest")
|
28 |
+
|
29 |
FTP_HOST = "1ink.us"
|
30 |
FTP_USER = "ford442"
|
31 |
FTP_PASS = "GoogleBez12!"
|
|
|
149 |
seed = random.randint(0, MAX_SEED)
|
150 |
return seed
|
151 |
|
152 |
+
@spaces.GPU(duration=60)
|
153 |
def generate(
|
154 |
model_choice: str,
|
155 |
prompt: str,
|
|
|
364 |
minimum=10,
|
365 |
maximum=1000,
|
366 |
step=10,
|
367 |
+
value=275,
|
368 |
)
|
369 |
|
370 |
gr.Examples(
|