Spaces:
Running
on
Zero
Running
on
Zero
Update
Browse files
app.py
CHANGED
@@ -86,11 +86,11 @@ class HFEmbedder(nnx.Module):
|
|
86 |
|
87 |
def load_t5(device: str | torch.device = "cuda", max_length: int = 512) -> HFEmbedder:
|
88 |
# max length 64, 128, 256 and 512 should work (if your sequence is short enough)
|
89 |
-
return HFEmbedder("lnyan/t5-v1_1-xxl-encoder", max_length=max_length,
|
90 |
|
91 |
|
92 |
def load_clip(device: str | torch.device = "cuda") -> HFEmbedder:
|
93 |
-
return HFEmbedder("openai/clip-vit-large-patch14", max_length=77,
|
94 |
|
95 |
@spaces.GPU(duration=30)
|
96 |
def load_encoders():
|
|
|
86 |
|
87 |
def load_t5(device: str | torch.device = "cuda", max_length: int = 512) -> HFEmbedder:
|
88 |
# max length 64, 128, 256 and 512 should work (if your sequence is short enough)
|
89 |
+
return HFEmbedder("lnyan/t5-v1_1-xxl-encoder", max_length=max_length, dtype=jnp.bfloat16)
|
90 |
|
91 |
|
92 |
def load_clip(device: str | torch.device = "cuda") -> HFEmbedder:
|
93 |
+
return HFEmbedder("openai/clip-vit-large-patch14", max_length=77, dtype=jnp.bfloat16)
|
94 |
|
95 |
@spaces.GPU(duration=30)
|
96 |
def load_encoders():
|