Spaces:
Sleeping
Sleeping
Leonardo Oliva
commited on
Commit
·
076ac9e
1
Parent(s):
005937f
app.py
CHANGED
@@ -1,10 +1,18 @@
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
|
4 |
"""
|
5 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
6 |
"""
|
7 |
-
client = InferenceClient("
|
8 |
|
9 |
|
10 |
def respond(
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
+
from utils.dl_utils import dl_guff_model
|
4 |
+
import os
|
5 |
+
|
6 |
+
model_filename = "gorilla-openfunctions-v2-q6_K.gguf"
|
7 |
+
model_path = os.path.join("models", model_filename)
|
8 |
+
|
9 |
+
if not os.path.exists(model_path):
|
10 |
+
dl_guff_model("models", f"https://huggingface.co/gorilla-llm/gorilla-openfunctions-v2-gguf/blob/main/{model_filename}")
|
11 |
|
12 |
"""
|
13 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
14 |
"""
|
15 |
+
client = InferenceClient("models/gorilla-openfunctions-v2-q6_K.gguf")
|
16 |
|
17 |
|
18 |
def respond(
|