Spaces:
Running
Running
YaTharThShaRma999
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -25,9 +25,9 @@ if __name__ == "__main__":
|
|
25 |
print(f"Number of threads available to the current process: {num_threads}")
|
26 |
#url = 'https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/resolve/main/llama-2-7b-chat.ggmlv3.q2_K.bin'
|
27 |
#filename = wget.download(url)
|
28 |
-
model_path= hf_hub_download(repo_id="
|
29 |
|
30 |
-
llm2 = Llama(model_path=model_path,
|
31 |
theme = gr.themes.Soft(
|
32 |
primary_hue=gr.themes.Color("#ededed", "#fee2e2", "#fecaca", "#fca5a5", "#f87171", "#ef4444", "#dc2626", "#b91c1c", "#991b1b", "#7f1d1d", "#6c1e1e"),
|
33 |
neutral_hue="red",
|
|
|
25 |
print(f"Number of threads available to the current process: {num_threads}")
|
26 |
#url = 'https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/resolve/main/llama-2-7b-chat.ggmlv3.q2_K.bin'
|
27 |
#filename = wget.download(url)
|
28 |
+
model_path= hf_hub_download(repo_id="brittlewis12/Octopus-v2-GGUF", filename="octopus-v2.Q4_K_S.gguf
|
29 |
|
30 |
+
llm2 = Llama(model_path=model_path, use_mlock=False)
|
31 |
theme = gr.themes.Soft(
|
32 |
primary_hue=gr.themes.Color("#ededed", "#fee2e2", "#fecaca", "#fca5a5", "#f87171", "#ef4444", "#dc2626", "#b91c1c", "#991b1b", "#7f1d1d", "#6c1e1e"),
|
33 |
neutral_hue="red",
|