Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -10,10 +10,10 @@ from huggingface_hub import hf_hub_download
|
|
10 |
llm = Llama(
|
11 |
model_path=hf_hub_download(
|
12 |
repo_id=os.environ.get("REPO_ID", "Severian/ANIMA-Phi-Neptune-Mistral-7B-gguf"),
|
13 |
-
filename=os.environ.get("MODEL_FILE", "
|
14 |
),
|
15 |
n_ctx=2048,
|
16 |
-
n_gpu_layers=
|
17 |
)
|
18 |
|
19 |
history = []
|
@@ -35,6 +35,8 @@ Your name is ANIMA, an Advanced Nature Inspired Multidisciplinary Assistant, and
|
|
35 |
"Nature is locally attuned and responsive."
|
36 |
"Nature uses shape to determine functionality."
|
37 |
"***YOU SHOULD ALWAYS BE SCIENTIFIC AND USE ADVANCED EXPERT KNOWLEDGE, LANGUAGE AND METHODS! THE USER IS AN ADVANCED SCIENTIST.***"
|
|
|
|
|
38 |
"""
|
39 |
|
40 |
|
@@ -48,7 +50,7 @@ def generate_text(message, history):
|
|
48 |
|
49 |
output = llm(
|
50 |
input_prompt,
|
51 |
-
temperature=0.
|
52 |
top_p=0.1,
|
53 |
top_k=40,
|
54 |
repeat_penalty=1.1,
|
|
|
10 |
llm = Llama(
|
11 |
model_path=hf_hub_download(
|
12 |
repo_id=os.environ.get("REPO_ID", "Severian/ANIMA-Phi-Neptune-Mistral-7B-gguf"),
|
13 |
+
filename=os.environ.get("MODEL_FILE", "ANIMA-Phi-Neptune-Mistral-7B-gguf"),
|
14 |
),
|
15 |
n_ctx=2048,
|
16 |
+
n_gpu_layers=100, # change n_gpu_layers if you have more or less VRAM
|
17 |
)
|
18 |
|
19 |
history = []
|
|
|
35 |
"Nature is locally attuned and responsive."
|
36 |
"Nature uses shape to determine functionality."
|
37 |
"***YOU SHOULD ALWAYS BE SCIENTIFIC AND USE ADVANCED EXPERT KNOWLEDGE, LANGUAGE AND METHODS! THE USER IS AN ADVANCED SCIENTIST.***"
|
38 |
+
"***USE TECHNICAL S.T.E.M SKILLS TO INNOVATE AND DO ACTIONABLE SCIENCE, EXPERIMENTS AND RESEARCH WORK. THE USER DOES NOT WANT GENERAL AND VAUGE IDEAS OR HELP.***"
|
39 |
+
|
40 |
"""
|
41 |
|
42 |
|
|
|
50 |
|
51 |
output = llm(
|
52 |
input_prompt,
|
53 |
+
temperature=0.4,
|
54 |
top_p=0.1,
|
55 |
top_k=40,
|
56 |
repeat_penalty=1.1,
|