Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -18,11 +18,14 @@ llm_models = [
|
|
18 |
"mistralai/Mistral-7B-Instruct-v0.2",
|
19 |
"tiiuae/falcon-7b-instruct",
|
20 |
"deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
|
21 |
-
"deepseek-ai/deepseek-vl2",
|
22 |
-
"deepseek-ai/deepseek-vl2-small",
|
23 |
-
"deepseek-ai/deepseek-vl2-tiny",
|
24 |
"deepseek-ai/deepseek-llm-7b-chat",
|
25 |
"deepseek-ai/deepseek-math-7b-instruct",
|
|
|
|
|
|
|
26 |
# "deepseek-ai/DeepSeek-R1-Zero", ## 688GB > 10GB
|
27 |
# "mistralai/Mixtral-8x22B-Instruct-v0.1", ## 281GB>10GB
|
28 |
# "NousResearch/Yarn-Mistral-7b-64k", ## 14GB>10GB
|
|
|
18 |
"mistralai/Mistral-7B-Instruct-v0.2",
|
19 |
"tiiuae/falcon-7b-instruct",
|
20 |
"deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
|
21 |
+
# "deepseek-ai/deepseek-vl2", ## 54GB > 10GB
|
22 |
+
# "deepseek-ai/deepseek-vl2-small", ## 32GB > 10GB
|
23 |
+
# "deepseek-ai/deepseek-vl2-tiny", ## high response time
|
24 |
"deepseek-ai/deepseek-llm-7b-chat",
|
25 |
"deepseek-ai/deepseek-math-7b-instruct",
|
26 |
+
"deepseek-ai/deepseek-coder-33b-instruct"
|
27 |
+
"deepseek-ai/deepseek-vl-1.3b-base",
|
28 |
+
""
|
29 |
# "deepseek-ai/DeepSeek-R1-Zero", ## 688GB > 10GB
|
30 |
# "mistralai/Mixtral-8x22B-Instruct-v0.1", ## 281GB>10GB
|
31 |
# "NousResearch/Yarn-Mistral-7b-64k", ## 14GB>10GB
|