{ | |
"model_type": "llama", | |
"_name_or_path": "legionarius/watt-tool-8B-GGUF", | |
"quantization": { | |
"method": "gguf", | |
"bits": 4, | |
"format": "GGUF" | |
}, | |
"default_model_file": "watt-tool-8B-GGUF-Q4_K_M.gguf", | |
"architectures": ["LlamaForCausalLM"] | |
} |
{ | |
"model_type": "llama", | |
"_name_or_path": "legionarius/watt-tool-8B-GGUF", | |
"quantization": { | |
"method": "gguf", | |
"bits": 4, | |
"format": "GGUF" | |
}, | |
"default_model_file": "watt-tool-8B-GGUF-Q4_K_M.gguf", | |
"architectures": ["LlamaForCausalLM"] | |
} |