File size: 1,634 Bytes
b74f188 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 |
models:
- model: mergekit-community/mergekit-model_stock-anvdilz+Azazelle/ANJIR-ADAPTER-128
- model: mergekit-community/mergekit-model_stock-anvdilz+Azazelle/Nimue-8B
- model: mergekit-community/mergekit-model_stock-anvdilz+surya-narayanan/formal_logic
- model: mergekit-community/mergekit-model_stock-anvdilz+surya-narayanan/sociology
- model: mergekit-community/mergekit-model_stock-anvdilz+surya-narayanan/health
- model: mergekit-community/mergekit-model_stock-anvdilz+surya-narayanan/professional_medicine
- model: mergekit-community/mergekit-model_stock-anvdilz+BeastGokul/Bio-Medical-MultiModal-Llama-3-8B-Finetuned
- model: mergekit-community/mergekit-model_stock-anvdilz+surya-narayanan/biology
- model: mergekit-community/mergekit-model_stock-anvdilz+surya-narayanan/psychology
- model: mergekit-community/mergekit-model_stock-anvdilz+surya-narayanan/professional_psychology
- model: mergekit-community/mergekit-model_stock-anvdilz+ResplendentAI/Smarts_Llama3
- model: mergekit-community/mergekit-model_stock-anvdilz+Azazelle/Llama-3-8B-Abomination-LORA
- model: mergekit-community/mergekit-model_stock-anvdilz+kik41/lora-type-descriptive-llama-3-8b-v2
- model: mergekit-community/mergekit-model_stock-anvdilz+kik41/lora-length-long-llama-3-8b-v2
- model: mergekit-community/mergekit-model_stock-anvdilz+surya-narayanan/anatomy
- model: mergekit-community/mergekit-model_stock-anvdilz+surya-narayanan/human_sexuality
merge_method: model_stock
base_model: mergekit-community/mergekit-model_stock-anvdilz+grimjim/Llama-3-Instruct-abliteration-LoRA-8B
dtype: bfloat16 |