Upload PhiRotForCausalLM
Browse files- config.json +1 -1
- generation_config.json +4 -0
- model.safetensors +1 -1
config.json
CHANGED
@@ -24,7 +24,7 @@
|
|
24 |
"rope_theta": 10000.0,
|
25 |
"tie_word_embeddings": false,
|
26 |
"torch_dtype": "bfloat16",
|
27 |
-
"transformers_version": "4.
|
28 |
"use_cache": true,
|
29 |
"vocab_size": 51200
|
30 |
}
|
|
|
24 |
"rope_theta": 10000.0,
|
25 |
"tie_word_embeddings": false,
|
26 |
"torch_dtype": "bfloat16",
|
27 |
+
"transformers_version": "4.47.0",
|
28 |
"use_cache": true,
|
29 |
"vocab_size": 51200
|
30 |
}
|
generation_config.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"transformers_version": "4.47.0"
|
4 |
+
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2836579040
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:68e3ac01da8acfe6fd18becb4ab640bbb036aa9ccd8fd6c56821540a11b1b536
|
3 |
size 2836579040
|