kaizen9 commited on
Commit
12811e0
·
verified ·
1 Parent(s): 444d7f4

Upload PhiRotForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +4 -0
  3. model.safetensors +1 -1
config.json CHANGED
@@ -24,7 +24,7 @@
24
  "rope_theta": 10000.0,
25
  "tie_word_embeddings": false,
26
  "torch_dtype": "bfloat16",
27
- "transformers_version": "4.44.2",
28
  "use_cache": true,
29
  "vocab_size": 51200
30
  }
 
24
  "rope_theta": 10000.0,
25
  "tie_word_embeddings": false,
26
  "torch_dtype": "bfloat16",
27
+ "transformers_version": "4.47.0",
28
  "use_cache": true,
29
  "vocab_size": 51200
30
  }
generation_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "transformers_version": "4.47.0"
4
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:42b5244b6beb83238054a99b620918718f780067eab4906b46c484dd7028ba15
3
  size 2836579040
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68e3ac01da8acfe6fd18becb4ab640bbb036aa9ccd8fd6c56821540a11b1b536
3
  size 2836579040