siqi-zz commited on
Commit
81aac9c
·
verified ·
1 Parent(s): 3ae04d5

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -28,8 +28,8 @@ You can easily call the model with the following code:
28
  ```python
29
  from transformers import AutoModelForCausalLM, AutoTokenizer
30
 
31
- tokenizer = AutoTokenizer.from_pretrained("Chuxin/Chuxin-1.6B-1M", trust_remote_code=True)
32
- model = AutoModelForCausalLM.from_pretrained("Chuxin/Chuxin-1.6B-1M", device_map="auto", trust_remote_code=True, bf16=True).eval()
33
  inputs = tokenizer('蒙古国的首都是乌兰巴托(Ulaanbaatar)\n冰岛的首都是雷克雅未克(Reykjavik)\n埃塞俄比亚的首都是', return_tensors='pt')
34
  inputs = inputs.to(model.device)
35
  pred = model.generate(**inputs, max_new_tokens=20, do_sample=False)
 
28
  ```python
29
  from transformers import AutoModelForCausalLM, AutoTokenizer
30
 
31
+ tokenizer = AutoTokenizer.from_pretrained("chuxin-llm/Chuxin-1.6B-1M", trust_remote_code=True)
32
+ model = AutoModelForCausalLM.from_pretrained("chuxin-llm/Chuxin-1.6B-1M", device_map="auto", trust_remote_code=True, bf16=True).eval()
33
  inputs = tokenizer('蒙古国的首都是乌兰巴托(Ulaanbaatar)\n冰岛的首都是雷克雅未克(Reykjavik)\n埃塞俄比亚的首都是', return_tensors='pt')
34
  inputs = inputs.to(model.device)
35
  pred = model.generate(**inputs, max_new_tokens=20, do_sample=False)