Update model
Browse files
README.md
CHANGED
@@ -31,11 +31,11 @@ Here provides a code snippet with `apply_chat_template` to show you how to load
|
|
31 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
32 |
device = "cuda" # the device to load the model onto
|
33 |
model = AutoModelForCausalLM.from_pretrained(
|
34 |
-
"Uni-SMART/
|
35 |
torch_dtype="auto",
|
36 |
device_map="auto"
|
37 |
)
|
38 |
-
tokenizer = AutoTokenizer.from_pretrained("Uni-SMART/
|
39 |
prompt = "Can you summarize this article for me?\n <ARTICLE>"
|
40 |
messages = [
|
41 |
{"role": "system", "content": "You are a helpful assistant."},
|
|
|
31 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
32 |
device = "cuda" # the device to load the model onto
|
33 |
model = AutoModelForCausalLM.from_pretrained(
|
34 |
+
"Uni-SMART/SciLitLLM1.5-14B",
|
35 |
torch_dtype="auto",
|
36 |
device_map="auto"
|
37 |
)
|
38 |
+
tokenizer = AutoTokenizer.from_pretrained("Uni-SMART/SciLitLLM1.5-14B")
|
39 |
prompt = "Can you summarize this article for me?\n <ARTICLE>"
|
40 |
messages = [
|
41 |
{"role": "system", "content": "You are a helpful assistant."},
|