oleksandrfluxon
commited on
Commit
·
bc15f8d
1
Parent(s):
28fbd6c
Update pipeline.py
Browse files- pipeline.py +2 -2
pipeline.py
CHANGED
@@ -7,12 +7,12 @@ class PreTrainedPipeline():
|
|
7 |
path = "oleksandrfluxon/mpt-7b-instruct-2"
|
8 |
print("===> path", path)
|
9 |
|
10 |
-
config = transformers.AutoConfig.from_pretrained(
|
11 |
config.max_seq_len = 4096 # (input + output) tokens can now be up to 4096
|
12 |
|
13 |
print("===> loading model")
|
14 |
model = transformers.AutoModelForCausalLM.from_pretrained(
|
15 |
-
|
16 |
config=config,
|
17 |
torch_dtype=torch.bfloat16, # Load model weights in bfloat16
|
18 |
trust_remote_code=True,
|
|
|
7 |
path = "oleksandrfluxon/mpt-7b-instruct-2"
|
8 |
print("===> path", path)
|
9 |
|
10 |
+
config = transformers.AutoConfig.from_pretrained(path, trust_remote_code=True)
|
11 |
config.max_seq_len = 4096 # (input + output) tokens can now be up to 4096
|
12 |
|
13 |
print("===> loading model")
|
14 |
model = transformers.AutoModelForCausalLM.from_pretrained(
|
15 |
+
path,
|
16 |
config=config,
|
17 |
torch_dtype=torch.bfloat16, # Load model weights in bfloat16
|
18 |
trust_remote_code=True,
|