cicdatopea
commited on
the model has supported cuda
Browse files
README.md
CHANGED
@@ -182,7 +182,7 @@ from auto_round import AutoRoundConfig ##must import for autoround format
|
|
182 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
183 |
import torch
|
184 |
|
185 |
-
quantized_model_dir = "OPEA/DeepSeek-V3-int4-sym-inc
|
186 |
quantization_config = AutoRoundConfig(
|
187 |
backend="cpu"
|
188 |
)
|
|
|
182 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
183 |
import torch
|
184 |
|
185 |
+
quantized_model_dir = "OPEA/DeepSeek-V3-int4-sym-gptq-inc"
|
186 |
quantization_config = AutoRoundConfig(
|
187 |
backend="cpu"
|
188 |
)
|