cicdatopea commited on
Commit
88165c5
·
verified ·
1 Parent(s): af0b9cd

the model has supported cuda

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -182,7 +182,7 @@ from auto_round import AutoRoundConfig ##must import for autoround format
182
  from transformers import AutoModelForCausalLM, AutoTokenizer
183
  import torch
184
 
185
- quantized_model_dir = "OPEA/DeepSeek-V3-int4-sym-inc-cpu"
186
  quantization_config = AutoRoundConfig(
187
  backend="cpu"
188
  )
 
182
  from transformers import AutoModelForCausalLM, AutoTokenizer
183
  import torch
184
 
185
+ quantized_model_dir = "OPEA/DeepSeek-V3-int4-sym-gptq-inc"
186
  quantization_config = AutoRoundConfig(
187
  backend="cpu"
188
  )