DanielHesslow commited on
Commit
7b1bccf
·
1 Parent(s): 5e172a3

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -3
README.md CHANGED
@@ -26,8 +26,8 @@ Model | #Params | d_model | layers | lm loss uniref-100
26
  Instantiate a model like so:
27
 
28
  from transformers import AutoModel, AutoModelForCausalLM
29
- model = AutoModelForCausalLM.from_pretrained("Seledorn/RITA_m, trust_remote_code=True")
30
- tokenizer = AutoTokenizer.from_pretrained("Seledorn/RITA_m")
31
 
32
  for generation use we support pipelines:
33
 
@@ -36,4 +36,3 @@ for generation use we support pipelines:
36
  sequences = rita_gen("MAB", max_length=20, do_sample=True, top_k=950, repetition_penalty=1.2, num_return_sequences=2, eos_token_id=2)
37
  for seq in sequences:
38
  print(f"seq: {seq['generated_text'].replace(' ', '')}")
39
-
 
26
  Instantiate a model like so:
27
 
28
  from transformers import AutoModel, AutoModelForCausalLM
29
+ model = AutoModelForCausalLM.from_pretrained("lightonai/RITA_m, trust_remote_code=True")
30
+ tokenizer = AutoTokenizer.from_pretrained("lightonai/RITA_m")
31
 
32
  for generation use we support pipelines:
33
 
 
36
  sequences = rita_gen("MAB", max_length=20, do_sample=True, top_k=950, repetition_penalty=1.2, num_return_sequences=2, eos_token_id=2)
37
  for seq in sequences:
38
  print(f"seq: {seq['generated_text'].replace(' ', '')}")