timpal0l commited on
Commit
edc8f54
·
verified ·
1 Parent(s): eff2613

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +7 -1
README.md CHANGED
@@ -22,6 +22,7 @@ from transformers import pipeline, StoppingCriteriaList, StoppingCriteria
22
 
23
  device = "cuda" if torch.cuda.is_available() else "cpu"
24
 
 
25
  # (Optional) - define a stopping criteria
26
  # We ideally want the model to stop generate once the response from the Bot is generated
27
  class StopOnTokenCriteria(StoppingCriteria):
@@ -43,7 +44,12 @@ pipe = pipeline(
43
  text = "I like to eat ice cream in the summer."
44
  prompt = f"<|endoftext|><s>User: Översätt till Svenska från Engelska\n{text}<s>Bot:"
45
 
46
- response = pipe(prompt, max_length=768, stopping_criteria=StoppingCriteriaList([stop_on_token_criteria])))
 
 
 
 
 
47
  print(response[0]["generated_text"].split("<s>Bot: ")[-1])
48
  ```
49
  ```python
 
22
 
23
  device = "cuda" if torch.cuda.is_available() else "cpu"
24
 
25
+
26
  # (Optional) - define a stopping criteria
27
  # We ideally want the model to stop generate once the response from the Bot is generated
28
  class StopOnTokenCriteria(StoppingCriteria):
 
44
  text = "I like to eat ice cream in the summer."
45
  prompt = f"<|endoftext|><s>User: Översätt till Svenska från Engelska\n{text}<s>Bot:"
46
 
47
+ response = pipe(
48
+ prompt,
49
+ max_length=768,
50
+ stopping_criteria=StoppingCriteriaList([stop_on_token_criteria])
51
+ )
52
+
53
  print(response[0]["generated_text"].split("<s>Bot: ")[-1])
54
  ```
55
  ```python