LukasStankevicius commited on
Commit
b328056
·
1 Parent(s): 7d0f683

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +7 -4
README.md CHANGED
@@ -13,15 +13,17 @@ news articles using a transformer model**.
13
  ## Usage
14
  ```python
15
  from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
16
- tokenizer = AutoTokenizer.from_pretrained("LukasStankevicius/t5-base-lithuanian-news-summaries-175")
17
- model = AutoModelForSeq2SeqLM.from_pretrained("LukasStankevicius/t5-base-lithuanian-news-summaries-175")
 
18
 
19
  def decode(x):
20
  return tokenizer.decode(x, skip_special_tokens=True)
21
 
22
  def summarize(text_, **g_kwargs):
23
  text_ = ' '.join(text_.strip().split())
24
- input_dict = tokenizer(text_, padding=True, return_tensors="pt", return_attention_mask=True)
 
25
  output = model.generate(**input_dict, **g_kwargs)
26
  predicted = list(map(decode, output.tolist()))[0]
27
  return predicted
@@ -39,7 +41,8 @@ Tarp žaidėjų, kurie sužaidė bent po 50 oficialių rungtynių Lietuvos rinkt
39
  ```
40
  The summary can be obtained by:
41
  ```
42
- g_kwargs = dict(max_length=512, num_beams=10, no_repeat_ngram_size=2, early_stopping=True)
 
43
  summarize(text, **g_kwargs)
44
  ```
45
  Output from above would be:
 
13
  ## Usage
14
  ```python
15
  from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
16
+ name = "LukasStankevicius/t5-base-lithuanian-news-summaries-175"
17
+ tokenizer = AutoTokenizer.from_pretrained(name)
18
+ model = AutoModelForSeq2SeqLM.from_pretrained(name)
19
 
20
  def decode(x):
21
  return tokenizer.decode(x, skip_special_tokens=True)
22
 
23
  def summarize(text_, **g_kwargs):
24
  text_ = ' '.join(text_.strip().split())
25
+ input_dict = tokenizer(text_, padding=True, return_tensors="pt",
26
+ return_attention_mask=True)
27
  output = model.generate(**input_dict, **g_kwargs)
28
  predicted = list(map(decode, output.tolist()))[0]
29
  return predicted
 
41
  ```
42
  The summary can be obtained by:
43
  ```
44
+ g_kwargs = dict(max_length=512, num_beams=10, no_repeat_ngram_size=2,
45
+ early_stopping=True)
46
  summarize(text, **g_kwargs)
47
  ```
48
  Output from above would be: