Spaces:
Sleeping
Sleeping
from transformers import PegasusForConditionalGeneration, PegasusTokenizer | |
import gradio as grad | |
model_name="google/pegasus-xsum" | |
pega_tokenizer = PegasusTokenizer.from_pretrained(model_name) | |
model = PegasusForConditionalGeneration.from_pretrained(model_name) | |
def summarize(text): | |
tokens = pega_tokenizer(text, truncation=True, padding="longest", return_tensors="pt") | |
trans_text = model.generate(**tokens, num_return_sequences=5, max_length=200, temperature=1.5, num_beams=10) | |
response = pega_tokenizer.batch_decode(trans_text, skip_special_tokens=True) | |
return response | |
in_text = grad.Textbox(lines=10, label="English", placeholder="English text here") | |
out_text = grad.Textbox(lines=10, label="Summary") | |
grad.Interface(summarize, inputs=in_text, outputs=out_text).launch() |