text-generation / app.py
Jezia's picture
Update app.py
e512511
raw
history blame
1.69 kB
import tensorflow as tf
from tensorflow import keras
import gradio as gr
from gradio import mix
import numpy as np
import torch
from keras.preprocessing.sequence import pad_sequences
import pickle
from huggingface_hub import from_pretrained_keras
model = from_pretrained_keras("keras-io/text-generation-miniature-gpt")
with open('tokenizer.pickle', 'rb') as handle:
tokenizer = pickle.load(handle)
#def tokenize_data(text):
# Tokenize the review body
# input_ = str(text) + ' </s>'
# max_len = 80
# tokenize inputs
# tokenized_inputs = tokenizer(input_, padding='max_length', truncation=True, max_length=max_len, return_attention_mask=True, return_tensors='pt')
# inputs={"input_ids": tokenized_inputs['input_ids'],
# "attention_mask": tokenized_inputs['attention_mask']}
# return inputs
def generate_answers(text):
sequence_test = tokenizer.texts_to_sequences([text])
padded_test = pad_sequences(sequence_test, maxlen= 80, padding='post')
predictions,_ = model.predict(padded_test)
results = np.argmax(predictions, axis=1)[0]
answer = tokenizer.sequences_to_texts([results] )
answertoString = ' '.join([str(elem) for elem in answer])
return answertoString
examples = [["The movie was nice, "], ["It was showing nothing special to "]]
title = "Text Generation with Miniature GPT"
description = "Gradio Demo for a miniature with GPT. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."
iface = gr.Interface(fn=generate_answers, title = title, description=description, inputs=['text'], outputs=["text"], examples=examples)
iface.launch(inline=False, share=True)