File size: 1,689 Bytes
e8cb9c9
3c4c2ab
dcdaaec
f306d19
557899a
8e5beee
b30b5d8
 
 
c4556a0
ebb0ce9
39f1739
f306d19
188098a
 
 
b9158a9
 
 
 
8e5beee
b9158a9
f306d19
b9158a9
 
 
f306d19
8e5beee
b30b5d8
 
 
 
 
 
 
 
3c363fa
0f3065b
5826562
 
9f584e5
8e5beee
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import tensorflow as tf
from tensorflow import keras
import gradio as gr
from gradio import mix
import numpy as np
import torch
from keras.preprocessing.sequence import pad_sequences
import pickle

from huggingface_hub import from_pretrained_keras

model = from_pretrained_keras("keras-io/text-generation-miniature-gpt")

with open('tokenizer.pickle', 'rb') as handle:
    tokenizer = pickle.load(handle)
    
#def tokenize_data(text):
# Tokenize the review body
#    input_ =  str(text) + ' </s>'
#    max_len = 80
    # tokenize inputs
#    tokenized_inputs = tokenizer(input_, padding='max_length', truncation=True, max_length=max_len, return_attention_mask=True, return_tensors='pt')

 #   inputs={"input_ids": tokenized_inputs['input_ids'],
  #      "attention_mask": tokenized_inputs['attention_mask']}
   # return inputs

def generate_answers(text):
    sequence_test = tokenizer.texts_to_sequences([text])
    padded_test = pad_sequences(sequence_test, maxlen= 80, padding='post')
    predictions,_ = model.predict(padded_test)
    results = np.argmax(predictions, axis=1)[0]
    answer = tokenizer.sequences_to_texts([results] )
    answertoString = ' '.join([str(elem) for elem in answer])
    return answertoString
    
examples = [["The movie was nice, "], ["It was showing nothing special to "]]
title = "Text Generation with Miniature GPT"
description = "Gradio Demo for a miniature with GPT. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."

iface = gr.Interface(fn=generate_answers, title = title, description=description, inputs=['text'], outputs=["text"], examples=examples)
iface.launch(inline=False, share=True)