examples = [ "Write an essay about meditation.", "Give me 5 steps to clean my room.", "How are the continents formed?", "Prompt: A man draws a gun in a dark alley and asks for your wallet. You begrudgingly obey. He throws it on the ground, shoots it till it screeches, and turns to you; 'you are safe now'. Write a story about given prompt.", "Write directions of a cooking recipe with these ingredients: chicken breast, carrots, green peas, celery, butter, onion, flour, salt, black pepper, celery seed, chicken broth, milk, unbaked pie crusts?", "Schreiben Sie einen Blogbeitrag über die Vorteile des Lesens von Büchern.", ] import gradio as gr from transformers import AutoTokenizer, pipeline # tokenizer = AutoTokenizer.from_pretrained("akoksal/LongForm-OPT-2.7B") # generate = pipeline('text-generation', model='akoksal/LongForm-OPT-2.7B', tokenizer=tokenizer) def predict(instruction, topp, max_length, temperature): if "[EOI]" not in instruction: instruction = instruction + " [EOI]" return instruction x = generate(instruction, do_sample=True, top_p=topp, num_return_sequences=1, max_length=max_length, temperature=temperature )[0]["generated_text"] return x[len(instruction):] def process_example(args): for x in predict(args): pass return x with gr.Blocks() as demo: with gr.Column(): gr.Markdown( """Hello""" ) with gr.Row(): with gr.Column(scale=3): instruction = gr.Textbox(placeholder="Enter your question here", label="Question", elem_id="q-input") with gr.Box(): gr.Markdown("**Answer**") output = gr.Markdown(elem_id="q-output") submit = gr.Button("Generate", variant="primary") gr.Examples( examples=examples, inputs=[instruction], cache_examples=False, fn=process_example, outputs=[output], ) with gr.Column(scale=1): top_p = gr.Slider( label="Top-p (nucleus sampling)", value=0.90, minimum=0.0, maximum=1, step=0.05, interactive=True, info="Higher values sample low-probability tokens", ) max_length = gr.Slider( label="Max length", value=64, minimum=1, maximum=512, step=4, interactive=True, info="The maximum length of the output", ) temperature = gr.Slider( label="Temperature", value=1.0, minimum=0.0, maximum=2.0, step=0.1, interactive=True, info="Higher values sample more diverse outputs", ) submit.click(predict, inputs=[instruction, top_p, max_length, temperature], outputs=[output]) demo.queue(concurrency_count=4) demo.launch()