oreonmayo's picture
create app.py
9937839 verified
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
model_name = "mistralai/Codestral-22B-v0.1"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
def generate_text(prompt, max_length=1000, min_length=50, temperature=0.1):
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(
inputs['input_ids'],
max_length=max_length,
min_length=min_length,
temperature=temperature
)
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
return response
interface = gr.Interface(
fn=generate_text,
inputs=[
gr.inputs.Textbox(lines=2, placeholder="Enter your prompt here..."),
gr.inputs.Slider(minimum=10, maximum=2000, default=1000, label="Max Length"),
gr.inputs.Slider(minimum=10, maximum=100, default=50, label="Min Length"),
gr.inputs.Slider(minimum=0.1, maximum=1, default=0.1, label="Temperature")
],
outputs="text",
title="Text Generation with Mistralai",
description="Generate text using the mistralai/Codestral-22B-v0.1 model."
)
if __name__ == "__main__":
interface.launch()