starcoder / app.py
Cyrano2's picture
create app.py
b8fae7b verified
raw
history blame
869 Bytes
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
# Charger le modèle
model_name = "bigcode/starcoder2-15b-instruct-v0.1"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(
model_name,
torch_dtype=torch.float16,
device_map="auto"
)
# Fonction pour générer du texte
def generate_text(prompt):
inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
outputs = model.generate(inputs["input_ids"], max_length=200)
return tokenizer.decode(outputs[0], skip_special_tokens=True)
# Interface utilisateur Gradio
interface = gr.Interface(
fn=generate_text,
inputs=gr.Textbox(label="Entrez votre instruction"),
outputs=gr.Textbox(label="Résultat généré"),
title="StarCoder2-15B-Instruct"
)
# Lancer l'application
interface.launch()