metadata
base_model:
- google/flan-t5-large
Usage
from transformers import T5Tokenizer, T5ForConditionalGeneration
model_path = "KameronB/sitcc-t5-large-v3"
# Load the model
model = T5ForConditionalGeneration.from_pretrained(model_path, use_safetensors=True)
# Load the tokenizer (if applicable)
tokenizer = T5Tokenizer.from_pretrained(model_path)
def summarize_ticket(ticket_text):
# Tokenize the input text
input_ids = tokenizer.encode("Summarize: " + ticket_text, return_tensors="pt")
# Generate the summary
summary_ids = model.generate(input_ids, min_length=10, max_length=100)
# Decode and return the summary
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
return summary