mynewmodel / test.py
yopzey's picture
Committing all changes before LFS migration
9190d78
raw
history blame contribute delete
510 Bytes
from transformers import AutoModelForCausalLM, AutoTokenizer
model_path = '/home/energyxadmin/UI2/merge'
model = AutoModelForCausalLM.from_pretrained(model_path)
tokenizer = AutoTokenizer.from_pretrained(model_path)
# Example text generation
input_ids = tokenizer.encode("What song did Eric Pask write or was a part of", return_tensors="pt")
generated_text_samples = model.generate(input_ids, max_length=1000)
print("Generated text:", tokenizer.decode(generated_text_samples[0], skip_special_tokens=True))