from ctransformers import AutoModelForCausalLM from fastapi import FastAPI from pydantic import BaseModel llm = AutoModelForCausalLM.from_pretrained("alvinfadli/llama2-fine-tuned-gguf", model_type='llama', max_new_tokens = 1096, threads = 3, ) #Pydantic object class validation(BaseModel): prompt: str #Fast API app = FastAPI() @app.post("/llm_on_cpu") async def stream(item: validation): system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.' prompt = f"""[INST] <> {system_prompt} <> {item.prompt} [/INST]""" return llm(prompt)