mttrz commited on
Commit
246156e
·
verified ·
1 Parent(s): e6fb692

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +2 -2
main.py CHANGED
@@ -6,7 +6,7 @@ import uvicorn
6
 
7
  app = FastAPI()
8
 
9
- client = InferenceClient("meta-llama/Meta-Llama-3.1-405B-Instruct")
10
 
11
  class Item(BaseModel):
12
  prompt: str
@@ -35,7 +35,7 @@ def generate(item: Item):
35
  seed=42,
36
  )
37
  formatted_prompt = format_prompt(f"{item.system_prompt}, {item.prompt}", item.history)
38
- stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
39
  output = ""
40
 
41
  for response in stream:
 
6
 
7
  app = FastAPI()
8
 
9
+ client = InferenceClient("mistralai/Mistral-Large-Instruct-2407")
10
 
11
  class Item(BaseModel):
12
  prompt: str
 
35
  seed=42,
36
  )
37
  formatted_prompt = format_prompt(f"{item.system_prompt}, {item.prompt}", item.history)
38
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False, token='')
39
  output = ""
40
 
41
  for response in stream: