NCTCMumbai commited on
Commit
b11281d
·
1 Parent(s): ab9cd93

Update backend/query_llm.py

Browse files
Files changed (1) hide show
  1. backend/query_llm.py +1 -1
backend/query_llm.py CHANGED
@@ -50,7 +50,7 @@ def format_prompt(message: str, api_kind: str):
50
  raise ValueError("API is not supported")
51
 
52
 
53
- def generate_hf(prompt: str, history: str, temperature: float = 0.9, max_new_tokens: int = 256,
54
  top_p: float = 0.95, repetition_penalty: float = 1.0) -> Generator[str, None, str]:
55
  """
56
  Generate a sequence of tokens based on a given prompt and history using Mistral client.
 
50
  raise ValueError("API is not supported")
51
 
52
 
53
+ def generate_hf(prompt: str, history: str, temperature: float = 0.9, max_new_tokens: int = 3000,
54
  top_p: float = 0.95, repetition_penalty: float = 1.0) -> Generator[str, None, str]:
55
  """
56
  Generate a sequence of tokens based on a given prompt and history using Mistral client.