Update app.py
Browse files
app.py
CHANGED
@@ -434,7 +434,8 @@ def generate_text (prompt, chatbot, history, rag_option, model_option, openai_ap
|
|
434 |
else:
|
435 |
#oder an Hugging Face --------------------------
|
436 |
print("HF Anfrage.......................")
|
437 |
-
|
|
|
438 |
#llm = HuggingFaceChain(model=MODEL_NAME_HF, model_kwargs={"temperature": 0.5, "max_length": 128})
|
439 |
#llm = HuggingFaceHub(url_??? = "https://wdgsjd6zf201mufn.us-east-1.aws.endpoints.huggingface.cloud", model_kwargs={"temperature": 0.5, "max_length": 64})
|
440 |
#llm = HuggingFaceTextGenInference( inference_server_url="http://localhost:8010/", max_new_tokens=max_new_tokens,top_k=10,top_p=top_p,typical_p=0.95,temperature=temperature,repetition_penalty=repetition_penalty,)
|
|
|
434 |
else:
|
435 |
#oder an Hugging Face --------------------------
|
436 |
print("HF Anfrage.......................")
|
437 |
+
model_kwargs={"temperature": 0.5, "max_length": 128, "num_return_sequences": 1}
|
438 |
+
llm = HuggingFaceHub(repo_id=repo_id, model_kwargs=model_kwargs)
|
439 |
#llm = HuggingFaceChain(model=MODEL_NAME_HF, model_kwargs={"temperature": 0.5, "max_length": 128})
|
440 |
#llm = HuggingFaceHub(url_??? = "https://wdgsjd6zf201mufn.us-east-1.aws.endpoints.huggingface.cloud", model_kwargs={"temperature": 0.5, "max_length": 64})
|
441 |
#llm = HuggingFaceTextGenInference( inference_server_url="http://localhost:8010/", max_new_tokens=max_new_tokens,top_k=10,top_p=top_p,typical_p=0.95,temperature=temperature,repetition_penalty=repetition_penalty,)
|