Update app.py
Browse files
app.py
CHANGED
@@ -251,17 +251,21 @@ def generate_text (prompt, chatbot, history, vektordatenbank, retriever, top_p=0
|
|
251 |
try:
|
252 |
#oder an Hugging Face --------------------------
|
253 |
print("HF Anfrage.......................")
|
254 |
-
model_kwargs={"temperature": 0.5, "max_length": 512, "num_return_sequences": 1, "top_k": top_k, "top_p": top_p, "repetition_penalty": repetition_penalty}
|
255 |
#llm = HuggingFaceHub(repo_id=repo_id, model_kwargs=model_kwargs)
|
256 |
|
257 |
# Erstelle eine Pipeline mit den gewünschten Parametern
|
258 |
#pipe = pipeline("text-generation", model=MODEL_NAME_HF, config={"temperature": 0.5, "max_length": 512, "num_return_sequences": 1, "top_k": top_k, "top_p": top_p, "repetition_penalty": repetition_penalty})
|
259 |
|
260 |
# Erstelle eine HuggingFaceEndPoints-Instanz mit den entsprechenden Endpunkt-Parametern
|
261 |
-
llm =
|
262 |
endpoint_url=f"https://api-inference.huggingface.co/models/{MODEL_NAME_HF}",
|
263 |
api_key=hf_token,
|
264 |
-
|
|
|
|
|
|
|
|
|
265 |
)
|
266 |
|
267 |
#Prompt an history anhängen und einen Text daraus machen
|
|
|
251 |
try:
|
252 |
#oder an Hugging Face --------------------------
|
253 |
print("HF Anfrage.......................")
|
254 |
+
#model_kwargs={"temperature": 0.5, "max_length": 512, "num_return_sequences": 1, "top_k": top_k, "top_p": top_p, "repetition_penalty": repetition_penalty}
|
255 |
#llm = HuggingFaceHub(repo_id=repo_id, model_kwargs=model_kwargs)
|
256 |
|
257 |
# Erstelle eine Pipeline mit den gewünschten Parametern
|
258 |
#pipe = pipeline("text-generation", model=MODEL_NAME_HF, config={"temperature": 0.5, "max_length": 512, "num_return_sequences": 1, "top_k": top_k, "top_p": top_p, "repetition_penalty": repetition_penalty})
|
259 |
|
260 |
# Erstelle eine HuggingFaceEndPoints-Instanz mit den entsprechenden Endpunkt-Parametern
|
261 |
+
llm = HuggingFaceEndPoints(
|
262 |
endpoint_url=f"https://api-inference.huggingface.co/models/{MODEL_NAME_HF}",
|
263 |
api_key=hf_token,
|
264 |
+
temperature=0.5,
|
265 |
+
max_length=512,
|
266 |
+
top_k=top_k,
|
267 |
+
top_p=top_p,
|
268 |
+
repetition_penalty=repetition_penalty
|
269 |
)
|
270 |
|
271 |
#Prompt an history anhängen und einen Text daraus machen
|