alexkueck commited on
Commit
91de823
·
verified ·
1 Parent(s): f1bd90c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -16,7 +16,7 @@ from langchain_community.document_loaders import PyPDFLoader, UnstructuredWordD
16
  from langchain_community.document_loaders.blob_loaders.youtube_audio import YoutubeAudioLoader
17
  #from langchain.document_loaders import GenericLoader
18
  from langchain.schema import AIMessage, HumanMessage
19
- #from langchain_community.llms import HuggingFaceHub
20
  from langchain_huggingface import HuggingFaceEndpoint
21
  from langchain_huggingface import HuggingFaceEmbeddings
22
  from langchain_community.llms import HuggingFaceTextGenInference
@@ -206,7 +206,7 @@ def generate_text (prompt, chatbot, history, vektordatenbank, retriever, top_p=0
206
  #oder an Hugging Face --------------------------
207
  print("HF Anfrage.......................")
208
  model_kwargs={"temperature": 0.5, "max_length": 512, "num_return_sequences": 1, "top_k": top_k, "top_p": top_p, "repetition_penalty": repetition_penalty}
209
- llm = HuggingFaceEndpoint(repo_id=repo_id, model_kwargs=model_kwargs)
210
  #llm = HuggingFaceChain(model=MODEL_NAME_HF, model_kwargs={"temperature": 0.5, "max_length": 128})
211
  # Erstelle eine Pipeline mit den gewünschten Parametern
212
  #pipe = pipeline("text-generation", model=MODEL_NAME_HF , model_kwargs=model_kwargs)
 
16
  from langchain_community.document_loaders.blob_loaders.youtube_audio import YoutubeAudioLoader
17
  #from langchain.document_loaders import GenericLoader
18
  from langchain.schema import AIMessage, HumanMessage
19
+ from langchain_community.llms import HuggingFaceHub
20
  from langchain_huggingface import HuggingFaceEndpoint
21
  from langchain_huggingface import HuggingFaceEmbeddings
22
  from langchain_community.llms import HuggingFaceTextGenInference
 
206
  #oder an Hugging Face --------------------------
207
  print("HF Anfrage.......................")
208
  model_kwargs={"temperature": 0.5, "max_length": 512, "num_return_sequences": 1, "top_k": top_k, "top_p": top_p, "repetition_penalty": repetition_penalty}
209
+ llm = HuggingFaceHub(repo_id=repo_id, model_kwargs=model_kwargs)
210
  #llm = HuggingFaceChain(model=MODEL_NAME_HF, model_kwargs={"temperature": 0.5, "max_length": 128})
211
  # Erstelle eine Pipeline mit den gewünschten Parametern
212
  #pipe = pipeline("text-generation", model=MODEL_NAME_HF , model_kwargs=model_kwargs)