alexkueck commited on
Commit
74d8fa9
·
verified ·
1 Parent(s): 0fc5a38

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +5 -5
utils.py CHANGED
@@ -400,17 +400,17 @@ def rag_chain(llm, prompt, retriever):
400
  outputs = modell_rag.generate(inputs['input_ids'], max_length=150, num_beams=2, early_stopping=True)
401
  answer = tokenizer_rag.decode(outputs[0], skip_special_tokens=True)
402
  """
403
- llm_chain = LLMChain(llm = llm, prompt = RAG_CHAIN_PROMPT)
404
- answer = llm_chain.run({"context": combined_content, "question": prompt})
405
-
406
- #answer = query(llm, {"inputs": input_text,})
 
407
 
408
  # Erstelle das Ergebnis-Dictionary
409
  result = {
410
  "answer": answer,
411
  "relevant_docs": most_relevant_docs
412
  }
413
-
414
  else:
415
  # keine relevanten Dokumente gefunden
416
  result = {
 
400
  outputs = modell_rag.generate(inputs['input_ids'], max_length=150, num_beams=2, early_stopping=True)
401
  answer = tokenizer_rag.decode(outputs[0], skip_special_tokens=True)
402
  """
403
+ #Alternative, wenn llm direkt übergeben....................................
404
+ #llm_chain = LLMChain(llm = llm, prompt = RAG_CHAIN_PROMPT)
405
+ #answer = llm_chain.run({"context": combined_content, "question": prompt})
406
+ #Alternative, wenn mit API_URL ...........................................
407
+ answer = query(llm, {"inputs": input_text,})
408
 
409
  # Erstelle das Ergebnis-Dictionary
410
  result = {
411
  "answer": answer,
412
  "relevant_docs": most_relevant_docs
413
  }
 
414
  else:
415
  # keine relevanten Dokumente gefunden
416
  result = {