Update utils.py
Browse files
utils.py
CHANGED
@@ -376,10 +376,12 @@ def query(api_llm, payload):
|
|
376 |
|
377 |
def llm_chain2(prompt, context):
|
378 |
full_prompt = RAG_CHAIN_PROMPT.format(context=context, question=prompt)
|
379 |
-
inputs =
|
|
|
380 |
#Generiere die Antwort
|
381 |
outputs = modell_rag.generate(inputs['input_ids'], max_length=1024, num_beams=2, early_stopping=True)
|
382 |
answer = tokenizer_rag.decode(outputs[0], skip_special_tokens=True)
|
|
|
383 |
return answer
|
384 |
|
385 |
|
|
|
376 |
|
377 |
def llm_chain2(prompt, context):
|
378 |
full_prompt = RAG_CHAIN_PROMPT.format(context=context, question=prompt)
|
379 |
+
inputs = tokenizer_summarization(full_prompt, return_tensors="pt", max_length=1024, truncation=True)
|
380 |
+
|
381 |
#Generiere die Antwort
|
382 |
outputs = modell_rag.generate(inputs['input_ids'], max_length=1024, num_beams=2, early_stopping=True)
|
383 |
answer = tokenizer_rag.decode(outputs[0], skip_special_tokens=True)
|
384 |
+
|
385 |
return answer
|
386 |
|
387 |
|