Update app.py
Browse files
app.py
CHANGED
@@ -393,6 +393,11 @@ def invoke (prompt, file, history, rag_option, model_option, openai_api_key, k=3
|
|
393 |
#print("OpenAI zeichnen.......................")
|
394 |
#llm = ChatOpenAI(model_name = MODEL_NAME_OAI_ZEICHNEN, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
|
395 |
data = {"inputs": prompt}
|
|
|
|
|
|
|
|
|
|
|
396 |
else:
|
397 |
print("OpenAI normal.......................")
|
398 |
llm = ChatOpenAI(model_name = MODEL_NAME, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
|
@@ -421,35 +426,26 @@ def invoke (prompt, file, history, rag_option, model_option, openai_api_key, k=3
|
|
421 |
result = rag_chain(llm, history_text_und_prompt, db)
|
422 |
else:
|
423 |
print("LLM aufrufen ohne RAG: ...........")
|
424 |
-
|
425 |
-
response = requests.post(API_URL, headers=HEADERS, json=data)
|
426 |
-
result = response.content
|
427 |
-
image = Image.open(io.BytesIO(result))
|
428 |
-
else:
|
429 |
-
result = llm_chain(llm, history_text_und_prompt)
|
430 |
|
431 |
|
432 |
except Exception as e:
|
433 |
raise gr.Error(e)
|
434 |
|
435 |
|
436 |
-
|
437 |
-
|
438 |
-
|
439 |
-
|
440 |
-
|
441 |
-
history
|
442 |
-
|
443 |
-
|
444 |
-
|
445 |
-
|
446 |
-
|
447 |
-
|
448 |
-
|
449 |
-
yield history, "Stop: Success"
|
450 |
-
return
|
451 |
-
except:
|
452 |
-
pass
|
453 |
|
454 |
################################################
|
455 |
#GUI
|
|
|
393 |
#print("OpenAI zeichnen.......................")
|
394 |
#llm = ChatOpenAI(model_name = MODEL_NAME_OAI_ZEICHNEN, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
|
395 |
data = {"inputs": prompt}
|
396 |
+
response = requests.post(API_URL, headers=HEADERS, json=data)
|
397 |
+
result = response.content
|
398 |
+
image = Image.open(io.BytesIO(result))
|
399 |
+
history[-1][1] = image
|
400 |
+
return history, "Stop: Success"
|
401 |
else:
|
402 |
print("OpenAI normal.......................")
|
403 |
llm = ChatOpenAI(model_name = MODEL_NAME, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
|
|
|
426 |
result = rag_chain(llm, history_text_und_prompt, db)
|
427 |
else:
|
428 |
print("LLM aufrufen ohne RAG: ...........")
|
429 |
+
result = llm_chain(llm, history_text_und_prompt)
|
|
|
|
|
|
|
|
|
|
|
430 |
|
431 |
|
432 |
except Exception as e:
|
433 |
raise gr.Error(e)
|
434 |
|
435 |
|
436 |
+
#Antwort als Stream ausgeben... wenn Textantwort gefordert
|
437 |
+
history[-1][1] = ""
|
438 |
+
for character in result:
|
439 |
+
history[-1][1] += character
|
440 |
+
time.sleep(0.03)
|
441 |
+
yield history, "Generating"
|
442 |
+
if shared_state.interrupted:
|
443 |
+
shared_state.recover()
|
444 |
+
try:
|
445 |
+
yield history, "Stop: Success"
|
446 |
+
return
|
447 |
+
except:
|
448 |
+
pass
|
|
|
|
|
|
|
|
|
449 |
|
450 |
################################################
|
451 |
#GUI
|