ChatExample / app.py
KorWoody's picture
Update app.py
550f6cc
import os
import pandas as pd
import gradio as gr
from langchain.vectorstores import Chroma
from langchain.embeddings import OpenAIEmbeddings
from langchain.llms import OpenAI
from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQA
def Loading():
return "๋ฐ์ดํ„ฐ ๋กœ๋”ฉ ์ค‘..."
def LoadData(openai_key):
if openai_key is not None:
os.environ["OPENAI_API_KEY"] = openai_key
persist_directory = 'realdb_LLM'
embedding = OpenAIEmbeddings()
vectordb = Chroma(
persist_directory=persist_directory,
embedding_function=embedding
)
global retriever
retriever = vectordb.as_retriever(search_kwargs={"k": 1})
return "์ค€๋น„ ์™„๋ฃŒ"
else:
return "์‚ฌ์šฉํ•˜์‹œ๋Š” API Key๋ฅผ ์ž…๋ ฅํ•˜์—ฌ ์ฃผ์‹œ๊ธฐ ๋ฐ”๋ž๋‹ˆ๋‹ค."
# ์ฑ—๋ด‡์˜ ๋‹ต๋ณ€์„ ์ฒ˜๋ฆฌํ•˜๋Š” ํ•จ์ˆ˜
def respond(message, chat_history, temperature, top_p):
try:
print(temperature)
qa_chain = RetrievalQA.from_chain_type(
llm=OpenAI(temperature=temperature, top_p=top_p),
# llm=OpenAI(temperature=0.4),
# llm=ChatOpenAI(temperature=0),
chain_type="stuff",
retriever=retriever
)
result = qa_chain(message)
bot_message = result['result']
# ์ฑ„ํŒ… ๊ธฐ๋ก์— ์‚ฌ์šฉ์ž์˜ ๋ฉ”์‹œ์ง€์™€ ๋ด‡์˜ ์‘๋‹ต์„ ์ถ”๊ฐ€.
chat_history.append((message, bot_message))
return "", chat_history
except:
chat_history.append(("", "API Key ์ž…๋ ฅ ์š”๋ง"))
return " ", chat_history
# ์ฑ—๋ด‡ ์„ค๋ช…
title = """
<div style="text-align: center; max-width: 500px; margin: 0 auto;">
<div>
<h1>Pretraining Chatbot V2 Real</h1>
</div>
<p style="margin-bottom: 10px; font-size: 94%">
OpenAI LLM๋ฅผ ์ด์šฉํ•œ Chatbot (Similarity)
</p>
</div>
"""
# ๊พธ๋ฏธ๊ธฐ
css="""
#col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
"""
with gr.Blocks(css=css) as UnivChatbot:
with gr.Column(elem_id="col-container"):
gr.HTML(title)
with gr.Row():
with gr.Column(scale=3):
openai_key = gr.Textbox(label="You OpenAI API key", type="password", placeholder="OpenAI Key Type", elem_id="InputKey", show_label=False, container=False)
with gr.Column(scale=1):
langchain_status = gr.Textbox(placeholder="Status", interactive=False, show_label=False, container=False)
with gr.Row():
with gr.Column(scale=4):
temperature = gr.Slider(
label="Temperature",
minimum=0,
maximum=2.0,
step=0.01,
value=0.7,
)
with gr.Column(scale=4):
top_p = gr.Slider(
label="Top_p",
minimum=0,
maximum=1,
step=0.01,
value=0.5,
)
with gr.Column(scale=1):
chk_key = gr.Button("ํ™•์ธ", variant="primary")
chatbot = gr.Chatbot(label="๋Œ€ํ•™ ์ฑ—๋ด‡์‹œ์Šคํ…œ(OpenAI LLM)", elem_id="chatbot") # ์ƒ๋‹จ ์ขŒ์ธก
with gr.Row():
with gr.Column(scale=9):
msg = gr.Textbox(label="์ž…๋ ฅ", placeholder="๊ถ๊ธˆํ•˜์‹  ๋‚ด์—ญ์„ ์ž…๋ ฅํ•˜์—ฌ ์ฃผ์„ธ์š”.", elem_id="InputQuery", show_label=False, container=False)
with gr.Row():
with gr.Column(scale=1):
submit = gr.Button("์ „์†ก", variant="primary")
with gr.Column(scale=1):
clear = gr.Button("์ดˆ๊ธฐํ™”", variant="stop")
#chk_key.click(Loading, None, langchain_status, queue=False)
chk_key.click(
fn=LoadData,
inputs=[openai_key],
outputs=[langchain_status],
queue=False
)
# ์‚ฌ์šฉ์ž์˜ ์ž…๋ ฅ์„ ์ œ์ถœ(submit)ํ•˜๋ฉด respond ํ•จ์ˆ˜๊ฐ€ ํ˜ธ์ถœ.
msg.submit(
fn=respond,
inputs=[msg, chatbot, temperature, top_p],
outputs=[msg, chatbot]
)
submit.click(respond, [msg, chatbot, temperature, top_p], [msg, chatbot])
# '์ดˆ๊ธฐํ™”' ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜๋ฉด ์ฑ„ํŒ… ๊ธฐ๋ก์„ ์ดˆ๊ธฐํ™”.
clear.click(lambda: None, None, chatbot, queue=False)
UnivChatbot.launch()