kipwise-llm / gradio-app.py
cowcow02's picture
Upload folder using huggingface_hub
1ab26fb verified
import gradio as gr
import llama_index
import openai
import phoenix as px
from llama_index import ServiceContext, VectorStoreIndex
from llama_index import set_global_service_context
from llama_index.agent import OpenAIAgent
from llama_index.chat_engine.types import ChatMode
from llama_index.ingestion import IngestionPipeline
from llama_index.tools import QueryEngineTool
from llama_index.vector_stores.qdrant import QdrantVectorStore
from chatbot import Chatbot, IndexBuilder
from constants import CHAT_TEXT_QA_PROMPT, TEXT_QA_SYSTEM_PROMPT, CHUNK_SIZE, DEFAULT_MODEL, \
IS_LOAD_FROM_VECTOR_STORE
from environments import OPENAI_API_KEY, QDRANT_COLLECTION_NAME
from qdrant import client as qdrant_client
from service_provider_config import get_service_provider_config
px.launch_app()
llama_index.set_global_handler("arize_phoenix")
openai.api_key = OPENAI_API_KEY
llm, embedding_model = get_service_provider_config(model_name=DEFAULT_MODEL)
service_context = ServiceContext.from_defaults(
chunk_size=CHUNK_SIZE,
llm=llm,
embed_model=embedding_model,
)
set_global_service_context(service_context)
class KipIndexBuilder(IndexBuilder):
def _load_documents(self):
# TODO: implement logic to import documents into qdrant - API feeding logic to consider
pass
def _setup_service_context(self):
super()._setup_service_context()
def _setup_vector_store(self):
self.vector_store = QdrantVectorStore(
client=qdrant_client, collection_name=self.vdb_collection_name)
super()._setup_vector_store()
def _setup_index(self):
super()._setup_index()
if self.is_load_from_vector_store:
self.index = VectorStoreIndex.from_vector_store(self.vector_store)
print("set up index from vector store")
return
pipeline = IngestionPipeline(
transformations=[
self.embed_model,
],
vector_store=self.vector_store,
)
pipeline.run(documents=self.documents, show_progress=True)
self.index = VectorStoreIndex.from_vector_store(self.vector_store)
class KipToolChatbot(Chatbot):
DENIED_ANSWER_PROMPT = ""
SYSTEM_PROMPT = ""
CHAT_EXAMPLES = []
def _setup_observer(self):
pass
def _setup_index(self):
super()._setup_index()
def _setup_query_engine(self):
super()._setup_query_engine()
self.query_engine = self.index.as_query_engine(
text_qa_template=CHAT_TEXT_QA_PROMPT)
def _setup_tools(self):
super()._setup_tools()
self.tools = QueryEngineTool.from_defaults(
query_engine=self.query_engine)
def _setup_chat_engine(self):
super()._setup_chat_engine()
self.chat_engine = OpenAIAgent.from_tools(
tools=[self.tools],
llm=llm,
similarity_top_k=1,
verbose=True
)
class KipContextChatbot(KipToolChatbot):
def _setup_chat_engine(self):
self.chat_engine = self.index.as_chat_engine(
chat_mode=ChatMode.CONTEXT,
similarity_top_k=5,
system_prompt=TEXT_QA_SYSTEM_PROMPT.content,
text_qa_template=CHAT_TEXT_QA_PROMPT)
class KipSimpleChatbot(KipToolChatbot):
def _setup_chat_engine(self):
self.chat_engine = self.index.as_chat_engine(
chat_mode=ChatMode.SIMPLE)
index_builder = KipIndexBuilder(vdb_collection_name=QDRANT_COLLECTION_NAME,
embed_model=embedding_model,
is_load_from_vector_store=IS_LOAD_FROM_VECTOR_STORE)
kip_chatbot = KipToolChatbot(model_name=DEFAULT_MODEL, index_builder=index_builder)
kip_chatbot_context = KipContextChatbot(model_name=DEFAULT_MODEL, index_builder=index_builder)
kip_chatbot_simple = KipSimpleChatbot(model_name=DEFAULT_MODEL, index_builder=index_builder)
def vote(data: gr.LikeData):
if data.liked:
gr.Info("You up-voted this response: " + data.value)
else:
gr.Info("You down-voted this response: " + data.value)
chatbot = gr.Chatbot()
with gr.Blocks() as demo:
gr.Markdown("# Kipwise LLM demo")
with gr.Tab("Using relevant context sent to system prompt"):
context_interface = gr.ChatInterface(
kip_chatbot_context.stream_chat,
examples=kip_chatbot.CHAT_EXAMPLES,
)
chatbot.like(vote, None, None)
with gr.Tab("Using function calling as tool to retrieve"):
function_call_interface = gr.ChatInterface(
kip_chatbot.stream_chat,
examples=kip_chatbot.CHAT_EXAMPLES,
)
chatbot.like(vote, None, None)
with gr.Tab("Vanilla ChatGPT without modification"):
vanilla_interface = gr.ChatInterface(
kip_chatbot_simple.stream_chat,
examples=kip_chatbot.CHAT_EXAMPLES)
demo.queue(False).launch(server_name='0.0.0.0', share=False)