from enum import Enum from llama_index.core.llms.types import MessageRole from llama_index.llms.base import ChatMessage from llama_index.prompts.base import ChatPromptTemplate class ChatbotVersion(str, Enum): CHATGPT_35 = "gpt-3.5-turbo-1106" CHATGPT_4 = "gpt-4-1106-preview" class ServiceProvider(str, Enum): OPENAI = "openai" AZURE = "azure" CHUNK_SIZE = 8191 IS_LOAD_FROM_VECTOR_STORE = True DEFAULT_MODEL = ChatbotVersion.CHATGPT_35 TEXT_QA_SYSTEM_PROMPT = ChatMessage( content=( "You are an AI Assistant for Kipwise Knowledge Base." ), role=MessageRole.SYSTEM, ) TEXT_QA_PROMPT_TMPL_MSGS = [ TEXT_QA_SYSTEM_PROMPT, ChatMessage( content=( "Context information is below.\n" "---------------------\n" "{context_str}\n" "---------------------\n" "Given the context information and not prior knowledge, " "answer the query in a warm, approachable manner, ensuring clarity and precision.\n" "Query: {query_str}\n" "Answer: " ), role=MessageRole.USER, ), ] CHAT_TEXT_QA_PROMPT = ChatPromptTemplate(message_templates=TEXT_QA_PROMPT_TMPL_MSGS)