ShawnAI commited on
Commit
9da0ae1
·
1 Parent(s): 723f467

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +88 -88
app.py CHANGED
@@ -2,10 +2,10 @@ import gradio as gr
2
  import random
3
  import time
4
 
5
- from langchain.llms import OpenAI, OpenAIChat
6
  from langchain.chat_models import ChatOpenAI
7
  from langchain.embeddings import HuggingFaceEmbeddings
8
  from langchain.vectorstores import Pinecone
 
9
  from langchain.chains.retrieval_qa.base import RetrievalQA
10
  from langchain.chains.question_answering import load_qa_chain
11
  import pinecone
@@ -13,7 +13,8 @@ import pinecone
13
  import os
14
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
15
 
16
- OPENAI_KEY = ""
 
17
  OPENAI_TEMP = 0
18
  PINECONE_KEY = os.environ["PINECONE_KEY"]
19
  PINECONE_ENV = "asia-northeast1-gcp"
@@ -28,11 +29,8 @@ LLM_HISTORY_LEN = 3
28
 
29
  BUTTON_MIN_WIDTH = 150
30
 
31
- MODEL_STATUS = "Wait for API Key to Initialize."
32
 
33
- MODEL_LOADED = "Model Loaded"
34
-
35
- MODEL_WARNING = "Please paste your OpenAI API Key from openai.com to initialize this application!"
36
 
37
 
38
  webui_title = """
@@ -46,35 +44,31 @@ Please insert your question and click 'Submit'
46
  """
47
 
48
 
49
- def init_model(openai_key):
50
  try:
51
-
52
- os.environ["OPENAI_API_KEY"] = openai_key
53
-
54
- embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
55
 
56
- pinecone.init(api_key = PINECONE_KEY,
57
- environment = PINECONE_ENV)
58
 
59
- llm = OpenAI(temperature=OPENAI_TEMP,
60
- model_name="gpt-3.5-turbo-0301")
61
-
62
- # ChatOpenAI(temperature = OPENAI_TEMP, openai_api_key = openai_key)
63
-
64
 
65
- global db
66
- db = Pinecone.from_existing_index(index_name = PINECONE_INDEX,
67
- embedding = embeddings)
68
- global chain
69
- chain = load_qa_chain(llm, chain_type="stuff")
70
-
71
- global MODEL_STATUS
72
- MODEL_STATUS = MODEL_LOADED
73
 
74
- return openai_key, ""
 
 
75
  except Exception as e:
76
  print(e)
77
- return "",""
78
 
79
  def get_chat_history(inputs) -> str:
80
  res = []
@@ -82,29 +76,64 @@ def get_chat_history(inputs) -> str:
82
  res.append(f"Human: {human}\nAI: {ai}")
83
  return "\n".join(res)
84
 
85
- css = """.bigbox {
86
- min-height:200px;
87
- }"""
88
 
89
- with gr.Blocks(css=css) as demo:
 
 
 
 
 
90
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
  gr.Markdown(webui_title)
92
  gr.Markdown(init_message)
93
 
94
- if OPENAI_KEY and OPENAI_KEY.startswith("sk-") and len(OPENAI_KEY) > 50:
95
- api_textbox_ph = "API Founded in Environment Variable: sk-..." + OPENAI_KEY[-4:]
96
- api_textbox_edit = False
97
- init_model(OPENAI_KEY)
98
- else:
99
- api_textbox_ph = "Paste Your OpenAI API Key (sk-...) and Hit ENTER"
100
- api_textbox_edit = True
101
-
102
- api_textbox = gr.Textbox(placeholder = api_textbox_ph,
103
- interactive = api_textbox_edit,
104
- show_label=False, lines=1, type='password')
105
 
106
-
107
- with gr.Tab("Chatbot"):
108
  with gr.Row():
109
  with gr.Column(scale=10):
110
  chatbot = gr.Chatbot(elem_classes="bigbox")
@@ -138,51 +167,22 @@ with gr.Blocks(css=css) as demo:
138
  detail_panel = gr.Chatbot(label="Related Docs")
139
 
140
 
141
- def user(user_message, history):
142
- return "", history+[[user_message, None]]
143
-
144
- def bot(box_message, ref_message, top_k):
145
- if MODEL_STATUS != MODEL_LOADED:
146
- box_message[-1][1] = MODEL_WARNING
147
- return box_message, "", ""
148
-
149
- # bot_message = random.choice(["Yes", "No"])
150
- # 0 is user question, 1 is bot response
151
- question = box_message[-1][0]
152
- history = box_message[:-1]
153
-
154
- if not ref_message:
155
- ref_message = question
156
- details = f"Q: {question}"
157
- else:
158
- details = f"Q: {question}\nR: {ref_message}"
159
-
160
- #print(question, ref_message)
161
- #print(history)
162
- #print(get_chat_history(history))
163
-
164
- docsearch = db.as_retriever(search_kwargs={'k':top_k})
165
- docs = docsearch.get_relevant_documents(ref_message)
166
- all_output = chain({"input_documents": docs,
167
- "question": question,
168
- "chat_history": get_chat_history(history)})
169
- bot_message = all_output['output_text']
170
- #print(docs)
171
-
172
- source = "".join([f"""<details> <summary>{doc.metadata["source"]}</summary>
173
- {doc.page_content}
174
-
175
- </details>""" for i, doc in enumerate(docs)])
176
-
177
- #print(source)
178
-
179
- box_message[-1][1] = bot_message
180
- return box_message, "", [[details, source]]
181
 
182
- submit.click(user, [query, chatbot], [query, chatbot], queue=False).then(
183
- bot, [chatbot, ref, top_k], [chatbot, ref, detail_panel]
 
 
 
 
 
184
  )
185
- api_textbox.submit(init_model, api_textbox, [api_textbox, chatbot])
186
  clear.click(lambda: (None,None,None), None, [query, ref, chatbot], queue=False)
187
 
188
  if __name__ == "__main__":
 
2
  import random
3
  import time
4
 
 
5
  from langchain.chat_models import ChatOpenAI
6
  from langchain.embeddings import HuggingFaceEmbeddings
7
  from langchain.vectorstores import Pinecone
8
+ from langchain.chains import LLMChain
9
  from langchain.chains.retrieval_qa.base import RetrievalQA
10
  from langchain.chains.question_answering import load_qa_chain
11
  import pinecone
 
13
  import os
14
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
15
 
16
+ #OPENAI_API_KEY = ""
17
+ OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")
18
  OPENAI_TEMP = 0
19
  PINECONE_KEY = os.environ["PINECONE_KEY"]
20
  PINECONE_ENV = "asia-northeast1-gcp"
 
29
 
30
  BUTTON_MIN_WIDTH = 150
31
 
 
32
 
33
+ MODEL_WARNING = "Please paste your OpenAI API Key from openai.com and press 'Enter' to initialize this application!"
 
 
34
 
35
 
36
  webui_title = """
 
44
  """
45
 
46
 
47
+ def init_model(api_key):
48
  try:
49
+ if api_key and api_key.startswith("sk-") and len(api_key) > 50:
50
+
51
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
 
52
 
53
+ pinecone.init(api_key = PINECONE_KEY,
54
+ environment = PINECONE_ENV)
55
 
56
+ #llm = OpenAI(temperature=OPENAI_TEMP, model_name="gpt-3.5-turbo-0301")
 
 
 
 
57
 
58
+ llm = ChatOpenAI(temperature = OPENAI_TEMP,
59
+ openai_api_key = api_key)
60
+
61
+ chain = load_qa_chain(llm, chain_type="stuff")
62
+
63
+ db = Pinecone.from_existing_index(index_name = PINECONE_INDEX,
64
+ embedding = embeddings)
 
65
 
66
+ return api_key, chain, db, None
67
+ else:
68
+ return None,None,None,None
69
  except Exception as e:
70
  print(e)
71
+ return None,None,None,None
72
 
73
  def get_chat_history(inputs) -> str:
74
  res = []
 
76
  res.append(f"Human: {human}\nAI: {ai}")
77
  return "\n".join(res)
78
 
79
+ def user(user_message, history):
80
+ return "", history+[[user_message, None]]
 
81
 
82
+ def bot(box_message, ref_message, chain, db, top_k):
83
+
84
+ # bot_message = random.choice(["Yes", "No"])
85
+ # 0 is user question, 1 is bot response
86
+ question = box_message[-1][0]
87
+ history = box_message[:-1]
88
 
89
+ if (not chain) or (not db):
90
+ box_message[-1][1] = MODEL_WARNING
91
+ return box_message, "", ""
92
+
93
+ if not ref_message:
94
+ ref_message = question
95
+ details = f"Q: {question}"
96
+ else:
97
+ details = f"Q: {question}\nR: {ref_message}"
98
+
99
+ docsearch = db.as_retriever(search_kwargs={'k':top_k})
100
+ docs = docsearch.get_relevant_documents(ref_message)
101
+
102
+
103
+ all_output = chain({"input_documents": docs,
104
+ "question": question,
105
+ "chat_history": get_chat_history(history)})
106
+
107
+ bot_message = all_output['output_text']
108
+
109
+
110
+ source = "".join([f"""<details> <summary>{doc.metadata["source"]}</summary>
111
+ {doc.page_content}
112
+
113
+ </details>""" for i, doc in enumerate(docs)])
114
+
115
+ #print(source)
116
+
117
+ box_message[-1][1] = bot_message
118
+ return box_message, "", [[details, source]]
119
+
120
+
121
+ with gr.Blocks(css=""".bigbox {
122
+ min-height:200px;
123
+ }""") as demo:
124
+ llm_chain = gr.State()
125
+ vector_db = gr.State()
126
  gr.Markdown(webui_title)
127
  gr.Markdown(init_message)
128
 
129
+ with gr.Row():
130
+ api_textbox = gr.Textbox(
131
+ value = OPENAI_API_KEY,
132
+ placeholder = "Paste Your OpenAI API Key (sk-...) and Hit ENTER",
133
+ show_label=False, lines=1, type='password')
134
+ init = gr.Button("Initialize Model").style(full_width=False)
 
 
 
 
 
135
 
136
+ with gr.Tab("3GPP-Chatbot"):
 
137
  with gr.Row():
138
  with gr.Column(scale=10):
139
  chatbot = gr.Chatbot(elem_classes="bigbox")
 
167
  detail_panel = gr.Chatbot(label="Related Docs")
168
 
169
 
170
+ api_textbox.submit(init_model,
171
+ api_textbox,
172
+ [api_textbox, llm_chain, vector_db, chatbot])
173
+ init.click(init_model,
174
+ api_textbox,
175
+ [api_textbox, llm_chain, vector_db, chatbot])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
176
 
177
+ submit.click(user,
178
+ [query, chatbot],
179
+ [query, chatbot],
180
+ queue=False).then(
181
+ bot,
182
+ [chatbot, ref, llm_chain, vector_db, top_k],
183
+ [chatbot, ref, detail_panel]
184
  )
185
+
186
  clear.click(lambda: (None,None,None), None, [query, ref, chatbot], queue=False)
187
 
188
  if __name__ == "__main__":