cowcow02 commited on
Commit
3d8027b
·
1 Parent(s): 094b890

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. README.md +2 -1
  2. app.py +9 -8
  3. requirements.txt +4 -3
README.md CHANGED
@@ -2,6 +2,7 @@
2
  title: gen-ai-demo-health-center-2
3
  app_file: app.py
4
  sdk: gradio
5
- sdk_version: 3.44.4
 
6
  ---
7
  # gen-ai-demo-health-center
 
2
  title: gen-ai-demo-health-center-2
3
  app_file: app.py
4
  sdk: gradio
5
+
6
+ sdk_version: 3.46.0
7
  ---
8
  # gen-ai-demo-health-center
app.py CHANGED
@@ -11,8 +11,14 @@ from llama_index.vector_stores import PineconeVectorStore
11
 
12
  from environments import OPENAI_API_KEY, PINECONE_API_KEY, PINECONE_INDEX, PASSWORD, LOCAL
13
 
 
 
 
 
 
 
 
14
  openai.api_key = OPENAI_API_KEY
15
- # openai.debug = True
16
 
17
  pinecone.init(
18
  api_key=PINECONE_API_KEY,
@@ -50,7 +56,7 @@ CHAT_EXAMPLES = [
50
  def convert_to_chat_messages(history: List[List[str]]) -> List[ChatMessage]:
51
  chat_messages = [ChatMessage(role=MessageRole.SYSTEM,
52
  content=SYSTEM_PROMPT)]
53
- for conversation in history[-3:]:
54
  if len(conversation) > 1 and DENIED_ANSWER_PROMPT in conversation[1]:
55
  continue
56
  for index, message in enumerate(conversation):
@@ -141,13 +147,8 @@ with gr.Blocks() as demo:
141
  )
142
  chatbot.like(vote, None, None)
143
 
144
- # with gr.Tab("With Initial System Prompt (a.k.a. prompt wrapper)"):
145
- # gr.ChatInterface(predict_with_prompt_wrapper, examples=CHAT_EXAMPLES)
146
- #
147
- # with gr.Tab("Vanilla ChatGPT without modification"):
148
- # gr.ChatInterface(predict_vanilla_chatgpt, examples=CHAT_EXAMPLES)
149
-
150
  demo.queue()
 
151
  if LOCAL:
152
  demo.launch(share=False)
153
  else:
 
11
 
12
  from environments import OPENAI_API_KEY, PINECONE_API_KEY, PINECONE_INDEX, PASSWORD, LOCAL
13
 
14
+ if LOCAL:
15
+ import llama_index
16
+ import phoenix as px
17
+
18
+ px.launch_app()
19
+ llama_index.set_global_handler("arize_phoenix")
20
+
21
  openai.api_key = OPENAI_API_KEY
 
22
 
23
  pinecone.init(
24
  api_key=PINECONE_API_KEY,
 
56
  def convert_to_chat_messages(history: List[List[str]]) -> List[ChatMessage]:
57
  chat_messages = [ChatMessage(role=MessageRole.SYSTEM,
58
  content=SYSTEM_PROMPT)]
59
+ for conversation in history[-1:]:
60
  if len(conversation) > 1 and DENIED_ANSWER_PROMPT in conversation[1]:
61
  continue
62
  for index, message in enumerate(conversation):
 
147
  )
148
  chatbot.like(vote, None, None)
149
 
 
 
 
 
 
 
150
  demo.queue()
151
+
152
  if LOCAL:
153
  demo.launch(share=False)
154
  else:
requirements.txt CHANGED
@@ -1,8 +1,9 @@
1
- gradio==3.44.1
 
2
  openai==0.27.9
3
  pinecone-client==2.2.2
4
  python-dotenv==1.0.0
5
- llama_index==0.8.8
6
- llama_hub==0.0.25
7
  nltk==3.8.1
8
  transformers==4.32.0
 
1
+ arize-phoenix[experimental]==0.0.43
2
+ gradio==3.46.0
3
  openai==0.27.9
4
  pinecone-client==2.2.2
5
  python-dotenv==1.0.0
6
+ llama_index==0.8.38
7
+ llama_hub==0.0.34
8
  nltk==3.8.1
9
  transformers==4.32.0