fastx commited on
Commit
bb3257b
·
1 Parent(s): ec81e9c

Upload 3 files

Browse files
Files changed (4) hide show
  1. .gitattributes +1 -0
  2. app.py +56 -0
  3. index.json +3 -0
  4. requirements.txt +5 -0
.gitattributes CHANGED
@@ -32,3 +32,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
35
+ index.json filter=lfs diff=lfs merge=lfs -text
app.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Import packages
2
+ import openai
3
+ from llama_index import SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext
4
+ from langchain.chat_models import ChatOpenAI
5
+ import gradio as gr
6
+ import sys
7
+ import os
8
+ import PyPDF2
9
+
10
+ #os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
11
+ # OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
12
+
13
+ '''
14
+ def construct_index(directory_path):
15
+ max_input_size = 4096
16
+ num_outputs = 512
17
+ max_chunk_overlap = 20
18
+ chunk_size_limit = 600
19
+ prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
20
+ llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
21
+ documents = SimpleDirectoryReader(directory_path).load_data()
22
+ service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper)
23
+ index = GPTSimpleVectorIndex.from_documents(documents, service_context=service_context)
24
+ index.save_to_disk('index.json')
25
+ return index
26
+ '''
27
+
28
+ def chatbot(input_text, openai_api_key):
29
+ os.environ["OPENAI_API_KEY"] = openai_api_key
30
+ index = GPTSimpleVectorIndex.load_from_disk('index.json')
31
+ response = index.query(input_text, response_mode="compact")
32
+ return response.response
33
+
34
+ # chat = gr.Interface(fn=chatbot,
35
+ # inputs=gr.components.Textbox(lines=7, label="Ask your question to ChatGPT"),
36
+ # outputs="text",
37
+ # title="Custom-trained AI Chatbot for employee tax assessment 2022")
38
+
39
+ # Documentation how to make gradio interfaces: https://gradio.app/quickstart/
40
+
41
+ with gr.Blocks() as chat:
42
+
43
+ with gr.Column(elem_id="col-container"):
44
+ gr.Markdown("""## Trained with custom data""",
45
+ elem_id="header")
46
+
47
+ with gr.Column():
48
+ gr.Markdown("Enter your OpenAI API Key.")
49
+ openai_api_key = gr.Textbox(value='', placeholder="OpenAI API Key", type="password", label="Enter OpenAI API Key")
50
+
51
+ text_input = gr.Textbox(lines=7, label="Enter your question")
52
+ output = gr.Textbox(label="Response")
53
+ greet_btn = gr.Button("Generate Response")
54
+ greet_btn.click(fn=chatbot, inputs=[text_input, openai_api_key], outputs=output)
55
+
56
+ chat.launch()
index.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:961ab7beda74080c295444752b998849120fee441604786fc7736ab560210cd9
3
+ size 14998715
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ openai
2
+ llama-index
3
+ gradio
4
+ PyPDF2
5
+ PyCryptodome