Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -90,10 +90,7 @@ def get_conversational_chain():
|
|
90 |
Begin your response now.
|
91 |
"""
|
92 |
model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3)
|
93 |
-
prompt = PromptTemplate(
|
94 |
-
template=prompt_template,
|
95 |
-
input_variables=["industry_name", "waste_interest", "main_goals"]
|
96 |
-
)
|
97 |
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
98 |
|
99 |
return chain
|
@@ -109,19 +106,9 @@ def user_input(user_question):
|
|
109 |
new_db = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
|
110 |
docs = new_db.similarity_search(user_question)
|
111 |
|
112 |
-
# Extract or set defaults for required variables
|
113 |
-
industry_name = "cement" # Example default; replace or parse from user_question if needed
|
114 |
-
waste_interest = "general waste" # Example default; replace or parse from user_question if needed
|
115 |
-
main_goals = "cost savings and sustainability" # Example default; replace or parse from user_question if needed
|
116 |
-
|
117 |
chain = get_conversational_chain()
|
118 |
|
119 |
-
response = chain({
|
120 |
-
"input_documents": docs,
|
121 |
-
"industry_name": industry_name,
|
122 |
-
"waste_interest": waste_interest,
|
123 |
-
"main_goals": main_goals
|
124 |
-
}, return_only_outputs=True)
|
125 |
|
126 |
return {"response": response["output_text"]}
|
127 |
|
@@ -149,7 +136,7 @@ def main():
|
|
149 |
st.caption("Example: 'Which waste product can I use for my cement industry?'")
|
150 |
|
151 |
if conc:
|
152 |
-
user_question = f"{conc}. And Please give me suggestions, in this context.Act Like Ai advisor who can give
|
153 |
result = user_input(user_question)
|
154 |
if "error" in result:
|
155 |
st.error(result["error"])
|
|
|
90 |
Begin your response now.
|
91 |
"""
|
92 |
model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3)
|
93 |
+
prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
|
|
|
|
|
|
|
94 |
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
95 |
|
96 |
return chain
|
|
|
106 |
new_db = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
|
107 |
docs = new_db.similarity_search(user_question)
|
108 |
|
|
|
|
|
|
|
|
|
|
|
109 |
chain = get_conversational_chain()
|
110 |
|
111 |
+
response = chain({"input_documents": docs, "question": user_question}, return_only_outputs=True)
|
|
|
|
|
|
|
|
|
|
|
112 |
|
113 |
return {"response": response["output_text"]}
|
114 |
|
|
|
136 |
st.caption("Example: 'Which waste product can I use for my cement industry?'")
|
137 |
|
138 |
if conc:
|
139 |
+
user_question = f"{conc}. And Please give me suggestions, in this context.Act Like Ai advisor who can give suggestions on Which Waste Can be Useful for their Industry or assist them to lear about waste management? Include how much they can save using the waste instead of the raw material and the benefits of using it. Give in bangladesh context and Give only the suggestions in the response ."
|
140 |
result = user_input(user_question)
|
141 |
if "error" in result:
|
142 |
st.error(result["error"])
|