Spaces:
Sleeping
Sleeping
ashishanand
commited on
Commit
·
f3f65fe
1
Parent(s):
85eda94
param changed
Browse files
app.py
CHANGED
@@ -53,8 +53,8 @@ def call_Llama_api(query, context):
|
|
53 |
"content": "User Question: " + query + "\n\nRelevant Excerpt(s):\n\n" + context,
|
54 |
}
|
55 |
],
|
56 |
-
temperature=0.
|
57 |
-
max_tokens=
|
58 |
top_p=1,
|
59 |
stream=False,
|
60 |
stop=None,
|
@@ -128,12 +128,15 @@ def colbert_rerank(query=None, chunks=None):
|
|
128 |
# ... (same as your original function)
|
129 |
d = ranker.rank(query=query, docs=chunks)
|
130 |
reranked_chunks = [d[i].text for i in range(len(chunks))]
|
131 |
-
return reranked_chunks
|
132 |
|
133 |
def process_query(query):
|
134 |
# Use global variables
|
135 |
global available_car_models, collection
|
136 |
|
|
|
|
|
|
|
137 |
car_model = is_car_model_available(query, available_car_models)
|
138 |
if not car_model:
|
139 |
return "The manual for the specified car model is not present."
|
@@ -158,12 +161,19 @@ def process_query(query):
|
|
158 |
|
159 |
answer = call_Llama_api(query, final_context)
|
160 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
161 |
# Prepare citations
|
162 |
citations = [
|
163 |
f"Page {meta.get('page_number', 'N/A')}" for meta in metadatas[:5]
|
164 |
]
|
165 |
|
166 |
-
citations_text = "
|
167 |
|
168 |
return f"{answer}\n\n{citations_text}"
|
169 |
|
@@ -192,7 +202,7 @@ def initialize():
|
|
192 |
# name=collection_name,
|
193 |
# embedding_function=embedding_function
|
194 |
# )
|
195 |
-
available_car_models = ['
|
196 |
|
197 |
# else:
|
198 |
collection = client.get_collection(
|
|
|
53 |
"content": "User Question: " + query + "\n\nRelevant Excerpt(s):\n\n" + context,
|
54 |
}
|
55 |
],
|
56 |
+
temperature=0.6,
|
57 |
+
max_tokens=200,
|
58 |
top_p=1,
|
59 |
stream=False,
|
60 |
stop=None,
|
|
|
128 |
# ... (same as your original function)
|
129 |
d = ranker.rank(query=query, docs=chunks)
|
130 |
reranked_chunks = [d[i].text for i in range(len(chunks))]
|
131 |
+
return reranked_chunks
|
132 |
|
133 |
def process_query(query):
|
134 |
# Use global variables
|
135 |
global available_car_models, collection
|
136 |
|
137 |
+
print("Input Query:",query)
|
138 |
+
print(type(query))
|
139 |
+
|
140 |
car_model = is_car_model_available(query, available_car_models)
|
141 |
if not car_model:
|
142 |
return "The manual for the specified car model is not present."
|
|
|
161 |
|
162 |
answer = call_Llama_api(query, final_context)
|
163 |
|
164 |
+
last_complete = answer.rfind('.')
|
165 |
+
# last_newline = answer.rfind('\n')
|
166 |
+
# last_complete = max(last_period, last_newline)
|
167 |
+
|
168 |
+
if last_complete != -1:
|
169 |
+
answer = answer[:last_complete + 1].strip()
|
170 |
+
|
171 |
# Prepare citations
|
172 |
citations = [
|
173 |
f"Page {meta.get('page_number', 'N/A')}" for meta in metadatas[:5]
|
174 |
]
|
175 |
|
176 |
+
citations_text = "Pages cited from:\n" + "\n".join(citations)
|
177 |
|
178 |
return f"{answer}\n\n{citations_text}"
|
179 |
|
|
|
202 |
# name=collection_name,
|
203 |
# embedding_function=embedding_function
|
204 |
# )
|
205 |
+
available_car_models = ['TIAGO', 'Astor']
|
206 |
|
207 |
# else:
|
208 |
collection = client.get_collection(
|