Spaces:
Runtime error
Runtime error
AlbertoFH98
commited on
Commit
·
d3f801e
1
Parent(s):
144cc2d
Update app.py
Browse files
app.py
CHANGED
@@ -190,10 +190,31 @@ RESPUESTA: """
|
|
190 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
191 |
with st.chat_message("assistant"):
|
192 |
if 'GPT' not in genre:
|
193 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
194 |
else:
|
195 |
if prompt.lower() == 'resume':
|
196 |
-
llm_response = utils.summarise_doc(TRANSCRIPTION_PATH)
|
197 |
st.markdown(llm_response)
|
198 |
else:
|
199 |
llm_response = utils.get_gpt_response(TRANSCRIPTION_PATH, prompt, logger)
|
|
|
190 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
191 |
with st.chat_message("assistant"):
|
192 |
if 'GPT' not in genre:
|
193 |
+
if prompt.lower() == 'resume':
|
194 |
+
llm_response = utils.summarise_doc(TRANSCRIPTION_PATH, model='llama')
|
195 |
+
st.markdown(llm_response)
|
196 |
+
else:
|
197 |
+
llm_response = qa_chain(prompt)['result']
|
198 |
+
llm_response = utils.process_llm_response(llm_response)
|
199 |
+
st.markdown(llm_response)
|
200 |
+
|
201 |
+
start_time_str_list = []; start_time_seconds_list = []; end_time_seconds_list = []
|
202 |
+
for response in llm_response.split('\n'):
|
203 |
+
if re.search(r'(\d{2}:\d{2}:\d{2}(.\d{6})?)', response) != None:
|
204 |
+
start_time_str, start_time_seconds, _, end_time_seconds = utils.add_hyperlink_and_convert_to_seconds(response)
|
205 |
+
start_time_str_list.append(start_time_str)
|
206 |
+
start_time_seconds_list.append(start_time_seconds)
|
207 |
+
end_time_seconds_list.append(end_time_seconds)
|
208 |
+
|
209 |
+
if start_time_str_list:
|
210 |
+
for start_time_seconds, start_time_str, end_time_seconds in zip(start_time_seconds_list, start_time_str_list, end_time_seconds_list):
|
211 |
+
st.markdown("__Fragmento: " + start_time_str + "__")
|
212 |
+
_, container, _ = st.columns([SIDE, WIDTH, SIDE])
|
213 |
+
with container:
|
214 |
+
st_player(youtube_video_url.replace("?enablejsapi=1", "") + f'?start={start_time_seconds}&end={end_time_seconds}')
|
215 |
else:
|
216 |
if prompt.lower() == 'resume':
|
217 |
+
llm_response = utils.summarise_doc(TRANSCRIPTION_PATH, model='gpt')
|
218 |
st.markdown(llm_response)
|
219 |
else:
|
220 |
llm_response = utils.get_gpt_response(TRANSCRIPTION_PATH, prompt, logger)
|