Llama.ui / app.py
Yahir's picture
Update app.py
5d58d95
raw
history blame
673 Bytes
# -*- coding: utf-8 -*-
import streamlit as st
import subprocess
def run_llama(input):
output = subprocess.check_output(['./main', '-m', 'qunt4_0.bin', '-p',"UTranscript of a dialog, where the User interacts with an Assistant named Bob. Bob is helpful, kind, honest, good at writing, and never fails to answer the User's requests immediately and with precision. Usuario: "+ input + ". Asistente:"]) output_str = str(output.decode('utf-8'))
response = output_str.split("Asistente:")[-1].strip()
return response
st.title("Llama Model")
input_text = st.text_input("Input Text", "")
if st.button("Run"):
output = run_llama(input_text)
st.write(output)