Spaces:
Runtime error
Runtime error
File size: 4,089 Bytes
d7697ac 67164b3 694caf5 48c7413 c34eda7 d7697ac 7af0ebc a0cdedc 7af0ebc 5279d37 d0c84a4 5279d37 d7697ac 7af0ebc d7697ac 7af0ebc c34eda7 441f0b3 d7697ac 7af0ebc 68685cb 7a67fb8 7af0ebc d62df08 7a67fb8 7af0ebc 7a67fb8 d62df08 7a67fb8 7af0ebc 7a67fb8 3309025 7a67fb8 7af0ebc 7a67fb8 7af0ebc 7a67fb8 68685cb 7af0ebc 68685cb d62df08 7af0ebc d62df08 979fe41 c224872 7af0ebc c224872 63e80ab 24e2d22 c224872 5279d37 d7697ac c34eda7 d7697ac 7af0ebc db4a355 95986fb d7697ac 7af0ebc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 |
import gradio as gr
import openai
from dotenv import load_dotenv
import os
import time
from gradio_client import Client
title = "# Welcome to 🙋🏻♂️Tonic's🕵🏻♂️Tulu🪴Plant👩🏻⚕️Doctor!"
description = """Here you can use Bulbi - an OpenAI agent that helps you save your plants with [Allen-AI](https://huggingface.co/allenai/tulu-2-dpo-70b) [allenai/tulu-2-dpo-13b](https://huggingface.co/allenai/tulu-2-dpo-13b)
Use [Tulu](https://huggingface.co/allenai/tulu-2-dpo-7b) to fix your plants!
### How to use:
- Introduce your🌵plant below.
- Be as🌿descriptive as possible.
- **Respond with additional🗣️information when prompted.**
- Save your plants with👨🏻⚕️Bulbi Plant Doctor!
### Join us:
[Join my active builders' server on discord](https://discord.gg/VqTxc76K3u). Let's build together!
Big thanks to 🤗Huggingface Organisation for the🫂Community Grant"""
examples = [
["My Eucalyptus tree is struggling outside in the cold weather in Europe",True, None]
]
load_dotenv()
openai.api_key = os.getenv('OPENAI_API_KEY')
assistant_id = os.getenv('ASSISTANT_ID')
client = openai.OpenAI(api_key=openai.api_key)
thread_ids = {}
current_thread_id = None
gradio_client = Client("https://tonic1-tulu.hf.space/--replicas/tjvh5/")
def ask_openai(question, start_new_thread=True, selected_thread_id=None):
global thread_ids
try:
if start_new_thread or selected_thread_id not in thread_ids:
thread = client.beta.threads.create()
current_thread_id = thread.id
thread_ids[current_thread_id] = thread.id
else:
current_thread_id = thread_ids[selected_thread_id]
client.beta.threads.messages.create(
thread_id=current_thread_id,
role="user",
content=question,
)
run = client.beta.threads.runs.create(
thread_id=current_thread_id,
assistant_id=assistant_id
)
response_received = False
timeout = 150
start_time = time.time()
while not response_received and time.time() - start_time < timeout:
run_status = client.beta.threads.runs.retrieve(
thread_id=current_thread_id,
run_id=run.id,
)
if run_status.status == 'completed':
response_received = True
else:
time.sleep(4)
if not response_received:
return "Response timed out."
steps = client.beta.threads.runs.steps.list(
thread_id=current_thread_id,
run_id=run.id
)
if steps.data:
last_step = steps.data[-1]
if last_step.type == 'message_creation':
message_id = last_step.step_details.message_creation.message_id
message = client.beta.threads.messages.retrieve(
thread_id=current_thread_id,
message_id=message_id
)
if message.content and message.content[0].type == 'text':
response_text = message.content[0].text.value
else:
return "No response."
else:
return "No response."
final_result = gradio_client.predict(
response_text,
"I am Tulu, an Expert Plant Doctor, I will exactly summarize the information you provide to me.",
450, 0.4, 0.9, 0.9, False, fn_index=0
)
return final_result
except Exception as e:
return f"An error occurred: {str(e)}"
except Exception as e:
return f"An error occurred: {str(e)}"
iface = gr.Interface(
title=title,
description=description,
fn=ask_openai,
inputs=[
gr.Textbox(lines=5, placeholder="Hi there, I have a plant that's..."),
gr.Checkbox(label="Start a new conversation thread"),
gr.Dropdown(label="Select previous thread", choices=list(thread_ids.keys()))
],
outputs=gr.Markdown(),
examples=examples
)
iface.launch()
|