Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import openai | |
| from dotenv import load_dotenv | |
| import os | |
| import time | |
| from gradio_client import Client | |
| title = "# Welcome to 🙋🏻♂️Tonic's🕵🏻♂️Tulu🪴Plant👩🏻⚕️Doctor!" | |
| description = """Here you can use Bulbi - an OpenAI agent that helps you save your plants with [Allen-AI](https://huggingface.co/allenai/tulu-2-dpo-70b) [allenai/tulu-2-dpo-13b](https://huggingface.co/allenai/tulu-2-dpo-13b) | |
| Use [Tulu](https://huggingface.co/allenai/tulu-2-dpo-7b) to fix your plants! | |
| ### How to use: | |
| - Introduce your🌵plant below. | |
| - Be as🌿descriptive as possible. | |
| - **Respond with additional🗣️information when prompted.** | |
| - Save your plants with👨🏻⚕️Bulbi Plant Doctor! | |
| ### Join us: | |
| [Join my active builders' server on discord](https://discord.gg/VqTxc76K3u). Let's build together! | |
| Big thanks to 🤗Huggingface Organisation for the🫂Community Grant""" | |
| examples = [ | |
| ["My Eucalyptus tree is struggling outside in the cold weather in Europe",True, None] | |
| ] | |
| load_dotenv() | |
| openai.api_key = os.getenv('OPENAI_API_KEY') | |
| assistant_id = os.getenv('ASSISTANT_ID') | |
| client = openai.OpenAI(api_key=openai.api_key) | |
| thread_ids = {} | |
| current_thread_id = None | |
| gradio_client = Client("https://tonic1-tulu.hf.space/--replicas/tjvh5/") | |
| def ask_openai(question, start_new_thread=True, selected_thread_id=None): | |
| global thread_ids | |
| try: | |
| if start_new_thread or selected_thread_id not in thread_ids: | |
| thread = client.beta.threads.create() | |
| current_thread_id = thread.id | |
| thread_ids[current_thread_id] = thread.id | |
| else: | |
| current_thread_id = thread_ids[selected_thread_id] | |
| client.beta.threads.messages.create( | |
| thread_id=current_thread_id, | |
| role="user", | |
| content=question, | |
| ) | |
| run = client.beta.threads.runs.create( | |
| thread_id=current_thread_id, | |
| assistant_id=assistant_id | |
| ) | |
| response_received = False | |
| timeout = 150 | |
| start_time = time.time() | |
| while not response_received and time.time() - start_time < timeout: | |
| run_status = client.beta.threads.runs.retrieve( | |
| thread_id=current_thread_id, | |
| run_id=run.id, | |
| ) | |
| if run_status.status == 'completed': | |
| response_received = True | |
| else: | |
| time.sleep(4) | |
| if not response_received: | |
| return "Response timed out." | |
| steps = client.beta.threads.runs.steps.list( | |
| thread_id=current_thread_id, | |
| run_id=run.id | |
| ) | |
| if steps.data: | |
| last_step = steps.data[-1] | |
| if last_step.type == 'message_creation': | |
| message_id = last_step.step_details.message_creation.message_id | |
| message = client.beta.threads.messages.retrieve( | |
| thread_id=current_thread_id, | |
| message_id=message_id | |
| ) | |
| if message.content and message.content[0].type == 'text': | |
| response_text = message.content[0].text.value | |
| else: | |
| return "No response." | |
| else: | |
| return "No response." | |
| final_result = gradio_client.predict( | |
| response_text, | |
| "I am Tulu, an Expert Plant Doctor, I will exactly summarize the information you provide to me.", | |
| 450, 0.4, 0.9, 0.9, False, fn_index=0 | |
| ) | |
| return final_result | |
| except Exception as e: | |
| return f"An error occurred: {str(e)}" | |
| except Exception as e: | |
| return f"An error occurred: {str(e)}" | |
| iface = gr.Interface( | |
| title=title, | |
| description=description, | |
| fn=ask_openai, | |
| inputs=[ | |
| gr.Textbox(lines=5, placeholder="Hi there, I have a plant that's..."), | |
| gr.Checkbox(label="Start a new conversation thread"), | |
| gr.Dropdown(label="Select previous thread", choices=list(thread_ids.keys())) | |
| ], | |
| outputs=gr.Markdown(), | |
| examples=examples | |
| ) | |
| iface.launch() | |