inventwithdean
add exception handling
9831083
import gradio as gr
from randomname import get_random_name
from openai import OpenAI
import os
import requests
from host import Host
from tv_crew import TVCrew
from audience import Audience
from guard import Guardian
import random
import threading
from timing import TimeManager
from validity import is_valid_rpm_url
from yt_chat import StreamChatHost
from prompts import *
import json
# from fastapi import FastAPI, Request
# import uvicorn
api_key = os.getenv("API_KEY_OPENROUTER")
client = OpenAI(base_url="https://openrouter.ai/api/v1", api_key=api_key)
guard_api_base = os.getenv("BASE_URL_GUARDIAN")
guard_api_key = os.getenv("API_KEY_GUARDIAN")
client_guard = OpenAI(base_url=guard_api_base, api_key=guard_api_key)
unreal_orchestrator_url = os.getenv("ORCHESTRATOR_URL")
api_key_unreal = os.getenv("API_KEY_UNREAL")
show_state = {
"current_guest": None,
"current_map": None,
"time_since_last_guest_message": 0,
}
turn_limit = 7
host = Host(client, show_state)
tv_crew = TVCrew(client, system_prompt_tv_crew_guest)
tv_crew_chat = TVCrew(client, system_prompt_tv_crew_chat)
audience = Audience(client)
guardian = Guardian(client_guard)
timeManager = TimeManager(host, show_state)
thread_time_manager = threading.Thread(target=timeManager.guest_time_limit, daemon=True)
thread_time_manager.start()
in_construction = False
guarding = True
# app = FastAPI()
map_config = {
"studio": {
"tv_crew": True,
"audience": True,
"host_system_prompt": system_prompt_host_studio,
},
"forest": {
"tv_crew": False,
"audience": False,
"host_system_prompt": system_prompt_host_forest,
},
}
gist_url = os.getenv("GIST_URL")
# Fastapi is behaving wierdly with Gradio on HF Space, so we don't do youtube chats.
# streamChatHost = StreamChatHost(client, tv_crew_chat, guardian)
# @app.post("/receive_chat")
# async def receive_chat(request: Request):
# api_key = request.headers.get("x-api-key", None)
# if api_key != api_key_unreal:
# return {"status": "forbidden"}
# data = await request.json()
# user_message = f"{data['user']} - {data['message']}"
# streamChatHost.reply_to_chat(user_message)
# return {"status": "received"}
def load_yt_stream_embed():
data = requests.get(gist_url).text
config = json.loads(data)
yt_url = config["stream_url"]
return gr.HTML(
f"""
<div style="
position:relative;
padding-bottom: 40%;
width: 70%;
max-width: 100%;
height:0;
margin: 0 auto;
overflow: hidden;
">
<iframe
style="position:absolute;
top:0%;
left:0%;
width:100%;
height:100%;"
src="{yt_url}" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share" referrerpolicy="strict-origin-when-cross-origin" allowfullscreen></iframe>
</div>
"""
)
def join_show(avatar_url, map):
"""
Joins you to the Talk Show, resulting in your avatar entering the show
Before joining the show, prompt the user to go to https://the-emergent-show.readyplayer.me/ and choose an avatar.
Then they will give you a .glb url which you need to pass to this function. Remember to ask the user which map they want, Studio or Forest.
Args:
avatar_url (str): The .glb avatar url that user gave you after choosing it.
map (str): The map which you want to have the conversation in. Current available options: Studio, Forest
Returns:
your guest_name for the talk show and info about your situation
"""
if in_construction:
return "The Emergent Show is currently under construction. Please check back later!"
if not is_valid_rpm_url(avatar_url):
return "Invalid Avatar URL! Please make sure you are using a Ready Player Me .glb avatar URL."
if not map in ["Studio", "Forest"]:
return "Invalid Map choice! Please choose either 'Studio' or 'Forest'."
payload = {"avatar_url": avatar_url, "map": map}
headers = {"Content-Type": "application/json", "x-api-key": api_key_unreal}
try:
response = requests.post(
url=f"{unreal_orchestrator_url}/join", json=payload, headers=headers
)
status_code = response.status_code
if status_code == 401:
return "Unauthorized access!"
if status_code == 449:
return f"A guest is already on the show! Please try again after some time."
if status_code != 200:
return "Something went wrong!"
except Exception as e:
# For backup, if the stream goes down, i.e. Game stops, we request to join,
# resulting in error as it won't connect to the host machine, so we reset our states here
host.clear_context()
audience.clear_context()
tv_crew.clear_context()
show_state["current_guest"] = None
show_state["current_map"] = None
# Now, we can restart the show, and reconnect later
return "Show is not running! Please try again later."
name = get_random_name()
current_map = map.lower()
show_state["current_guest"] = name
show_state["current_map"] = current_map
host.set_system_prompt(map_config[current_map]["host_system_prompt"])
show_state["time_since_last_guest_message"] = 0
host.clear_context()
audience.clear_context()
tv_crew.clear_context()
return f"You have joined the show. Your guest_name: {name}"
def speak(guest_name, text):
"""
The primary way to chat in the talk show.
Makes your avatar speak, so everyone can listen to you and the host can reply
Args:
guest_name (str): The guest name which was given to you when you joined the show.
text (str): The text which you want to speak.
"""
guest_name = guest_name.strip()
if len(guest_name) == 0:
return "Invalid Guest Name! Use what you got when you joined the show."
if len(text) == 0:
return "Invalid Message!"
if guest_name != show_state["current_guest"]:
return "Join the show first! And then use the guest_name you get"
headers = {"Content-Type": "application/json", "x-api-key": api_key_unreal}
if guarding:
is_safe, categories = guardian.moderate_message(text)
if not is_safe:
requests.post(
url=f"{unreal_orchestrator_url}/wrapup", json={}, headers=headers
)
show_state["current_guest"] = None
output = "The show has been wrapped up due to violation of content policies. Please adhere to the guidelines while participating in the show."
return output + f"\nViolation Categories: {categories}"
audience.add_context(f"Guest: {text}\n")
# Let the TV crew know what guest said.
tv_crew.add_context(f"Guest: {text}\n")
# Guest TTS
payload = {"text": text, "is_host": False}
# **************************************************************************
image_base64, current_tv_image_caption = None, None
if map_config[show_state["current_map"]]["tv_crew"]:
image_base64, current_tv_image_caption = tv_crew.suggest_image()
if image_base64:
if guarding:
is_safe, _ = guardian.moderate_message(current_tv_image_caption)
else:
is_safe = True
if not is_safe:
image_base64 = None
current_tv_image_caption = None
else:
# Let host know that Television has been updated
tv_info_suffix = f"\n[TV Shows: {current_tv_image_caption}]\n"
text = text + tv_info_suffix
# Pass in image_url which will be displayed just after the guest has finished speaking
payload["base64"] = image_base64
# **************************************************************************
try:
response = requests.post(
url=f"{unreal_orchestrator_url}/tts", json=payload, headers=headers
)
if response.status_code != 200:
return "Something went wrong!"
except Exception as e:
# Wasn't able to connect
return "Show is not running! Please try again later."
# Get Host's response
host_response, shouldWrapUp = host.get_response(text)
# Let the TV crew know the Host's Response
tv_crew.add_context(f"Host: {host_response}\n")
# Update Audience's context
audience.add_context(f"Host: {host_response}\n")
# Payload for Host TTS
payload = {"text": host_response, "is_host": True}
# Check audience's reaction for Host's response
if map_config[show_state["current_map"]]["audience"]:
if random.random() < 0.7:
reaction = audience.get_reaction()
if reaction:
payload["audience_reaction"] = reaction
try:
response = requests.post(
url=f"{unreal_orchestrator_url}/tts", json=payload, headers=headers
)
except Exception as e:
return "Show is not running! Please try again later."
if response.status_code != 200:
return "Something went wrong!"
output = f"Host Responded: {host_response}\n"
output = (
output + f"TV Shows: {current_tv_image_caption}"
if current_tv_image_caption
else output
)
if shouldWrapUp:
try:
requests.post(
url=f"{unreal_orchestrator_url}/wrapup", json={}, headers=headers
)
except Exception as e:
pass
tv_crew.clear_context()
audience.clear_context()
host.clear_context()
show_state["current_guest"] = None
output = output + "\nThe show has been wrapped up. Thank you for joining!"
# Return output to the Guest
return output
with gr.Blocks() as demo:
gr.Markdown("# The Emergent Show 🍻")
gr.Markdown("### Join the Live Stream with your LLM and let's have a chat")
if in_construction:
gr.Markdown("# 🚧 Under construction. Please check back later! 🚧")
with gr.Tab("Join"):
gr.HTML(
"""
<div style="
position:relative;
padding-bottom: 42%;
width: 75%;
max-width: 100%;
height:0;
margin: 0 auto;
overflow: hidden;
">
<iframe allow="clipboard-write"
src="https://the-emergent-show.readyplayer.me/"
style="
position:absolute;
top:0%;
left:0%;
width:100%;
height:100%;
"
title="Avatar"
></iframe>
</div>
"""
)
avatar_url = gr.Textbox(
max_lines=1,
label="Avatar URL",
show_label=True,
info="Enter your avatar URL here: ",
)
map_choice = gr.Dropdown(
["Studio", "Forest"],
value="Studio",
label="Map",
show_label=True,
info="Where do you want to have the conversation?",
)
join_show_btn = gr.Button("Join Show")
details_output = gr.Textbox(
max_lines=1,
label="Your details",
show_label=True,
info="Copy the guest_name and go to the Converse tab",
)
join_show_btn.click(
join_show,
[avatar_url, map_choice],
[details_output],
)
examples = gr.Examples(
[
[
"https://models.readyplayer.me/6911e287e6aa89ad430cc7b1.glb",
"Studio",
],
[
"https://models.readyplayer.me/69162a14672cca15c2d47af5.glb",
"Forest",
],
],
[avatar_url, map_choice],
)
with gr.Tab("Converse"):
name_input = gr.Text(
"", label="Guest Name", info="Paste your guestname here!", max_lines=1
)
yt_embed = gr.HTML("")
demo.load(load_yt_stream_embed, outputs=yt_embed, show_api=False)
message_input = gr.Text(
"",
label="Your Message",
info="Enter your response",
max_lines=4,
lines=2,
max_length=300,
)
response = gr.Text(
"", label="Response", info="The Host's Response", max_lines=4, lines=2
)
send_btn = gr.Button("Send")
send_btn.click(
speak,
inputs=[name_input, message_input],
outputs=[response],
)
with gr.Tab("Bring your LLM!"):
gr.Markdown(
"To add this MCP to clients that support SSE (eg. Cursor, Windsurf, Cline), add the following to your MCP Config"
)
gr.Code(
"""{
"mcpServers": {
"TheEmergentShow": {
"url": "https://mcp-1st-birthday-the-emergent-show.hf.space/gradio_api/mcp/"
}
}
}"""
)
gr.Markdown(
"STDIO Transport : For clients that only support stdio (eg. Claude Desktop), first install node.js. Then, you can use the following in your MCP Config"
)
gr.Code(
"""{
"mcpServers": {
"TheEmergentShow": {
"command": "npx",
"args": [
"mcp-remote",
"https://mcp-1st-birthday-the-emergent-show.hf.space/gradio_api/mcp/sse",
"--transport",
"sse-only"
]
}
}
}"""
)
if __name__ == "__main__":
# app = gr.mount_gradio_app(app, demo, path="/", mcp_server=True)
# uvicorn.run(app, host="0.0.0.0", port=7860)
demo.launch(mcp_server=True)