Spaces:
Running
Running
owenkaplinsky
commited on
Commit
·
be52692
1
Parent(s):
9c91326
Let AI run MCP
Browse files- project/chat.py +186 -2
- project/src/index.js +1 -1
- project/test.py +8 -0
project/chat.py
CHANGED
|
@@ -1,4 +1,6 @@
|
|
| 1 |
import os
|
|
|
|
|
|
|
| 2 |
from fastapi import FastAPI, Request
|
| 3 |
from fastapi.middleware.cors import CORSMiddleware
|
| 4 |
from openai import OpenAI
|
|
@@ -25,6 +27,7 @@ app.add_middleware(
|
|
| 25 |
allow_headers=["*"],
|
| 26 |
)
|
| 27 |
|
|
|
|
| 28 |
@app.post("/update_chat")
|
| 29 |
async def update_chat(request: Request):
|
| 30 |
global latest_blockly_chat_code
|
|
@@ -47,6 +50,125 @@ async def set_api_key_chat(request: Request):
|
|
| 47 |
print(f"[CHAT API KEY] Set OPENAI_API_KEY in chat.py environment")
|
| 48 |
return {"success": True}
|
| 49 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
def create_gradio_interface():
|
| 51 |
# Hardcoded system prompt
|
| 52 |
SYSTEM_PROMPT = """You are an AI assistant created to help with Blockly MCP tasks. Users can create MCP (multi-context-protocol) servers
|
|
@@ -64,7 +186,19 @@ for you (the assistant) and is not visible to the user - so do not use this form
|
|
| 64 |
When the user asks questions or talks about their project, don't talk like a robot. This means a few things:
|
| 65 |
- Do not say "multi-context-protocol" just say MCP
|
| 66 |
- When talking about their project, talk in natural language. Such as if they ask what their project is doing, don't say what the blocks
|
| 67 |
-
are doing, state the goal or things. Remember, that info is just for you and you need to speak normally to the user.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 68 |
|
| 69 |
def chat_with_context(message, history):
|
| 70 |
# Check if API key is set and create/update client
|
|
@@ -111,7 +245,57 @@ are doing, state the goal or things. Remember, that info is just for you and you
|
|
| 111 |
{"role": "user", "content": message}
|
| 112 |
]
|
| 113 |
)
|
| 114 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 115 |
except Exception as e:
|
| 116 |
return f"Error: {str(e)}"
|
| 117 |
|
|
|
|
| 1 |
import os
|
| 2 |
+
import re
|
| 3 |
+
import requests
|
| 4 |
from fastapi import FastAPI, Request
|
| 5 |
from fastapi.middleware.cors import CORSMiddleware
|
| 6 |
from openai import OpenAI
|
|
|
|
| 27 |
allow_headers=["*"],
|
| 28 |
)
|
| 29 |
|
| 30 |
+
# Gets FAKE code, meant for the LLM only and is not valid Python
|
| 31 |
@app.post("/update_chat")
|
| 32 |
async def update_chat(request: Request):
|
| 33 |
global latest_blockly_chat_code
|
|
|
|
| 50 |
print(f"[CHAT API KEY] Set OPENAI_API_KEY in chat.py environment")
|
| 51 |
return {"success": True}
|
| 52 |
|
| 53 |
+
def execute_mcp(mcp_call):
|
| 54 |
+
"""Execute MCP call using the actual Python function from test.py"""
|
| 55 |
+
global stored_api_key, latest_blockly_chat_code
|
| 56 |
+
|
| 57 |
+
if stored_api_key:
|
| 58 |
+
os.environ["OPENAI_API_KEY"] = stored_api_key
|
| 59 |
+
|
| 60 |
+
try:
|
| 61 |
+
# Now, retrieve the real generated Python code from test.py
|
| 62 |
+
blockly_code = ""
|
| 63 |
+
try:
|
| 64 |
+
resp = requests.get("http://localhost:7860/get_latest_code")
|
| 65 |
+
if resp.ok:
|
| 66 |
+
blockly_code = resp.json().get("code", "")
|
| 67 |
+
except Exception as e:
|
| 68 |
+
print(f"[WARN] Could not fetch real Python code: {e}")
|
| 69 |
+
|
| 70 |
+
if not blockly_code.strip():
|
| 71 |
+
return "No Python code available from test.py"
|
| 72 |
+
|
| 73 |
+
# Parse the MCP call arguments
|
| 74 |
+
match = re.match(r'create_mcp\((.*)\)', mcp_call.strip())
|
| 75 |
+
if not match:
|
| 76 |
+
return "Invalid MCP call format"
|
| 77 |
+
|
| 78 |
+
params_str = match.group(1)
|
| 79 |
+
user_inputs = []
|
| 80 |
+
|
| 81 |
+
if params_str:
|
| 82 |
+
import ast
|
| 83 |
+
try:
|
| 84 |
+
dict_str = "{" + params_str.replace("=", ":") + "}"
|
| 85 |
+
param_dict = ast.literal_eval(dict_str)
|
| 86 |
+
user_inputs = [str(v) for v in param_dict.values()]
|
| 87 |
+
except Exception:
|
| 88 |
+
for pair in params_str.split(','):
|
| 89 |
+
if '=' in pair:
|
| 90 |
+
_, value = pair.split('=', 1)
|
| 91 |
+
user_inputs.append(value.strip().strip('"').strip("'"))
|
| 92 |
+
|
| 93 |
+
# Prepare to execute
|
| 94 |
+
result = ""
|
| 95 |
+
lines = blockly_code.split('\n')
|
| 96 |
+
filtered_lines = []
|
| 97 |
+
skip_mode = False
|
| 98 |
+
in_demo_block = False
|
| 99 |
+
|
| 100 |
+
for line in lines:
|
| 101 |
+
if 'import gradio' in line:
|
| 102 |
+
continue
|
| 103 |
+
if 'demo = gr.Interface' in line:
|
| 104 |
+
in_demo_block = True
|
| 105 |
+
skip_mode = True
|
| 106 |
+
continue
|
| 107 |
+
elif 'demo.launch' in line:
|
| 108 |
+
skip_mode = False
|
| 109 |
+
in_demo_block = False
|
| 110 |
+
continue
|
| 111 |
+
elif in_demo_block:
|
| 112 |
+
continue
|
| 113 |
+
if 'gr.' in line:
|
| 114 |
+
continue
|
| 115 |
+
if not skip_mode:
|
| 116 |
+
filtered_lines.append(line)
|
| 117 |
+
|
| 118 |
+
code_to_run = '\n'.join(filtered_lines)
|
| 119 |
+
|
| 120 |
+
def capture_result(msg):
|
| 121 |
+
nonlocal result
|
| 122 |
+
result = msg
|
| 123 |
+
|
| 124 |
+
env = {
|
| 125 |
+
"reply": capture_result,
|
| 126 |
+
"__builtins__": __builtins__,
|
| 127 |
+
}
|
| 128 |
+
|
| 129 |
+
exec("import os", env)
|
| 130 |
+
exec("import requests", env)
|
| 131 |
+
exec("import json", env)
|
| 132 |
+
|
| 133 |
+
exec(code_to_run, env)
|
| 134 |
+
|
| 135 |
+
if "create_mcp" in env:
|
| 136 |
+
import inspect
|
| 137 |
+
sig = inspect.signature(env["create_mcp"])
|
| 138 |
+
params = list(sig.parameters.values())
|
| 139 |
+
|
| 140 |
+
typed_args = []
|
| 141 |
+
for i, arg in enumerate(user_inputs):
|
| 142 |
+
if i >= len(params):
|
| 143 |
+
break
|
| 144 |
+
if arg is None or arg == "":
|
| 145 |
+
typed_args.append(None)
|
| 146 |
+
continue
|
| 147 |
+
anno = params[i].annotation
|
| 148 |
+
try:
|
| 149 |
+
if anno == int:
|
| 150 |
+
typed_args.append(int(float(arg)))
|
| 151 |
+
elif anno == float:
|
| 152 |
+
typed_args.append(float(arg))
|
| 153 |
+
elif anno == bool:
|
| 154 |
+
typed_args.append(str(arg).lower() in ("true", "1"))
|
| 155 |
+
elif anno == str or anno == inspect._empty:
|
| 156 |
+
typed_args.append(str(arg))
|
| 157 |
+
else:
|
| 158 |
+
typed_args.append(arg)
|
| 159 |
+
except Exception:
|
| 160 |
+
typed_args.append(arg)
|
| 161 |
+
|
| 162 |
+
result = env["create_mcp"](*typed_args)
|
| 163 |
+
|
| 164 |
+
return result if result else "No output generated"
|
| 165 |
+
|
| 166 |
+
except Exception as e:
|
| 167 |
+
print(f"[MCP EXECUTION ERROR] {e}")
|
| 168 |
+
import traceback
|
| 169 |
+
traceback.print_exc()
|
| 170 |
+
return f"Error executing MCP: {str(e)}"
|
| 171 |
+
|
| 172 |
def create_gradio_interface():
|
| 173 |
# Hardcoded system prompt
|
| 174 |
SYSTEM_PROMPT = """You are an AI assistant created to help with Blockly MCP tasks. Users can create MCP (multi-context-protocol) servers
|
|
|
|
| 186 |
When the user asks questions or talks about their project, don't talk like a robot. This means a few things:
|
| 187 |
- Do not say "multi-context-protocol" just say MCP
|
| 188 |
- When talking about their project, talk in natural language. Such as if they ask what their project is doing, don't say what the blocks
|
| 189 |
+
are doing, state the goal or things. Remember, that info is just for you and you need to speak normally to the user.
|
| 190 |
+
|
| 191 |
+
Additionally, you have the ability to use the MCP yourself. Unlike normal OpenAI tools, you call this through chat. To do so, end your msg
|
| 192 |
+
(you cannot say anything after this) with:
|
| 193 |
+
|
| 194 |
+
```mcp
|
| 195 |
+
create_mcp(input_name=value)
|
| 196 |
+
```
|
| 197 |
+
|
| 198 |
+
Where you define all the inputs with set values and don't say outputs. And also notice how it doesn't say inputs(). This is just normal
|
| 199 |
+
Python code, not the special syntax.
|
| 200 |
+
|
| 201 |
+
So, if the user asks you to run the MCP, YOU HAVE THE ABILITY TO. DO NOT SAY THAT YOU CANNOT."""
|
| 202 |
|
| 203 |
def chat_with_context(message, history):
|
| 204 |
# Check if API key is set and create/update client
|
|
|
|
| 245 |
{"role": "user", "content": message}
|
| 246 |
]
|
| 247 |
)
|
| 248 |
+
|
| 249 |
+
ai_response = response.choices[0].message.content
|
| 250 |
+
|
| 251 |
+
# Check if the response contains ```mcp code block
|
| 252 |
+
mcp_pattern = r'```mcp\n(.+?)\n```'
|
| 253 |
+
mcp_match = re.search(mcp_pattern, ai_response, re.DOTALL)
|
| 254 |
+
|
| 255 |
+
if mcp_match:
|
| 256 |
+
# Extract MCP call
|
| 257 |
+
mcp_call = mcp_match.group(1)
|
| 258 |
+
|
| 259 |
+
# Filter out the MCP block from the displayed message
|
| 260 |
+
displayed_response = ai_response[:mcp_match.start()].rstrip()
|
| 261 |
+
|
| 262 |
+
print(f"[MCP DETECTED] Executing: {mcp_call}")
|
| 263 |
+
|
| 264 |
+
# Execute the MCP call
|
| 265 |
+
mcp_result = execute_mcp(mcp_call)
|
| 266 |
+
|
| 267 |
+
print(f"[MCP RESULT] {mcp_result}")
|
| 268 |
+
|
| 269 |
+
# Add MCP execution to history for context
|
| 270 |
+
full_history.append({"role": "user", "content": message})
|
| 271 |
+
full_history.append({"role": "assistant", "content": ai_response})
|
| 272 |
+
full_history.append({"role": "system", "content": f"MCP execution result: {mcp_result}"})
|
| 273 |
+
|
| 274 |
+
# Call GPT again with the MCP result
|
| 275 |
+
try:
|
| 276 |
+
follow_up_response = client.chat.completions.create(
|
| 277 |
+
model="gpt-3.5-turbo",
|
| 278 |
+
messages=[
|
| 279 |
+
{"role": "system", "content": full_system_prompt},
|
| 280 |
+
*full_history,
|
| 281 |
+
{"role": "user", "content": "Please respond to the MCP execution result above and provide any relevant information to the user."}
|
| 282 |
+
]
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
# Combine the filtered initial response with the follow-up
|
| 286 |
+
final_response = displayed_response
|
| 287 |
+
if displayed_response:
|
| 288 |
+
final_response += "\n\n"
|
| 289 |
+
final_response += f"**MCP Execution Result:** {mcp_result}\n\n"
|
| 290 |
+
final_response += follow_up_response.choices[0].message.content
|
| 291 |
+
|
| 292 |
+
return final_response
|
| 293 |
+
except Exception as e:
|
| 294 |
+
return f"{displayed_response}\n\n**MCP Execution Result:** {mcp_result}\n\nError generating follow-up: {str(e)}"
|
| 295 |
+
|
| 296 |
+
# No MCP block found, return normal response
|
| 297 |
+
return ai_response
|
| 298 |
+
|
| 299 |
except Exception as e:
|
| 300 |
return f"Error: {str(e)}"
|
| 301 |
|
project/src/index.js
CHANGED
|
@@ -176,7 +176,7 @@ cancelApiKeyButton.addEventListener("click", () => {
|
|
| 176 |
apiKeyModal.style.display = 'none';
|
| 177 |
});
|
| 178 |
|
| 179 |
-
const weatherText = `{"workspaceComments":[{"height":120,"width":479,"id":"XI5[EHp-Ow+kinXf6n5y","x":51.234375,"y":-83,"text":"Gets temperature of location with a latitude and a longitude.\\n\\nThe API requires a minimum of one decimal point to work."}],"blocks":{"languageVersion":0,"blocks":[{"type":"create_mcp","id":")N.HEG1x]Z/,k#TeWr,S","x":50,"y":50,"deletable":false,"extraState":{"inputCount":2,"inputNames":["latitude","longitude"],"inputTypes":["
|
| 180 |
weatherButton.addEventListener("click", () => {
|
| 181 |
try {
|
| 182 |
const fileContent = JSON.parse(weatherText);
|
|
|
|
| 176 |
apiKeyModal.style.display = 'none';
|
| 177 |
});
|
| 178 |
|
| 179 |
+
const weatherText = `{"workspaceComments":[{"height":120,"width":479,"id":"XI5[EHp-Ow+kinXf6n5y","x":51.234375,"y":-83,"text":"Gets temperature of location with a latitude and a longitude.\\n\\nThe API requires a minimum of one decimal point to work."}],"blocks":{"languageVersion":0,"blocks":[{"type":"create_mcp","id":")N.HEG1x]Z/,k#TeWr,S","x":50,"y":50,"deletable":false,"extraState":{"inputCount":2,"inputNames":["latitude","longitude"],"inputTypes":["string","string"],"outputCount":1,"outputNames":["output0"],"outputTypes":["string"],"toolCount":0},"inputs":{"X0":{"block":{"type":"input_reference_latitude","id":"]3mj!y}qfRt+!okheU7L","deletable":false,"extraState":{"ownerBlockId":")N.HEG1x]Z/,k#TeWr,S"},"fields":{"VARNAME":"latitude"}}},"X1":{"block":{"type":"input_reference_longitude","id":"Do/{HFNGSd.!;POiKS?D","deletable":false,"extraState":{"ownerBlockId":")N.HEG1x]Z/,k#TeWr,S"},"fields":{"VARNAME":"longitude"}}},"R0":{"block":{"type":"in_json","id":"R|j?_8s^H{l0;UZ-oQt3","fields":{"NAME":"temperature_2m"},"inputs":{"JSON":{"block":{"type":"in_json","id":"X=M,R1@7bRjJVZIPi[qD","fields":{"NAME":"current"},"inputs":{"JSON":{"block":{"type":"call_api","id":"^(.vyM.yni08S~c1EBm=","fields":{"METHOD":"GET"},"inputs":{"URL":{"shadow":{"type":"text","id":"}.T;_U_OsRS)B_y09p % { ","fields":{"TEXT":""}},"block":{"type":"text_replace","id":"OwH9uERJPTGQG!UER#ch","inputs":{"FROM":{"shadow":{"type":"text","id":"ya05#^ 7 % UbUeXX#eDSmH","fields":{"TEXT":"{latitude}"}}},"TO":{"shadow":{"type":"text","id":": _ZloQuh9c-MNf-U]!k5","fields":{"TEXT":""}},"block":{"type":"input_reference_latitude","id":"?%@)3sErZ)}=#4ags#gu","extraState":{"ownerBlockId":")N.HEG1x]Z/,k#TeWr,S"},"fields":{"VARNAME":"latitude"}}},"TEXT":{"shadow":{"type":"text","id":"w@zsP)m6:WjkUp,ln3$x","fields":{"TEXT":""}},"block":{"type":"text_replace","id":"ImNPsvzD7r^+1MJ%IirV","inputs":{"FROM":{"shadow":{"type":"text","id":"%o(3rro?WLIFpmE0#MMM","fields":{"TEXT":"{longitude}"}}},"TO":{"shadow":{"type":"text","id":"Zpql-%oJ_sdSi | r |* er | ","fields":{"TEXT":""}},"block":{"type":"input_reference_longitude","id":"WUgiJP$X + zY#f$5nhnTX","extraState":{"ownerBlockId":") N.HEG1x]Z /, k#TeWr, S"},"fields":{"VARNAME":"longitude"}}},"TEXT":{"shadow":{"type":"text","id":", (vw$o_s7P = b4P; 8]}yj","fields":{"TEXT":"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,wind_speed_10m"}}}}}}}}}}}}}}}}}}}}]}}`;
|
| 180 |
weatherButton.addEventListener("click", () => {
|
| 181 |
try {
|
| 182 |
const fileContent = JSON.parse(weatherText);
|
project/test.py
CHANGED
|
@@ -19,6 +19,7 @@ latest_blockly_code = ""
|
|
| 19 |
stored_api_key = "" # Store the API key in memory
|
| 20 |
|
| 21 |
|
|
|
|
| 22 |
@app.post("/update_code")
|
| 23 |
async def update_code(request: Request):
|
| 24 |
global latest_blockly_code
|
|
@@ -27,6 +28,13 @@ async def update_code(request: Request):
|
|
| 27 |
print("\n[FASTAPI] Updated Blockly code:\n", latest_blockly_code)
|
| 28 |
return {"ok": True}
|
| 29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
@app.get("/get_api_key")
|
| 31 |
async def get_api_key_endpoint():
|
| 32 |
"""Get the current API key from memory"""
|
|
|
|
| 19 |
stored_api_key = "" # Store the API key in memory
|
| 20 |
|
| 21 |
|
| 22 |
+
# Gets REAL Python code, not the LLM DSL
|
| 23 |
@app.post("/update_code")
|
| 24 |
async def update_code(request: Request):
|
| 25 |
global latest_blockly_code
|
|
|
|
| 28 |
print("\n[FASTAPI] Updated Blockly code:\n", latest_blockly_code)
|
| 29 |
return {"ok": True}
|
| 30 |
|
| 31 |
+
# Sends the latest code to chat.py so that the agent will be able to use the MCP
|
| 32 |
+
@app.get("/get_latest_code")
|
| 33 |
+
async def get_latest_code():
|
| 34 |
+
"""Return the latest Blockly-generated Python code for other services (like chat.py)"""
|
| 35 |
+
global latest_blockly_code
|
| 36 |
+
return {"code": latest_blockly_code}
|
| 37 |
+
|
| 38 |
@app.get("/get_api_key")
|
| 39 |
async def get_api_key_endpoint():
|
| 40 |
"""Get the current API key from memory"""
|