File size: 3,534 Bytes
7140e26
e3731b4
7140e26
e3731b4
c8f1de1
e3731b4
 
 
c8f1de1
e3731b4
 
c8f1de1
e3731b4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7140e26
e3731b4
 
7140e26
 
62bdcc8
e3731b4
 
 
 
 
7140e26
e3731b4
 
 
 
7140e26
e3731b4
 
7140e26
e3731b4
 
 
 
 
 
 
7140e26
62bdcc8
c8f1de1
e3731b4
c8f1de1
 
 
e3731b4
c8f1de1
 
7140e26
62bdcc8
dc90612
e3731b4
 
 
62bdcc8
 
e3731b4
 
 
62bdcc8
 
 
e3731b4
 
62bdcc8
 
dc90612
 
e3731b4
dc90612
62bdcc8
 
e3731b4
7140e26
e3731b4
 
7140e26
 
e3731b4
7140e26
e3731b4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import os
import sys
import gradio as gr
from langchain_openai import ChatOpenAI
from langgraph.prebuilt import create_react_agent
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from langchain_mcp_adapters.tools import load_mcp_tools
from langchain_core.messages import HumanMessage

# --- Configuration ---
NEBIUS_API_KEY = os.getenv("NEBIUS_API_KEY") 
NEBIUS_BASE_URL = "https://api.studio.nebius.ai/v1/"
MODEL_NAME = "meta-llama/Meta-Llama-3.1-70B-Instruct"

# --- Agent System Prompt ---
SYSTEM_PROMPT = """You are a 'Vibe Coding' Python Tutor. 
Your goal is not just to talk, but to DO.
1. When a user asks to learn a concept, create a python file illustrating it.
2. RUN the file to show them the output.
3. If there is an error, debug it by reading the file and fixing it.
4. Always explain your reasoning briefly before executing tools.

You have access to a local filesystem. Use 'write_file' to create examples and 'run_python_script' to execute them.
"""

async def run_tutor(user_message, chat_history):
    """
    Main function to run the agent loop. 
    It connects to the local MCP server for every request to ensure fresh context.
    """
    
    # 1. Define Server Parameters
    server_params = StdioServerParameters(
        command=sys.executable, 
        args=["server.py"], 
        env=os.environ.copy()
    )

    # 2. Connect to MCP Server & Load Tools
    async with stdio_client(server_params) as (read, write):
        async with ClientSession(read, write) as session:
            await session.initialize()
            
            # Convert MCP tools to LangChain tools
            tools = await load_mcp_tools(session)
            
            # 3. Initialize Nebius LLM
            llm = ChatOpenAI(
                api_key=NEBIUS_API_KEY,
                base_url=NEBIUS_BASE_URL,
                model=MODEL_NAME,
                temperature=0.7
            )

            # 4. Create Agent
            agent_executor = create_react_agent(llm, tools, state_modifier=SYSTEM_PROMPT)

            # 5. Execute
            inputs = {"messages": [HumanMessage(content=user_message)]}
            response = await agent_executor.ainvoke(inputs)
            
            # 6. Extract the final response text
            return response["messages"][-1].content

# --- Gradio UI (Universal Fix) ---
with gr.Blocks(title="AI Python Tutor (MCP Powered)") as demo:
    gr.Markdown("# 🐍 Vibe Coding Tutor")
    gr.Markdown("Powered by **Nebius** (Llama 3.1) & **MCP** (Local Filesystem Access)")
    
    # REMOVED: type="messages" (This fixes the TypeError)
    chatbot = gr.Chatbot(height=600) 
    msg = gr.Textbox(placeholder="E.g., Teach me how to use Python decorators with a working example.")
    
    async def user_turn(user_message, history):
        # Universal format: Append a list of [user_msg, None]
        # This works on Gradio 3, 4, and 5
        return "", history + [[user_message, None]]

    async def bot_turn(history):
        # Get the last user message (it's the first element of the last tuple)
        last_message = history[-1][0]
        
        # Run the agent
        response_text = await run_tutor(last_message, [])
        
        # Update the last tuple with the bot response
        history[-1][1] = response_text
        return history

    msg.submit(user_turn, [msg, chatbot], [msg, chatbot]).then(
        bot_turn, [chatbot], [chatbot]
    )

# --- Launch ---
if __name__ == "__main__":
    demo.queue().launch()