File size: 4,422 Bytes
83c5f9d
 
 
1ce17ff
8791d59
 
1315b27
83c5f9d
 
812086d
83c5f9d
 
c508ed0
1ce17ff
 
83c5f9d
8791d59
 
1315b27
 
83c5f9d
1315b27
83c5f9d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c508ed0
 
 
 
 
 
 
 
 
 
83c5f9d
1ce17ff
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83c5f9d
3677973
 
83c5f9d
812086d
 
 
 
 
b36d7d0
 
 
 
 
 
 
1ce17ff
 
b36d7d0
 
812086d
 
 
 
1315b27
83c5f9d
 
 
812086d
 
83c5f9d
1315b27
 
83c5f9d
812086d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
"""Service pour initialiser le serveur MCP avec FastMCP"""

from mcp.server.fastmcp import FastMCP
from typing import Dict, Any, Optional
import logging

from fastapi import FastAPI

from services.stance_model_manager import stance_model_manager
from services.label_model_manager import kpa_model_manager  
from services.stt_service import speech_to_text
from services.tts_service import text_to_speech
from services.generate_model_manager import generate_model_manager
from services.topic_service import topic_service
from services.chat_service import generate_chat_response

logger = logging.getLogger(__name__)

# Créer l'instance FastMCP
mcp_server = FastMCP("NLP-Debater-MCP", json_response=True, stateless_http=False)  # Stateful pour sessions

# Tools (inchangés, OK)
@mcp_server.tool()
def detect_stance(topic: str, argument: str) -> Dict[str, Any]:
    if not stance_model_manager.model_loaded:
        raise ValueError("Modèle stance non chargé")
    result = stance_model_manager.predict(topic, argument)
    return {
        "predicted_stance": result["predicted_stance"],
        "confidence": result["confidence"],
        "probability_con": result["probability_con"],
        "probability_pro": result["probability_pro"]
    }

@mcp_server.tool()
def match_keypoint_argument(argument: str, key_point: str) -> Dict[str, Any]:
    if not kpa_model_manager.model_loaded:
        raise ValueError("Modèle KPA non chargé")
    result = kpa_model_manager.predict(argument, key_point)
    return {
        "prediction": result["prediction"],
        "label": result["label"],
        "confidence": result["confidence"],
        "probabilities": result["probabilities"]
    }

@mcp_server.tool()
def transcribe_audio(audio_path: str) -> str:
    return speech_to_text(audio_path)

@mcp_server.tool()
def generate_speech(text: str, voice: str = "Aaliyah-PlayAI", format: str = "wav") -> str:
    return text_to_speech(text, voice, format)

@mcp_server.tool()
def generate_argument(topic: str, position: str) -> Dict[str, Any]:
    """Generate an argument for a given topic and position"""
    if not generate_model_manager.model_loaded:
        raise ValueError("Modèle de génération non chargé")
    argument = generate_model_manager.generate(topic=topic, position=position)
    return {
        "topic": topic,
        "position": position,
        "argument": argument
    }

@mcp_server.tool()
def extract_topic(text: str) -> Dict[str, Any]:
    """Extract a topic from the given text/argument"""
    if not topic_service.initialized:
        topic_service.initialize()
    topic = topic_service.extract_topic(text)
    return {
        "text": text,
        "topic": topic
    }

@mcp_server.tool()
def voice_chat(user_input: str, conversation_id: Optional[str] = None) -> Dict[str, Any]:
    """Generate a chatbot response for voice chat (English only)"""
    response_text = generate_chat_response(
        user_input=user_input,
        conversation_id=conversation_id
    )
    return {
        "user_input": user_input,
        "conversation_id": conversation_id,
        "response": response_text
    }

@mcp_server.resource("debate://prompt")
def get_debate_prompt() -> str:
    return "Tu es un expert en débat. Génère 3 arguments PRO pour le topic donné. Sois concis et persuasif."

# Health tool (enregistré avant l'initialisation)
@mcp_server.tool()
def health_check() -> Dict[str, Any]:
    """Health check pour le serveur MCP"""
    try:
        # Liste hardcodée pour éviter les problèmes avec list_tools()
        tool_names = [
            "detect_stance",
            "match_keypoint_argument",
            "transcribe_audio",
            "generate_speech",
            "generate_argument",
            "extract_topic",
            "voice_chat",
            "health_check"
        ]
    except Exception:
        tool_names = []
    return {"status": "healthy", "tools": tool_names}

def init_mcp_server(app: FastAPI) -> None:
    """
    Initialise et monte le serveur MCP sur l'app FastAPI.
    """
    # CORRIGÉ : Utilise streamable_http_app() qui retourne l'ASGI app
    mcp_app = mcp_server.streamable_http_app()  # L'ASGI app pour mounting (gère /health, /tools, etc. nativement)
    
    # Monte à /api/v1/mcp - FastAPI gère le lifespan auto
    app.mount("/api/v1/mcp", mcp_app)
    
    logger.info("✓ Serveur MCP monté sur /api/v1/mcp avec tools NLP/STT/TTS")