File size: 3,546 Bytes
03977cf 5fb4696 674469e 5fb4696 674469e 2da4544 674469e 5fb4696 03977cf 2da4544 03977cf 2da4544 5fb4696 9db766f 674469e 2da4544 9db766f 674469e 2da4544 674469e 2da4544 674469e 2da4544 674469e 2da4544 674469e 2da4544 674469e 2da4544 674469e 2da4544 9db766f 2da4544 9db766f 2da4544 9db766f 2da4544 9db766f 2da4544 56dc677 2da4544 56dc677 2da4544 56dc677 2da4544 674469e 2da4544 674469e 2da4544 674469e 2da4544 674469e 2da4544 674469e 2da4544 674469e 5fb4696 2da4544 674469e 2da4544 5fb4696 9db766f 2da4544 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 |
import sys
from pathlib import Path
import logging
from contextlib import asynccontextmanager
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger(__name__)
# Add app directory
app_dir = Path(__file__).parent
sys.path.insert(0, str(app_dir))
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import uvicorn
from config import (
API_TITLE, API_DESCRIPTION, API_VERSION,
HUGGINGFACE_API_KEY, HUGGINGFACE_STANCE_MODEL_ID, HUGGINGFACE_LABEL_MODEL_ID,
HOST, PORT, RELOAD,
CORS_ORIGINS, CORS_METHODS, CORS_HEADERS, CORS_CREDENTIALS,
PRELOAD_MODELS_ON_STARTUP, LOAD_STANCE_MODEL, LOAD_KPA_MODEL
)
@asynccontextmanager
async def lifespan(app: FastAPI):
logger.info("="*60)
logger.info("π API STARTUP - Loading HuggingFace models...")
logger.info("="*60)
if PRELOAD_MODELS_ON_STARTUP:
# Load stance model
if LOAD_STANCE_MODEL:
try:
from services.stance_model_manager import load_model as load_stance
load_stance(HUGGINGFACE_STANCE_MODEL_ID, HUGGINGFACE_API_KEY)
logger.info("β Stance model loaded")
except Exception as e:
logger.error(f"β Failed loading stance model: {e}")
# Load KPA model
if LOAD_KPA_MODEL:
try:
from services.label_model_manager import load_model as load_kpa
load_kpa(HUGGINGFACE_LABEL_MODEL_ID, HUGGINGFACE_API_KEY)
logger.info("β KPA model loaded")
except Exception as e:
logger.error(f"β Failed loading KPA model: {e}")
logger.info("β Startup complete. API ready.")
yield
logger.info("π Shutting down...")
# ------------- FASTAPI APP -------------
app = FastAPI(
title=API_TITLE,
description=API_DESCRIPTION,
version=API_VERSION,
lifespan=lifespan
)
# ------------- CORS -------------
app.add_middleware(
CORSMiddleware,
allow_origins=CORS_ORIGINS,
allow_credentials=CORS_CREDENTIALS,
allow_methods=CORS_METHODS,
allow_headers=CORS_HEADERS,
)
# ============ ROUTES ============
# STT route (Groq Whisper)
try:
from routes.stt_routes import router as stt_router
app.include_router(stt_router, prefix="/api/v1/stt", tags=["Speech To Text"])
logger.info("β STT route loaded (Groq Whisper)")
except Exception as e:
logger.warning(f"β Failed loading STT route: {e}")
# TTS route (Groq PlayAI TTS)
try:
from routes.tts_routes import router as tts_router
app.include_router(tts_router, prefix="/api/v1/tts", tags=["Text To Speech"])
logger.info("β TTS route loaded (Groq PlayAI TTS)")
except Exception as e:
logger.warning(f"β Failed loading TTS route: {e}")
# Main NLP system routes
try:
from routes import api_router
app.include_router(api_router)
logger.info("β Main API routes loaded")
except Exception as e:
logger.warning(f"β Failed loading main API routes: {e}")
# ------------------ BASIC ROUTES ------------------
@app.get("/health")
async def health():
return {"status": "healthy", "service": "NLP Debater + Groq Voice"}
@app.get("/")
async def root():
return {
"message": "NLP Debater API with Groq Voice Support",
"docs": "/docs",
"voice_stt": "/api/v1/stt",
"voice_tts": "/api/v1/tts"
}
if __name__ == "__main__":
uvicorn.run("main:app", host=HOST, port=PORT, reload=RELOAD)
|