malek-messaoudii
Refactor ffmpeg installation and enhance logging; add health check endpoints for API status
5fb4696
raw
history blame
6.25 kB
"""Main FastAPI application entry point"""
import sys
from pathlib import Path
import os
import subprocess
import logging
# Configure logging first
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Add the app directory to Python path to ensure imports work
app_dir = Path(__file__).parent
if str(app_dir) not in sys.path:
sys.path.insert(0, str(app_dir))
def install_ffmpeg():
"""Install ffmpeg on Hugging Face Spaces"""
try:
# Check if ffmpeg is already installed
result = subprocess.run(["which", "ffmpeg"], capture_output=True, text=True)
if result.returncode == 0:
logger.info("βœ“ ffmpeg is already installed")
# Verify it works
version_result = subprocess.run(["ffmpeg", "-version"], capture_output=True, text=True)
if version_result.returncode == 0:
logger.info(f"βœ“ ffmpeg version: {version_result.stdout.split()[2]}")
return True
logger.info("Installing ffmpeg...")
# Update package list and install ffmpeg
subprocess.run(["apt-get", "update"], check=True, capture_output=True)
subprocess.run(["apt-get", "install", "-y", "ffmpeg"], check=True, capture_output=True)
# Verify installation
verify_result = subprocess.run(["ffmpeg", "-version"], capture_output=True, text=True)
if verify_result.returncode == 0:
logger.info(f"βœ“ ffmpeg installed successfully: {verify_result.stdout.split()[2]}")
return True
else:
logger.error("βœ— ffmpeg installation verification failed")
return False
except Exception as e:
logger.error(f"βœ— Failed to install ffmpeg: {e}")
return False
# Install ffmpeg before importing other modules
logger.info("===== Checking system dependencies =====")
install_ffmpeg()
from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import uvicorn
from config import (
API_TITLE,
API_DESCRIPTION,
API_VERSION,
HUGGINGFACE_API_KEY,
HUGGINGFACE_STANCE_MODEL_ID,
HUGGINGFACE_LABEL_MODEL_ID,
HOST,
PORT,
RELOAD,
CORS_ORIGINS,
CORS_CREDENTIALS,
CORS_METHODS,
CORS_HEADERS,
)
from services import stance_model_manager, kpa_model_manager
from routes import api_router
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Load models on startup and cleanup on shutdown"""
# Startup: Load all models
logger.info("Loading models on startup...")
# Load stance detection model
try:
logger.info(f"Loading stance model from Hugging Face: {HUGGINGFACE_STANCE_MODEL_ID}")
stance_model_manager.load_model(HUGGINGFACE_STANCE_MODEL_ID, HUGGINGFACE_API_KEY)
logger.info("βœ“ Stance model loaded successfully")
except Exception as e:
logger.error(f"βœ— Failed to load stance model: {str(e)}")
logger.error("⚠️ Stance detection endpoints will not work!")
# Load KPA (label) model
try:
logger.info(f"Loading KPA model from Hugging Face: {HUGGINGFACE_LABEL_MODEL_ID}")
kpa_model_manager.load_model(HUGGINGFACE_LABEL_MODEL_ID, HUGGINGFACE_API_KEY)
logger.info("βœ“ KPA model loaded successfully")
except Exception as e:
logger.error(f"βœ— Failed to load KPA model: {str(e)}")
logger.error("⚠️ KPA/Label prediction endpoints will not work!")
# Load STT and Chatbot models
try:
from services.stt_service import load_stt_model
from services.chatbot_service import load_chatbot_model
logger.info("Loading STT and Chatbot models...")
load_stt_model()
load_chatbot_model()
logger.info("βœ“ STT and Chatbot models loaded successfully")
except Exception as e:
logger.error(f"βœ— Failed to load STT/Chatbot models: {str(e)}")
logger.error("⚠️ Audio endpoints may not work properly!")
logger.info("βœ“ API startup complete")
logger.info(f"πŸ“š API Documentation: https://nlp-debater-project-fastapi-backend-models.hf.space/docs")
yield # Application runs here
# Shutdown: Cleanup (if needed)
logger.info("Shutting down API...")
# Currently no cleanup needed, but you can add it here if necessary
# Create FastAPI application
app = FastAPI(
title=API_TITLE,
description=API_DESCRIPTION,
version=API_VERSION,
docs_url="/docs",
redoc_url="/redoc",
lifespan=lifespan,
)
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=CORS_ORIGINS,
allow_credentials=CORS_CREDENTIALS,
allow_methods=CORS_METHODS,
allow_headers=CORS_HEADERS,
)
# Include API routes
app.include_router(api_router)
# Health check endpoint
@app.get("/")
async def root():
"""Health check endpoint"""
return {
"message": "NLP Debater API is running!",
"status": "healthy",
"docs": "/docs"
}
@app.get("/health")
async def health_check():
"""Health check endpoint"""
return {
"status": "healthy",
"message": "API is running successfully"
}
@app.get("/check-ffmpeg")
async def check_ffmpeg():
"""Check if ffmpeg is available"""
try:
result = subprocess.run(["ffmpeg", "-version"], capture_output=True, text=True)
if result.returncode == 0:
return {
"status": "available",
"version": result.stdout.split('\n')[0],
"message": "ffmpeg is ready for audio processing"
}
else:
return {"status": "error", "error": result.stderr}
except FileNotFoundError:
return {"status": "ffmpeg not found"}
if __name__ == "__main__":
# Run the API server
# Access at: http://localhost:8000
# API docs at: http://localhost:8000/docs
logger.info(f"πŸš€ Starting server on {HOST}:{PORT}")
logger.info(f"πŸ“š Documentation available at: http://{HOST}:{PORT}/docs")
# Run the API server
uvicorn.run(
"main:app",
host=HOST,
port=PORT,
reload=RELOAD,
log_level="info"
)