FastAPI-Backend-Models / services /chatbot_service.py
malek-messaoudii
Update requirements and refactor STT and chatbot services for improved model loading and error handling
3b2b211
raw
history blame
2.67 kB
# services/chatbot_service.py (CONFIRMED WORKING VERSION)
from transformers import pipeline
import logging
logger = logging.getLogger(__name__)
# Global chatbot pipeline
chatbot_pipeline = None
def load_chatbot_model():
"""Load the free chatbot model"""
global chatbot_pipeline
try:
logger.info("Loading DialoGPT chatbot model...")
chatbot_pipeline = pipeline(
"text-generation",
model="microsoft/DialoGPT-small",
device="cpu"
)
logger.info("βœ“ Chatbot model loaded successfully")
except Exception as e:
logger.error(f"βœ— Failed to load chatbot model: {str(e)}")
chatbot_pipeline = None
async def get_chatbot_response(user_text: str, user_id: str = "default") -> str:
"""
Generate chatbot response using free model.
"""
global chatbot_pipeline
try:
if chatbot_pipeline is None:
load_chatbot_model()
if chatbot_pipeline is None:
return get_fallback_response(user_text)
logger.info(f"Generating chatbot response for: '{user_text}'")
# Prepare prompt
prompt = f"User: {user_text}\nAssistant:"
# Generate response
response = chatbot_pipeline(
prompt,
max_new_tokens=100,
do_sample=True,
temperature=0.7,
top_p=0.9,
pad_token_id=chatbot_pipeline.tokenizer.eos_token_id
)
# Extract the response
generated_text = response[0]['generated_text']
# Extract only the assistant's response
if "Assistant:" in generated_text:
bot_response = generated_text.split("Assistant:")[-1].strip()
else:
bot_response = generated_text.replace(prompt, "").strip()
# Clean up the response
if not bot_response:
bot_response = get_fallback_response(user_text)
logger.info(f"βœ“ Response generated: '{bot_response}'")
return bot_response
except Exception as e:
logger.error(f"βœ— Chatbot response failed: {str(e)}")
return get_fallback_response(user_text)
def get_fallback_response(user_text: str) -> str:
"""Provide fallback responses"""
fallback_responses = [
f"I understand you said: '{user_text}'. How can I help you with that?",
f"That's interesting! Regarding '{user_text}', what would you like to know?",
f"Thanks for your message about '{user_text}'. How can I assist you further?"
]
import random
return random.choice(fallback_responses)