inplace-demo-streamlit / backup /streamlit_app.py
Chae
feat: got a minimal working anthropic-themed chatbot w/ gpt-oss-20b
9a7b741
import streamlit as st
from huggingface_hub import InferenceClient
import os
# Page configuration with anthropic styling
st.title("πŸ’¬ GPT-OSS-20B Chatbot")
st.markdown("*A clean, multi-turn chatbot powered by GPT-OSS-20B*")
# Initialize Hugging Face client
hf_token = os.getenv("HF_TOKEN")
# if not hf_token:
# st.error("Please set your HF_TOKEN environment variable.")
# st.stop()
client = InferenceClient(
model="openai/gpt-oss-20b",
token=hf_token
)
# Initialize chat history
if "messages" not in st.session_state:
st.session_state.messages = []
# Sidebar configuration with anthropic styling
with st.sidebar:
st.header("βš™οΈ Configuration")
# Model parameters
system_prompt = st.text_area(
"System message",
value="You are a helpful assistant.",
help="Define the AI assistant's behavior and personality"
)
st.subheader("Generation Parameters")
max_tokens = st.slider("Max new tokens", 1, 2048, 512)
temperature = st.slider("Temperature", 0.1, 2.0, 0.7)
top_p = st.slider("Top-p", 0.1, 1.0, 0.95)
# Chat management
st.divider()
if st.button("πŸ—‘οΈ Clear Chat History", use_container_width=True):
st.session_state.messages = []
st.rerun()
# Display chat stats
if st.session_state.messages:
st.caption(f"Messages in conversation: {len(st.session_state.messages)}")
# Main chat interface
st.subheader("Chat")
# Display chat history with improved formatting
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Chat input
if prompt := st.chat_input("Type your message here..."):
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt})
# Display user message
with st.chat_message("user"):
st.markdown(prompt)
# Generate and display assistant response
with st.chat_message("assistant"):
with st.spinner("Thinking..."):
try:
# Prepare messages for API call
messages_for_api = [{"role": "system", "content": system_prompt}]
messages_for_api.extend(st.session_state.messages)
# Call Hugging Face API
response = client.chat_completion(
messages=messages_for_api,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
stream=False,
)
# Extract and display response
reply = response.choices[0].message["content"]
st.markdown(reply)
# Add assistant response to chat history
st.session_state.messages.append({"role": "assistant", "content": reply})
except Exception as e:
st.error(f"Error generating response: {str(e)}")
# Don't add the user message if there was an error
st.session_state.messages.pop()
# Footer
st.divider()
st.caption("Built with Streamlit β€’ Powered by GPT-OSS-20B β€’ Styled with Anthropic theme")