davidfearne's picture
Update app.py
a42d4ec verified
import streamlit as st
from langchain_openai import AzureChatOpenAI
from langchain.schema import AIMessage, HumanMessage, SystemMessage
from langchain.callbacks.base import BaseCallbackHandler
from typing import Any
from prompts import dental_system_message, welcome_message, update
OPENAI_API_KEY = "86b631a9c0294e9698e327c59ff5ac2c"
OPENAI_API_TYPE = "azure"
OPENAI_API_BASE = "https://davidfearn-gpt4.openai.azure.com"
OPENAI_API_VERSION = "2024-08-01-preview"
OPENAI_MODEL = "gpt-4o"
# --- Page Config ---
st.set_page_config(page_title="Home Dental Expert", page_icon="🦷")
st.subheader("🦷 Home Dental Expert")
haleon_system_message = SystemMessage(content=dental_system_message)
from langchain.schema import BaseMessage
def get_llm_response_with_context(messages: list[BaseMessage], stream_container):
llm = AzureChatOpenAI(
openai_api_version=OPENAI_API_VERSION,
openai_api_key=OPENAI_API_KEY,
azure_endpoint=OPENAI_API_BASE,
openai_api_type=OPENAI_API_TYPE,
deployment_name=OPENAI_MODEL,
temperature=0.7,
streaming=True,
callbacks=[StreamlitCallbackHandler(stream_container)],
)
full_convo = [haleon_system_message] + messages
assert all(isinstance(m, BaseMessage) for m in full_convo), "One or more messages is not a BaseMessage type"
return llm(full_convo)
# --- Streaming Output Handler ---
class StreamlitCallbackHandler(BaseCallbackHandler):
def __init__(self, container):
self.container = container
self.text = ""
def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
self.text += token
self.container.markdown(self.text + "▌")
# --- Session State Init ---
if "messages" not in st.session_state:
st.session_state.messages = []
if "greeted" not in st.session_state:
st.session_state.greeted = False
if "welcome_response" not in st.session_state:
st.session_state.welcome_response = None
# --- Display welcome message at top if it exists ---
if st.session_state.welcome_response:
with st.chat_message("assistant"):
st.markdown(st.session_state.welcome_response)
from datetime import datetime
def get_time_of_day():
"""Returns 'morning' if before 12pm, 'evening' if after 5pm, otherwise 'afternoon'."""
now = datetime.now().hour
if now < 12:
return "morning"
elif now < 17:
return "afternoon"
else:
return "evening"
# --- Stream the welcome message only once ---
if not st.session_state.greeted:
with st.chat_message("assistant"):
stream_container = st.empty()
time_of_day = get_time_of_day()
latest_update = update
llm = AzureChatOpenAI(
openai_api_version=OPENAI_API_VERSION,
openai_api_key=OPENAI_API_KEY,
azure_endpoint=OPENAI_API_BASE,
openai_api_type=OPENAI_API_TYPE,
deployment_name=OPENAI_MODEL,
temperature=0.7,
streaming=True,
callbacks=[StreamlitCallbackHandler(stream_container)]
)
messages = [
haleon_system_message,
HumanMessage(content=f"Give a warm, friendly greeting specific to the time of day being the {time_of_day} as if the user just opened the daily dental journaling app. They have used this app before, so you can assume they are familiar with it. This app is designed to be specically before or after they brush their teeth. Also comment on the latest update as this will be the focus for todays discussion: {latest_update}. Ask a follow up question. DON'T USE #Hash #Tags. Feel free to use emojis."),
]
response = llm(messages)
# Save it to show above later
st.session_state.welcome_response = response.content
st.session_state.greeted = True
# --- Chat History Display ---
for msg in st.session_state.messages:
with st.chat_message("user" if isinstance(msg, HumanMessage) else "assistant"):
st.markdown(msg.content)
# --- Chat Input ---
user_input = st.chat_input("Type your message...")
# --- On User Message ---
if user_input:
st.chat_message("user").markdown(user_input)
st.session_state.messages.append(HumanMessage(content=user_input))
with st.chat_message("assistant"):
stream_container = st.empty()
response = get_llm_response_with_context(st.session_state.messages, stream_container)
st.session_state.messages.append(AIMessage(content=response.content))
stream_container.markdown(response.content)