Commit
·
55ff70d
1
Parent(s):
28f7cda
v3
Browse files
app.py
CHANGED
|
@@ -423,7 +423,7 @@ def convert_to_messages_format(chat_history):
|
|
| 423 |
|
| 424 |
return messages
|
| 425 |
|
| 426 |
-
async def stream_agent_response(question: str, chat_history: List) ->
|
| 427 |
"""Procesa la pregunta del usuario y devuelve la respuesta del agente con memoria de conversación."""
|
| 428 |
global agent # Make sure we can modify the agent's memory
|
| 429 |
|
|
@@ -432,13 +432,11 @@ async def stream_agent_response(question: str, chat_history: List) -> List[Dict]
|
|
| 432 |
messages = []
|
| 433 |
|
| 434 |
# Add previous chat history in the correct format for the agent
|
| 435 |
-
|
| 436 |
-
|
| 437 |
-
|
| 438 |
-
|
| 439 |
-
|
| 440 |
-
elif msg["role"] == "assistant":
|
| 441 |
-
messages.append(AIMessage(content=msg["content"]))
|
| 442 |
|
| 443 |
# Add current user's question
|
| 444 |
user_message = HumanMessage(content=question)
|
|
@@ -797,7 +795,7 @@ def create_application():
|
|
| 797 |
# Create the UI components
|
| 798 |
demo, chatbot, question_input, submit_button, streaming_output_display = create_ui()
|
| 799 |
|
| 800 |
-
def user_message(user_input: str, chat_history: List[
|
| 801 |
"""Add user message to chat history and clear input."""
|
| 802 |
if not user_input.strip():
|
| 803 |
return "", chat_history
|
|
@@ -808,33 +806,30 @@ def create_application():
|
|
| 808 |
if chat_history is None:
|
| 809 |
chat_history = []
|
| 810 |
|
| 811 |
-
# Add user message
|
| 812 |
-
chat_history.append(
|
| 813 |
-
|
| 814 |
-
# Add empty assistant response
|
| 815 |
-
chat_history.append({"role": "assistant", "content": ""})
|
| 816 |
|
| 817 |
# Clear the input
|
| 818 |
return "", chat_history
|
| 819 |
|
| 820 |
-
async def bot_response(chat_history: List[
|
| 821 |
"""Get bot response and update chat history."""
|
| 822 |
-
if not chat_history
|
| 823 |
return chat_history
|
| 824 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 825 |
try:
|
| 826 |
-
|
| 827 |
-
if len(chat_history) < 2:
|
| 828 |
-
return chat_history
|
| 829 |
-
|
| 830 |
-
question = chat_history[-2]["content"]
|
| 831 |
logger.info(f"Processing question: {question}")
|
| 832 |
|
| 833 |
# Call the agent and get the response
|
| 834 |
-
assistant_message = await stream_agent_response(question, chat_history[:-
|
| 835 |
|
| 836 |
# Update the assistant's message in the chat history
|
| 837 |
-
chat_history[-1]
|
| 838 |
|
| 839 |
logger.info("Response generation complete")
|
| 840 |
return chat_history
|
|
@@ -842,7 +837,8 @@ def create_application():
|
|
| 842 |
except Exception as e:
|
| 843 |
error_msg = f"## ❌ Error\n\nError al procesar la solicitud:\n\n```\n{str(e)}\n```"
|
| 844 |
logger.error(error_msg, exc_info=True)
|
| 845 |
-
chat_history[-1][
|
|
|
|
| 846 |
return chat_history
|
| 847 |
|
| 848 |
# Event handlers
|
|
|
|
| 423 |
|
| 424 |
return messages
|
| 425 |
|
| 426 |
+
async def stream_agent_response(question: str, chat_history: List[Tuple[str, str]]) -> str:
|
| 427 |
"""Procesa la pregunta del usuario y devuelve la respuesta del agente con memoria de conversación."""
|
| 428 |
global agent # Make sure we can modify the agent's memory
|
| 429 |
|
|
|
|
| 432 |
messages = []
|
| 433 |
|
| 434 |
# Add previous chat history in the correct format for the agent
|
| 435 |
+
for user_msg, assistant_msg in chat_history:
|
| 436 |
+
if user_msg:
|
| 437 |
+
messages.append(HumanMessage(content=user_msg))
|
| 438 |
+
if assistant_msg:
|
| 439 |
+
messages.append(AIMessage(content=assistant_msg))
|
|
|
|
|
|
|
| 440 |
|
| 441 |
# Add current user's question
|
| 442 |
user_message = HumanMessage(content=question)
|
|
|
|
| 795 |
# Create the UI components
|
| 796 |
demo, chatbot, question_input, submit_button, streaming_output_display = create_ui()
|
| 797 |
|
| 798 |
+
def user_message(user_input: str, chat_history: List[Tuple[str, str]]) -> Tuple[str, List[Tuple[str, str]]]:
|
| 799 |
"""Add user message to chat history and clear input."""
|
| 800 |
if not user_input.strip():
|
| 801 |
return "", chat_history
|
|
|
|
| 806 |
if chat_history is None:
|
| 807 |
chat_history = []
|
| 808 |
|
| 809 |
+
# Add user message and empty assistant response
|
| 810 |
+
chat_history.append((user_input, None))
|
|
|
|
|
|
|
|
|
|
| 811 |
|
| 812 |
# Clear the input
|
| 813 |
return "", chat_history
|
| 814 |
|
| 815 |
+
async def bot_response(chat_history: List[Tuple[str, str]]) -> List[Tuple[str, str]]:
|
| 816 |
"""Get bot response and update chat history."""
|
| 817 |
+
if not chat_history:
|
| 818 |
return chat_history
|
| 819 |
+
|
| 820 |
+
# Get the last user message (first element of the last tuple if it exists)
|
| 821 |
+
if not chat_history[-1][0] or chat_history[-1][1] is not None:
|
| 822 |
+
return chat_history
|
| 823 |
+
|
| 824 |
try:
|
| 825 |
+
question = chat_history[-1][0]
|
|
|
|
|
|
|
|
|
|
|
|
|
| 826 |
logger.info(f"Processing question: {question}")
|
| 827 |
|
| 828 |
# Call the agent and get the response
|
| 829 |
+
assistant_message = await stream_agent_response(question, chat_history[:-1])
|
| 830 |
|
| 831 |
# Update the assistant's message in the chat history
|
| 832 |
+
chat_history[-1] = (question, assistant_message)
|
| 833 |
|
| 834 |
logger.info("Response generation complete")
|
| 835 |
return chat_history
|
|
|
|
| 837 |
except Exception as e:
|
| 838 |
error_msg = f"## ❌ Error\n\nError al procesar la solicitud:\n\n```\n{str(e)}\n```"
|
| 839 |
logger.error(error_msg, exc_info=True)
|
| 840 |
+
if chat_history and len(chat_history[-1]) == 2 and chat_history[-1][1] is None:
|
| 841 |
+
chat_history[-1] = (chat_history[-1][0], error_msg)
|
| 842 |
return chat_history
|
| 843 |
|
| 844 |
# Event handlers
|