Dave Aitel
09/01/2024, 6:40 PMelse:
# all streaming responses are langchain Pydantic v1 models
# which we don't convert to AIMessage/AIMessageChunks for sanity.
# they are converted in handle_delta_events and when the stream is finished.
# initialize the list of deltas with an empty delta
# to facilitate comparison with the previous delta
deltas = [langchain_core.messages.AIMessageChunk(content="")]
def ensure_valid_message_order(messages):
from langchain_core.messages import HumanMessage, SystemMessage, AIMessage
# Check if there's a SystemMessage
system_message = next((msg for msg in messages if isinstance(msg, SystemMessage)), None)
# Check if there's any message with a "user" role
has_user_message = any(
isinstance(msg, HumanMessage) or
(hasattr(msg, 'role') and msg.role == 'user')
for msg in messages
)
new_messages = []
# Add SystemMessage at the beginning if it exists
if system_message:
new_messages.append(system_message)
messages = [msg for msg in messages if msg != system_message]
# If no SystemMessage, add a default one
else:
new_messages.append(SystemMessage(content="You are a helpful assistant."))
# If no user message found, add a HumanMessage right after the SystemMessage
if not has_user_message:
new_messages.append(HumanMessage(content="Please continue."))
# If the last message is an AIMessage, add a new HumanMessage
if isinstance(new_messages[-1], AIMessage):
new_messages.append(HumanMessage(content="Please continue."))
# Add all remaining messages
new_messages.extend(messages)
return new_messages
input_messages = ensure_valid_message_order(input_messages)