diff --git a/src/backend/base/langflow/base/models/model.py b/src/backend/base/langflow/base/models/model.py index 35853b2c2852..6a2aeda1d534 100644 --- a/src/backend/base/langflow/base/models/model.py +++ b/src/backend/base/langflow/base/models/model.py @@ -143,20 +143,24 @@ def get_chat_result( messages: list[Union[BaseMessage]] = [] if not input_value and not system_message: raise ValueError("The message you want to send to the model is empty.") - if system_message: - messages.append(SystemMessage(content=system_message)) + system_message_added = False if input_value: if isinstance(input_value, Message): with warnings.catch_warnings(): warnings.simplefilter("ignore") if "prompt" in input_value: prompt = input_value.load_lc_prompt() + if system_message: + prompt.messages = [SystemMessage(content=system_message)] + prompt.messages + system_message_added = True runnable = prompt | runnable else: messages.append(input_value.to_lc_message()) else: messages.append(HumanMessage(content=input_value)) + if system_message and not system_message_added: + messages.append(SystemMessage(content=system_message)) inputs: Union[list, dict] = messages or {} try: runnable = runnable.with_config( # type: ignore