Skip to content

processing

Class info

Classes

Name Children Inherits
ChatMessage
llmling_agent.messaging.messages
Common message format for all UI types.

    🛈 DocStrings

    Helper functions for common message processing logic.

    finalize_message async

    finalize_message(
        message: ChatMessage[Any],
        previous_message: ChatMessage[Any] | None,
        node: MessageNode[Any, Any],
        connections: ConnectionManager,
        original_message: ChatMessage[Any] | None,
        wait_for_connections: bool | None = None,
    ) -> ChatMessage[Any]
    

    Handle message finalization and routing.

    Parameters:

    Name Type Description Default
    message ChatMessage[Any]

    The response message to finalize

    required
    previous_message ChatMessage[Any] | None

    The original user message (if any)

    required
    node MessageNode[Any, Any]

    The message node that produced the message

    required
    connections ConnectionManager

    Connection manager for routing

    required
    original_message ChatMessage[Any] | None

    The original ChatMessage if forwarded, None otherwise

    required
    wait_for_connections bool | None

    Whether to wait for connected nodes

    None

    Returns:

    Type Description
    ChatMessage[Any]

    The finalized message

    Source code in src/llmling_agent/messaging/processing.py
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    async def finalize_message(
        message: ChatMessage[Any],
        previous_message: ChatMessage[Any] | None,
        node: MessageNode[Any, Any],
        connections: ConnectionManager,
        original_message: ChatMessage[Any] | None,
        wait_for_connections: bool | None = None,
    ) -> ChatMessage[Any]:
        """Handle message finalization and routing.
    
        Args:
            message: The response message to finalize
            previous_message: The original user message (if any)
            node: The message node that produced the message
            connections: Connection manager for routing
            original_message: The original ChatMessage if forwarded, None otherwise
            wait_for_connections: Whether to wait for connected nodes
    
        Returns:
            The finalized message
        """
        # For chain processing, update the response's chain if input was forwarded
        if original_message:
            message = message.forwarded(original_message)
    
        # Emit signals
        node.message_sent.emit(message)
    
        # Log message if enabled
        await node.log_message(message)
    
        # Route to connections
        await connections.route_message(message, wait=wait_for_connections)
    
        return message
    

    prepare_prompts async

    prepare_prompts(
        *prompt: PromptCompatible | ChatMessage[Any],
    ) -> tuple[ChatMessage[Any], list[BaseContent | str], ChatMessage[Any] | None]
    

    Prepare prompts for processing.

    Extracted from MessageNode.pre_run logic.

    Parameters:

    Name Type Description Default
    *prompt PromptCompatible | ChatMessage[Any]

    The prompt(s) to prepare.

    ()

    Returns:

    Type Description
    tuple[ChatMessage[Any], list[BaseContent | str], ChatMessage[Any] | None]

    A tuple of: - Either incoming message, or a constructed incoming message based on the prompt(s). - A list of prompts to be sent to the model. - The original ChatMessage if forwarded, None otherwise

    Source code in src/llmling_agent/messaging/processing.py
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    async def prepare_prompts(
        *prompt: PromptCompatible | ChatMessage[Any],
    ) -> tuple[ChatMessage[Any], list[BaseContent | str], ChatMessage[Any] | None]:
        """Prepare prompts for processing.
    
        Extracted from MessageNode.pre_run logic.
    
        Args:
            *prompt: The prompt(s) to prepare.
    
        Returns:
            A tuple of:
                - Either incoming message, or a constructed incoming message based
                  on the prompt(s).
                - A list of prompts to be sent to the model.
                - The original ChatMessage if forwarded, None otherwise
        """
        if len(prompt) == 1 and isinstance(prompt[0], ChatMessage):
            original_msg = prompt[0]
            # Update received message's chain to show it came through its source
            user_msg = original_msg.forwarded(original_msg).to_request()
            prompts = await convert_prompts([user_msg.content])
            # clear cost info to avoid double-counting
            return user_msg, prompts, original_msg
        prompts = await convert_prompts(prompt)
        # use format_prompts?
        messages = [i if isinstance(i, str) else i.to_pydantic_ai() for i in prompts]
        user_msg = ChatMessage.user_prompt(message=messages)
        return user_msg, prompts, None