Skip to content

conversation

Class info

Classes

Name Children Inherits
MessageHistory
llmling_agent.agent.conversation
Manages conversation state and system prompts.
    SessionQuery
    llmling_agent_config.session
    Query configuration for session recovery.
      StorageManager
      llmling_agent.storage.manager
      Manages multiple storage providers.

        🛈 DocStrings

        Conversation management for LLMling agent.

        MessageHistory

        Manages conversation state and system prompts.

        Source code in src/llmling_agent/agent/conversation.py
         39
         40
         41
         42
         43
         44
         45
         46
         47
         48
         49
         50
         51
         52
         53
         54
         55
         56
         57
         58
         59
         60
         61
         62
         63
         64
         65
         66
         67
         68
         69
         70
         71
         72
         73
         74
         75
         76
         77
         78
         79
         80
         81
         82
         83
         84
         85
         86
         87
         88
         89
         90
         91
         92
         93
         94
         95
         96
         97
         98
         99
        100
        101
        102
        103
        104
        105
        106
        107
        108
        109
        110
        111
        112
        113
        114
        115
        116
        117
        118
        119
        120
        121
        122
        123
        124
        125
        126
        127
        128
        129
        130
        131
        132
        133
        134
        135
        136
        137
        138
        139
        140
        141
        142
        143
        144
        145
        146
        147
        148
        149
        150
        151
        152
        153
        154
        155
        156
        157
        158
        159
        160
        161
        162
        163
        164
        165
        166
        167
        168
        169
        170
        171
        172
        173
        174
        175
        176
        177
        178
        179
        180
        181
        182
        183
        184
        185
        186
        187
        188
        189
        190
        191
        192
        193
        194
        195
        196
        197
        198
        199
        200
        201
        202
        203
        204
        205
        206
        207
        208
        209
        210
        211
        212
        213
        214
        215
        216
        217
        218
        219
        220
        221
        222
        223
        224
        225
        226
        227
        228
        229
        230
        231
        232
        233
        234
        235
        236
        237
        238
        239
        240
        241
        242
        243
        244
        245
        246
        247
        248
        249
        250
        251
        252
        253
        254
        255
        256
        257
        258
        259
        260
        261
        262
        263
        264
        265
        266
        267
        268
        269
        270
        271
        272
        273
        274
        275
        276
        277
        278
        279
        280
        281
        282
        283
        284
        285
        286
        287
        288
        289
        290
        291
        292
        293
        294
        295
        296
        297
        298
        299
        300
        301
        302
        303
        304
        305
        306
        307
        308
        309
        310
        311
        312
        313
        314
        315
        316
        317
        318
        319
        320
        321
        322
        323
        324
        325
        326
        327
        328
        329
        330
        331
        332
        333
        334
        335
        336
        337
        338
        339
        340
        341
        342
        343
        344
        345
        346
        347
        348
        349
        350
        351
        352
        353
        354
        355
        356
        357
        358
        359
        360
        361
        362
        363
        364
        365
        366
        367
        368
        369
        370
        371
        372
        373
        374
        375
        376
        377
        378
        379
        380
        381
        382
        383
        384
        385
        386
        387
        388
        389
        390
        391
        392
        393
        394
        395
        396
        397
        398
        399
        400
        401
        402
        403
        404
        405
        406
        407
        408
        409
        410
        411
        412
        413
        414
        415
        416
        417
        418
        419
        420
        421
        422
        423
        424
        425
        426
        427
        428
        429
        430
        431
        432
        433
        434
        435
        436
        437
        438
        439
        440
        441
        442
        443
        444
        445
        446
        447
        448
        449
        450
        451
        452
        453
        454
        455
        456
        457
        458
        459
        460
        461
        462
        463
        464
        465
        466
        467
        468
        469
        470
        471
        472
        473
        474
        475
        class MessageHistory:
            """Manages conversation state and system prompts."""
        
            @dataclass(frozen=True)
            class HistoryCleared:
                """Emitted when chat history is cleared."""
        
                session_id: str
                timestamp: datetime = field(default_factory=get_now)
        
            history_cleared = Signal(HistoryCleared)
        
            def __init__(
                self,
                storage: StorageManager | None = None,
                converter: ConversionManager | None = None,
                *,
                messages: list[ChatMessage[Any]] | None = None,
                session_config: MemoryConfig | None = None,
                resources: Sequence[PromptType | str] = (),
            ) -> None:
                """Initialize conversation manager.
        
                Args:
                    storage: Storage manager for persistence
                    converter: Content converter for file processing
                    messages: Optional list of initial messages
                    session_config: Optional MemoryConfig
                    resources: Optional paths to load as context
                """
                from llmling_agent.messaging import ChatMessageList
                from llmling_agent.prompts.conversion_manager import ConversionManager
                from llmling_agent_config.storage import MemoryStorageConfig, StorageConfig
        
                self._storage = storage or StorageManager(
                    config=StorageConfig(providers=[MemoryStorageConfig()])
                )
                self._converter = converter or ConversionManager([])
                self.chat_messages = ChatMessageList()
                if messages:
                    self.chat_messages.extend(messages)
                self._last_messages: list[ChatMessage[Any]] = []
                self._pending_messages: deque[ChatMessage[Any]] = deque()
                self._config = session_config
                self._resources = list(resources)  # Store for async loading
                # Generate new ID if none provided
                self.id = str(uuid4())
        
                if session_config and session_config.session:
                    self._current_history = self.storage.filter_messages.sync(session_config.session)
                    if session_config.session.name:
                        self.id = session_config.session.name
        
                # Note: max_messages and max_tokens will be handled in add_message/get_history
                # to maintain the rolling window during conversation
        
            @property
            def storage(self) -> StorageManager:
                return self._storage
        
            def get_initialization_tasks(self) -> list[Coroutine[Any, Any, Any]]:
                """Get all initialization coroutines."""
                self._resources = []  # Clear so we dont load again on async init
                return [self.load_context_source(source) for source in self._resources]
        
            async def __aenter__(self) -> Self:
                """Initialize when used standalone."""
                if tasks := self.get_initialization_tasks():
                    await asyncio.gather(*tasks)
                return self
        
            async def __aexit__(
                self,
                exc_type: type[BaseException] | None,
                exc_val: BaseException | None,
                exc_tb: TracebackType | None,
            ) -> None:
                """Clean up any pending messages."""
                self._pending_messages.clear()
        
            def __bool__(self) -> bool:
                return bool(self._pending_messages) or bool(self.chat_messages)
        
            def __repr__(self) -> str:
                return f"MessageHistory(id={self.id!r})"
        
            def __prompt__(self) -> str:
                if not self.chat_messages:
                    return "No conversation history"
        
                last_msgs = self.chat_messages[-2:]
                parts = ["Recent conversation:"]
                parts.extend(msg.format() for msg in last_msgs)
                return "\n".join(parts)
        
            def __contains__(self, item: Any) -> bool:
                """Check if item is in history."""
                return item in self.chat_messages
        
            def __len__(self) -> int:
                """Get length of history."""
                return len(self.chat_messages)
        
            def get_message_tokens(self, message: ChatMessage[Any]) -> int:
                """Get token count for a single message."""
                content = "\n".join(message.format())
                return count_tokens(content, message.model_name)
        
            async def format_history(
                self,
                *,
                max_tokens: int | None = None,
                include_system: bool = False,
                format_template: str | None = None,
                num_messages: int | None = None,  # Add this parameter
            ) -> str:
                """Format conversation history as a single context message.
        
                Args:
                    max_tokens: Optional limit to include only last N tokens
                    include_system: Whether to include system messages
                    format_template: Optional custom format (defaults to agent/message pairs)
                    num_messages: Optional limit to include only last N messages
                """
                template = format_template or "Agent {agent}: {content}\n"
                messages: list[str] = []
                token_count = 0
        
                # Get messages, optionally limited
                history: Sequence[ChatMessage[Any]] = self.chat_messages
                if num_messages:
                    history = history[-num_messages:]
        
                if max_tokens:
                    history = list(reversed(history))  # Start from newest when token limited
        
                for msg in history:
                    name = msg.name or msg.role.title()
                    formatted = template.format(agent=name, content=str(msg.content))
        
                    if max_tokens:
                        # Count tokens in this message
                        if msg.cost_info:
                            msg_tokens = msg.cost_info.token_usage.total_tokens
                        else:
                            # Fallback to tiktoken if no cost info
                            msg_tokens = self.get_message_tokens(msg)
        
                        if token_count + msg_tokens > max_tokens:
                            break
                        token_count += msg_tokens
                        # Add to front since we're going backwards
                        messages.insert(0, formatted)
                    else:
                        messages.append(formatted)
        
                return "\n".join(messages)
        
            async def load_context_source(self, source: PromptType | str) -> None:
                """Load context from a single source."""
                from llmling_agent.prompts.prompts import BasePrompt
        
                try:
                    match source:
                        case str():
                            await self.add_context_from_path(source)
                        case BasePrompt():
                            await self.add_context_from_prompt(source)
                except Exception:
                    logger.exception(
                        "Failed to load context",
                        source="file" if isinstance(source, str) else source.type,
                    )
        
            def load_history_from_database(
                self,
                session: SessionIdType | SessionQuery = None,
                *,
                since: datetime | None = None,
                until: datetime | None = None,
                roles: set[MessageRole] | None = None,
                limit: int | None = None,
            ) -> None:
                """Load conversation history from database.
        
                Args:
                    session: Session ID or query config
                    since: Only include messages after this time (override)
                    until: Only include messages before this time (override)
                    roles: Only include messages with these roles (override)
                    limit: Maximum number of messages to return (override)
                """
                from llmling_agent_config.session import SessionQuery
        
                match session:
                    case SessionQuery() as query:
                        # Override query params if provided
                        if since is not None or until is not None or roles or limit:
                            update = {
                                "since": since.isoformat() if since else None,
                                "until": until.isoformat() if until else None,
                                "roles": roles,
                                "limit": limit,
                            }
                            query = query.model_copy(update=update)
                        if query.name:
                            self.id = query.name
                    case str() | UUID():
                        self.id = str(session)
                        query = SessionQuery(
                            name=self.id,
                            since=since.isoformat() if since else None,
                            until=until.isoformat() if until else None,
                            roles=roles,
                            limit=limit,
                        )
                    case None:
                        # Use current session ID
                        query = SessionQuery(
                            name=self.id,
                            since=since.isoformat() if since else None,
                            until=until.isoformat() if until else None,
                            roles=roles,
                            limit=limit,
                        )
                    case _ as unreachable:
                        assert_never(unreachable)
                self.chat_messages.clear()
                self.chat_messages.extend(self.storage.filter_messages.sync(query))
        
            def get_history(
                self,
                include_pending: bool = True,
                do_filter: bool = True,
            ) -> list[ChatMessage[Any]]:
                """Get conversation history.
        
                Args:
                    include_pending: Whether to include pending messages
                    do_filter: Whether to apply memory config limits (max_tokens, max_messages)
        
                Returns:
                    Filtered list of messages in chronological order
                """
                if include_pending and self._pending_messages:
                    self.chat_messages.extend(self._pending_messages)
                    self._pending_messages.clear()
        
                # 2. Start with original history
                history: Sequence[ChatMessage[Any]] = self.chat_messages
        
                # 3. Only filter if needed
                if do_filter and self._config:
                    # First filter by message count (simple slice)
                    if self._config.max_messages:
                        history = history[-self._config.max_messages :]
        
                    # Then filter by tokens if needed
                    if self._config.max_tokens:
                        token_count = 0
                        filtered = []
                        # Collect messages from newest to oldest until we hit the limit
                        for msg in reversed(history):
                            msg_tokens = self.get_message_tokens(msg)
                            if token_count + msg_tokens > self._config.max_tokens:
                                break
                            token_count += msg_tokens
                            filtered.append(msg)
                        history = list(reversed(filtered))
        
                return list(history)
        
            def get_pending_messages(self) -> list[ChatMessage[Any]]:
                """Get messages that will be included in next interaction."""
                return list(self._pending_messages)
        
            def clear_pending(self) -> None:
                """Clear pending messages without adding them to history."""
                self._pending_messages.clear()
        
            def set_history(self, history: list[ChatMessage[Any]]) -> None:
                """Update conversation history after run."""
                self.chat_messages.clear()
                self.chat_messages.extend(history)
        
            def clear(self) -> None:
                """Clear conversation history and prompts."""
                from llmling_agent.messaging import ChatMessageList
        
                self.chat_messages = ChatMessageList()
                self._last_messages = []
                event = self.HistoryCleared(session_id=str(self.id))
                self.history_cleared.emit(event)
        
            @asynccontextmanager
            async def temporary_state(
                self,
                history: list[AnyPromptType] | SessionQuery | None = None,
                *,
                replace_history: bool = False,
            ) -> AsyncIterator[Self]:
                """Temporarily set conversation history.
        
                Args:
                    history: Optional list of prompts to use as temporary history.
                            Can be strings, BasePrompts, or other prompt types.
                    replace_history: If True, only use provided history. If False, append
                            to existing history.
                """
                from toprompt import to_prompt
        
                from llmling_agent.messaging import ChatMessage, ChatMessageList
        
                old_history = self.chat_messages.copy()
                try:
                    messages: Sequence[ChatMessage[Any]] = ChatMessageList()
                    if history is not None:
                        if isinstance(history, SessionQuery):
                            messages = await self.storage.filter_messages(history)
                        else:
                            messages = [
                                ChatMessage.user_prompt(message=prompt)
                                for p in history
                                if (prompt := await to_prompt(p))
                            ]
        
                    if replace_history:
                        self.chat_messages = ChatMessageList(messages)
                    else:
                        self.chat_messages.extend(messages)
        
                    yield self
        
                finally:
                    self.chat_messages = old_history
        
            def add_chat_messages(self, messages: Sequence[ChatMessage[Any]]) -> None:
                """Add new messages to history and update last_messages."""
                self._last_messages = list(messages)
                self.chat_messages.extend(messages)
        
            @property
            def last_run_messages(self) -> list[ChatMessage[Any]]:
                """Get messages from the last run converted to our format."""
                return self._last_messages
        
            def add_context_message(
                self,
                content: str,
                source: str | None = None,
                **metadata: Any,
            ) -> None:
                """Add a context message.
        
                Args:
                    content: Text content to add
                    source: Description of content source
                    **metadata: Additional metadata to include with the message
                """
                from llmling_agent.messaging import ChatMessage
        
                meta_str = ""
                if metadata:
                    meta_str = "\n".join(f"{k}: {v}" for k, v in metadata.items())
                    meta_str = f"\nMetadata:\n{meta_str}\n"
        
                header = f"Content from {source}:" if source else "Additional context:"
                formatted = f"{header}{meta_str}\n{content}\n"
        
                chat_message = ChatMessage(
                    content=formatted,
                    role="user",
                    name="user",
                    metadata=metadata,
                    conversation_id="context",  # TODO: should probably allow DB field to be NULL
                )
                self._pending_messages.append(chat_message)
        
            async def add_context_from_path(
                self,
                path: JoinablePathLike,
                *,
                convert_to_md: bool = False,
                **metadata: Any,
            ) -> None:
                """Add file or URL content as context message.
        
                Args:
                    path: Any UPath-supported path
                    convert_to_md: Whether to convert content to markdown
                    **metadata: Additional metadata to include with the message
        
                Raises:
                    ValueError: If content cannot be loaded or converted
                """
                path_obj = to_upath(path)
                if convert_to_md:
                    content = await self._converter.convert_file(path)
                    source = f"markdown:{path_obj.name}"
                else:
                    content = await read_path(path)
                    source = f"{path_obj.protocol}:{path_obj.name}"
                self.add_context_message(content, source=source, **metadata)
        
            async def add_context_from_prompt(
                self,
                prompt: PromptType,
                metadata: dict[str, Any] | None = None,
                **kwargs: Any,
            ) -> None:
                """Add rendered prompt content as context message.
        
                Args:
                    prompt: LLMling prompt (static, dynamic, or file-based)
                    metadata: Additional metadata to include with the message
                    kwargs: Optional kwargs for prompt formatting
                """
                try:
                    # Format the prompt using LLMling's prompt system
                    messages = await prompt.format(kwargs)
                    # Extract text content from all messages
                    content = "\n\n".join(msg.get_text_content() for msg in messages)
        
                    self.add_context_message(
                        content,
                        source=f"prompt:{prompt.name or prompt.type}",
                        prompt_args=kwargs,
                        **(metadata or {}),
                    )
                except Exception as e:
                    msg = f"Failed to format prompt: {e}"
                    raise ValueError(msg) from e
        
            def get_history_tokens(self) -> int:
                """Get token count for current history."""
                # Use cost_info if available
                return self.chat_messages.get_history_tokens()
        

        last_run_messages property

        last_run_messages: list[ChatMessage[Any]]
        

        Get messages from the last run converted to our format.

        HistoryCleared dataclass

        Emitted when chat history is cleared.

        Source code in src/llmling_agent/agent/conversation.py
        42
        43
        44
        45
        46
        47
        @dataclass(frozen=True)
        class HistoryCleared:
            """Emitted when chat history is cleared."""
        
            session_id: str
            timestamp: datetime = field(default_factory=get_now)
        

        __aenter__ async

        __aenter__() -> Self
        

        Initialize when used standalone.

        Source code in src/llmling_agent/agent/conversation.py
        104
        105
        106
        107
        108
        async def __aenter__(self) -> Self:
            """Initialize when used standalone."""
            if tasks := self.get_initialization_tasks():
                await asyncio.gather(*tasks)
            return self
        

        __aexit__ async

        __aexit__(
            exc_type: type[BaseException] | None,
            exc_val: BaseException | None,
            exc_tb: TracebackType | None,
        ) -> None
        

        Clean up any pending messages.

        Source code in src/llmling_agent/agent/conversation.py
        110
        111
        112
        113
        114
        115
        116
        117
        async def __aexit__(
            self,
            exc_type: type[BaseException] | None,
            exc_val: BaseException | None,
            exc_tb: TracebackType | None,
        ) -> None:
            """Clean up any pending messages."""
            self._pending_messages.clear()
        

        __contains__

        __contains__(item: Any) -> bool
        

        Check if item is in history.

        Source code in src/llmling_agent/agent/conversation.py
        134
        135
        136
        def __contains__(self, item: Any) -> bool:
            """Check if item is in history."""
            return item in self.chat_messages
        

        __init__

        __init__(
            storage: StorageManager | None = None,
            converter: ConversionManager | None = None,
            *,
            messages: list[ChatMessage[Any]] | None = None,
            session_config: MemoryConfig | None = None,
            resources: Sequence[PromptType | str] = ()
        ) -> None
        

        Initialize conversation manager.

        Parameters:

        Name Type Description Default
        storage StorageManager | None

        Storage manager for persistence

        None
        converter ConversionManager | None

        Content converter for file processing

        None
        messages list[ChatMessage[Any]] | None

        Optional list of initial messages

        None
        session_config MemoryConfig | None

        Optional MemoryConfig

        None
        resources Sequence[PromptType | str]

        Optional paths to load as context

        ()
        Source code in src/llmling_agent/agent/conversation.py
        51
        52
        53
        54
        55
        56
        57
        58
        59
        60
        61
        62
        63
        64
        65
        66
        67
        68
        69
        70
        71
        72
        73
        74
        75
        76
        77
        78
        79
        80
        81
        82
        83
        84
        85
        86
        87
        88
        89
        90
        def __init__(
            self,
            storage: StorageManager | None = None,
            converter: ConversionManager | None = None,
            *,
            messages: list[ChatMessage[Any]] | None = None,
            session_config: MemoryConfig | None = None,
            resources: Sequence[PromptType | str] = (),
        ) -> None:
            """Initialize conversation manager.
        
            Args:
                storage: Storage manager for persistence
                converter: Content converter for file processing
                messages: Optional list of initial messages
                session_config: Optional MemoryConfig
                resources: Optional paths to load as context
            """
            from llmling_agent.messaging import ChatMessageList
            from llmling_agent.prompts.conversion_manager import ConversionManager
            from llmling_agent_config.storage import MemoryStorageConfig, StorageConfig
        
            self._storage = storage or StorageManager(
                config=StorageConfig(providers=[MemoryStorageConfig()])
            )
            self._converter = converter or ConversionManager([])
            self.chat_messages = ChatMessageList()
            if messages:
                self.chat_messages.extend(messages)
            self._last_messages: list[ChatMessage[Any]] = []
            self._pending_messages: deque[ChatMessage[Any]] = deque()
            self._config = session_config
            self._resources = list(resources)  # Store for async loading
            # Generate new ID if none provided
            self.id = str(uuid4())
        
            if session_config and session_config.session:
                self._current_history = self.storage.filter_messages.sync(session_config.session)
                if session_config.session.name:
                    self.id = session_config.session.name
        

        __len__

        __len__() -> int
        

        Get length of history.

        Source code in src/llmling_agent/agent/conversation.py
        138
        139
        140
        def __len__(self) -> int:
            """Get length of history."""
            return len(self.chat_messages)
        

        add_chat_messages

        add_chat_messages(messages: Sequence[ChatMessage[Any]]) -> None
        

        Add new messages to history and update last_messages.

        Source code in src/llmling_agent/agent/conversation.py
        375
        376
        377
        378
        def add_chat_messages(self, messages: Sequence[ChatMessage[Any]]) -> None:
            """Add new messages to history and update last_messages."""
            self._last_messages = list(messages)
            self.chat_messages.extend(messages)
        

        add_context_from_path async

        add_context_from_path(
            path: JoinablePathLike, *, convert_to_md: bool = False, **metadata: Any
        ) -> None
        

        Add file or URL content as context message.

        Parameters:

        Name Type Description Default
        path JoinablePathLike

        Any UPath-supported path

        required
        convert_to_md bool

        Whether to convert content to markdown

        False
        **metadata Any

        Additional metadata to include with the message

        {}

        Raises:

        Type Description
        ValueError

        If content cannot be loaded or converted

        Source code in src/llmling_agent/agent/conversation.py
        417
        418
        419
        420
        421
        422
        423
        424
        425
        426
        427
        428
        429
        430
        431
        432
        433
        434
        435
        436
        437
        438
        439
        440
        441
        async def add_context_from_path(
            self,
            path: JoinablePathLike,
            *,
            convert_to_md: bool = False,
            **metadata: Any,
        ) -> None:
            """Add file or URL content as context message.
        
            Args:
                path: Any UPath-supported path
                convert_to_md: Whether to convert content to markdown
                **metadata: Additional metadata to include with the message
        
            Raises:
                ValueError: If content cannot be loaded or converted
            """
            path_obj = to_upath(path)
            if convert_to_md:
                content = await self._converter.convert_file(path)
                source = f"markdown:{path_obj.name}"
            else:
                content = await read_path(path)
                source = f"{path_obj.protocol}:{path_obj.name}"
            self.add_context_message(content, source=source, **metadata)
        

        add_context_from_prompt async

        add_context_from_prompt(
            prompt: PromptType, metadata: dict[str, Any] | None = None, **kwargs: Any
        ) -> None
        

        Add rendered prompt content as context message.

        Parameters:

        Name Type Description Default
        prompt PromptType

        LLMling prompt (static, dynamic, or file-based)

        required
        metadata dict[str, Any] | None

        Additional metadata to include with the message

        None
        kwargs Any

        Optional kwargs for prompt formatting

        {}
        Source code in src/llmling_agent/agent/conversation.py
        443
        444
        445
        446
        447
        448
        449
        450
        451
        452
        453
        454
        455
        456
        457
        458
        459
        460
        461
        462
        463
        464
        465
        466
        467
        468
        469
        470
        async def add_context_from_prompt(
            self,
            prompt: PromptType,
            metadata: dict[str, Any] | None = None,
            **kwargs: Any,
        ) -> None:
            """Add rendered prompt content as context message.
        
            Args:
                prompt: LLMling prompt (static, dynamic, or file-based)
                metadata: Additional metadata to include with the message
                kwargs: Optional kwargs for prompt formatting
            """
            try:
                # Format the prompt using LLMling's prompt system
                messages = await prompt.format(kwargs)
                # Extract text content from all messages
                content = "\n\n".join(msg.get_text_content() for msg in messages)
        
                self.add_context_message(
                    content,
                    source=f"prompt:{prompt.name or prompt.type}",
                    prompt_args=kwargs,
                    **(metadata or {}),
                )
            except Exception as e:
                msg = f"Failed to format prompt: {e}"
                raise ValueError(msg) from e
        

        add_context_message

        add_context_message(content: str, source: str | None = None, **metadata: Any) -> None
        

        Add a context message.

        Parameters:

        Name Type Description Default
        content str

        Text content to add

        required
        source str | None

        Description of content source

        None
        **metadata Any

        Additional metadata to include with the message

        {}
        Source code in src/llmling_agent/agent/conversation.py
        385
        386
        387
        388
        389
        390
        391
        392
        393
        394
        395
        396
        397
        398
        399
        400
        401
        402
        403
        404
        405
        406
        407
        408
        409
        410
        411
        412
        413
        414
        415
        def add_context_message(
            self,
            content: str,
            source: str | None = None,
            **metadata: Any,
        ) -> None:
            """Add a context message.
        
            Args:
                content: Text content to add
                source: Description of content source
                **metadata: Additional metadata to include with the message
            """
            from llmling_agent.messaging import ChatMessage
        
            meta_str = ""
            if metadata:
                meta_str = "\n".join(f"{k}: {v}" for k, v in metadata.items())
                meta_str = f"\nMetadata:\n{meta_str}\n"
        
            header = f"Content from {source}:" if source else "Additional context:"
            formatted = f"{header}{meta_str}\n{content}\n"
        
            chat_message = ChatMessage(
                content=formatted,
                role="user",
                name="user",
                metadata=metadata,
                conversation_id="context",  # TODO: should probably allow DB field to be NULL
            )
            self._pending_messages.append(chat_message)
        

        clear

        clear() -> None
        

        Clear conversation history and prompts.

        Source code in src/llmling_agent/agent/conversation.py
        324
        325
        326
        327
        328
        329
        330
        331
        def clear(self) -> None:
            """Clear conversation history and prompts."""
            from llmling_agent.messaging import ChatMessageList
        
            self.chat_messages = ChatMessageList()
            self._last_messages = []
            event = self.HistoryCleared(session_id=str(self.id))
            self.history_cleared.emit(event)
        

        clear_pending

        clear_pending() -> None
        

        Clear pending messages without adding them to history.

        Source code in src/llmling_agent/agent/conversation.py
        315
        316
        317
        def clear_pending(self) -> None:
            """Clear pending messages without adding them to history."""
            self._pending_messages.clear()
        

        format_history async

        format_history(
            *,
            max_tokens: int | None = None,
            include_system: bool = False,
            format_template: str | None = None,
            num_messages: int | None = None
        ) -> str
        

        Format conversation history as a single context message.

        Parameters:

        Name Type Description Default
        max_tokens int | None

        Optional limit to include only last N tokens

        None
        include_system bool

        Whether to include system messages

        False
        format_template str | None

        Optional custom format (defaults to agent/message pairs)

        None
        num_messages int | None

        Optional limit to include only last N messages

        None
        Source code in src/llmling_agent/agent/conversation.py
        147
        148
        149
        150
        151
        152
        153
        154
        155
        156
        157
        158
        159
        160
        161
        162
        163
        164
        165
        166
        167
        168
        169
        170
        171
        172
        173
        174
        175
        176
        177
        178
        179
        180
        181
        182
        183
        184
        185
        186
        187
        188
        189
        190
        191
        192
        193
        194
        195
        async def format_history(
            self,
            *,
            max_tokens: int | None = None,
            include_system: bool = False,
            format_template: str | None = None,
            num_messages: int | None = None,  # Add this parameter
        ) -> str:
            """Format conversation history as a single context message.
        
            Args:
                max_tokens: Optional limit to include only last N tokens
                include_system: Whether to include system messages
                format_template: Optional custom format (defaults to agent/message pairs)
                num_messages: Optional limit to include only last N messages
            """
            template = format_template or "Agent {agent}: {content}\n"
            messages: list[str] = []
            token_count = 0
        
            # Get messages, optionally limited
            history: Sequence[ChatMessage[Any]] = self.chat_messages
            if num_messages:
                history = history[-num_messages:]
        
            if max_tokens:
                history = list(reversed(history))  # Start from newest when token limited
        
            for msg in history:
                name = msg.name or msg.role.title()
                formatted = template.format(agent=name, content=str(msg.content))
        
                if max_tokens:
                    # Count tokens in this message
                    if msg.cost_info:
                        msg_tokens = msg.cost_info.token_usage.total_tokens
                    else:
                        # Fallback to tiktoken if no cost info
                        msg_tokens = self.get_message_tokens(msg)
        
                    if token_count + msg_tokens > max_tokens:
                        break
                    token_count += msg_tokens
                    # Add to front since we're going backwards
                    messages.insert(0, formatted)
                else:
                    messages.append(formatted)
        
            return "\n".join(messages)
        

        get_history

        get_history(include_pending: bool = True, do_filter: bool = True) -> list[ChatMessage[Any]]
        

        Get conversation history.

        Parameters:

        Name Type Description Default
        include_pending bool

        Whether to include pending messages

        True
        do_filter bool

        Whether to apply memory config limits (max_tokens, max_messages)

        True

        Returns:

        Type Description
        list[ChatMessage[Any]]

        Filtered list of messages in chronological order

        Source code in src/llmling_agent/agent/conversation.py
        269
        270
        271
        272
        273
        274
        275
        276
        277
        278
        279
        280
        281
        282
        283
        284
        285
        286
        287
        288
        289
        290
        291
        292
        293
        294
        295
        296
        297
        298
        299
        300
        301
        302
        303
        304
        305
        306
        307
        308
        309
        def get_history(
            self,
            include_pending: bool = True,
            do_filter: bool = True,
        ) -> list[ChatMessage[Any]]:
            """Get conversation history.
        
            Args:
                include_pending: Whether to include pending messages
                do_filter: Whether to apply memory config limits (max_tokens, max_messages)
        
            Returns:
                Filtered list of messages in chronological order
            """
            if include_pending and self._pending_messages:
                self.chat_messages.extend(self._pending_messages)
                self._pending_messages.clear()
        
            # 2. Start with original history
            history: Sequence[ChatMessage[Any]] = self.chat_messages
        
            # 3. Only filter if needed
            if do_filter and self._config:
                # First filter by message count (simple slice)
                if self._config.max_messages:
                    history = history[-self._config.max_messages :]
        
                # Then filter by tokens if needed
                if self._config.max_tokens:
                    token_count = 0
                    filtered = []
                    # Collect messages from newest to oldest until we hit the limit
                    for msg in reversed(history):
                        msg_tokens = self.get_message_tokens(msg)
                        if token_count + msg_tokens > self._config.max_tokens:
                            break
                        token_count += msg_tokens
                        filtered.append(msg)
                    history = list(reversed(filtered))
        
            return list(history)
        

        get_history_tokens

        get_history_tokens() -> int
        

        Get token count for current history.

        Source code in src/llmling_agent/agent/conversation.py
        472
        473
        474
        475
        def get_history_tokens(self) -> int:
            """Get token count for current history."""
            # Use cost_info if available
            return self.chat_messages.get_history_tokens()
        

        get_initialization_tasks

        get_initialization_tasks() -> list[Coroutine[Any, Any, Any]]
        

        Get all initialization coroutines.

        Source code in src/llmling_agent/agent/conversation.py
         99
        100
        101
        102
        def get_initialization_tasks(self) -> list[Coroutine[Any, Any, Any]]:
            """Get all initialization coroutines."""
            self._resources = []  # Clear so we dont load again on async init
            return [self.load_context_source(source) for source in self._resources]
        

        get_message_tokens

        get_message_tokens(message: ChatMessage[Any]) -> int
        

        Get token count for a single message.

        Source code in src/llmling_agent/agent/conversation.py
        142
        143
        144
        145
        def get_message_tokens(self, message: ChatMessage[Any]) -> int:
            """Get token count for a single message."""
            content = "\n".join(message.format())
            return count_tokens(content, message.model_name)
        

        get_pending_messages

        get_pending_messages() -> list[ChatMessage[Any]]
        

        Get messages that will be included in next interaction.

        Source code in src/llmling_agent/agent/conversation.py
        311
        312
        313
        def get_pending_messages(self) -> list[ChatMessage[Any]]:
            """Get messages that will be included in next interaction."""
            return list(self._pending_messages)
        

        load_context_source async

        load_context_source(source: PromptType | str) -> None
        

        Load context from a single source.

        Source code in src/llmling_agent/agent/conversation.py
        197
        198
        199
        200
        201
        202
        203
        204
        205
        206
        207
        208
        209
        210
        211
        async def load_context_source(self, source: PromptType | str) -> None:
            """Load context from a single source."""
            from llmling_agent.prompts.prompts import BasePrompt
        
            try:
                match source:
                    case str():
                        await self.add_context_from_path(source)
                    case BasePrompt():
                        await self.add_context_from_prompt(source)
            except Exception:
                logger.exception(
                    "Failed to load context",
                    source="file" if isinstance(source, str) else source.type,
                )
        

        load_history_from_database

        load_history_from_database(
            session: SessionIdType | SessionQuery = None,
            *,
            since: datetime | None = None,
            until: datetime | None = None,
            roles: set[MessageRole] | None = None,
            limit: int | None = None
        ) -> None
        

        Load conversation history from database.

        Parameters:

        Name Type Description Default
        session SessionIdType | SessionQuery

        Session ID or query config

        None
        since datetime | None

        Only include messages after this time (override)

        None
        until datetime | None

        Only include messages before this time (override)

        None
        roles set[MessageRole] | None

        Only include messages with these roles (override)

        None
        limit int | None

        Maximum number of messages to return (override)

        None
        Source code in src/llmling_agent/agent/conversation.py
        213
        214
        215
        216
        217
        218
        219
        220
        221
        222
        223
        224
        225
        226
        227
        228
        229
        230
        231
        232
        233
        234
        235
        236
        237
        238
        239
        240
        241
        242
        243
        244
        245
        246
        247
        248
        249
        250
        251
        252
        253
        254
        255
        256
        257
        258
        259
        260
        261
        262
        263
        264
        265
        266
        267
        def load_history_from_database(
            self,
            session: SessionIdType | SessionQuery = None,
            *,
            since: datetime | None = None,
            until: datetime | None = None,
            roles: set[MessageRole] | None = None,
            limit: int | None = None,
        ) -> None:
            """Load conversation history from database.
        
            Args:
                session: Session ID or query config
                since: Only include messages after this time (override)
                until: Only include messages before this time (override)
                roles: Only include messages with these roles (override)
                limit: Maximum number of messages to return (override)
            """
            from llmling_agent_config.session import SessionQuery
        
            match session:
                case SessionQuery() as query:
                    # Override query params if provided
                    if since is not None or until is not None or roles or limit:
                        update = {
                            "since": since.isoformat() if since else None,
                            "until": until.isoformat() if until else None,
                            "roles": roles,
                            "limit": limit,
                        }
                        query = query.model_copy(update=update)
                    if query.name:
                        self.id = query.name
                case str() | UUID():
                    self.id = str(session)
                    query = SessionQuery(
                        name=self.id,
                        since=since.isoformat() if since else None,
                        until=until.isoformat() if until else None,
                        roles=roles,
                        limit=limit,
                    )
                case None:
                    # Use current session ID
                    query = SessionQuery(
                        name=self.id,
                        since=since.isoformat() if since else None,
                        until=until.isoformat() if until else None,
                        roles=roles,
                        limit=limit,
                    )
                case _ as unreachable:
                    assert_never(unreachable)
            self.chat_messages.clear()
            self.chat_messages.extend(self.storage.filter_messages.sync(query))
        

        set_history

        set_history(history: list[ChatMessage[Any]]) -> None
        

        Update conversation history after run.

        Source code in src/llmling_agent/agent/conversation.py
        319
        320
        321
        322
        def set_history(self, history: list[ChatMessage[Any]]) -> None:
            """Update conversation history after run."""
            self.chat_messages.clear()
            self.chat_messages.extend(history)
        

        temporary_state async

        temporary_state(
            history: list[AnyPromptType] | SessionQuery | None = None, *, replace_history: bool = False
        ) -> AsyncIterator[Self]
        

        Temporarily set conversation history.

        Parameters:

        Name Type Description Default
        history list[AnyPromptType] | SessionQuery | None

        Optional list of prompts to use as temporary history. Can be strings, BasePrompts, or other prompt types.

        None
        replace_history bool

        If True, only use provided history. If False, append to existing history.

        False
        Source code in src/llmling_agent/agent/conversation.py
        333
        334
        335
        336
        337
        338
        339
        340
        341
        342
        343
        344
        345
        346
        347
        348
        349
        350
        351
        352
        353
        354
        355
        356
        357
        358
        359
        360
        361
        362
        363
        364
        365
        366
        367
        368
        369
        370
        371
        372
        373
        @asynccontextmanager
        async def temporary_state(
            self,
            history: list[AnyPromptType] | SessionQuery | None = None,
            *,
            replace_history: bool = False,
        ) -> AsyncIterator[Self]:
            """Temporarily set conversation history.
        
            Args:
                history: Optional list of prompts to use as temporary history.
                        Can be strings, BasePrompts, or other prompt types.
                replace_history: If True, only use provided history. If False, append
                        to existing history.
            """
            from toprompt import to_prompt
        
            from llmling_agent.messaging import ChatMessage, ChatMessageList
        
            old_history = self.chat_messages.copy()
            try:
                messages: Sequence[ChatMessage[Any]] = ChatMessageList()
                if history is not None:
                    if isinstance(history, SessionQuery):
                        messages = await self.storage.filter_messages(history)
                    else:
                        messages = [
                            ChatMessage.user_prompt(message=prompt)
                            for p in history
                            if (prompt := await to_prompt(p))
                        ]
        
                if replace_history:
                    self.chat_messages = ChatMessageList(messages)
                else:
                    self.chat_messages.extend(messages)
        
                yield self
        
            finally:
                self.chat_messages = old_history