Skip to content

resource_providers

Class info

Classes

Name Children Inherits
AggregatingResourceProvider
llmling_agent.resource_providers.aggregating
Provider that combines resources from multiple providers.
    MCPResourceProvider
    llmling_agent.resource_providers.mcp_provider
    Resource provider for a single MCP server.
      ResourceProvider
      llmling_agent.resource_providers.base
      Base class for resource providers.
      StaticResourceProvider
      llmling_agent.resource_providers.static
      Provider for pre-configured tools, prompts and resources.

        🛈 DocStrings

        Resource provider implementations.

        AggregatingResourceProvider

        Bases: ResourceProvider

        Provider that combines resources from multiple providers.

        Source code in src/llmling_agent/resource_providers/aggregating.py
        18
        19
        20
        21
        22
        23
        24
        25
        26
        27
        28
        29
        30
        31
        32
        33
        34
        35
        36
        37
        38
        39
        40
        41
        42
        43
        44
        45
        46
        47
        48
        49
        50
        51
        52
        53
        54
        class AggregatingResourceProvider(ResourceProvider):
            """Provider that combines resources from multiple providers."""
        
            def __init__(self, providers: list[ResourceProvider], name: str = "aggregating"):
                """Initialize provider with list of providers to aggregate.
        
                Args:
                    providers: Resource providers to aggregate (stores reference to list)
                    name: Name for this provider
                """
                super().__init__(name=name)
                # Store reference to the providers list for dynamic updates
                self.providers = providers
        
            async def get_tools(self) -> list[Tool]:
                """Get tools from all providers."""
                return [t for provider in self.providers for t in await provider.get_tools()]
        
            async def get_prompts(self) -> list[BasePrompt]:
                """Get prompts from all providers."""
                return [p for provider in self.providers for p in await provider.get_prompts()]
        
            async def get_resources(self) -> list[ResourceInfo]:
                """Get resources from all providers."""
                return [r for provider in self.providers for r in await provider.get_resources()]
        
            async def get_request_parts(
                self, name: str, arguments: dict[str, str] | None = None
            ) -> list[ModelRequestPart]:
                """Try to get prompt from first provider that has it."""
                for provider in self.providers:
                    try:
                        return await provider.get_request_parts(name, arguments)
                    except KeyError:
                        continue
                msg = f"Prompt {name!r} not found in any provider"
                raise KeyError(msg)
        

        __init__

        __init__(providers: list[ResourceProvider], name: str = 'aggregating')
        

        Initialize provider with list of providers to aggregate.

        Parameters:

        Name Type Description Default
        providers list[ResourceProvider]

        Resource providers to aggregate (stores reference to list)

        required
        name str

        Name for this provider

        'aggregating'
        Source code in src/llmling_agent/resource_providers/aggregating.py
        21
        22
        23
        24
        25
        26
        27
        28
        29
        30
        def __init__(self, providers: list[ResourceProvider], name: str = "aggregating"):
            """Initialize provider with list of providers to aggregate.
        
            Args:
                providers: Resource providers to aggregate (stores reference to list)
                name: Name for this provider
            """
            super().__init__(name=name)
            # Store reference to the providers list for dynamic updates
            self.providers = providers
        

        get_prompts async

        get_prompts() -> list[BasePrompt]
        

        Get prompts from all providers.

        Source code in src/llmling_agent/resource_providers/aggregating.py
        36
        37
        38
        async def get_prompts(self) -> list[BasePrompt]:
            """Get prompts from all providers."""
            return [p for provider in self.providers for p in await provider.get_prompts()]
        

        get_request_parts async

        get_request_parts(
            name: str, arguments: dict[str, str] | None = None
        ) -> list[ModelRequestPart]
        

        Try to get prompt from first provider that has it.

        Source code in src/llmling_agent/resource_providers/aggregating.py
        44
        45
        46
        47
        48
        49
        50
        51
        52
        53
        54
        async def get_request_parts(
            self, name: str, arguments: dict[str, str] | None = None
        ) -> list[ModelRequestPart]:
            """Try to get prompt from first provider that has it."""
            for provider in self.providers:
                try:
                    return await provider.get_request_parts(name, arguments)
                except KeyError:
                    continue
            msg = f"Prompt {name!r} not found in any provider"
            raise KeyError(msg)
        

        get_resources async

        get_resources() -> list[ResourceInfo]
        

        Get resources from all providers.

        Source code in src/llmling_agent/resource_providers/aggregating.py
        40
        41
        42
        async def get_resources(self) -> list[ResourceInfo]:
            """Get resources from all providers."""
            return [r for provider in self.providers for r in await provider.get_resources()]
        

        get_tools async

        get_tools() -> list[Tool]
        

        Get tools from all providers.

        Source code in src/llmling_agent/resource_providers/aggregating.py
        32
        33
        34
        async def get_tools(self) -> list[Tool]:
            """Get tools from all providers."""
            return [t for provider in self.providers for t in await provider.get_tools()]
        

        MCPResourceProvider

        Bases: ResourceProvider

        Resource provider for a single MCP server.

        Source code in src/llmling_agent/resource_providers/mcp_provider.py
         32
         33
         34
         35
         36
         37
         38
         39
         40
         41
         42
         43
         44
         45
         46
         47
         48
         49
         50
         51
         52
         53
         54
         55
         56
         57
         58
         59
         60
         61
         62
         63
         64
         65
         66
         67
         68
         69
         70
         71
         72
         73
         74
         75
         76
         77
         78
         79
         80
         81
         82
         83
         84
         85
         86
         87
         88
         89
         90
         91
         92
         93
         94
         95
         96
         97
         98
         99
        100
        101
        102
        103
        104
        105
        106
        107
        108
        109
        110
        111
        112
        113
        114
        115
        116
        117
        118
        119
        120
        121
        122
        123
        124
        125
        126
        127
        128
        129
        130
        131
        132
        133
        134
        135
        136
        137
        138
        139
        140
        141
        142
        143
        144
        145
        146
        147
        148
        149
        150
        151
        152
        153
        154
        155
        156
        157
        158
        159
        160
        161
        162
        163
        164
        165
        166
        167
        168
        169
        170
        171
        172
        173
        174
        175
        176
        177
        178
        179
        180
        181
        182
        183
        184
        185
        186
        187
        188
        189
        190
        191
        192
        193
        194
        195
        196
        197
        198
        199
        200
        201
        202
        203
        204
        205
        206
        207
        208
        209
        210
        211
        212
        213
        214
        215
        216
        217
        218
        219
        220
        221
        222
        223
        224
        225
        226
        class MCPResourceProvider(ResourceProvider):
            """Resource provider for a single MCP server."""
        
            def __init__(
                self,
                server: MCPServerConfig | str,
                name: str = "mcp",
                owner: str | None = None,
                context: NodeContext | None = None,
                source: Literal["pool", "node"] = "node",
                elicitation_callback: ElicitationHandler | None = None,
                sampling_callback: ClientSamplingHandler | None = None,
                progress_handler: RichProgressCallback | None = None,
                accessible_roots: list[str] | None = None,
            ):
                from llmling_agent.mcp_server import MCPClient
        
                super().__init__(name, owner=owner)
                self.server = (
                    BaseMCPServerConfig.from_string(server) if isinstance(server, str) else server
                )
                self.context = context
                self.source = source
                self.exit_stack = AsyncExitStack()
                self._progress_handler = progress_handler
                self._accessible_roots = accessible_roots
                self._elicitation_callback = elicitation_callback
                self._sampling_callback = sampling_callback
        
                # Tool caching
                self._tools_cache: list[Tool] | None = None
                self._saved_enabled_states: dict[str, bool] = {}
        
                # Prompt caching
                self._prompts_cache: list[MCPClientPrompt] | None = None
        
                # Resource caching
                self._resources_cache: list[ResourceInfo] | None = None
        
                self.client = MCPClient(
                    config=self.server,
                    elicitation_callback=self._elicitation_callback,
                    sampling_callback=self._sampling_callback,
                    progress_handler=self._progress_handler,
                    accessible_roots=self._accessible_roots,
                    tool_change_callback=self._on_tools_changed,
                    prompt_change_callback=self._on_prompts_changed,
                    resource_change_callback=self._on_resources_changed,
                )
        
            def __repr__(self) -> str:
                return f"MCPResourceProvider({self.server!r}, source={self.source!r})"
        
            async def __aenter__(self) -> Self:
                try:
                    await self.exit_stack.enter_async_context(self.client)
                except Exception as e:
                    # Clean up in case of error
                    await self.__aexit__(type(e), e, e.__traceback__)
                    msg = "Failed to initialize MCP manager"
                    raise RuntimeError(msg) from e
        
                return self
        
            async def __aexit__(
                self,
                exc_type: type[BaseException] | None,
                exc_val: BaseException | None,
                exc_tb: TracebackType | None,
            ):
                try:
                    try:
                        # Clean up exit stack (which includes MCP clients)
                        await self.exit_stack.aclose()
                    except RuntimeError as e:
                        if "different task" in str(e):
                            # Handle task context mismatch
                            current_task = asyncio.current_task()
                            if current_task:
                                loop = asyncio.get_running_loop()
                                await loop.create_task(self.exit_stack.aclose())
                        else:
                            raise
        
                except Exception as e:
                    msg = "Error during MCP manager cleanup"
                    logger.exception(msg, exc_info=e)
                    raise RuntimeError(msg) from e
        
            async def _on_tools_changed(self) -> None:
                """Callback when tools change on the MCP server."""
                logger.info("MCP tool list changed, refreshing provider cache")
                self._saved_enabled_states = {t.name: t.enabled for t in self._tools_cache or []}
                self._tools_cache = None
        
            async def _on_prompts_changed(self) -> None:
                """Callback when prompts change on the MCP server."""
                logger.info("MCP prompt list changed, refreshing provider cache")
                self._prompts_cache = None
        
            async def _on_resources_changed(self) -> None:
                """Callback when resources change on the MCP server."""
                logger.info("MCP resource list changed, refreshing provider cache")
                self._resources_cache = None
        
            async def refresh_tools_cache(self) -> None:
                """Refresh the tools cache by fetching from client."""
                try:
                    # Get fresh tools from client
                    mcp_tools = await self.client.list_tools()
                    all_tools: list[Tool] = []
        
                    for tool in mcp_tools:
                        try:
                            tool_info = self.client.convert_tool(tool)
                            all_tools.append(tool_info)
                        except Exception:
                            logger.exception("Failed to create MCP tool", name=tool.name)
                            continue
        
                    # Restore enabled states from saved states
                    for tool_info in all_tools:
                        if tool_info.name in self._saved_enabled_states:
                            tool_info.enabled = self._saved_enabled_states[tool_info.name]
        
                    self._tools_cache = all_tools
                    logger.debug("Refreshed MCP tools cache", num_tools=len(all_tools))
                except Exception:
                    logger.exception("Failed to refresh MCP tools cache")
                    self._tools_cache = []
        
            async def get_tools(self) -> list[Tool]:
                """Get cached tools, refreshing if necessary."""
                if self._tools_cache is None:
                    await self.refresh_tools_cache()
        
                return self._tools_cache or []
        
            async def refresh_prompts_cache(self) -> None:
                """Refresh the prompts cache by fetching from client."""
                from llmling_agent.prompts.prompts import MCPClientPrompt
        
                try:
                    result = await self.client.list_prompts()
                    all_prompts: list[MCPClientPrompt] = []
        
                    for prompt in result:
                        try:
                            converted = MCPClientPrompt.from_fastmcp(self.client, prompt)
                            all_prompts.append(converted)
                        except Exception:
                            logger.exception("Failed to convert prompt", name=prompt.name)
                            continue
        
                    self._prompts_cache = all_prompts
                    logger.debug("Refreshed MCP prompts cache", num_prompts=len(all_prompts))
                except Exception:
                    logger.exception("Failed to refresh MCP prompts cache")
                    self._prompts_cache = []
        
            async def get_prompts(self) -> list[MCPClientPrompt]:  # type: ignore
                """Get cached prompts, refreshing if necessary."""
                if self._prompts_cache is None:
                    await self.refresh_prompts_cache()
        
                return self._prompts_cache or []
        
            async def refresh_resources_cache(self) -> None:
                """Refresh the resources cache by fetching from client."""
                try:
                    result = await self.client.list_resources()
                    all_resources: list[ResourceInfo] = []
        
                    for resource in result:
                        try:
                            converted = await ResourceInfo.from_mcp_resource(resource)
                            all_resources.append(converted)
                        except Exception:
                            logger.exception("Failed to convert resource", name=resource.name)
                            continue
        
                    self._resources_cache = all_resources
                    logger.debug(
                        "Refreshed MCP resources cache", num_resources=len(all_resources)
                    )
                except Exception:
                    logger.exception("Failed to refresh MCP resources cache")
                    self._resources_cache = []
        
            async def get_resources(self) -> list[ResourceInfo]:
                """Get cached resources, refreshing if necessary."""
                if self._resources_cache is None:
                    await self.refresh_resources_cache()
        
                return self._resources_cache or []
        

        get_prompts async

        get_prompts() -> list[MCPClientPrompt]
        

        Get cached prompts, refreshing if necessary.

        Source code in src/llmling_agent/resource_providers/mcp_provider.py
        192
        193
        194
        195
        196
        197
        async def get_prompts(self) -> list[MCPClientPrompt]:  # type: ignore
            """Get cached prompts, refreshing if necessary."""
            if self._prompts_cache is None:
                await self.refresh_prompts_cache()
        
            return self._prompts_cache or []
        

        get_resources async

        get_resources() -> list[ResourceInfo]
        

        Get cached resources, refreshing if necessary.

        Source code in src/llmling_agent/resource_providers/mcp_provider.py
        221
        222
        223
        224
        225
        226
        async def get_resources(self) -> list[ResourceInfo]:
            """Get cached resources, refreshing if necessary."""
            if self._resources_cache is None:
                await self.refresh_resources_cache()
        
            return self._resources_cache or []
        

        get_tools async

        get_tools() -> list[Tool]
        

        Get cached tools, refreshing if necessary.

        Source code in src/llmling_agent/resource_providers/mcp_provider.py
        163
        164
        165
        166
        167
        168
        async def get_tools(self) -> list[Tool]:
            """Get cached tools, refreshing if necessary."""
            if self._tools_cache is None:
                await self.refresh_tools_cache()
        
            return self._tools_cache or []
        

        refresh_prompts_cache async

        refresh_prompts_cache() -> None
        

        Refresh the prompts cache by fetching from client.

        Source code in src/llmling_agent/resource_providers/mcp_provider.py
        170
        171
        172
        173
        174
        175
        176
        177
        178
        179
        180
        181
        182
        183
        184
        185
        186
        187
        188
        189
        190
        async def refresh_prompts_cache(self) -> None:
            """Refresh the prompts cache by fetching from client."""
            from llmling_agent.prompts.prompts import MCPClientPrompt
        
            try:
                result = await self.client.list_prompts()
                all_prompts: list[MCPClientPrompt] = []
        
                for prompt in result:
                    try:
                        converted = MCPClientPrompt.from_fastmcp(self.client, prompt)
                        all_prompts.append(converted)
                    except Exception:
                        logger.exception("Failed to convert prompt", name=prompt.name)
                        continue
        
                self._prompts_cache = all_prompts
                logger.debug("Refreshed MCP prompts cache", num_prompts=len(all_prompts))
            except Exception:
                logger.exception("Failed to refresh MCP prompts cache")
                self._prompts_cache = []
        

        refresh_resources_cache async

        refresh_resources_cache() -> None
        

        Refresh the resources cache by fetching from client.

        Source code in src/llmling_agent/resource_providers/mcp_provider.py
        199
        200
        201
        202
        203
        204
        205
        206
        207
        208
        209
        210
        211
        212
        213
        214
        215
        216
        217
        218
        219
        async def refresh_resources_cache(self) -> None:
            """Refresh the resources cache by fetching from client."""
            try:
                result = await self.client.list_resources()
                all_resources: list[ResourceInfo] = []
        
                for resource in result:
                    try:
                        converted = await ResourceInfo.from_mcp_resource(resource)
                        all_resources.append(converted)
                    except Exception:
                        logger.exception("Failed to convert resource", name=resource.name)
                        continue
        
                self._resources_cache = all_resources
                logger.debug(
                    "Refreshed MCP resources cache", num_resources=len(all_resources)
                )
            except Exception:
                logger.exception("Failed to refresh MCP resources cache")
                self._resources_cache = []
        

        refresh_tools_cache async

        refresh_tools_cache() -> None
        

        Refresh the tools cache by fetching from client.

        Source code in src/llmling_agent/resource_providers/mcp_provider.py
        137
        138
        139
        140
        141
        142
        143
        144
        145
        146
        147
        148
        149
        150
        151
        152
        153
        154
        155
        156
        157
        158
        159
        160
        161
        async def refresh_tools_cache(self) -> None:
            """Refresh the tools cache by fetching from client."""
            try:
                # Get fresh tools from client
                mcp_tools = await self.client.list_tools()
                all_tools: list[Tool] = []
        
                for tool in mcp_tools:
                    try:
                        tool_info = self.client.convert_tool(tool)
                        all_tools.append(tool_info)
                    except Exception:
                        logger.exception("Failed to create MCP tool", name=tool.name)
                        continue
        
                # Restore enabled states from saved states
                for tool_info in all_tools:
                    if tool_info.name in self._saved_enabled_states:
                        tool_info.enabled = self._saved_enabled_states[tool_info.name]
        
                self._tools_cache = all_tools
                logger.debug("Refreshed MCP tools cache", num_tools=len(all_tools))
            except Exception:
                logger.exception("Failed to refresh MCP tools cache")
                self._tools_cache = []
        

        ResourceProvider

        Base class for resource providers.

        Provides tools, prompts, and other resources to agents. Default implementations return empty lists - override as needed.

        Source code in src/llmling_agent/resource_providers/base.py
        23
        24
        25
        26
        27
        28
        29
        30
        31
        32
        33
        34
        35
        36
        37
        38
        39
        40
        41
        42
        43
        44
        45
        46
        47
        48
        49
        50
        51
        52
        53
        54
        55
        56
        57
        58
        59
        60
        61
        62
        63
        64
        65
        66
        67
        68
        69
        70
        71
        72
        73
        74
        75
        76
        77
        78
        79
        80
        81
        82
        83
        84
        85
        86
        87
        88
        89
        90
        class ResourceProvider:
            """Base class for resource providers.
        
            Provides tools, prompts, and other resources to agents.
            Default implementations return empty lists - override as needed.
            """
        
            def __init__(self, name: str, owner: str | None = None):
                """Initialize the resource provider."""
                self.name = name
                self.owner = owner
                self.log = logger.bind(name=self.name, owner=self.owner)
        
            async def __aenter__(self) -> Self:
                """Async context entry if required."""
                return self
        
            async def __aexit__(
                self,
                exc_type: type[BaseException] | None,
                exc_val: BaseException | None,
                exc_tb: TracebackType | None,
            ):
                """Async context cleanup if required."""
        
            def __repr__(self) -> str:
                return f"{self.__class__.__name__}(name={self.name!r})"
        
            async def get_tools(self) -> list[Tool]:
                """Get available tools. Override to provide tools."""
                return []
        
            async def get_prompts(self) -> list[BasePrompt]:
                """Get available prompts. Override to provide prompts."""
                return []
        
            async def get_resources(self) -> list[ResourceInfo]:
                """Get available resources. Override to provide resources."""
                return []
        
            async def get_request_parts(
                self, name: str, arguments: dict[str, str] | None = None
            ) -> list[ModelRequestPart]:
                """Get a prompt formatted with arguments.
        
                Args:
                    name: Name of the prompt to format
                    arguments: Optional arguments for prompt formatting
        
                Returns:
                    Single chat message with merged content
        
                Raises:
                    KeyError: If prompt not found
                    ValueError: If formatting fails
                """
                prompts = await self.get_prompts()
                prompt = next((p for p in prompts if p.name == name), None)
                if not prompt:
                    msg = f"Prompt {name!r} not found"
                    raise KeyError(msg)
        
                messages = await prompt.format(arguments or {})
                if not messages:
                    msg = f"Prompt {name!r} produced no messages"
                    raise ValueError(msg)
        
                return [p for prompt_msg in messages for p in prompt_msg.to_pydantic_parts()]
        

        __aenter__ async

        __aenter__() -> Self
        

        Async context entry if required.

        Source code in src/llmling_agent/resource_providers/base.py
        36
        37
        38
        async def __aenter__(self) -> Self:
            """Async context entry if required."""
            return self
        

        __aexit__ async

        __aexit__(
            exc_type: type[BaseException] | None,
            exc_val: BaseException | None,
            exc_tb: TracebackType | None,
        )
        

        Async context cleanup if required.

        Source code in src/llmling_agent/resource_providers/base.py
        40
        41
        42
        43
        44
        45
        46
        async def __aexit__(
            self,
            exc_type: type[BaseException] | None,
            exc_val: BaseException | None,
            exc_tb: TracebackType | None,
        ):
            """Async context cleanup if required."""
        

        __init__

        __init__(name: str, owner: str | None = None)
        

        Initialize the resource provider.

        Source code in src/llmling_agent/resource_providers/base.py
        30
        31
        32
        33
        34
        def __init__(self, name: str, owner: str | None = None):
            """Initialize the resource provider."""
            self.name = name
            self.owner = owner
            self.log = logger.bind(name=self.name, owner=self.owner)
        

        get_prompts async

        get_prompts() -> list[BasePrompt]
        

        Get available prompts. Override to provide prompts.

        Source code in src/llmling_agent/resource_providers/base.py
        55
        56
        57
        async def get_prompts(self) -> list[BasePrompt]:
            """Get available prompts. Override to provide prompts."""
            return []
        

        get_request_parts async

        get_request_parts(
            name: str, arguments: dict[str, str] | None = None
        ) -> list[ModelRequestPart]
        

        Get a prompt formatted with arguments.

        Parameters:

        Name Type Description Default
        name str

        Name of the prompt to format

        required
        arguments dict[str, str] | None

        Optional arguments for prompt formatting

        None

        Returns:

        Type Description
        list[ModelRequestPart]

        Single chat message with merged content

        Raises:

        Type Description
        KeyError

        If prompt not found

        ValueError

        If formatting fails

        Source code in src/llmling_agent/resource_providers/base.py
        63
        64
        65
        66
        67
        68
        69
        70
        71
        72
        73
        74
        75
        76
        77
        78
        79
        80
        81
        82
        83
        84
        85
        86
        87
        88
        89
        90
        async def get_request_parts(
            self, name: str, arguments: dict[str, str] | None = None
        ) -> list[ModelRequestPart]:
            """Get a prompt formatted with arguments.
        
            Args:
                name: Name of the prompt to format
                arguments: Optional arguments for prompt formatting
        
            Returns:
                Single chat message with merged content
        
            Raises:
                KeyError: If prompt not found
                ValueError: If formatting fails
            """
            prompts = await self.get_prompts()
            prompt = next((p for p in prompts if p.name == name), None)
            if not prompt:
                msg = f"Prompt {name!r} not found"
                raise KeyError(msg)
        
            messages = await prompt.format(arguments or {})
            if not messages:
                msg = f"Prompt {name!r} produced no messages"
                raise ValueError(msg)
        
            return [p for prompt_msg in messages for p in prompt_msg.to_pydantic_parts()]
        

        get_resources async

        get_resources() -> list[ResourceInfo]
        

        Get available resources. Override to provide resources.

        Source code in src/llmling_agent/resource_providers/base.py
        59
        60
        61
        async def get_resources(self) -> list[ResourceInfo]:
            """Get available resources. Override to provide resources."""
            return []
        

        get_tools async

        get_tools() -> list[Tool]
        

        Get available tools. Override to provide tools.

        Source code in src/llmling_agent/resource_providers/base.py
        51
        52
        53
        async def get_tools(self) -> list[Tool]:
            """Get available tools. Override to provide tools."""
            return []
        

        StaticResourceProvider

        Bases: ResourceProvider

        Provider for pre-configured tools, prompts and resources.

        Allows creating a provider that serves a fixed set of resources passed during initialization. Useful for converting static configurations to the common ResourceProvider interface.

        Source code in src/llmling_agent/resource_providers/static.py
         18
         19
         20
         21
         22
         23
         24
         25
         26
         27
         28
         29
         30
         31
         32
         33
         34
         35
         36
         37
         38
         39
         40
         41
         42
         43
         44
         45
         46
         47
         48
         49
         50
         51
         52
         53
         54
         55
         56
         57
         58
         59
         60
         61
         62
         63
         64
         65
         66
         67
         68
         69
         70
         71
         72
         73
         74
         75
         76
         77
         78
         79
         80
         81
         82
         83
         84
         85
         86
         87
         88
         89
         90
         91
         92
         93
         94
         95
         96
         97
         98
         99
        100
        101
        102
        103
        104
        105
        106
        107
        108
        109
        110
        111
        112
        113
        114
        115
        116
        117
        118
        119
        120
        121
        122
        123
        124
        125
        126
        127
        128
        129
        130
        131
        132
        133
        134
        135
        136
        137
        138
        139
        140
        141
        142
        143
        144
        145
        146
        147
        148
        149
        150
        151
        152
        153
        154
        155
        156
        157
        158
        159
        160
        161
        162
        163
        164
        165
        166
        167
        168
        169
        170
        171
        172
        173
        174
        175
        176
        177
        178
        179
        180
        181
        182
        183
        184
        185
        186
        187
        188
        189
        190
        191
        192
        193
        194
        195
        196
        197
        198
        199
        200
        201
        class StaticResourceProvider(ResourceProvider):
            """Provider for pre-configured tools, prompts and resources.
        
            Allows creating a provider that serves a fixed set of resources
            passed during initialization. Useful for converting static configurations
            to the common ResourceProvider interface.
            """
        
            def __init__(
                self,
                name: str = "static",
                tools: Sequence[Tool] | None = None,
                prompts: Sequence[BasePrompt] | None = None,
                resources: Sequence[ResourceInfo] | None = None,
            ):
                """Initialize provider with static resources.
        
                Args:
                    name: Name of the provider
                    tools: Optional list of tools to serve
                    prompts: Optional list of prompts to serve
                    resources: Optional list of resources to serve
                """
                super().__init__(name=name)
                self._tools = list(tools) if tools else []
                self._prompts = list(prompts) if prompts else []
                self._resources = list(resources) if resources else []
        
            async def get_tools(self) -> list[Tool]:
                """Get pre-configured tools."""
                return self._tools
        
            async def get_tool(self, name: str) -> Tool | None:
                """Get tool by name."""
                return next((tool for tool in self._tools if tool.name == name), None)
        
            async def get_prompts(self) -> list[BasePrompt]:
                """Get pre-configured prompts."""
                return self._prompts
        
            async def get_resources(self) -> list[ResourceInfo]:
                """Get pre-configured resources."""
                return self._resources
        
            def add_tool(self, tool: Tool) -> None:
                """Add a tool to this provider.
        
                Args:
                    tool: Tool to add
                """
                self._tools.append(tool)
        
            def remove_tool(self, name: str) -> bool:
                """Remove a tool by name.
        
                Args:
                    name: Name of tool to remove
        
                Returns:
                    True if tool was found and removed, False otherwise
                """
                for i, tool in enumerate(self._tools):
                    if tool.name == name:
                        self._tools.pop(i)
                        return True
                return False
        
            def add_prompt(self, prompt: BasePrompt) -> None:
                """Add a prompt to this provider.
        
                Args:
                    prompt: Prompt to add
                """
                self._prompts.append(prompt)
        
            def remove_prompt(self, name: str) -> bool:
                """Remove a prompt by name.
        
                Args:
                    name: Name of prompt to remove
        
                Returns:
                    True if prompt was found and removed, False otherwise
                """
                for i, prompt in enumerate(self._prompts):
                    if prompt.name == name:
                        self._prompts.pop(i)
                        return True
                return False
        
            def add_resource(self, resource: ResourceInfo) -> None:
                """Add a resource to this provider.
        
                Args:
                    resource: Resource to add
                """
                self._resources.append(resource)
        
            def remove_resource(self, name: str) -> bool:
                """Remove a resource by name.
        
                Args:
                    name: Name of resource to remove
        
                Returns:
                    True if resource was found and removed, False otherwise
                """
                for i, resource in enumerate(self._resources):
                    if resource.name == name:
                        self._resources.pop(i)
                        return True
                return False
        
            @overload
            def tool(self, func: Callable[..., Any]) -> Callable[..., Any]: ...
        
            @overload
            def tool(
                self,
                *,
                name: str | None = None,
                description: str | None = None,
                enabled: bool = True,
                requires_confirmation: bool = False,
                metadata: dict[str, str] | None = None,
                **kwargs: Any,
            ) -> Callable[[Callable[..., Any]], Callable[..., Any]]: ...
        
            def tool(
                self,
                func: Callable[..., Any] | None = None,
                *,
                name: str | None = None,
                description: str | None = None,
                enabled: bool = True,
                requires_confirmation: bool = False,
                metadata: dict[str, str] | None = None,
                **kwargs: Any,
            ) -> Callable[..., Any] | Callable[[Callable[..., Any]], Callable[..., Any]]:
                """Decorator to register a function as a tool.
        
                Can be used with or without parameters:
        
                ```python
                # Without parameters
                @provider.tool
                def my_function(x: int) -> str:
                    return str(x)
        
                # With parameters
                @provider.tool(name="custom_name", description="Custom description")
                def another_function(y: str) -> str:
                    return y.upper()
                ```
        
                Args:
                    func: Function to register (when used without parentheses)
                    name: Override for tool name
                    description: Override for tool description
                    enabled: Whether tool is initially enabled
                    requires_confirmation: Whether execution needs confirmation
                    metadata: Additional tool metadata
                    **kwargs: Additional arguments passed to Tool.from_callable
                """
                from llmling_agent.tools.base import Tool
        
                def decorator(f: Callable[..., Any]) -> Callable[..., Any]:
                    tool = Tool.from_callable(
                        f,
                        name_override=name,
                        description_override=description,
                        enabled=enabled,
                        requires_confirmation=requires_confirmation,
                        metadata=metadata or {},
                        **kwargs,
                    )
                    self.add_tool(tool)
                    return f
        
                if func is None:
                    # Called with arguments: @provider.tool(...)
                    return decorator
                # Called without arguments: @provider.tool
                return decorator(func)
        

        __init__

        __init__(
            name: str = "static",
            tools: Sequence[Tool] | None = None,
            prompts: Sequence[BasePrompt] | None = None,
            resources: Sequence[ResourceInfo] | None = None,
        )
        

        Initialize provider with static resources.

        Parameters:

        Name Type Description Default
        name str

        Name of the provider

        'static'
        tools Sequence[Tool] | None

        Optional list of tools to serve

        None
        prompts Sequence[BasePrompt] | None

        Optional list of prompts to serve

        None
        resources Sequence[ResourceInfo] | None

        Optional list of resources to serve

        None
        Source code in src/llmling_agent/resource_providers/static.py
        26
        27
        28
        29
        30
        31
        32
        33
        34
        35
        36
        37
        38
        39
        40
        41
        42
        43
        44
        def __init__(
            self,
            name: str = "static",
            tools: Sequence[Tool] | None = None,
            prompts: Sequence[BasePrompt] | None = None,
            resources: Sequence[ResourceInfo] | None = None,
        ):
            """Initialize provider with static resources.
        
            Args:
                name: Name of the provider
                tools: Optional list of tools to serve
                prompts: Optional list of prompts to serve
                resources: Optional list of resources to serve
            """
            super().__init__(name=name)
            self._tools = list(tools) if tools else []
            self._prompts = list(prompts) if prompts else []
            self._resources = list(resources) if resources else []
        

        add_prompt

        add_prompt(prompt: BasePrompt) -> None
        

        Add a prompt to this provider.

        Parameters:

        Name Type Description Default
        prompt BasePrompt

        Prompt to add

        required
        Source code in src/llmling_agent/resource_providers/static.py
        85
        86
        87
        88
        89
        90
        91
        def add_prompt(self, prompt: BasePrompt) -> None:
            """Add a prompt to this provider.
        
            Args:
                prompt: Prompt to add
            """
            self._prompts.append(prompt)
        

        add_resource

        add_resource(resource: ResourceInfo) -> None
        

        Add a resource to this provider.

        Parameters:

        Name Type Description Default
        resource ResourceInfo

        Resource to add

        required
        Source code in src/llmling_agent/resource_providers/static.py
        108
        109
        110
        111
        112
        113
        114
        def add_resource(self, resource: ResourceInfo) -> None:
            """Add a resource to this provider.
        
            Args:
                resource: Resource to add
            """
            self._resources.append(resource)
        

        add_tool

        add_tool(tool: Tool) -> None
        

        Add a tool to this provider.

        Parameters:

        Name Type Description Default
        tool Tool

        Tool to add

        required
        Source code in src/llmling_agent/resource_providers/static.py
        62
        63
        64
        65
        66
        67
        68
        def add_tool(self, tool: Tool) -> None:
            """Add a tool to this provider.
        
            Args:
                tool: Tool to add
            """
            self._tools.append(tool)
        

        get_prompts async

        get_prompts() -> list[BasePrompt]
        

        Get pre-configured prompts.

        Source code in src/llmling_agent/resource_providers/static.py
        54
        55
        56
        async def get_prompts(self) -> list[BasePrompt]:
            """Get pre-configured prompts."""
            return self._prompts
        

        get_resources async

        get_resources() -> list[ResourceInfo]
        

        Get pre-configured resources.

        Source code in src/llmling_agent/resource_providers/static.py
        58
        59
        60
        async def get_resources(self) -> list[ResourceInfo]:
            """Get pre-configured resources."""
            return self._resources
        

        get_tool async

        get_tool(name: str) -> Tool | None
        

        Get tool by name.

        Source code in src/llmling_agent/resource_providers/static.py
        50
        51
        52
        async def get_tool(self, name: str) -> Tool | None:
            """Get tool by name."""
            return next((tool for tool in self._tools if tool.name == name), None)
        

        get_tools async

        get_tools() -> list[Tool]
        

        Get pre-configured tools.

        Source code in src/llmling_agent/resource_providers/static.py
        46
        47
        48
        async def get_tools(self) -> list[Tool]:
            """Get pre-configured tools."""
            return self._tools
        

        remove_prompt

        remove_prompt(name: str) -> bool
        

        Remove a prompt by name.

        Parameters:

        Name Type Description Default
        name str

        Name of prompt to remove

        required

        Returns:

        Type Description
        bool

        True if prompt was found and removed, False otherwise

        Source code in src/llmling_agent/resource_providers/static.py
         93
         94
         95
         96
         97
         98
         99
        100
        101
        102
        103
        104
        105
        106
        def remove_prompt(self, name: str) -> bool:
            """Remove a prompt by name.
        
            Args:
                name: Name of prompt to remove
        
            Returns:
                True if prompt was found and removed, False otherwise
            """
            for i, prompt in enumerate(self._prompts):
                if prompt.name == name:
                    self._prompts.pop(i)
                    return True
            return False
        

        remove_resource

        remove_resource(name: str) -> bool
        

        Remove a resource by name.

        Parameters:

        Name Type Description Default
        name str

        Name of resource to remove

        required

        Returns:

        Type Description
        bool

        True if resource was found and removed, False otherwise

        Source code in src/llmling_agent/resource_providers/static.py
        116
        117
        118
        119
        120
        121
        122
        123
        124
        125
        126
        127
        128
        129
        def remove_resource(self, name: str) -> bool:
            """Remove a resource by name.
        
            Args:
                name: Name of resource to remove
        
            Returns:
                True if resource was found and removed, False otherwise
            """
            for i, resource in enumerate(self._resources):
                if resource.name == name:
                    self._resources.pop(i)
                    return True
            return False
        

        remove_tool

        remove_tool(name: str) -> bool
        

        Remove a tool by name.

        Parameters:

        Name Type Description Default
        name str

        Name of tool to remove

        required

        Returns:

        Type Description
        bool

        True if tool was found and removed, False otherwise

        Source code in src/llmling_agent/resource_providers/static.py
        70
        71
        72
        73
        74
        75
        76
        77
        78
        79
        80
        81
        82
        83
        def remove_tool(self, name: str) -> bool:
            """Remove a tool by name.
        
            Args:
                name: Name of tool to remove
        
            Returns:
                True if tool was found and removed, False otherwise
            """
            for i, tool in enumerate(self._tools):
                if tool.name == name:
                    self._tools.pop(i)
                    return True
            return False
        

        tool

        tool(func: Callable[..., Any]) -> Callable[..., Any]
        
        tool(
            *,
            name: str | None = None,
            description: str | None = None,
            enabled: bool = True,
            requires_confirmation: bool = False,
            metadata: dict[str, str] | None = None,
            **kwargs: Any
        ) -> Callable[[Callable[..., Any]], Callable[..., Any]]
        
        tool(
            func: Callable[..., Any] | None = None,
            *,
            name: str | None = None,
            description: str | None = None,
            enabled: bool = True,
            requires_confirmation: bool = False,
            metadata: dict[str, str] | None = None,
            **kwargs: Any
        ) -> Callable[..., Any] | Callable[[Callable[..., Any]], Callable[..., Any]]
        

        Decorator to register a function as a tool.

        Can be used with or without parameters:

        # Without parameters
        @provider.tool
        def my_function(x: int) -> str:
            return str(x)
        
        # With parameters
        @provider.tool(name="custom_name", description="Custom description")
        def another_function(y: str) -> str:
            return y.upper()
        

        Parameters:

        Name Type Description Default
        func Callable[..., Any] | None

        Function to register (when used without parentheses)

        None
        name str | None

        Override for tool name

        None
        description str | None

        Override for tool description

        None
        enabled bool

        Whether tool is initially enabled

        True
        requires_confirmation bool

        Whether execution needs confirmation

        False
        metadata dict[str, str] | None

        Additional tool metadata

        None
        **kwargs Any

        Additional arguments passed to Tool.from_callable

        {}
        Source code in src/llmling_agent/resource_providers/static.py
        146
        147
        148
        149
        150
        151
        152
        153
        154
        155
        156
        157
        158
        159
        160
        161
        162
        163
        164
        165
        166
        167
        168
        169
        170
        171
        172
        173
        174
        175
        176
        177
        178
        179
        180
        181
        182
        183
        184
        185
        186
        187
        188
        189
        190
        191
        192
        193
        194
        195
        196
        197
        198
        199
        200
        201
        def tool(
            self,
            func: Callable[..., Any] | None = None,
            *,
            name: str | None = None,
            description: str | None = None,
            enabled: bool = True,
            requires_confirmation: bool = False,
            metadata: dict[str, str] | None = None,
            **kwargs: Any,
        ) -> Callable[..., Any] | Callable[[Callable[..., Any]], Callable[..., Any]]:
            """Decorator to register a function as a tool.
        
            Can be used with or without parameters:
        
            ```python
            # Without parameters
            @provider.tool
            def my_function(x: int) -> str:
                return str(x)
        
            # With parameters
            @provider.tool(name="custom_name", description="Custom description")
            def another_function(y: str) -> str:
                return y.upper()
            ```
        
            Args:
                func: Function to register (when used without parentheses)
                name: Override for tool name
                description: Override for tool description
                enabled: Whether tool is initially enabled
                requires_confirmation: Whether execution needs confirmation
                metadata: Additional tool metadata
                **kwargs: Additional arguments passed to Tool.from_callable
            """
            from llmling_agent.tools.base import Tool
        
            def decorator(f: Callable[..., Any]) -> Callable[..., Any]:
                tool = Tool.from_callable(
                    f,
                    name_override=name,
                    description_override=description,
                    enabled=enabled,
                    requires_confirmation=requires_confirmation,
                    metadata=metadata or {},
                    **kwargs,
                )
                self.add_tool(tool)
                return f
        
            if func is None:
                # Called with arguments: @provider.tool(...)
                return decorator
            # Called without arguments: @provider.tool
            return decorator(func)