Skip to content

base

Class info

Classes

Name Children Inherits
ToolContext
llmling_agent.tools.base
Context for tool execution confirmation.
    ToolInfo
    llmling_agent.tools.base
    Information about a registered tool.
      ToolParameter
      llmling_agent.tools.base
      Information about a tool parameter.

        🛈 DocStrings

        Base tool classes.

        ToolContext dataclass

        Context for tool execution confirmation.

        Source code in src/llmling_agent/tools/base.py
        30
        31
        32
        33
        34
        35
        36
        37
        38
        39
        40
        41
        42
        43
        44
        45
        46
        47
        48
        49
        50
        51
        52
        53
        54
        55
        56
        57
        58
        59
        60
        61
        62
        @dataclass(frozen=True)
        class ToolContext:
            """Context for tool execution confirmation."""
        
            name: str
            """Name of the tool being executed"""
        
            args: dict[str, Any]
            """Arguments being passed to the tool"""
        
            schema: py2openai.OpenAIFunctionTool
            """Complete OpenAI function schema"""
        
            runtime_ctx: AgentContext
            """Runtime context from agent"""
        
            @property
            def description(self) -> str | None:
                """Get tool description from schema."""
                return self.schema["function"].get("description")
        
            @property
            def parameters(self) -> ToolParameters:
                """Get parameter definitions from schema."""
                return self.schema["function"].get("parameters", {})  # type: ignore
        
            def __str__(self) -> str:
                """Format tool context for logging/display."""
                return (
                    f"Tool: {self.name}\n"
                    f"Arguments: {self.args}\n"
                    f"Description: {self.description or 'N/A'}"
                )
        

        args instance-attribute

        args: dict[str, Any]
        

        Arguments being passed to the tool

        description property

        description: str | None
        

        Get tool description from schema.

        name instance-attribute

        name: str
        

        Name of the tool being executed

        parameters property

        parameters: ToolParameters
        

        Get parameter definitions from schema.

        runtime_ctx instance-attribute

        runtime_ctx: AgentContext
        

        Runtime context from agent

        schema instance-attribute

        schema: OpenAIFunctionTool
        

        Complete OpenAI function schema

        __str__

        __str__() -> str
        

        Format tool context for logging/display.

        Source code in src/llmling_agent/tools/base.py
        56
        57
        58
        59
        60
        61
        62
        def __str__(self) -> str:
            """Format tool context for logging/display."""
            return (
                f"Tool: {self.name}\n"
                f"Arguments: {self.args}\n"
                f"Description: {self.description or 'N/A'}"
            )
        

        ToolInfo dataclass

        Information about a registered tool.

        Source code in src/llmling_agent/tools/base.py
         65
         66
         67
         68
         69
         70
         71
         72
         73
         74
         75
         76
         77
         78
         79
         80
         81
         82
         83
         84
         85
         86
         87
         88
         89
         90
         91
         92
         93
         94
         95
         96
         97
         98
         99
        100
        101
        102
        103
        104
        105
        106
        107
        108
        109
        110
        111
        112
        113
        114
        115
        116
        117
        118
        119
        120
        121
        122
        123
        124
        125
        126
        127
        128
        129
        130
        131
        132
        133
        134
        135
        136
        137
        138
        139
        140
        141
        142
        143
        144
        145
        146
        147
        148
        149
        150
        151
        152
        153
        154
        155
        156
        157
        158
        159
        160
        161
        162
        163
        164
        165
        166
        167
        168
        169
        170
        171
        172
        173
        174
        175
        176
        177
        178
        179
        180
        181
        182
        183
        184
        185
        186
        187
        188
        189
        190
        191
        192
        193
        194
        195
        196
        197
        198
        199
        200
        201
        202
        203
        204
        205
        206
        207
        208
        209
        210
        211
        212
        213
        214
        215
        216
        217
        218
        219
        220
        221
        222
        223
        224
        225
        226
        227
        228
        229
        230
        231
        232
        233
        234
        235
        236
        237
        238
        239
        240
        241
        242
        243
        244
        245
        246
        247
        248
        249
        250
        251
        252
        253
        254
        255
        256
        257
        258
        259
        260
        261
        262
        263
        264
        265
        266
        267
        268
        269
        270
        271
        272
        273
        274
        275
        276
        277
        278
        279
        280
        281
        282
        283
        284
        285
        286
        287
        288
        289
        290
        291
        292
        293
        294
        295
        296
        297
        @dataclass
        class ToolInfo:
            """Information about a registered tool."""
        
            callable: LLMCallableTool
            """The actual tool implementation"""
        
            enabled: bool = True
            """Whether the tool is currently enabled"""
        
            source: ToolSource = "runtime"
            """Where the tool came from."""
        
            priority: int = 100
            """Priority for tool execution (lower = higher priority)"""
        
            requires_confirmation: bool = False
            """Whether tool execution needs explicit confirmation"""
        
            requires_capability: str | None = None
            """Optional capability required to use this tool"""
        
            agent_name: str | None = None
            """The agent name as an identifier for agent-as-a-tool."""
        
            metadata: dict[str, str] = field(default_factory=dict)
            """Additional tool metadata"""
        
            cache_enabled: bool = False
            """Whether to enable caching for this tool."""
        
            @property
            def schema(self) -> py2openai.OpenAIFunctionTool:
                """Get the OpenAI function schema for the tool."""
                return self.callable.get_schema()
        
            @property
            def name(self) -> str:
                """Get tool name."""
                return self.callable.name
        
            @property
            def description(self) -> str | None:
                """Get tool description."""
                return self.callable.description
        
            def matches_filter(self, state: Literal["all", "enabled", "disabled"]) -> bool:
                """Check if tool matches state filter."""
                match state:
                    case "all":
                        return True
                    case "enabled":
                        return self.enabled
                    case "disabled":
                        return not self.enabled
        
            @property
            def parameters(self) -> list[ToolParameter]:
                """Get information about tool parameters."""
                schema = self.schema["function"]
                properties: dict[str, Property] = schema.get("properties", {})  # type: ignore
                required: list[str] = schema.get("required", [])  # type: ignore
        
                return [
                    ToolParameter(
                        name=name,
                        required=name in required,
                        type_info=details.get("type"),
                        description=details.get("description"),
                    )
                    for name, details in properties.items()
                ]
        
            def format_info(self, indent: str = "  ") -> str:
                """Format complete tool information."""
                lines = [f"{indent}{self.name}"]
                if self.description:
                    lines.append(f"{indent}  {self.description}")
                if self.parameters:
                    lines.append(f"{indent}  Parameters:")
                    lines.extend(f"{indent}    {param}" for param in self.parameters)
                if self.metadata:
                    lines.append(f"{indent}  Metadata:")
                    lines.extend(f"{indent}    {k}: {v}" for k, v in self.metadata.items())
                return "\n".join(lines)
        
            async def execute(self, *args: Any, **kwargs: Any) -> Any:
                """Execute tool, handling both sync and async cases."""
                fn = track_tool(self.name)(self.callable.callable)
                return await execute(fn, *args, **kwargs, use_thread=True)
        
            @classmethod
            def from_code(
                cls,
                code: str,
                name: str | None = None,
                description: str | None = None,
            ) -> Self:
                """Create a tool from a code string."""
                namespace: dict[str, Any] = {}
                exec(code, namespace)
                func = next((v for v in namespace.values() if callable(v)), None)
                if not func:
                    msg = "No callable found in provided code"
                    raise ValueError(msg)
                return cls.from_callable(
                    func, name_override=name, description_override=description
                )
        
            @classmethod
            def from_callable(
                cls,
                fn: Callable[..., Any] | str,
                *,
                name_override: str | None = None,
                description_override: str | None = None,
                schema_override: py2openai.OpenAIFunctionDefinition | None = None,
                **kwargs: Any,
            ) -> Self:
                tool = LLMCallableTool.from_callable(
                    fn,
                    name_override=name_override,
                    description_override=description_override,
                    schema_override=schema_override,
                )
                return cls(tool, **kwargs)
        
            @classmethod
            def from_crewai_tool(
                cls,
                tool: Any,
                *,
                name_override: str | None = None,
                description_override: str | None = None,
                schema_override: py2openai.OpenAIFunctionDefinition | None = None,
                **kwargs: Any,
            ) -> Self:
                """Allows importing crewai tools."""
                # vaidate_import("crewai_tools", "crewai")
                try:
                    from crewai.tools import BaseTool as CrewAiBaseTool
                except ImportError as e:
                    msg = "crewai package not found. Please install it with 'pip install crewai'"
                    raise ImportError(msg) from e
        
                if not isinstance(tool, CrewAiBaseTool):
                    msg = f"Expected CrewAI BaseTool, got {type(tool)}"
                    raise TypeError(msg)
        
                return cls.from_callable(
                    tool._run,
                    name_override=name_override or tool.__class__.__name__.removesuffix("Tool"),
                    description_override=description_override or tool.description,
                    schema_override=schema_override,
                    **kwargs,
                )
        
            @classmethod
            def from_langchain_tool(
                cls,
                tool: Any,
                *,
                name_override: str | None = None,
                description_override: str | None = None,
                schema_override: py2openai.OpenAIFunctionDefinition | None = None,
                **kwargs: Any,
            ) -> Self:
                """Create a tool from a LangChain tool."""
                # vaidate_import("langchain_core", "langchain")
                try:
                    from langchain_core.tools import BaseTool as LangChainBaseTool
                except ImportError as e:
                    msg = "langchain-core package not found."
                    raise ImportError(msg) from e
        
                if not isinstance(tool, LangChainBaseTool):
                    msg = f"Expected LangChain BaseTool, got {type(tool)}"
                    raise TypeError(msg)
        
                return cls.from_callable(
                    tool.invoke,
                    name_override=name_override or tool.name,
                    description_override=description_override or tool.description,
                    schema_override=schema_override,
                    **kwargs,
                )
        
            @classmethod
            def from_autogen_tool(
                cls,
                tool: Any,
                *,
                name_override: str | None = None,
                description_override: str | None = None,
                schema_override: py2openai.OpenAIFunctionDefinition | None = None,
                **kwargs: Any,
            ) -> Self:
                """Create a tool from a AutoGen tool."""
                # vaidate_import("autogen_core", "autogen")
                try:
                    from autogen_core import CancellationToken
                    from autogen_core.tools import BaseTool
                except ImportError as e:
                    msg = "autogent_core package not found."
                    raise ImportError(msg) from e
        
                if not isinstance(tool, BaseTool):
                    msg = f"Expected AutoGent BaseTool, got {type(tool)}"
                    raise TypeError(msg)
                token = CancellationToken()
        
                input_model = tool.__class__.__orig_bases__[0].__args__[0]  # type: ignore
        
                name = name_override or tool.name or tool.__class__.__name__.removesuffix("Tool")
                description = (
                    description_override
                    or tool.description
                    or inspect.getdoc(tool.__class__)
                    or ""
                )
        
                async def wrapper(**kwargs: Any) -> Any:
                    # Convert kwargs to the expected input model
                    model = input_model(**kwargs)
                    return await tool.run(model, cancellation_token=token)
        
                return cls.from_callable(
                    wrapper,  # type: ignore
                    name_override=name,
                    description_override=description,
                    schema_override=schema_override,
                    **kwargs,
                )
        

        agent_name class-attribute instance-attribute

        agent_name: str | None = None
        

        The agent name as an identifier for agent-as-a-tool.

        cache_enabled class-attribute instance-attribute

        cache_enabled: bool = False
        

        Whether to enable caching for this tool.

        callable instance-attribute

        callable: LLMCallableTool
        

        The actual tool implementation

        description property

        description: str | None
        

        Get tool description.

        enabled class-attribute instance-attribute

        enabled: bool = True
        

        Whether the tool is currently enabled

        metadata class-attribute instance-attribute

        metadata: dict[str, str] = field(default_factory=dict)
        

        Additional tool metadata

        name property

        name: str
        

        Get tool name.

        parameters property

        parameters: list[ToolParameter]
        

        Get information about tool parameters.

        priority class-attribute instance-attribute

        priority: int = 100
        

        Priority for tool execution (lower = higher priority)

        requires_capability class-attribute instance-attribute

        requires_capability: str | None = None
        

        Optional capability required to use this tool

        requires_confirmation class-attribute instance-attribute

        requires_confirmation: bool = False
        

        Whether tool execution needs explicit confirmation

        schema property

        schema: OpenAIFunctionTool
        

        Get the OpenAI function schema for the tool.

        source class-attribute instance-attribute

        source: ToolSource = 'runtime'
        

        Where the tool came from.

        execute async

        execute(*args: Any, **kwargs: Any) -> Any
        

        Execute tool, handling both sync and async cases.

        Source code in src/llmling_agent/tools/base.py
        151
        152
        153
        154
        async def execute(self, *args: Any, **kwargs: Any) -> Any:
            """Execute tool, handling both sync and async cases."""
            fn = track_tool(self.name)(self.callable.callable)
            return await execute(fn, *args, **kwargs, use_thread=True)
        

        format_info

        format_info(indent: str = '  ') -> str
        

        Format complete tool information.

        Source code in src/llmling_agent/tools/base.py
        138
        139
        140
        141
        142
        143
        144
        145
        146
        147
        148
        149
        def format_info(self, indent: str = "  ") -> str:
            """Format complete tool information."""
            lines = [f"{indent}{self.name}"]
            if self.description:
                lines.append(f"{indent}  {self.description}")
            if self.parameters:
                lines.append(f"{indent}  Parameters:")
                lines.extend(f"{indent}    {param}" for param in self.parameters)
            if self.metadata:
                lines.append(f"{indent}  Metadata:")
                lines.extend(f"{indent}    {k}: {v}" for k, v in self.metadata.items())
            return "\n".join(lines)
        

        from_autogen_tool classmethod

        from_autogen_tool(
            tool: Any,
            *,
            name_override: str | None = None,
            description_override: str | None = None,
            schema_override: OpenAIFunctionDefinition | None = None,
            **kwargs: Any,
        ) -> Self
        

        Create a tool from a AutoGen tool.

        Source code in src/llmling_agent/tools/base.py
        252
        253
        254
        255
        256
        257
        258
        259
        260
        261
        262
        263
        264
        265
        266
        267
        268
        269
        270
        271
        272
        273
        274
        275
        276
        277
        278
        279
        280
        281
        282
        283
        284
        285
        286
        287
        288
        289
        290
        291
        292
        293
        294
        295
        296
        297
        @classmethod
        def from_autogen_tool(
            cls,
            tool: Any,
            *,
            name_override: str | None = None,
            description_override: str | None = None,
            schema_override: py2openai.OpenAIFunctionDefinition | None = None,
            **kwargs: Any,
        ) -> Self:
            """Create a tool from a AutoGen tool."""
            # vaidate_import("autogen_core", "autogen")
            try:
                from autogen_core import CancellationToken
                from autogen_core.tools import BaseTool
            except ImportError as e:
                msg = "autogent_core package not found."
                raise ImportError(msg) from e
        
            if not isinstance(tool, BaseTool):
                msg = f"Expected AutoGent BaseTool, got {type(tool)}"
                raise TypeError(msg)
            token = CancellationToken()
        
            input_model = tool.__class__.__orig_bases__[0].__args__[0]  # type: ignore
        
            name = name_override or tool.name or tool.__class__.__name__.removesuffix("Tool")
            description = (
                description_override
                or tool.description
                or inspect.getdoc(tool.__class__)
                or ""
            )
        
            async def wrapper(**kwargs: Any) -> Any:
                # Convert kwargs to the expected input model
                model = input_model(**kwargs)
                return await tool.run(model, cancellation_token=token)
        
            return cls.from_callable(
                wrapper,  # type: ignore
                name_override=name,
                description_override=description,
                schema_override=schema_override,
                **kwargs,
            )
        

        from_code classmethod

        from_code(code: str, name: str | None = None, description: str | None = None) -> Self
        

        Create a tool from a code string.

        Source code in src/llmling_agent/tools/base.py
        156
        157
        158
        159
        160
        161
        162
        163
        164
        165
        166
        167
        168
        169
        170
        171
        172
        @classmethod
        def from_code(
            cls,
            code: str,
            name: str | None = None,
            description: str | None = None,
        ) -> Self:
            """Create a tool from a code string."""
            namespace: dict[str, Any] = {}
            exec(code, namespace)
            func = next((v for v in namespace.values() if callable(v)), None)
            if not func:
                msg = "No callable found in provided code"
                raise ValueError(msg)
            return cls.from_callable(
                func, name_override=name, description_override=description
            )
        

        from_crewai_tool classmethod

        from_crewai_tool(
            tool: Any,
            *,
            name_override: str | None = None,
            description_override: str | None = None,
            schema_override: OpenAIFunctionDefinition | None = None,
            **kwargs: Any,
        ) -> Self
        

        Allows importing crewai tools.

        Source code in src/llmling_agent/tools/base.py
        192
        193
        194
        195
        196
        197
        198
        199
        200
        201
        202
        203
        204
        205
        206
        207
        208
        209
        210
        211
        212
        213
        214
        215
        216
        217
        218
        219
        220
        @classmethod
        def from_crewai_tool(
            cls,
            tool: Any,
            *,
            name_override: str | None = None,
            description_override: str | None = None,
            schema_override: py2openai.OpenAIFunctionDefinition | None = None,
            **kwargs: Any,
        ) -> Self:
            """Allows importing crewai tools."""
            # vaidate_import("crewai_tools", "crewai")
            try:
                from crewai.tools import BaseTool as CrewAiBaseTool
            except ImportError as e:
                msg = "crewai package not found. Please install it with 'pip install crewai'"
                raise ImportError(msg) from e
        
            if not isinstance(tool, CrewAiBaseTool):
                msg = f"Expected CrewAI BaseTool, got {type(tool)}"
                raise TypeError(msg)
        
            return cls.from_callable(
                tool._run,
                name_override=name_override or tool.__class__.__name__.removesuffix("Tool"),
                description_override=description_override or tool.description,
                schema_override=schema_override,
                **kwargs,
            )
        

        from_langchain_tool classmethod

        from_langchain_tool(
            tool: Any,
            *,
            name_override: str | None = None,
            description_override: str | None = None,
            schema_override: OpenAIFunctionDefinition | None = None,
            **kwargs: Any,
        ) -> Self
        

        Create a tool from a LangChain tool.

        Source code in src/llmling_agent/tools/base.py
        222
        223
        224
        225
        226
        227
        228
        229
        230
        231
        232
        233
        234
        235
        236
        237
        238
        239
        240
        241
        242
        243
        244
        245
        246
        247
        248
        249
        250
        @classmethod
        def from_langchain_tool(
            cls,
            tool: Any,
            *,
            name_override: str | None = None,
            description_override: str | None = None,
            schema_override: py2openai.OpenAIFunctionDefinition | None = None,
            **kwargs: Any,
        ) -> Self:
            """Create a tool from a LangChain tool."""
            # vaidate_import("langchain_core", "langchain")
            try:
                from langchain_core.tools import BaseTool as LangChainBaseTool
            except ImportError as e:
                msg = "langchain-core package not found."
                raise ImportError(msg) from e
        
            if not isinstance(tool, LangChainBaseTool):
                msg = f"Expected LangChain BaseTool, got {type(tool)}"
                raise TypeError(msg)
        
            return cls.from_callable(
                tool.invoke,
                name_override=name_override or tool.name,
                description_override=description_override or tool.description,
                schema_override=schema_override,
                **kwargs,
            )
        

        matches_filter

        matches_filter(state: Literal['all', 'enabled', 'disabled']) -> bool
        

        Check if tool matches state filter.

        Source code in src/llmling_agent/tools/base.py
        111
        112
        113
        114
        115
        116
        117
        118
        119
        def matches_filter(self, state: Literal["all", "enabled", "disabled"]) -> bool:
            """Check if tool matches state filter."""
            match state:
                case "all":
                    return True
                case "enabled":
                    return self.enabled
                case "disabled":
                    return not self.enabled
        

        ToolParameter dataclass

        Information about a tool parameter.

        Source code in src/llmling_agent/tools/base.py
        300
        301
        302
        303
        304
        305
        306
        307
        308
        309
        310
        311
        312
        313
        314
        @dataclass
        class ToolParameter:
            """Information about a tool parameter."""
        
            name: str
            required: bool
            type_info: str | None = None
            description: str | None = None
        
            def __str__(self) -> str:
                """Format parameter info."""
                req = "*" if self.required else ""
                type_str = f": {self.type_info}" if self.type_info else ""
                desc = f" - {self.description}" if self.description else ""
                return f"{self.name}{req}{type_str}{desc}"
        

        __str__

        __str__() -> str
        

        Format parameter info.

        Source code in src/llmling_agent/tools/base.py
        309
        310
        311
        312
        313
        314
        def __str__(self) -> str:
            """Format parameter info."""
            req = "*" if self.required else ""
            type_str = f": {self.type_info}" if self.type_info else ""
            desc = f" - {self.description}" if self.description else ""
            return f"{self.name}{req}{type_str}{desc}"