Skip to content

Tool

Base classes

Name Children Inherits
Generic
typing
Abstract base class for generic types.

⋔ Inheritance diagram

graph TD
  94001347841024["base.Tool"]
  94001297341184["typing.Generic"]
  140380010846688["builtins.object"]
  94001297341184 --> 94001347841024
  140380010846688 --> 94001297341184

🛈 DocStrings

Information about a registered tool.

Source code in src/llmling_agent/tools/base.py
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
@dataclass
class Tool[TOutputType = Any]:
    """Information about a registered tool."""

    callable: Callable[..., TOutputType]
    """The actual tool implementation"""

    name: str
    """The name of the tool."""

    description: str = ""
    """The description of the tool."""

    schema_override: schemez.OpenAIFunctionDefinition | None = None
    """Schema override. If not set, the schema is inferred from the callable."""

    hints: ToolHints | None = None
    """Hints for the tool."""

    import_path: str | None = None
    """The import path for the tool."""

    enabled: bool = True
    """Whether the tool is currently enabled"""

    source: ToolSource = "dynamic"
    """Where the tool came from."""

    requires_confirmation: bool = False
    """Whether tool execution needs explicit confirmation"""

    agent_name: str | None = None
    """The agent name as an identifier for agent-as-a-tool."""

    metadata: dict[str, str] = field(default_factory=dict)
    """Additional tool metadata"""

    category: ToolKind | None = None
    """The category of the tool."""

    __repr__ = dataclasses_no_defaults_repr

    def to_pydantic_ai(self) -> PydanticAiTool:
        """Convert tool to Pydantic AI tool."""
        metadata = {
            **self.metadata,
            "agent_name": self.agent_name,
            "category": self.category,
        }
        return PydanticAiTool(
            function=self.callable,
            name=self.name,
            # takes_ctx=self.takes_ctx,
            # max_retries=self.max_retries,
            description=self.description,
            requires_approval=self.requires_confirmation,
            metadata=metadata,
        )

    @property
    def schema_obj(self) -> FunctionSchema:
        """Get the OpenAI function schema for the tool."""
        return schemez.create_schema(
            self.callable,
            name_override=self.name,
            description_override=self.description,
        )

    @property
    def schema(self) -> schemez.OpenAIFunctionTool:
        """Get the OpenAI function schema for the tool."""
        schema = self.schema_obj.model_dump_openai()
        if self.schema_override:
            schema["function"] = self.schema_override
        return schema

    def matches_filter(self, state: ToolState) -> bool:
        """Check if tool matches state filter."""
        match state:
            case "all":
                return True
            case "enabled":
                return self.enabled
            case "disabled":
                return not self.enabled

    @property
    def parameters(self) -> list[ToolParameter]:
        """Get information about tool parameters."""
        schema = self.schema["function"]
        properties: dict[str, Property] = schema.get("properties", {})  # type: ignore
        required: list[str] = schema.get("required", [])  # type: ignore

        return [
            ToolParameter(
                name=name,
                required=name in required,
                type_info=details.get("type"),
                description=details.get("description"),
            )
            for name, details in properties.items()
        ]

    def format_info(self, indent: str = "  ") -> str:
        """Format complete tool information."""
        lines = [f"{indent}{self.name}"]
        if self.description:
            lines.append(f"{indent}  {self.description}")
        if self.parameters:
            lines.append(f"{indent}  Parameters:")
            lines.extend(f"{indent}    {param}" for param in self.parameters)
        if self.metadata:
            lines.append(f"{indent}  Metadata:")
            lines.extend(f"{indent}    {k}: {v}" for k, v in self.metadata.items())
        return "\n".join(lines)

    @logfire.instrument("Executing tool {self.name} with args={args}, kwargs={kwargs}")
    async def execute(self, *args: Any, **kwargs: Any) -> Any:
        """Execute tool, handling both sync and async cases."""
        return await execute(self.callable, *args, **kwargs, use_thread=True)

    @classmethod
    def from_code(
        cls,
        code: str,
        name: str | None = None,
        description: str | None = None,
    ) -> Tool[Any]:
        """Create a tool from a code string."""
        namespace: dict[str, Any] = {}
        exec(code, namespace)
        func = next((v for v in namespace.values() if callable(v)), None)
        if not func:
            msg = "No callable found in provided code"
            raise ValueError(msg)
        return cls.from_callable(
            func,  # pyright: ignore[reportArgumentType]
            name_override=name,
            description_override=description,
        )

    @classmethod
    def from_callable(
        cls,
        fn: Callable[..., TOutputType | Awaitable[TOutputType]] | str,
        *,
        name_override: str | None = None,
        description_override: str | None = None,
        schema_override: schemez.OpenAIFunctionDefinition | None = None,
        hints: ToolHints | None = None,
        **kwargs: Any,
    ) -> Tool[TOutputType]:
        if isinstance(fn, str):
            import_path = fn
            from llmling_agent.utils import importing

            callable_obj = importing.import_callable(fn)
            name = getattr(callable_obj, "__name__", "unknown")
            import_path = fn
        else:
            callable_obj = fn
            module = fn.__module__
            if hasattr(fn, "__qualname__"):  # Regular function
                name = fn.__name__
                import_path = f"{module}.{fn.__qualname__}"
            else:  # Instance with __call__ method
                name = fn.__class__.__name__
                import_path = f"{module}.{fn.__class__.__qualname__}"

        return cls(
            callable=callable_obj,  # pyright: ignore[reportArgumentType]
            name=name_override or name,
            description=description_override or inspect.getdoc(callable_obj) or "",
            import_path=import_path,
            schema_override=schema_override,
            hints=hints,
            **kwargs,
        )

    @classmethod
    def from_crewai_tool(
        cls,
        tool: Any,
        *,
        name_override: str | None = None,
        description_override: str | None = None,
        schema_override: schemez.OpenAIFunctionDefinition | None = None,
        **kwargs: Any,
    ) -> Tool[Any]:
        """Allows importing crewai tools."""
        # vaidate_import("crewai_tools", "crewai")
        try:
            from crewai.tools import (  # type: ignore[import-not-found]
                BaseTool as CrewAiBaseTool,
            )
        except ImportError as e:
            msg = "crewai package not found. Please install it with 'pip install crewai'"
            raise ImportError(msg) from e

        if not isinstance(tool, CrewAiBaseTool):
            msg = f"Expected CrewAI BaseTool, got {type(tool)}"
            raise TypeError(msg)

        return cls.from_callable(
            tool._run,
            name_override=name_override or tool.__class__.__name__.removesuffix("Tool"),
            description_override=description_override or tool.description,
            schema_override=schema_override,
            **kwargs,
        )

    @classmethod
    def from_langchain_tool(
        cls,
        tool: Any,
        *,
        name_override: str | None = None,
        description_override: str | None = None,
        schema_override: schemez.OpenAIFunctionDefinition | None = None,
        **kwargs: Any,
    ) -> Tool[Any]:
        """Create a tool from a LangChain tool."""
        # vaidate_import("langchain_core", "langchain")
        try:
            from langchain_core.tools import (  # type: ignore[import-not-found]
                BaseTool as LangChainBaseTool,
            )
        except ImportError as e:
            msg = "langchain-core package not found."
            raise ImportError(msg) from e

        if not isinstance(tool, LangChainBaseTool):
            msg = f"Expected LangChain BaseTool, got {type(tool)}"
            raise TypeError(msg)

        return cls.from_callable(
            tool.invoke,
            name_override=name_override or tool.name,
            description_override=description_override or tool.description,
            schema_override=schema_override,
            **kwargs,
        )

    @classmethod
    def from_autogen_tool(
        cls,
        tool: Any,
        *,
        name_override: str | None = None,
        description_override: str | None = None,
        schema_override: schemez.OpenAIFunctionDefinition | None = None,
        **kwargs: Any,
    ) -> Tool[Any]:
        """Create a tool from a AutoGen tool."""
        # vaidate_import("autogen_core", "autogen")
        try:
            from autogen_core import CancellationToken  # type: ignore[import-not-found]
            from autogen_core.tools import BaseTool  # type: ignore[import-not-found]
        except ImportError as e:
            msg = "autogent_core package not found."
            raise ImportError(msg) from e

        if not isinstance(tool, BaseTool):
            msg = f"Expected AutoGent BaseTool, got {type(tool)}"
            raise TypeError(msg)
        token = CancellationToken()

        input_model = tool.__class__.__orig_bases__[0].__args__[0]

        name = name_override or tool.name or tool.__class__.__name__.removesuffix("Tool")
        description = (
            description_override or tool.description or inspect.getdoc(tool.__class__) or ""
        )

        async def wrapper(**kwargs: Any) -> Any:
            # Convert kwargs to the expected input model
            model = input_model(**kwargs)
            return await tool.run(model, cancellation_token=token)

        return cls.from_callable(
            wrapper,
            name_override=name,
            description_override=description,
            schema_override=schema_override,
            **kwargs,
        )

    def to_mcp_tool(self) -> MCPTool:
        """Convert internal Tool to MCP Tool."""
        schema = self.schema
        from mcp.types import Tool as MCPTool, ToolAnnotations

        return MCPTool(
            name=schema["function"]["name"],
            description=schema["function"]["description"],
            inputSchema=schema["function"]["parameters"],  # pyright: ignore
            annotations=ToolAnnotations(
                title=self.name,
                readOnlyHint=self.hints.read_only if self.hints else None,
                destructiveHint=self.hints.destructive if self.hints else None,
                idempotentHint=self.hints.idempotent if self.hints else None,
                openWorldHint=self.hints.open_world if self.hints else None,
            ),
        )

agent_name class-attribute instance-attribute

agent_name: str | None = None

The agent name as an identifier for agent-as-a-tool.

callable instance-attribute

callable: Callable[..., TOutputType]

The actual tool implementation

category class-attribute instance-attribute

category: ToolKind | None = None

The category of the tool.

description class-attribute instance-attribute

description: str = ''

The description of the tool.

enabled class-attribute instance-attribute

enabled: bool = True

Whether the tool is currently enabled

hints class-attribute instance-attribute

hints: ToolHints | None = None

Hints for the tool.

import_path class-attribute instance-attribute

import_path: str | None = None

The import path for the tool.

metadata class-attribute instance-attribute

metadata: dict[str, str] = field(default_factory=dict)

Additional tool metadata

name instance-attribute

name: str

The name of the tool.

parameters property

parameters: list[ToolParameter]

Get information about tool parameters.

requires_confirmation class-attribute instance-attribute

requires_confirmation: bool = False

Whether tool execution needs explicit confirmation

schema property

schema: OpenAIFunctionTool

Get the OpenAI function schema for the tool.

schema_obj property

schema_obj: FunctionSchema

Get the OpenAI function schema for the tool.

schema_override class-attribute instance-attribute

schema_override: OpenAIFunctionDefinition | None = None

Schema override. If not set, the schema is inferred from the callable.

source class-attribute instance-attribute

source: ToolSource = 'dynamic'

Where the tool came from.

execute async

execute(*args: Any, **kwargs: Any) -> Any

Execute tool, handling both sync and async cases.

Source code in src/llmling_agent/tools/base.py
159
160
161
162
@logfire.instrument("Executing tool {self.name} with args={args}, kwargs={kwargs}")
async def execute(self, *args: Any, **kwargs: Any) -> Any:
    """Execute tool, handling both sync and async cases."""
    return await execute(self.callable, *args, **kwargs, use_thread=True)

format_info

format_info(indent: str = '  ') -> str

Format complete tool information.

Source code in src/llmling_agent/tools/base.py
146
147
148
149
150
151
152
153
154
155
156
157
def format_info(self, indent: str = "  ") -> str:
    """Format complete tool information."""
    lines = [f"{indent}{self.name}"]
    if self.description:
        lines.append(f"{indent}  {self.description}")
    if self.parameters:
        lines.append(f"{indent}  Parameters:")
        lines.extend(f"{indent}    {param}" for param in self.parameters)
    if self.metadata:
        lines.append(f"{indent}  Metadata:")
        lines.extend(f"{indent}    {k}: {v}" for k, v in self.metadata.items())
    return "\n".join(lines)

from_autogen_tool classmethod

from_autogen_tool(
    tool: Any,
    *,
    name_override: str | None = None,
    description_override: str | None = None,
    schema_override: OpenAIFunctionDefinition | None = None,
    **kwargs: Any
) -> Tool[Any]

Create a tool from a AutoGen tool.

Source code in src/llmling_agent/tools/base.py
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
@classmethod
def from_autogen_tool(
    cls,
    tool: Any,
    *,
    name_override: str | None = None,
    description_override: str | None = None,
    schema_override: schemez.OpenAIFunctionDefinition | None = None,
    **kwargs: Any,
) -> Tool[Any]:
    """Create a tool from a AutoGen tool."""
    # vaidate_import("autogen_core", "autogen")
    try:
        from autogen_core import CancellationToken  # type: ignore[import-not-found]
        from autogen_core.tools import BaseTool  # type: ignore[import-not-found]
    except ImportError as e:
        msg = "autogent_core package not found."
        raise ImportError(msg) from e

    if not isinstance(tool, BaseTool):
        msg = f"Expected AutoGent BaseTool, got {type(tool)}"
        raise TypeError(msg)
    token = CancellationToken()

    input_model = tool.__class__.__orig_bases__[0].__args__[0]

    name = name_override or tool.name or tool.__class__.__name__.removesuffix("Tool")
    description = (
        description_override or tool.description or inspect.getdoc(tool.__class__) or ""
    )

    async def wrapper(**kwargs: Any) -> Any:
        # Convert kwargs to the expected input model
        model = input_model(**kwargs)
        return await tool.run(model, cancellation_token=token)

    return cls.from_callable(
        wrapper,
        name_override=name,
        description_override=description,
        schema_override=schema_override,
        **kwargs,
    )

from_code classmethod

from_code(
    code: str, name: str | None = None, description: str | None = None
) -> Tool[Any]

Create a tool from a code string.

Source code in src/llmling_agent/tools/base.py
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
@classmethod
def from_code(
    cls,
    code: str,
    name: str | None = None,
    description: str | None = None,
) -> Tool[Any]:
    """Create a tool from a code string."""
    namespace: dict[str, Any] = {}
    exec(code, namespace)
    func = next((v for v in namespace.values() if callable(v)), None)
    if not func:
        msg = "No callable found in provided code"
        raise ValueError(msg)
    return cls.from_callable(
        func,  # pyright: ignore[reportArgumentType]
        name_override=name,
        description_override=description,
    )

from_crewai_tool classmethod

from_crewai_tool(
    tool: Any,
    *,
    name_override: str | None = None,
    description_override: str | None = None,
    schema_override: OpenAIFunctionDefinition | None = None,
    **kwargs: Any
) -> Tool[Any]

Allows importing crewai tools.

Source code in src/llmling_agent/tools/base.py
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
@classmethod
def from_crewai_tool(
    cls,
    tool: Any,
    *,
    name_override: str | None = None,
    description_override: str | None = None,
    schema_override: schemez.OpenAIFunctionDefinition | None = None,
    **kwargs: Any,
) -> Tool[Any]:
    """Allows importing crewai tools."""
    # vaidate_import("crewai_tools", "crewai")
    try:
        from crewai.tools import (  # type: ignore[import-not-found]
            BaseTool as CrewAiBaseTool,
        )
    except ImportError as e:
        msg = "crewai package not found. Please install it with 'pip install crewai'"
        raise ImportError(msg) from e

    if not isinstance(tool, CrewAiBaseTool):
        msg = f"Expected CrewAI BaseTool, got {type(tool)}"
        raise TypeError(msg)

    return cls.from_callable(
        tool._run,
        name_override=name_override or tool.__class__.__name__.removesuffix("Tool"),
        description_override=description_override or tool.description,
        schema_override=schema_override,
        **kwargs,
    )

from_langchain_tool classmethod

from_langchain_tool(
    tool: Any,
    *,
    name_override: str | None = None,
    description_override: str | None = None,
    schema_override: OpenAIFunctionDefinition | None = None,
    **kwargs: Any
) -> Tool[Any]

Create a tool from a LangChain tool.

Source code in src/llmling_agent/tools/base.py
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
@classmethod
def from_langchain_tool(
    cls,
    tool: Any,
    *,
    name_override: str | None = None,
    description_override: str | None = None,
    schema_override: schemez.OpenAIFunctionDefinition | None = None,
    **kwargs: Any,
) -> Tool[Any]:
    """Create a tool from a LangChain tool."""
    # vaidate_import("langchain_core", "langchain")
    try:
        from langchain_core.tools import (  # type: ignore[import-not-found]
            BaseTool as LangChainBaseTool,
        )
    except ImportError as e:
        msg = "langchain-core package not found."
        raise ImportError(msg) from e

    if not isinstance(tool, LangChainBaseTool):
        msg = f"Expected LangChain BaseTool, got {type(tool)}"
        raise TypeError(msg)

    return cls.from_callable(
        tool.invoke,
        name_override=name_override or tool.name,
        description_override=description_override or tool.description,
        schema_override=schema_override,
        **kwargs,
    )

matches_filter

matches_filter(state: ToolState) -> bool

Check if tool matches state filter.

Source code in src/llmling_agent/tools/base.py
119
120
121
122
123
124
125
126
127
def matches_filter(self, state: ToolState) -> bool:
    """Check if tool matches state filter."""
    match state:
        case "all":
            return True
        case "enabled":
            return self.enabled
        case "disabled":
            return not self.enabled

to_mcp_tool

to_mcp_tool() -> Tool

Convert internal Tool to MCP Tool.

Source code in src/llmling_agent/tools/base.py
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
def to_mcp_tool(self) -> MCPTool:
    """Convert internal Tool to MCP Tool."""
    schema = self.schema
    from mcp.types import Tool as MCPTool, ToolAnnotations

    return MCPTool(
        name=schema["function"]["name"],
        description=schema["function"]["description"],
        inputSchema=schema["function"]["parameters"],  # pyright: ignore
        annotations=ToolAnnotations(
            title=self.name,
            readOnlyHint=self.hints.read_only if self.hints else None,
            destructiveHint=self.hints.destructive if self.hints else None,
            idempotentHint=self.hints.idempotent if self.hints else None,
            openWorldHint=self.hints.open_world if self.hints else None,
        ),
    )

to_pydantic_ai

to_pydantic_ai() -> Tool

Convert tool to Pydantic AI tool.

Source code in src/llmling_agent/tools/base.py
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
def to_pydantic_ai(self) -> PydanticAiTool:
    """Convert tool to Pydantic AI tool."""
    metadata = {
        **self.metadata,
        "agent_name": self.agent_name,
        "category": self.category,
    }
    return PydanticAiTool(
        function=self.callable,
        name=self.name,
        # takes_ctx=self.takes_ctx,
        # max_retries=self.max_retries,
        description=self.description,
        requires_approval=self.requires_confirmation,
        metadata=metadata,
    )

Show source on GitHub