-
Notifications
You must be signed in to change notification settings - Fork 1.1k
Expand file tree
/
Copy pathtools.py
More file actions
276 lines (218 loc) · 8.39 KB
/
tools.py
File metadata and controls
276 lines (218 loc) · 8.39 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
"""
Tool definition utilities for the Copilot SDK.
Provides a decorator-based API for defining tools with automatic JSON schema
generation from Pydantic models.
"""
from __future__ import annotations
import inspect
import json
from collections.abc import Awaitable, Callable
from dataclasses import dataclass, field
from typing import Any, Literal, TypeVar, get_type_hints, overload
from pydantic import BaseModel
ToolResultType = Literal["success", "failure", "rejected", "denied", "timeout"]
@dataclass
class ToolBinaryResult:
"""Binary content returned by a tool."""
data: str = ""
mime_type: str = ""
type: str = ""
description: str = ""
@dataclass
class ToolResult:
"""Result of a tool invocation."""
text_result_for_llm: str = ""
result_type: ToolResultType = "success"
error: str | None = None
binary_results_for_llm: list[ToolBinaryResult] | None = None
session_log: str | None = None
tool_telemetry: dict[str, Any] | None = None
_from_exception: bool = field(default=False, repr=False)
@dataclass
class ToolInvocation:
"""Context passed to a tool handler when invoked."""
session_id: str = ""
tool_call_id: str = ""
tool_name: str = ""
arguments: Any = None
ToolHandler = Callable[[ToolInvocation], ToolResult | Awaitable[ToolResult]]
@dataclass
class Tool:
name: str
description: str
handler: ToolHandler
parameters: dict[str, Any] | None = None
overrides_built_in_tool: bool = False
skip_permission: bool = False
T = TypeVar("T", bound=BaseModel)
R = TypeVar("R")
@overload
def define_tool(
name: str | None = None,
*,
description: str | None = None,
overrides_built_in_tool: bool = False,
skip_permission: bool = False,
) -> Callable[[Callable[..., Any]], Tool]: ...
@overload
def define_tool(
name: str,
*,
description: str | None = None,
handler: Callable[[T, ToolInvocation], R],
params_type: type[T],
overrides_built_in_tool: bool = False,
skip_permission: bool = False,
) -> Tool: ...
def define_tool(
name: str | None = None,
*,
description: str | None = None,
handler: Callable[[Any, ToolInvocation], Any] | None = None,
params_type: type[BaseModel] | None = None,
overrides_built_in_tool: bool = False,
skip_permission: bool = False,
) -> Tool | Callable[[Callable[[Any, ToolInvocation], Any]], Tool]:
"""
Define a tool with automatic JSON schema generation from Pydantic models.
Can be used as a decorator or as a function:
Decorator usage (recommended):
from pydantic import BaseModel, Field
class LookupIssueParams(BaseModel):
id: str = Field(description="Issue identifier")
@define_tool(description="Fetch issue details")
def lookup_issue(params: LookupIssueParams) -> str:
return fetch_issue(params.id).summary
Function usage:
tool = define_tool(
"lookup_issue",
description="Fetch issue details",
handler=lambda params, inv: fetch_issue(params.id).summary,
params_type=LookupIssueParams
)
Args:
name: The tool name (defaults to function name)
description: Description of what the tool does (shown to the LLM)
handler: Optional handler function (if not using as decorator)
params_type: Optional Pydantic model type for parameters (inferred from
type hints when using as decorator)
overrides_built_in_tool: When True, explicitly indicates this tool is intended
to override a built-in tool of the same name. If not set and the
name clashes with a built-in tool, the runtime will return an error.
skip_permission: When True, the tool can execute without a permission prompt.
Returns:
A Tool instance
"""
def decorator(fn: Callable[..., Any]) -> Tool:
tool_name = name if name is not None else getattr(fn, "__name__", "unknown")
sig = inspect.signature(fn)
param_names = list(sig.parameters.keys())
hints = get_type_hints(fn)
num_params = len(param_names)
# Detect handler signature:
# - 0 params: handler()
# - 1 param, ToolInvocation: handler(invocation)
# - 1 param, Pydantic: handler(params)
# - 2 params: handler(params, invocation)
ptype = params_type
first_param_type = hints.get(param_names[0]) if param_names else None
if num_params == 0:
takes_params = False
takes_invocation = False
elif num_params == 1 and first_param_type is ToolInvocation:
takes_params = False
takes_invocation = True
else:
takes_params = True
takes_invocation = num_params >= 2
if ptype is None and _is_pydantic_model(first_param_type):
ptype = first_param_type
# Generate schema from Pydantic model
schema = None
if ptype is not None and _is_pydantic_model(ptype):
schema = ptype.model_json_schema()
async def wrapped_handler(invocation: ToolInvocation) -> ToolResult:
try:
# Build args based on detected signature
call_args = []
if takes_params:
args = invocation.arguments or {}
if ptype is not None and _is_pydantic_model(ptype):
call_args.append(ptype.model_validate(args))
else:
call_args.append(args)
if takes_invocation:
call_args.append(invocation)
result = fn(*call_args)
if inspect.isawaitable(result):
result = await result
return _normalize_result(result)
except Exception as exc:
# Don't expose detailed error information to the LLM for security reasons.
# The actual error is stored in the 'error' field for debugging.
return ToolResult(
text_result_for_llm=(
"Invoking this tool produced an error. "
"Detailed information is not available."
),
result_type="failure",
error=str(exc),
tool_telemetry={},
_from_exception=True,
)
return Tool(
name=tool_name,
description=description or "",
parameters=schema,
handler=wrapped_handler,
overrides_built_in_tool=overrides_built_in_tool,
skip_permission=skip_permission,
)
# If handler is provided, call decorator immediately
if handler is not None:
if name is None:
raise ValueError("name is required when using define_tool with handler=")
return decorator(handler)
# Otherwise return decorator for @define_tool(...) usage
return decorator
def _is_pydantic_model(t: Any) -> bool:
"""Check if a type is a Pydantic BaseModel subclass."""
try:
return isinstance(t, type) and issubclass(t, BaseModel)
except TypeError:
return False
def _normalize_result(result: Any) -> ToolResult:
"""
Convert any return value to a ToolResult.
- None returns empty success
- Strings pass through directly
- ToolResult passes through
- Everything else gets JSON-serialized (with Pydantic support)
"""
if result is None:
return ToolResult(
text_result_for_llm="",
result_type="success",
)
# ToolResult dataclass passes through directly
if isinstance(result, ToolResult):
return result
# Strings pass through directly
if isinstance(result, str):
return ToolResult(
text_result_for_llm=result,
result_type="success",
)
# Everything else gets JSON-serialized (with Pydantic model support)
def default(obj: Any) -> Any:
if isinstance(obj, BaseModel):
return obj.model_dump()
raise TypeError(f"Object of type {type(obj).__name__} is not JSON serializable")
try:
json_str = json.dumps(result, default=default)
except (TypeError, ValueError) as exc:
raise TypeError(f"Failed to serialize tool result: {exc}") from exc
return ToolResult(
text_result_for_llm=json_str,
result_type="success",
)