Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion sentry_sdk/integrations/pydantic_ai/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,20 @@ class PydanticAIIntegration(Integration):
identifier = "pydantic_ai"
origin = f"auto.ai.{identifier}"

def __init__(self, include_prompts: bool = True) -> None:
def __init__(
self, include_prompts: bool = True, handled_tool_call_exceptions: bool = True
) -> None:
"""
Initialize the Pydantic AI integration.

Args:
include_prompts: Whether to include prompts and messages in span data.
Requires send_default_pii=True. Defaults to True.
handled_tool_exceptions: Capture tool call exceptions that Pydantic AI
internally prevents from bubbling up.
"""
self.include_prompts = include_prompts
self.handled_tool_call_exceptions = handled_tool_call_exceptions

@staticmethod
def setup_once() -> None:
Expand Down
23 changes: 17 additions & 6 deletions sentry_sdk/integrations/pydantic_ai/patches/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,7 @@
import sentry_sdk

from ..spans import execute_tool_span, update_execute_tool_span
from ..utils import (
_capture_exception,
get_current_agent,
)
from ..utils import _capture_exception, get_current_agent

from typing import TYPE_CHECKING

Expand All @@ -23,6 +20,7 @@

try:
from pydantic_ai._tool_manager import ToolManager # type: ignore
from pydantic_ai.exceptions import ToolRetryError # type: ignore
except ImportError:
raise DidNotEnable("pydantic-ai not installed")

Expand Down Expand Up @@ -82,8 +80,21 @@ async def wrapped_call_tool(
)
update_execute_tool_span(span, result)
return result
except Exception as exc:
_capture_exception(exc)
except ToolRetryError as exc:
# Avoid circular import due to multi-file integration structure
from sentry_sdk.integrations.pydantic_ai import (
PydanticAIIntegration,
)

integration = sentry_sdk.get_client().get_integration(
PydanticAIIntegration
)
if (
integration is None
or not integration.handled_tool_call_exceptions
):
raise exc from None
_capture_exception(exc, handled=True)
raise exc from None

# No span context - just call original
Expand Down
4 changes: 2 additions & 2 deletions sentry_sdk/integrations/pydantic_ai/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,12 +206,12 @@ def _set_available_tools(span: "sentry_sdk.tracing.Span", agent: "Any") -> None:
pass


def _capture_exception(exc: "Any") -> None:
def _capture_exception(exc: "Any", handled: bool = False) -> None:
set_span_errored()

event, hint = event_from_exception(
exc,
client_options=sentry_sdk.get_client().options,
mechanism={"type": "pydantic_ai", "handled": False},
mechanism={"type": "pydantic_ai", "handled": handled},
)
sentry_sdk.capture_event(event, hint=hint)
155 changes: 155 additions & 0 deletions tests/integrations/pydantic_ai/test_pydantic_ai.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
import asyncio
import pytest

from typing import Annotated
from pydantic import Field

from sentry_sdk.integrations.pydantic_ai import PydanticAIIntegration

from pydantic_ai import Agent
from pydantic_ai.models.test import TestModel
from pydantic_ai.exceptions import ModelRetry, UnexpectedModelBehavior


@pytest.fixture
Expand Down Expand Up @@ -277,6 +281,157 @@ def add_numbers(a: int, b: int) -> int:
assert "add_numbers" in available_tools_str


@pytest.mark.parametrize(
"handled_tool_call_exceptions",
[False, True],
)
@pytest.mark.asyncio
async def test_agent_with_tool_model_retry(
sentry_init, capture_events, test_agent, handled_tool_call_exceptions
):
"""
Test that a handled exception is captured when a tool raises ModelRetry.
"""

retries = 0

@test_agent.tool_plain
def add_numbers(a: int, b: int) -> float:
"""Add two numbers together, but raises an exception on the first attempt."""
nonlocal retries
if retries == 0:
retries += 1
raise ModelRetry(message="Try again with the same arguments.")
return a + b

sentry_init(
integrations=[
PydanticAIIntegration(
handled_tool_call_exceptions=handled_tool_call_exceptions
)
],
traces_sample_rate=1.0,
send_default_pii=True,
)

events = capture_events()

result = await test_agent.run("What is 5 + 3?")

assert result is not None

if handled_tool_call_exceptions:
(error, transaction) = events
else:
(transaction,) = events
spans = transaction["spans"]

if handled_tool_call_exceptions:
assert error["level"] == "error"
assert error["exception"]["values"][0]["mechanism"]["handled"]

# Find child span types (invoke_agent is the transaction, not a child span)
chat_spans = [s for s in spans if s["op"] == "gen_ai.chat"]
tool_spans = [s for s in spans if s["op"] == "gen_ai.execute_tool"]

# Should have tool spans
assert len(tool_spans) >= 1

# Check tool spans
model_retry_tool_span = tool_spans[0]
assert "execute_tool" in model_retry_tool_span["description"]
assert model_retry_tool_span["data"]["gen_ai.operation.name"] == "execute_tool"
assert model_retry_tool_span["data"]["gen_ai.tool.type"] == "function"
assert model_retry_tool_span["data"]["gen_ai.tool.name"] == "add_numbers"
assert "gen_ai.tool.input" in model_retry_tool_span["data"]

tool_span = tool_spans[1]
assert "execute_tool" in tool_span["description"]
assert tool_span["data"]["gen_ai.operation.name"] == "execute_tool"
assert tool_span["data"]["gen_ai.tool.type"] == "function"
assert tool_span["data"]["gen_ai.tool.name"] == "add_numbers"
assert "gen_ai.tool.input" in tool_span["data"]
assert "gen_ai.tool.output" in tool_span["data"]

# Check chat spans have available_tools
for chat_span in chat_spans:
assert "gen_ai.request.available_tools" in chat_span["data"]
available_tools_str = chat_span["data"]["gen_ai.request.available_tools"]
# Available tools is serialized as a string
assert "add_numbers" in available_tools_str


@pytest.mark.parametrize(
"handled_tool_call_exceptions",
[False, True],
)
@pytest.mark.asyncio
async def test_agent_with_tool_validation_error(
sentry_init, capture_events, test_agent, handled_tool_call_exceptions
):
"""
Test that a handled exception is captured when a tool has unsatisfiable constraints.
"""

@test_agent.tool_plain
def add_numbers(a: Annotated[int, Field(gt=0, lt=0)], b: int) -> int:
"""Add two numbers together."""
return a + b

sentry_init(
integrations=[
PydanticAIIntegration(
handled_tool_call_exceptions=handled_tool_call_exceptions
)
],
traces_sample_rate=1.0,
send_default_pii=True,
)

events = capture_events()

result = None
with pytest.raises(UnexpectedModelBehavior):
result = await test_agent.run("What is 5 + 3?")

assert result is None

if handled_tool_call_exceptions:
(error, model_behaviour_error, transaction) = events
else:
(
model_behaviour_error,
transaction,
) = events
spans = transaction["spans"]

if handled_tool_call_exceptions:
assert error["level"] == "error"
assert error["exception"]["values"][0]["mechanism"]["handled"]

# Find child span types (invoke_agent is the transaction, not a child span)
chat_spans = [s for s in spans if s["op"] == "gen_ai.chat"]
tool_spans = [s for s in spans if s["op"] == "gen_ai.execute_tool"]

# Should have tool spans
assert len(tool_spans) >= 1

# Check tool spans
model_retry_tool_span = tool_spans[0]
assert "execute_tool" in model_retry_tool_span["description"]
assert model_retry_tool_span["data"]["gen_ai.operation.name"] == "execute_tool"
assert model_retry_tool_span["data"]["gen_ai.tool.type"] == "function"
assert model_retry_tool_span["data"]["gen_ai.tool.name"] == "add_numbers"
assert "gen_ai.tool.input" in model_retry_tool_span["data"]

# Check chat spans have available_tools
for chat_span in chat_spans:
assert "gen_ai.request.available_tools" in chat_span["data"]
available_tools_str = chat_span["data"]["gen_ai.request.available_tools"]
# Available tools is serialized as a string
assert "add_numbers" in available_tools_str


@pytest.mark.asyncio
async def test_agent_with_tools_streaming(sentry_init, capture_events, test_agent):
"""
Expand Down
Loading