Skip to content

Commit 9e306d4

Browse files
feat(anthropic): Emit gen_ai.chat spans for messages.stream()
1 parent fd84837 commit 9e306d4

2 files changed

Lines changed: 644 additions & 27 deletions

File tree

sentry_sdk/integrations/anthropic.py

Lines changed: 185 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@
3838
Omit = None
3939

4040
from anthropic.resources import AsyncMessages, Messages
41+
from anthropic.lib.streaming._messages import MessageStreamManager
4142

4243
from anthropic.types import (
4344
MessageStartEvent,
@@ -59,7 +60,13 @@
5960
from sentry_sdk._types import TextPart
6061

6162
from anthropic import AsyncStream
62-
from anthropic.types import RawMessageStreamEvent
63+
from anthropic.types import (
64+
RawMessageStreamEvent,
65+
MessageParam,
66+
ModelParam,
67+
TextBlockParam,
68+
ToolUnionParam,
69+
)
6370

6471

6572
class _RecordedUsage:
@@ -84,6 +91,11 @@ def setup_once() -> None:
8491
Messages.create = _wrap_message_create(Messages.create)
8592
AsyncMessages.create = _wrap_message_create_async(AsyncMessages.create)
8693

94+
Messages.stream = _wrap_message_stream(Messages.stream)
95+
MessageStreamManager.__enter__ = _wrap_message_stream_manager_enter(
96+
MessageStreamManager.__enter__
97+
)
98+
8799

88100
def _capture_exception(exc: "Any") -> None:
89101
set_span_errored()
@@ -253,27 +265,32 @@ def _transform_system_instructions(
253265
]
254266

255267

256-
def _set_input_data(
257-
span: "Span", kwargs: "dict[str, Any]", integration: "AnthropicIntegration"
268+
def _common_set_input_data(
269+
span: "Span",
270+
integration: "AnthropicIntegration",
271+
max_tokens: "int",
272+
messages: "Iterable[MessageParam]",
273+
model: "ModelParam",
274+
system: "Optional[Union[str, Iterable[TextBlockParam]]]",
275+
temperature: "Optional[float]",
276+
top_k: "Optional[int]",
277+
top_p: "Optional[float]",
278+
tools: "Optional[Iterable[ToolUnionParam]]",
258279
) -> None:
259280
"""
260281
Set input data for the span based on the provided keyword arguments for the anthropic message creation.
261282
"""
262283
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat")
263-
system_instructions: "Union[str, Iterable[TextBlockParam]]" = kwargs.get("system") # type: ignore
264-
messages = kwargs.get("messages")
265284
if (
266285
messages is not None
267286
and len(messages) > 0
268287
and should_send_default_pii()
269288
and integration.include_prompts
270289
):
271-
if isinstance(system_instructions, str) or isinstance(
272-
system_instructions, Iterable
273-
):
290+
if isinstance(system, str) or isinstance(system, Iterable):
274291
span.set_data(
275292
SPANDATA.GEN_AI_SYSTEM_INSTRUCTIONS,
276-
json.dumps(_transform_system_instructions(system_instructions)),
293+
json.dumps(_transform_system_instructions(system)),
277294
)
278295

279296
normalized_messages = []
@@ -329,27 +346,69 @@ def _set_input_data(
329346
span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages_data, unpack=False
330347
)
331348

332-
span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, kwargs.get("stream", False))
349+
if max_tokens is not None and _is_given(max_tokens):
350+
span.set_data(SPANDATA.GEN_AI_REQUEST_MAX_TOKENS, max_tokens)
351+
if model is not None and _is_given(model):
352+
span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model)
353+
if temperature is not None and _is_given(temperature):
354+
span.set_data(SPANDATA.GEN_AI_REQUEST_TEMPERATURE, temperature)
355+
if top_k is not None and _is_given(top_k):
356+
span.set_data(SPANDATA.GEN_AI_REQUEST_TOP_K, top_k)
357+
if top_p is not None and _is_given(top_p):
358+
span.set_data(SPANDATA.GEN_AI_REQUEST_TOP_P, top_p)
333359

334-
kwargs_keys_to_attributes = {
335-
"max_tokens": SPANDATA.GEN_AI_REQUEST_MAX_TOKENS,
336-
"model": SPANDATA.GEN_AI_REQUEST_MODEL,
337-
"temperature": SPANDATA.GEN_AI_REQUEST_TEMPERATURE,
338-
"top_k": SPANDATA.GEN_AI_REQUEST_TOP_K,
339-
"top_p": SPANDATA.GEN_AI_REQUEST_TOP_P,
340-
}
341-
for key, attribute in kwargs_keys_to_attributes.items():
342-
value = kwargs.get(key)
343-
344-
if value is not None and _is_given(value):
345-
span.set_data(attribute, value)
346-
347-
# Input attributes: Tools
348-
tools = kwargs.get("tools")
349360
if tools is not None and _is_given(tools) and len(tools) > 0:
350361
span.set_data(SPANDATA.GEN_AI_REQUEST_AVAILABLE_TOOLS, safe_serialize(tools))
351362

352363

364+
def _set_create_input_data(
365+
span: "Span", kwargs: "dict[str, Any]", integration: "AnthropicIntegration"
366+
) -> None:
367+
"""
368+
Set input data for the span based on the provided keyword arguments for the anthropic message creation.
369+
"""
370+
span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, kwargs.get("stream", False))
371+
372+
_common_set_input_data(
373+
span=span,
374+
integration=integration,
375+
max_tokens=kwargs.get("max_tokens"),
376+
messages=kwargs.get("messages"),
377+
model=kwargs.get("model"),
378+
system=kwargs.get("system"),
379+
temperature=kwargs.get("temperature"),
380+
top_k=kwargs.get("top_k"),
381+
top_p=kwargs.get("top_p"),
382+
tools=kwargs.get("tools"),
383+
)
384+
385+
386+
def _set_stream_input_data(
387+
span: "Span",
388+
integration: "AnthropicIntegration",
389+
max_tokens: "int",
390+
messages: "Iterable[MessageParam]",
391+
model: "ModelParam",
392+
system: "Optional[Union[str, Iterable[TextBlockParam]]]",
393+
temperature: "Optional[float]",
394+
top_k: "Optional[int]",
395+
top_p: "Optional[float]",
396+
tools: "Optional[Iterable[ToolUnionParam]]",
397+
) -> None:
398+
_common_set_input_data(
399+
span=span,
400+
integration=integration,
401+
max_tokens=max_tokens,
402+
messages=messages,
403+
model=model,
404+
system=system,
405+
temperature=temperature,
406+
top_k=top_k,
407+
top_p=top_p,
408+
tools=tools,
409+
)
410+
411+
353412
def _set_output_data(
354413
span: "Span",
355414
integration: "AnthropicIntegration",
@@ -543,7 +602,7 @@ def _sentry_patched_create_common(f: "Any", *args: "Any", **kwargs: "Any") -> "A
543602
)
544603
span.__enter__()
545604

546-
_set_input_data(span, kwargs, integration)
605+
_set_create_input_data(span, kwargs, integration)
547606

548607
result = yield f, args, kwargs
549608

@@ -664,6 +723,106 @@ async def _sentry_patched_create_async(*args: "Any", **kwargs: "Any") -> "Any":
664723
return _sentry_patched_create_async
665724

666725

726+
def _sentry_patched_stream_common(
727+
result,
728+
max_tokens: "int",
729+
messages: "Iterable[MessageParam]",
730+
model: "ModelParam",
731+
system: "Union[str, Iterable[TextBlockParam]]",
732+
temperature: "float",
733+
top_k: "int",
734+
top_p: "float",
735+
tools: "Iterable[ToolUnionParam]",
736+
):
737+
integration = sentry_sdk.get_client().get_integration(AnthropicIntegration)
738+
739+
if integration is None:
740+
return result
741+
742+
if messages is None:
743+
return result
744+
745+
try:
746+
iter(messages)
747+
except TypeError:
748+
return result
749+
750+
if model is None:
751+
model = ""
752+
753+
span = get_start_span_function()(
754+
op=OP.GEN_AI_CHAT,
755+
name=f"chat {model}".strip(),
756+
origin=AnthropicIntegration.origin,
757+
)
758+
span.__enter__()
759+
760+
span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True)
761+
_set_stream_input_data(
762+
span,
763+
integration,
764+
max_tokens=max_tokens,
765+
messages=messages,
766+
model=model,
767+
system=system,
768+
temperature=temperature,
769+
top_k=top_k,
770+
top_p=top_p,
771+
tools=tools,
772+
)
773+
_patch_streaming_response_iterator(result, span, integration)
774+
775+
return result
776+
777+
778+
def _wrap_message_stream(f: "Any") -> "Any":
779+
"""
780+
Attaches user-provided arguments to the returned context manager.
781+
The attributes are set on `gen_ai.chat` spans in the patch for the context manager.
782+
"""
783+
784+
@wraps(f)
785+
def _sentry_patched_stream(*args, **kwargs):
786+
stream = f(*args, **kwargs)
787+
788+
stream._max_tokens = kwargs.get("max_tokens")
789+
stream._messages = kwargs.get("messages")
790+
stream._model = kwargs.get("model")
791+
stream._system = kwargs.get("system")
792+
stream._temperature = kwargs.get("temperature")
793+
stream._top_k = kwargs.get("top_k")
794+
stream._top_p = kwargs.get("top_p")
795+
stream._tools = kwargs.get("tools")
796+
797+
return stream
798+
799+
return _sentry_patched_stream
800+
801+
802+
def _wrap_message_stream_manager_enter(f: "Any") -> "Any":
803+
"""
804+
Creates and manages `gen_ai.chat` spans.
805+
"""
806+
807+
@wraps(f)
808+
def _sentry_patched_enter(self):
809+
stream = f(self)
810+
_sentry_patched_stream_common(
811+
stream,
812+
max_tokens=self._max_tokens,
813+
messages=self._messages,
814+
model=self._model,
815+
system=self._system,
816+
temperature=self._temperature,
817+
top_k=self._top_k,
818+
top_p=self._top_p,
819+
tools=self._tools,
820+
)
821+
return stream
822+
823+
return _sentry_patched_enter
824+
825+
667826
def _is_given(obj: "Any") -> bool:
668827
"""
669828
Check for givenness safely across different anthropic versions.

0 commit comments

Comments
 (0)