Skip to content

feat(mistralai): implement emitting events in addition to current behavior #2890

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
class Config:
exception_logger = None
use_legacy_attributes = True
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
from dataclasses import asdict
from enum import Enum
from typing import Union

from opentelemetry._events import Event, EventLogger
from opentelemetry.instrumentation.mistralai.event_models import (
ChoiceEvent,
MessageEvent,
)
from opentelemetry.instrumentation.mistralai.utils import (
should_emit_events,
should_send_prompts,
)
from opentelemetry.semconv._incubating.attributes import (
gen_ai_attributes as GenAIAttributes,
)


class Roles(Enum):
USER = "user"
ASSISTANT = "assistant"
SYSTEM = "system"
TOOL = "tool"


VALID_MESSAGE_ROLES = {role.value for role in Roles}
"""The valid roles for naming the message event."""

EVENT_ATTRIBUTES = {GenAIAttributes.GEN_AI_SYSTEM: "mistral_ai"}
"""The attributes to be used for the event."""


def emit_event(
event: Union[MessageEvent, ChoiceEvent], event_logger: Union[EventLogger, None]
) -> None:
"""
Emit an event to the OpenTelemetry SDK.

Args:
event: The event to emit.
"""
if not should_emit_events() or not event_logger:
return

if isinstance(event, MessageEvent):
_emit_message_event(event, event_logger)
elif isinstance(event, ChoiceEvent):
_emit_choice_event(event, event_logger)
else:
raise TypeError("Unsupported event type")


def _emit_message_event(event: MessageEvent, event_logger: EventLogger) -> None:
body = asdict(event)

if event.role in VALID_MESSAGE_ROLES:
name = "gen_ai.{}.message".format(event.role)
# According to the semantic conventions, the role is conditionally required if available
# and not equal to the "role" in the message name. So, remove the role from the body if
# it is the same as the in the event name.
body.pop("role", None)
else:
name = "gen_ai.user.message"

# According to the semantic conventions, only the assistant role has tool call
if event.role != Roles.ASSISTANT.value and event.tool_calls is not None:
del body["tool_calls"]
elif event.tool_calls is None:
del body["tool_calls"]

if not should_send_prompts():
del body["content"]
if body.get("tool_calls") is not None:
for tool_call in body["tool_calls"]:
tool_call["function"].pop("arguments", None)

event_logger.emit(Event(name=name, body=body, attributes=EVENT_ATTRIBUTES))


def _emit_choice_event(event: ChoiceEvent, event_logger: EventLogger) -> None:
body = asdict(event)
if event.message["role"] == Roles.ASSISTANT.value:
# According to the semantic conventions, the role is conditionally required if available
# and not equal to "assistant", so remove the role from the body if it is "assistant".
body["message"].pop("role", None)

if event.tool_calls is None:
del body["tool_calls"]

if not should_send_prompts():
body["message"].pop("content", None)
if body.get("tool_calls") is not None:
for tool_call in body["tool_calls"]:
tool_call["function"].pop("arguments", None)

event_logger.emit(
Event(name="gen_ai.choice", body=body, attributes=EVENT_ATTRIBUTES)
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
from dataclasses import dataclass
from typing import Any, List, Literal, Optional, TypedDict


class _FunctionToolCall(TypedDict):
function_name: str
arguments: Optional[dict[str, Any]]


class ToolCall(TypedDict):
"""Represents a tool call in the AI model."""

id: str
function: _FunctionToolCall
type: Literal["function"]


class CompletionMessage(TypedDict):
"""Represents a message in the AI model."""

content: Any
role: str = "assistant"


@dataclass
class MessageEvent:
"""Represents an input event for the AI model."""

content: Any
role: str = "user"
tool_calls: Optional[List[ToolCall]] = None


@dataclass
class ChoiceEvent:
"""Represents a completion event for the AI model."""

index: int
message: CompletionMessage
finish_reason: str = "unknown"
tool_calls: Optional[List[ToolCall]] = None
Original file line number Diff line number Diff line change
@@ -1,7 +1,18 @@
import logging
import os
import traceback

from opentelemetry import context as context_api
from opentelemetry.instrumentation.mistralai.config import Config

TRACELOOP_TRACE_CONTENT = "TRACELOOP_TRACE_CONTENT"


def should_send_prompts():
return (
os.getenv(TRACELOOP_TRACE_CONTENT) or "true"
).lower() == "true" or context_api.get_value("override_enable_content_tracing")


def dont_throw(func):
"""
Expand All @@ -26,3 +37,12 @@ def wrapper(*args, **kwargs):
Config.exception_logger(e)

return wrapper


def should_emit_events() -> bool:
"""
Checks if the instrumentation isn't using the legacy attributes
and if the event logger is not None.
"""

return not Config.use_legacy_attributes
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "Tell me a joke about OpenTelemetry"}],
"safe_prompt": false, "model": "mistral-tiny", "stream": false}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '145'
content-type:
- application/json
host:
- api.mistral.ai
user-agent:
- mistral-client-python/0.2.0
method: POST
uri: https://api.mistral.ai/v1/chat/completions
response:
body:
string: !!binary |
H4sIAAAAAAAAAzyRQY8TMQyF/0rWF0AKaGbbaWkuSNw4cUHiwKLKTdxOIImj2IWOqv53lKLu0U/f
e5afrxADOPi4ptVxN2yP4XlYT3TA3Q53I04DbvyE0wos8OEXeQUHfkb94DnXRBq5gAXfCJUCuHE7
bjbb4XlaWcgcKIGDHEUbpvcay9LZmaMnAffjCrEEuoAbLGQSwROBu0LjROAARaIoFu0eLkql7/4+
LybEYL5WKt8oUSZtizm0WE4GTcZqlI3OZCo2XT69lJfymTyehUxU8xeLUrgjDT3dwcZnJcPH+3Bi
DsZz+UNNsB8nTz3i7Zc32czUqFsfyyTHRNYU1q56DmS4GbrUhLGY//1cjCyilOXpHVhQ5rT3mJKA
K+eUbhaOsUSZ941QuIADUa5gIfGpNj48uJ8Wzo96auNcda/8m4qAG8eeq5helWnbC3t851Veb263
fwAAAP//AwBaDk/J7QEAAA==
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8898667d8c2a8e48-TLV
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Sat, 25 May 2024 20:50:54 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
x-kong-proxy-latency:
- '6'
x-kong-request-id:
- 9695304667dcf5089c27ca7e9e074810
x-kong-upstream-latency:
- '739'
status:
code: 200
message: OK
version: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "Tell me a joke about OpenTelemetry"}],
"safe_prompt": false, "model": "mistral-tiny", "stream": false}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '145'
content-type:
- application/json
host:
- api.mistral.ai
user-agent:
- mistral-client-python/0.2.0
method: POST
uri: https://api.mistral.ai/v1/chat/completions
response:
body:
string: !!binary |
H4sIAAAAAAAAAzyRQY8TMQyF/0rWF0AKaGbbaWkuSNw4cUHiwKLKTdxOIImj2IWOqv53lKLu0U/f
e5afrxADOPi4ptVxN2yP4XlYT3TA3Q53I04DbvyE0wos8OEXeQUHfkb94DnXRBq5gAXfCJUCuHE7
bjbb4XlaWcgcKIGDHEUbpvcay9LZmaMnAffjCrEEuoAbLGQSwROBu0LjROAARaIoFu0eLkql7/4+
LybEYL5WKt8oUSZtizm0WE4GTcZqlI3OZCo2XT69lJfymTyehUxU8xeLUrgjDT3dwcZnJcPH+3Bi
DsZz+UNNsB8nTz3i7Zc32czUqFsfyyTHRNYU1q56DmS4GbrUhLGY//1cjCyilOXpHVhQ5rT3mJKA
K+eUbhaOsUSZ941QuIADUa5gIfGpNj48uJ8Wzo96auNcda/8m4qAG8eeq5helWnbC3t851Veb263
fwAAAP//AwBaDk/J7QEAAA==
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8898667d8c2a8e48-TLV
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Sat, 25 May 2024 20:50:54 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
x-kong-proxy-latency:
- '6'
x-kong-request-id:
- 9695304667dcf5089c27ca7e9e074810
x-kong-upstream-latency:
- '739'
status:
code: 200
message: OK
version: 1
Loading