From 42e5c5d157521e04c67d59560bdda1cdfd787412 Mon Sep 17 00:00:00 2001 From: Nicole Cybul Date: Fri, 18 Jul 2025 11:25:05 -0400 Subject: [PATCH 1/3] select metadata keys to tag from kwargs --- ddtrace/llmobs/_integrations/litellm.py | 4 +- ddtrace/llmobs/_integrations/utils.py | 76 +++++++++++++++++++++++-- 2 files changed, 74 insertions(+), 6 deletions(-) diff --git a/ddtrace/llmobs/_integrations/litellm.py b/ddtrace/llmobs/_integrations/litellm.py index ded75c9753f..e808103509c 100644 --- a/ddtrace/llmobs/_integrations/litellm.py +++ b/ddtrace/llmobs/_integrations/litellm.py @@ -71,9 +71,9 @@ def _llmobs_set_tags( # use Open AI helpers since response format will match Open AI if self.is_completion_operation(operation): - openai_set_meta_tags_from_completion(span, kwargs, response) + openai_set_meta_tags_from_completion(span, kwargs, response, integration_name="litellm") else: - openai_set_meta_tags_from_chat(span, kwargs, response) + openai_set_meta_tags_from_chat(span, kwargs, response, integration_name="litellm") # custom logic for updating metadata on litellm spans self._update_litellm_metadata(span, kwargs, operation) diff --git a/ddtrace/llmobs/_integrations/utils.py b/ddtrace/llmobs/_integrations/utils.py index 8bc81164447..a5b3835d4c2 100644 --- a/ddtrace/llmobs/_integrations/utils.py +++ b/ddtrace/llmobs/_integrations/utils.py @@ -50,6 +50,57 @@ LITELLM_ROUTER_INSTANCE_KEY, ) +LITELLM_METADATA_CHAT_KEYS = ( + "timeout", + "temperature", + "top_p", + "n", + "stream", + "stream_options", + "stop", + "max_completion_tokens", + "max_tokens", + "modalities", + "prediction", + "presence_penalty", + "frequency_penalty", + "logit_bias", + "user", + "response_format", + "seed", + "tool_choice", + "parallel_tool_calls", + "logprobs", + "top_logprobs", + "deployment_id", + "reasoning_effort", + "base_url", + "api_base", + "api_version", + "model_list", +) +LITELLM_METADATA_COMPLETION_KEYS = ( + "best_of", + "echo", + "frequency_penalty", + "logit_bias", + "logprobs", + "max_tokens", + "n", + "presence_penalty", + "stop", + "stream", + "stream_options", + "suffix", + "temperature", + "top_p", + "user", + "api_base", + "api_version", + "model_list", + "custom_llm_provider", +) + def extract_model_name_google(instance, model_name_attr): """Extract the model name from the instance. @@ -289,12 +340,14 @@ def get_messages_from_converse_content(role: str, content: List[Dict[str, Any]]) return messages -def openai_set_meta_tags_from_completion(span: Span, kwargs: Dict[str, Any], completions: Any) -> None: +def openai_set_meta_tags_from_completion( + span: Span, kwargs: Dict[str, Any], completions: Any, integration_name: str = "openai" +) -> None: """Extract prompt/response tags from a completion and set them as temporary "_ml_obs.meta.*" tags.""" prompt = kwargs.get("prompt", "") if isinstance(prompt, str): prompt = [prompt] - parameters = {k: v for k, v in kwargs.items() if k not in OPENAI_SKIPPED_COMPLETION_TAGS} + parameters = get_metadata_from_kwargs(kwargs, integration_name, "completion") output_messages = [{"content": ""}] if not span.error and completions: choices = getattr(completions, "choices", completions) @@ -308,7 +361,9 @@ def openai_set_meta_tags_from_completion(span: Span, kwargs: Dict[str, Any], com ) -def openai_set_meta_tags_from_chat(span: Span, kwargs: Dict[str, Any], messages: Optional[Any]) -> None: +def openai_set_meta_tags_from_chat( + span: Span, kwargs: Dict[str, Any], messages: Optional[Any], integration_name: str = "openai" +) -> None: """Extract prompt/response tags from a chat completion and set them as temporary "_ml_obs.meta.*" tags.""" input_messages = [] for m in kwargs.get("messages", []): @@ -332,7 +387,7 @@ def openai_set_meta_tags_from_chat(span: Span, kwargs: Dict[str, Any], messages: for tool_call in tool_calls ] input_messages.append(processed_message) - parameters = {k: v for k, v in kwargs.items() if k not in OPENAI_SKIPPED_CHAT_TAGS} + parameters = get_metadata_from_kwargs(kwargs, integration_name, "chat") span._set_ctx_items({INPUT_MESSAGES: input_messages, METADATA: parameters}) if span.error or not messages: @@ -404,6 +459,19 @@ def openai_set_meta_tags_from_chat(span: Span, kwargs: Dict[str, Any], messages: span._set_ctx_item(OUTPUT_MESSAGES, output_messages) +def get_metadata_from_kwargs( + kwargs: Dict[str, Any], integration_name: str = "openai", operation: str = "chat" +) -> Dict[str, Any]: + metadata = {} + if integration_name == "openai": + keys_to_skip = OPENAI_SKIPPED_CHAT_TAGS if operation == "chat" else OPENAI_SKIPPED_COMPLETION_TAGS + metadata = {k: v for k, v in kwargs.items() if k not in keys_to_skip} + elif integration_name == "litellm": + keys_to_include = LITELLM_METADATA_CHAT_KEYS if operation == "chat" else LITELLM_METADATA_COMPLETION_KEYS + metadata = {k: v for k, v in kwargs.items() if k in keys_to_include} + return metadata + + def openai_get_input_messages_from_response_input( messages: Optional[Union[str, List[Dict[str, Any]]]] ) -> List[Dict[str, Any]]: From 83fe788289dbbcb167b7931fb1956aa3933c6fc3 Mon Sep 17 00:00:00 2001 From: Nicole Cybul Date: Fri, 18 Jul 2025 11:51:05 -0400 Subject: [PATCH 2/3] add release note --- .../notes/litellm-scrub-metadata-135109a6a5324111.yaml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 releasenotes/notes/litellm-scrub-metadata-135109a6a5324111.yaml diff --git a/releasenotes/notes/litellm-scrub-metadata-135109a6a5324111.yaml b/releasenotes/notes/litellm-scrub-metadata-135109a6a5324111.yaml new file mode 100644 index 00000000000..5d2ba17ccf6 --- /dev/null +++ b/releasenotes/notes/litellm-scrub-metadata-135109a6a5324111.yaml @@ -0,0 +1,4 @@ +fixes: + - | + litellm: This fix resolves an issue where potentially sensitive parameters were being tagged as metadata + on spans produced by the LiteLLM integration. From 498696576930a9678cc5928a4f0644a981aa3bb1 Mon Sep 17 00:00:00 2001 From: ncybul <124532568+ncybul@users.noreply.github.com> Date: Mon, 21 Jul 2025 10:46:48 -0400 Subject: [PATCH 3/3] Update releasenotes/notes/litellm-scrub-metadata-135109a6a5324111.yaml Co-authored-by: Yun Kim <35776586+Yun-Kim@users.noreply.github.com> --- .../notes/litellm-scrub-metadata-135109a6a5324111.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/releasenotes/notes/litellm-scrub-metadata-135109a6a5324111.yaml b/releasenotes/notes/litellm-scrub-metadata-135109a6a5324111.yaml index 5d2ba17ccf6..f539ef0b5a8 100644 --- a/releasenotes/notes/litellm-scrub-metadata-135109a6a5324111.yaml +++ b/releasenotes/notes/litellm-scrub-metadata-135109a6a5324111.yaml @@ -1,4 +1,4 @@ fixes: - | - litellm: This fix resolves an issue where potentially sensitive parameters were being tagged as metadata - on spans produced by the LiteLLM integration. + litellm: This fix resolves an issue where potentially sensitive parameters were being tagged as metadata on LLM Observability spans. + Now, metadata tags are based on an allowlist instead of a denylist.