50
50
LITELLM_ROUTER_INSTANCE_KEY ,
51
51
)
52
52
53
+ LITELLM_METADATA_CHAT_KEYS = (
54
+ "timeout" ,
55
+ "temperature" ,
56
+ "top_p" ,
57
+ "n" ,
58
+ "stream" ,
59
+ "stream_options" ,
60
+ "stop" ,
61
+ "max_completion_tokens" ,
62
+ "max_tokens" ,
63
+ "modalities" ,
64
+ "prediction" ,
65
+ "presence_penalty" ,
66
+ "frequency_penalty" ,
67
+ "logit_bias" ,
68
+ "user" ,
69
+ "response_format" ,
70
+ "seed" ,
71
+ "tool_choice" ,
72
+ "parallel_tool_calls" ,
73
+ "logprobs" ,
74
+ "top_logprobs" ,
75
+ "deployment_id" ,
76
+ "reasoning_effort" ,
77
+ "base_url" ,
78
+ "api_base" ,
79
+ "api_version" ,
80
+ "model_list" ,
81
+ )
82
+ LITELLM_METADATA_COMPLETION_KEYS = (
83
+ "best_of" ,
84
+ "echo" ,
85
+ "frequency_penalty" ,
86
+ "logit_bias" ,
87
+ "logprobs" ,
88
+ "max_tokens" ,
89
+ "n" ,
90
+ "presence_penalty" ,
91
+ "stop" ,
92
+ "stream" ,
93
+ "stream_options" ,
94
+ "suffix" ,
95
+ "temperature" ,
96
+ "top_p" ,
97
+ "user" ,
98
+ "api_base" ,
99
+ "api_version" ,
100
+ "model_list" ,
101
+ "custom_llm_provider" ,
102
+ )
103
+
53
104
54
105
def extract_model_name_google (instance , model_name_attr ):
55
106
"""Extract the model name from the instance.
@@ -299,12 +350,14 @@ def get_messages_from_converse_content(role: str, content: list):
299
350
return messages
300
351
301
352
302
- def openai_set_meta_tags_from_completion (span : Span , kwargs : Dict [str , Any ], completions : Any ) -> None :
353
+ def openai_set_meta_tags_from_completion (
354
+ span : Span , kwargs : Dict [str , Any ], completions : Any , integration_name : str = "openai"
355
+ ) -> None :
303
356
"""Extract prompt/response tags from a completion and set them as temporary "_ml_obs.meta.*" tags."""
304
357
prompt = kwargs .get ("prompt" , "" )
305
358
if isinstance (prompt , str ):
306
359
prompt = [prompt ]
307
- parameters = { k : v for k , v in kwargs . items () if k not in OPENAI_SKIPPED_COMPLETION_TAGS }
360
+ parameters = get_metadata_from_kwargs ( kwargs , integration_name , "completion" )
308
361
output_messages = [{"content" : "" }]
309
362
if not span .error and completions :
310
363
choices = getattr (completions , "choices" , completions )
@@ -318,15 +371,17 @@ def openai_set_meta_tags_from_completion(span: Span, kwargs: Dict[str, Any], com
318
371
)
319
372
320
373
321
- def openai_set_meta_tags_from_chat (span : Span , kwargs : Dict [str , Any ], messages : Optional [Any ]) -> None :
374
+ def openai_set_meta_tags_from_chat (
375
+ span : Span , kwargs : Dict [str , Any ], messages : Optional [Any ], integration_name : str = "openai"
376
+ ) -> None :
322
377
"""Extract prompt/response tags from a chat completion and set them as temporary "_ml_obs.meta.*" tags."""
323
378
input_messages = []
324
379
for m in kwargs .get ("messages" , []):
325
380
tool_call_id = m .get ("tool_call_id" )
326
381
if tool_call_id :
327
382
core .dispatch (DISPATCH_ON_TOOL_CALL_OUTPUT_USED , (tool_call_id , span ))
328
383
input_messages .append ({"content" : str (_get_attr (m , "content" , "" )), "role" : str (_get_attr (m , "role" , "" ))})
329
- parameters = { k : v for k , v in kwargs . items () if k not in OPENAI_SKIPPED_CHAT_TAGS }
384
+ parameters = get_metadata_from_kwargs ( kwargs , integration_name , "chat" )
330
385
span ._set_ctx_items ({INPUT_MESSAGES : input_messages , METADATA : parameters })
331
386
332
387
if span .error or not messages :
@@ -398,6 +453,19 @@ def openai_set_meta_tags_from_chat(span: Span, kwargs: Dict[str, Any], messages:
398
453
span ._set_ctx_item (OUTPUT_MESSAGES , output_messages )
399
454
400
455
456
+ def get_metadata_from_kwargs (
457
+ kwargs : Dict [str , Any ], integration_name : str = "openai" , operation : str = "chat"
458
+ ) -> Dict [str , Any ]:
459
+ metadata = {}
460
+ if integration_name == "openai" :
461
+ keys_to_skip = OPENAI_SKIPPED_CHAT_TAGS if operation == "chat" else OPENAI_SKIPPED_COMPLETION_TAGS
462
+ metadata = {k : v for k , v in kwargs .items () if k not in keys_to_skip }
463
+ elif integration_name == "litellm" :
464
+ keys_to_include = LITELLM_METADATA_CHAT_KEYS if operation == "chat" else LITELLM_METADATA_COMPLETION_KEYS
465
+ metadata = {k : v for k , v in kwargs .items () if k in keys_to_include }
466
+ return metadata
467
+
468
+
401
469
def openai_get_input_messages_from_response_input (
402
470
messages : Optional [Union [str , List [Dict [str , Any ]]]]
403
471
) -> List [Dict [str , Any ]]:
0 commit comments