diff --git a/ddtrace/llmobs/_constants.py b/ddtrace/llmobs/_constants.py index 3c61053df68..1f1fbaa3eee 100644 --- a/ddtrace/llmobs/_constants.py +++ b/ddtrace/llmobs/_constants.py @@ -13,7 +13,6 @@ INPUT_DOCUMENTS = "_ml_obs.meta.input.documents" INPUT_MESSAGES = "_ml_obs.meta.input.messages" INPUT_VALUE = "_ml_obs.meta.input.value" -INPUT_PARAMETERS = "_ml_obs.meta.input.parameters" INPUT_PROMPT = "_ml_obs.meta.input.prompt" OUTPUT_DOCUMENTS = "_ml_obs.meta.output.documents" diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index 768f4bdb292..91fd7959b50 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -35,7 +35,6 @@ from ddtrace.llmobs._constants import ANNOTATIONS_CONTEXT_ID from ddtrace.llmobs._constants import INPUT_DOCUMENTS from ddtrace.llmobs._constants import INPUT_MESSAGES -from ddtrace.llmobs._constants import INPUT_PARAMETERS from ddtrace.llmobs._constants import INPUT_PROMPT from ddtrace.llmobs._constants import INPUT_VALUE from ddtrace.llmobs._constants import METADATA @@ -148,8 +147,6 @@ def _llmobs_span_event(cls, span: Span) -> Dict[str, Any]: meta["model_name"] = span._get_ctx_item(MODEL_NAME) meta["model_provider"] = (span._get_ctx_item(MODEL_PROVIDER) or "custom").lower() meta["metadata"] = span._get_ctx_item(METADATA) or {} - if span._get_ctx_item(INPUT_PARAMETERS): - meta["input"]["parameters"] = span._get_ctx_item(INPUT_PARAMETERS) if span_kind == "llm" and span._get_ctx_item(INPUT_MESSAGES) is not None: meta["input"]["messages"] = span._get_ctx_item(INPUT_MESSAGES) if span._get_ctx_item(INPUT_VALUE) is not None: @@ -710,7 +707,6 @@ def retrieval( def annotate( cls, span: Optional[Span] = None, - parameters: Optional[Dict[str, Any]] = None, prompt: Optional[dict] = None, input_data: Optional[Any] = None, output_data: Optional[Any] = None, @@ -720,7 +716,7 @@ def annotate( _name: Optional[str] = None, ) -> None: """ - Sets parameters, inputs, outputs, tags, and metrics as provided for a given LLMObs span. + Sets metadata, inputs, outputs, tags, and metrics as provided for a given LLMObs span. Note that with the exception of tags, this method will override any existing values for the provided fields. :param Span span: Span to annotate. If no span is provided, the current active span will be used. @@ -747,7 +743,6 @@ def annotate( {"name": str, "id": str, "text": str, "score": float}, or a list of dictionaries with the same signature. - other: any JSON serializable type. - :param parameters: (DEPRECATED) Dictionary of JSON serializable key-value pairs to set as input parameters. :param metadata: Dictionary of JSON serializable key-value metadata pairs relevant to the input/output operation described by the LLMObs span. :param tags: Dictionary of JSON serializable key-value tag pairs to set or update on the LLMObs span @@ -773,9 +768,6 @@ def annotate( if tags is not None: cls._tag_span_tags(span, tags) span_kind = span._get_ctx_item(SPAN_KIND) - if parameters is not None: - log.warning("Setting parameters is deprecated, please set parameters and other metadata as tags instead.") - cls._tag_params(span, parameters) if _name is not None: span.name = _name if prompt is not None: @@ -803,16 +795,6 @@ def _tag_prompt(span, prompt: dict) -> None: log.warning("Failed to validate prompt with error: ", exc_info=True) return - @staticmethod - def _tag_params(span: Span, params: Dict[str, Any]) -> None: - """Tags input parameters for a given LLMObs span. - Will be mapped to span's `meta.input.parameters` field. - """ - if not isinstance(params, dict): - log.warning("parameters must be a dictionary of key-value pairs.") - return - span._set_ctx_item(INPUT_PARAMETERS, params) - @classmethod def _tag_llm_io(cls, span, input_messages=None, output_messages=None): """Tags input/output messages for LLM-kind spans. diff --git a/releasenotes/notes/remove-llmobs-parameters-annotation-97406f9cece3fac4.yaml b/releasenotes/notes/remove-llmobs-parameters-annotation-97406f9cece3fac4.yaml new file mode 100644 index 00000000000..4f92056ce53 --- /dev/null +++ b/releasenotes/notes/remove-llmobs-parameters-annotation-97406f9cece3fac4.yaml @@ -0,0 +1,4 @@ +--- +upgrade: + - | + LLM Observability: Removes the deprecated ``parameters`` argument from ``LLMObs annotate()``. Use ``metadata`` instead. diff --git a/tests/llmobs/_utils.py b/tests/llmobs/_utils.py index 8343aee530e..f9cff969c8a 100644 --- a/tests/llmobs/_utils.py +++ b/tests/llmobs/_utils.py @@ -62,7 +62,6 @@ def _expected_llmobs_llm_span_event( input_documents=None, output_messages=None, output_value=None, - parameters=None, metadata=None, token_metrics=None, model_name=None, @@ -78,7 +77,6 @@ def _expected_llmobs_llm_span_event( span_kind: either "llm" or "agent" or "embedding" input_messages: list of input messages in format {"content": "...", "optional_role", "..."} output_messages: list of output messages in format {"content": "...", "optional_role", "..."} - parameters: dict of input parameters metadata: dict of metadata key value pairs token_metrics: dict of token metrics (e.g. prompt_tokens, completion_tokens, total_tokens) model_name: name of the model @@ -112,8 +110,6 @@ def _expected_llmobs_llm_span_event( if model_provider is not None: meta_dict.update({"model_provider": model_provider}) meta_dict.update({"metadata": metadata or {}}) - if parameters is not None: - meta_dict["input"].update({"parameters": parameters}) span_event["meta"].update(meta_dict) if token_metrics is not None: span_event["metrics"].update(token_metrics) @@ -126,7 +122,6 @@ def _expected_llmobs_non_llm_span_event( input_value=None, output_value=None, output_documents=None, - parameters=None, metadata=None, token_metrics=None, tags=None, @@ -140,7 +135,6 @@ def _expected_llmobs_non_llm_span_event( span_kind: one of "workflow", "task", "tool", "retrieval" input_value: input value string output_value: output value string - parameters: dict of input parameters metadata: dict of metadata key value pairs token_metrics: dict of token metrics (e.g. prompt_tokens, completion_tokens, total_tokens) tags: dict of tags to add/override on span @@ -160,8 +154,6 @@ def _expected_llmobs_non_llm_span_event( meta_dict["output"].update({"value": output_value}) if input_value is not None: meta_dict["input"].update({"value": input_value}) - if parameters is not None: - meta_dict["input"].update({"parameters": parameters}) meta_dict.update({"metadata": metadata or {}}) if output_value is not None: meta_dict["output"].update({"value": output_value}) @@ -275,7 +267,6 @@ def _completion_event(): "model_provider": "openai", "input": { "messages": [{"content": "who broke enigma?"}], - "parameters": {"temperature": 0, "max_tokens": 256}, }, "output": { "messages": [ @@ -284,6 +275,7 @@ def _completion_event(): } ] }, + "metadata": {"temperature": 0, "max_tokens": 256}, }, "metrics": {"input_tokens": 64, "output_tokens": 128, "total_tokens": 192}, } @@ -312,7 +304,6 @@ def _chat_completion_event(): }, {"role": "user", "content": "I am a hobbit looking to go to Mordor"}, ], - "parameters": {"temperature": 0.9, "max_tokens": 256}, }, "output": { "messages": [ @@ -322,6 +313,7 @@ def _chat_completion_event(): }, ] }, + "metadata": {"temperature": 0.9, "max_tokens": 256}, }, "metrics": {"input_tokens": 64, "output_tokens": 128, "total_tokens": 192}, } diff --git a/tests/llmobs/test_llmobs.py b/tests/llmobs/test_llmobs.py index 004b77b5764..086e867c787 100644 --- a/tests/llmobs/test_llmobs.py +++ b/tests/llmobs/test_llmobs.py @@ -114,14 +114,6 @@ def test_input_messages_are_set(tracer, llmobs_events): assert llmobs_events[0]["meta"]["input"]["messages"] == [{"content": "message", "role": "user"}] -def test_input_parameters_are_set(tracer, llmobs_events): - """Test that input parameters are set on the span event if they are present on the span.""" - with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: - llm_span._set_ctx_item(const.SPAN_KIND, "llm") - llm_span._set_ctx_item(const.INPUT_PARAMETERS, {"key": "value"}) - assert llmobs_events[0]["meta"]["input"]["parameters"] == {"key": "value"} - - def test_output_messages_are_set(tracer, llmobs_events): """Test that output messages are set on the span event if they are present on the span.""" with tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: diff --git a/tests/llmobs/test_llmobs_decorators.py b/tests/llmobs/test_llmobs_decorators.py index 056de72ee96..c293c4ca46d 100644 --- a/tests/llmobs/test_llmobs_decorators.py +++ b/tests/llmobs/test_llmobs_decorators.py @@ -271,7 +271,7 @@ def test_llm_annotate(llmobs, llmobs_events): @llm(model_name="test_model", model_provider="test_provider", name="test_function", session_id="test_session_id") def f(): llmobs.annotate( - parameters={"temperature": 0.9, "max_tokens": 50}, + metadata={"temperature": 0.9, "max_tokens": 50}, input_data=[{"content": "test_prompt"}], output_data=[{"content": "test_response"}], tags={"custom_tag": "tag_value"}, @@ -287,7 +287,7 @@ def f(): model_provider="test_provider", input_messages=[{"content": "test_prompt"}], output_messages=[{"content": "test_response"}], - parameters={"temperature": 0.9, "max_tokens": 50}, + metadata={"temperature": 0.9, "max_tokens": 50}, token_metrics={"input_tokens": 10, "output_tokens": 20, "total_tokens": 30}, tags={"custom_tag": "tag_value"}, session_id="test_session_id", @@ -298,7 +298,7 @@ def test_llm_annotate_raw_string_io(llmobs, llmobs_events): @llm(model_name="test_model", model_provider="test_provider", name="test_function", session_id="test_session_id") def f(): llmobs.annotate( - parameters={"temperature": 0.9, "max_tokens": 50}, + metadata={"temperature": 0.9, "max_tokens": 50}, input_data="test_prompt", output_data="test_response", tags={"custom_tag": "tag_value"}, @@ -314,7 +314,7 @@ def f(): model_provider="test_provider", input_messages=[{"content": "test_prompt"}], output_messages=[{"content": "test_response"}], - parameters={"temperature": 0.9, "max_tokens": 50}, + metadata={"temperature": 0.9, "max_tokens": 50}, token_metrics={"input_tokens": 10, "output_tokens": 20, "total_tokens": 30}, tags={"custom_tag": "tag_value"}, session_id="test_session_id", diff --git a/tests/llmobs/test_llmobs_service.py b/tests/llmobs/test_llmobs_service.py index de428999147..deac267fac0 100644 --- a/tests/llmobs/test_llmobs_service.py +++ b/tests/llmobs/test_llmobs_service.py @@ -13,7 +13,6 @@ from ddtrace.llmobs import LLMObs as llmobs_service from ddtrace.llmobs._constants import INPUT_DOCUMENTS from ddtrace.llmobs._constants import INPUT_MESSAGES -from ddtrace.llmobs._constants import INPUT_PARAMETERS from ddtrace.llmobs._constants import INPUT_PROMPT from ddtrace.llmobs._constants import INPUT_VALUE from ddtrace.llmobs._constants import IS_EVALUATION_SPAN @@ -356,33 +355,24 @@ def test_embedding_span(llmobs, llmobs_events): def test_annotate_no_active_span_logs_warning(llmobs, mock_llmobs_logs): - llmobs.annotate(parameters={"test": "test"}) + llmobs.annotate(metadata={"test": "test"}) mock_llmobs_logs.warning.assert_called_once_with("No span provided and no active LLMObs-generated span found.") def test_annotate_non_llm_span_logs_warning(llmobs, mock_llmobs_logs): dummy_tracer = DummyTracer() with dummy_tracer.trace("root") as non_llmobs_span: - llmobs.annotate(span=non_llmobs_span, parameters={"test": "test"}) + llmobs.annotate(span=non_llmobs_span, metadata={"test": "test"}) mock_llmobs_logs.warning.assert_called_once_with("Span must be an LLMObs-generated span.") def test_annotate_finished_span_does_nothing(llmobs, mock_llmobs_logs): with llmobs.llm(model_name="test_model", name="test_llm_call", model_provider="test_provider") as span: pass - llmobs.annotate(span=span, parameters={"test": "test"}) + llmobs.annotate(span=span, metadata={"test": "test"}) mock_llmobs_logs.warning.assert_called_once_with("Cannot annotate a finished span.") -def test_annotate_parameters(llmobs, mock_llmobs_logs): - with llmobs.llm(model_name="test_model", name="test_llm_call", model_provider="test_provider") as span: - llmobs.annotate(span=span, parameters={"temperature": 0.9, "max_tokens": 50}) - assert span._get_ctx_item(INPUT_PARAMETERS) == {"temperature": 0.9, "max_tokens": 50} - mock_llmobs_logs.warning.assert_called_once_with( - "Setting parameters is deprecated, please set parameters and other metadata as tags instead." - ) - - def test_annotate_metadata(llmobs): with llmobs.llm(model_name="test_model", name="test_llm_call", model_provider="test_provider") as span: llmobs.annotate(span=span, metadata={"temperature": 0.5, "max_tokens": 20, "top_k": 10, "n": 3})