Skip to content

Commit 3debca0

Browse files
authored
ref(ai): Rename generate_text to text_completion (#5885)
1 parent 7ee6918 commit 3debca0

File tree

5 files changed

+26
-18
lines changed

5 files changed

+26
-18
lines changed

sentry_sdk/consts.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -921,7 +921,7 @@ class OP:
921921
GEN_AI_CREATE_AGENT = "gen_ai.create_agent"
922922
GEN_AI_EMBEDDINGS = "gen_ai.embeddings"
923923
GEN_AI_EXECUTE_TOOL = "gen_ai.execute_tool"
924-
GEN_AI_GENERATE_TEXT = "gen_ai.generate_text"
924+
GEN_AI_TEXT_COMPLETION = "gen_ai.text_completion"
925925
GEN_AI_HANDOFF = "gen_ai.handoff"
926926
GEN_AI_PIPELINE = "gen_ai.pipeline"
927927
GEN_AI_INVOKE_AGENT = "gen_ai.invoke_agent"

sentry_sdk/integrations/huggingface_hub.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def setup_once() -> None:
4141
huggingface_hub.inference._client.InferenceClient.text_generation = (
4242
_wrap_huggingface_task(
4343
huggingface_hub.inference._client.InferenceClient.text_generation,
44-
OP.GEN_AI_GENERATE_TEXT,
44+
OP.GEN_AI_TEXT_COMPLETION,
4545
)
4646
)
4747
huggingface_hub.inference._client.InferenceClient.chat_completion = (

sentry_sdk/integrations/langchain.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -377,13 +377,13 @@ def on_llm_start(
377377
watched_span = self._create_span(
378378
run_id,
379379
parent_run_id,
380-
op=OP.GEN_AI_GENERATE_TEXT,
381-
name=f"generate_text {model}".strip(),
380+
op=OP.GEN_AI_TEXT_COMPLETION,
381+
name=f"text_completion {model}".strip(),
382382
origin=LangchainIntegration.origin,
383383
)
384384
span = watched_span.span
385385

386-
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "generate_text")
386+
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "text_completion")
387387

388388
pipeline_name = kwargs.get("name")
389389
if pipeline_name:

tests/integrations/huggingface_hub/test_huggingface_hub.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -507,12 +507,12 @@ def test_text_generation(
507507

508508
assert span is not None
509509

510-
assert span["op"] == "gen_ai.generate_text"
511-
assert span["description"] == "generate_text test-model"
510+
assert span["op"] == "gen_ai.text_completion"
511+
assert span["description"] == "text_completion test-model"
512512
assert span["origin"] == "auto.ai.huggingface_hub"
513513

514514
expected_data = {
515-
"gen_ai.operation.name": "generate_text",
515+
"gen_ai.operation.name": "text_completion",
516516
"gen_ai.request.model": "test-model",
517517
"gen_ai.response.finish_reasons": "length",
518518
"gen_ai.response.streaming": False,
@@ -576,12 +576,12 @@ def test_text_generation_streaming(
576576

577577
assert span is not None
578578

579-
assert span["op"] == "gen_ai.generate_text"
580-
assert span["description"] == "generate_text test-model"
579+
assert span["op"] == "gen_ai.text_completion"
580+
assert span["description"] == "text_completion test-model"
581581
assert span["origin"] == "auto.ai.huggingface_hub"
582582

583583
expected_data = {
584-
"gen_ai.operation.name": "generate_text",
584+
"gen_ai.operation.name": "text_completion",
585585
"gen_ai.request.model": "test-model",
586586
"gen_ai.response.finish_reasons": "length",
587587
"gen_ai.response.streaming": True,

tests/integrations/langchain/test_langchain.py

Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -153,12 +153,14 @@ def test_langchain_text_completion(
153153
assert tx["type"] == "transaction"
154154

155155
llm_spans = [
156-
span for span in tx.get("spans", []) if span.get("op") == "gen_ai.generate_text"
156+
span
157+
for span in tx.get("spans", [])
158+
if span.get("op") == "gen_ai.text_completion"
157159
]
158160
assert len(llm_spans) > 0
159161

160162
llm_span = llm_spans[0]
161-
assert llm_span["description"] == "generate_text gpt-3.5-turbo"
163+
assert llm_span["description"] == "text_completion gpt-3.5-turbo"
162164
assert llm_span["data"]["gen_ai.system"] == "openai"
163165
assert llm_span["data"]["gen_ai.pipeline.name"] == "my-snazzy-pipeline"
164166
assert llm_span["data"]["gen_ai.request.model"] == "gpt-3.5-turbo"
@@ -1294,12 +1296,14 @@ def test_langchain_message_truncation(sentry_init, capture_events):
12941296
assert tx["type"] == "transaction"
12951297

12961298
llm_spans = [
1297-
span for span in tx.get("spans", []) if span.get("op") == "gen_ai.generate_text"
1299+
span
1300+
for span in tx.get("spans", [])
1301+
if span.get("op") == "gen_ai.text_completion"
12981302
]
12991303
assert len(llm_spans) > 0
13001304

13011305
llm_span = llm_spans[0]
1302-
assert llm_span["data"]["gen_ai.operation.name"] == "generate_text"
1306+
assert llm_span["data"]["gen_ai.operation.name"] == "text_completion"
13031307
assert llm_span["data"][SPANDATA.GEN_AI_PIPELINE_NAME] == "my_pipeline"
13041308

13051309
assert SPANDATA.GEN_AI_REQUEST_MESSAGES in llm_span["data"]
@@ -2010,12 +2014,14 @@ def test_langchain_response_model_extraction(
20102014
assert tx["type"] == "transaction"
20112015

20122016
llm_spans = [
2013-
span for span in tx.get("spans", []) if span.get("op") == "gen_ai.generate_text"
2017+
span
2018+
for span in tx.get("spans", [])
2019+
if span.get("op") == "gen_ai.text_completion"
20142020
]
20152021
assert len(llm_spans) > 0
20162022

20172023
llm_span = llm_spans[0]
2018-
assert llm_span["data"]["gen_ai.operation.name"] == "generate_text"
2024+
assert llm_span["data"]["gen_ai.operation.name"] == "text_completion"
20192025

20202026
if expected_model is not None:
20212027
assert SPANDATA.GEN_AI_RESPONSE_MODEL in llm_span["data"]
@@ -2311,7 +2317,9 @@ def test_langchain_ai_system_detection(
23112317
assert tx["type"] == "transaction"
23122318

23132319
llm_spans = [
2314-
span for span in tx.get("spans", []) if span.get("op") == "gen_ai.generate_text"
2320+
span
2321+
for span in tx.get("spans", [])
2322+
if span.get("op") == "gen_ai.text_completion"
23152323
]
23162324
assert len(llm_spans) > 0
23172325

0 commit comments

Comments
 (0)