Skip to content

Commit dab9734

Browse files
authored
Merge branch 'main' into drop-logging-handler-setup-genai
2 parents 74c7448 + 7e49db4 commit dab9734

10 files changed

Lines changed: 168 additions & 19 deletions

File tree

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
115115
([#4175](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/4175))
116116
- `opentelemetry-docker-tests` Fix docker-tests assumption by Postgres-Sqlalchemy case about scope of metrics
117117
([#4258](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/4258))
118+
- `opentelemetry-instrumentation-threading`: fix AttributeError when Thread is run without starting
119+
([#4246](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/4246))
118120

119121
### Breaking changes
120122

gen-requirements.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,5 @@ requests
77
tomli
88
tomli_w
99
hatch
10+
# TODO: stick with virtualenv < 21 until a new hatch release
11+
virtualenv<21

instrumentation-genai/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
| --------------- | ------------------ | --------------- | -------------- |
44
| [opentelemetry-instrumentation-anthropic](./opentelemetry-instrumentation-anthropic) | anthropic >= 0.16.0 | No | development
55
| [opentelemetry-instrumentation-claude-agent-sdk](./opentelemetry-instrumentation-claude-agent-sdk) | claude-agent-sdk >= 0.1.14 | No | development
6-
| [opentelemetry-instrumentation-google-genai](./opentelemetry-instrumentation-google-genai) | google-genai >= 1.0.0 | No | development
6+
| [opentelemetry-instrumentation-google-genai](./opentelemetry-instrumentation-google-genai) | google-genai >= 1.32.0 | No | development
77
| [opentelemetry-instrumentation-langchain](./opentelemetry-instrumentation-langchain) | langchain >= 0.3.21 | No | development
88
| [opentelemetry-instrumentation-openai-agents-v2](./opentelemetry-instrumentation-openai-agents-v2) | openai-agents >= 0.3.3 | No | development
99
| [opentelemetry-instrumentation-openai-v2](./opentelemetry-instrumentation-openai-v2) | openai >= 1.26.0 | Yes | development

instrumentation-genai/opentelemetry-instrumentation-google-genai/src/opentelemetry/instrumentation/google_genai/package.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,4 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
_instruments = ("google-genai >= 1.0.0",)
15+
_instruments = ("google-genai >= 1.32.0",)

instrumentation-genai/opentelemetry-instrumentation-google-genai/tests/generate_content/test_e2e.py

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,14 @@
4040
from google.genai import types
4141
from vcr.record_mode import RecordMode
4242

43+
try:
44+
# These modules are only supported in python >= 3.10
45+
from aiohttp.client_exceptions import ClientConnectionError
46+
from vcr.stubs import aiohttp_stubs
47+
except ImportError:
48+
ClientConnectionError = None
49+
aiohttp_stubs = None
50+
4351
from opentelemetry.instrumentation._semconv import (
4452
OTEL_SEMCONV_STABILITY_OPT_IN,
4553
_OpenTelemetrySemanticConventionStability,
@@ -135,6 +143,9 @@ def _redact_headers(headers):
135143

136144

137145
def _before_record_request(request):
146+
# aiohttp reports the request method in lower case while it is recorded in the cassette in upper case.
147+
if request.method:
148+
request.method = request.method.upper()
138149
if request.headers:
139150
_redact_headers(request.headers)
140151
uri = request.uri
@@ -316,6 +327,48 @@ def setup_vcr(vcr):
316327
return vcr
317328

318329

330+
@pytest.fixture(name="patch_vcr_aiohttp_stream", scope="module", autouse=True)
331+
def fixture_patch_vcr_aiohttp_stream():
332+
# Allows the async tests to not be stuck in infinite loop when streaming
333+
# a VCR cassette with aiohttp stubs.
334+
# https://github.com/kevin1024/vcrpy/issues/927
335+
if ClientConnectionError is None or aiohttp_stubs is None:
336+
return
337+
338+
class _ReplayMockStream(aiohttp_stubs.MockStream):
339+
# Keep vcrpy's stream behavior, but ignore aiohttp's
340+
# close-time ClientConnectionError("Connection closed") during
341+
# cassette replay, where the full response is already buffered
342+
# and this condition should be treated as normal EOF.
343+
def set_exception(self, exc):
344+
if isinstance(exc, ClientConnectionError) and exc.args == (
345+
"Connection closed",
346+
):
347+
return
348+
super().set_exception(exc)
349+
350+
class _ReplayMockClientResponse(aiohttp_stubs.MockClientResponse):
351+
def __init__(self, *args, **kwargs):
352+
super().__init__(*args, **kwargs)
353+
self._mock_content_stream = None
354+
355+
@property
356+
def content(self):
357+
# vcrpy's aiohttp MockClientResponse.content creates a fresh stream object
358+
# on every property access. google-genai async streaming repeatedly reads
359+
# response.content.readline() and expects the same stream instance until EOF is
360+
# reached.
361+
if self._mock_content_stream is None:
362+
body = self._body or b""
363+
stream = _ReplayMockStream()
364+
stream.feed_data(body)
365+
stream.feed_eof()
366+
self._mock_content_stream = stream
367+
return self._mock_content_stream
368+
369+
aiohttp_stubs.MockClientResponse = _ReplayMockClientResponse
370+
371+
319372
@pytest.fixture(name="instrumentor")
320373
def fixture_instrumentor():
321374
return GoogleGenAiSdkInstrumentor()

instrumentation-genai/opentelemetry-instrumentation-google-genai/tests/requirements.latest.txt

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,10 @@ pytest==7.4.4
4040
pytest-asyncio==0.21.0
4141
pytest-vcr==1.0.2
4242

43-
google-auth==2.38.0
44-
google-genai==1.32.0
43+
google-auth==2.47.0
44+
45+
google-genai==1.47.0; python_version < "3.10"
46+
google-genai==1.64.0; python_version >= "3.10"
4547

4648
# Install locally from the folder. This path is relative to the
4749
# root directory, given invocation from "tox" at root level.

instrumentation/opentelemetry-instrumentation-confluent-kafka/src/opentelemetry/instrumentation/confluent_kafka/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -277,6 +277,9 @@ def instrumentation_dependencies(self) -> Collection[str]:
277277
return _instruments
278278

279279
def _instrument(self, **kwargs):
280+
# TODO: should probably wrap methods directly instead of going through
281+
# these classes. Hopefully it'll make the patching work if called after
282+
# the original classes have already been imported, #4270
280283
self._original_kafka_producer = confluent_kafka.Producer
281284
self._original_kafka_consumer = confluent_kafka.Consumer
282285

instrumentation/opentelemetry-instrumentation-confluent-kafka/tests/test_instrumentation.py

Lines changed: 47 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,11 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
# pylint: disable=no-name-in-module
16-
17-
from confluent_kafka import Consumer, Producer
15+
# pylint: disable=no-name-in-module,import-outside-toplevel
1816

1917
from opentelemetry.instrumentation.confluent_kafka import (
18+
AutoInstrumentedConsumer,
19+
AutoInstrumentedProducer,
2020
ConfluentKafkaInstrumentor,
2121
ProxiedConsumer,
2222
ProxiedProducer,
@@ -41,6 +41,8 @@
4141

4242
class TestConfluentKafka(TestBase):
4343
def test_instrument_api(self) -> None:
44+
from confluent_kafka import Consumer, Producer # noqa: PLC0415
45+
4446
instrumentation = ConfluentKafkaInstrumentor()
4547

4648
producer = Producer({"bootstrap.servers": "localhost:29092"})
@@ -51,16 +53,22 @@ def test_instrument_api(self) -> None:
5153
producer = instrumentation.uninstrument_producer(producer)
5254
self.assertEqual(producer.__class__, Producer)
5355

54-
producer = Producer({"bootstrap.servers": "localhost:29092"})
55-
producer = instrumentation.instrument_producer(producer)
56+
consumer = Consumer(
57+
{
58+
"bootstrap.servers": "localhost:29092",
59+
"group.id": "mygroup",
60+
"auto.offset.reset": "earliest",
61+
}
62+
)
5663

57-
self.assertEqual(producer.__class__, ProxiedProducer)
64+
consumer = instrumentation.instrument_consumer(consumer)
65+
self.assertEqual(consumer.__class__, ProxiedConsumer)
5866

59-
producer = instrumentation.uninstrument_producer(producer)
60-
self.assertEqual(producer.__class__, Producer)
67+
consumer = instrumentation.uninstrument_consumer(consumer)
68+
self.assertEqual(consumer.__class__, Consumer)
6169

6270
consumer = Consumer(
63-
{
71+
**{
6472
"bootstrap.servers": "localhost:29092",
6573
"group.id": "mygroup",
6674
"auto.offset.reset": "earliest",
@@ -73,7 +81,37 @@ def test_instrument_api(self) -> None:
7381
consumer = instrumentation.uninstrument_consumer(consumer)
7482
self.assertEqual(consumer.__class__, Consumer)
7583

84+
def test_instrument_api_with_instrument(self) -> None:
85+
ConfluentKafkaInstrumentor().instrument()
86+
87+
from confluent_kafka import Consumer, Producer # noqa: PLC0415
88+
89+
producer = Producer({"bootstrap.servers": "localhost:29092"})
90+
self.assertEqual(producer.__class__, AutoInstrumentedProducer)
91+
92+
consumer = Consumer(
93+
{
94+
"bootstrap.servers": "localhost:29092",
95+
"group.id": "mygroup",
96+
"auto.offset.reset": "earliest",
97+
}
98+
)
99+
self.assertEqual(consumer.__class__, AutoInstrumentedConsumer)
100+
101+
consumer = Consumer(
102+
**{
103+
"bootstrap.servers": "localhost:29092",
104+
"group.id": "mygroup",
105+
"auto.offset.reset": "earliest",
106+
}
107+
)
108+
self.assertEqual(consumer.__class__, AutoInstrumentedConsumer)
109+
110+
ConfluentKafkaInstrumentor().uninstrument()
111+
76112
def test_consumer_commit_method_exists(self) -> None:
113+
from confluent_kafka import Consumer # noqa: PLC0415
114+
77115
instrumentation = ConfluentKafkaInstrumentor()
78116

79117
consumer = Consumer(

instrumentation/opentelemetry-instrumentation-threading/src/opentelemetry/instrumentation/threading/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,8 @@ def __wrap_threading_run(
147147
) -> R:
148148
token = None
149149
try:
150-
token = context.attach(instance._otel_context)
150+
if hasattr(instance, "_otel_context"):
151+
token = context.attach(instance._otel_context)
151152
return call_wrapped(*args, **kwargs)
152153
finally:
153154
if token is not None:

instrumentation/opentelemetry-instrumentation-threading/tests/test_threading.py

Lines changed: 53 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,11 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
from __future__ import annotations
16+
1517
import threading
1618
from concurrent.futures import ( # pylint: disable=no-name-in-module; TODO #4199
19+
Future,
1720
ThreadPoolExecutor,
1821
)
1922
from typing import List
@@ -66,7 +69,7 @@ def test_trace_context_propagation_in_thread_pool_with_multiple_workers(
6669
executor = ThreadPoolExecutor(max_workers=max_workers)
6770

6871
expected_span_contexts: List[trace.SpanContext] = []
69-
futures_list = []
72+
futures_list: List[Future[trace.SpanContext]] = []
7073
for num in range(max_workers):
7174
with self._tracer.start_as_current_span(f"trace_{num}") as span:
7275
expected_span_context = span.get_span_context()
@@ -125,23 +128,23 @@ def fake_func(self):
125128
def get_current_span_context_for_test() -> trace.SpanContext:
126129
return trace.get_current_span().get_span_context()
127130

128-
def print_square(self, num):
131+
def print_square(self, num: int | float) -> int | float:
129132
with self._tracer.start_as_current_span("square"):
130133
return num * num
131134

132-
def print_cube(self, num):
135+
def print_cube(self, num: int | float) -> int | float:
133136
with self._tracer.start_as_current_span("cube"):
134137
return num * num * num
135138

136-
def print_square_with_thread(self, num):
139+
def print_square_with_thread(self, num: int | float) -> int | float:
137140
with self._tracer.start_as_current_span("square"):
138141
cube_thread = threading.Thread(target=self.print_cube, args=(10,))
139142

140143
cube_thread.start()
141144
cube_thread.join()
142145
return num * num
143146

144-
def calculate(self, num):
147+
def calculate(self, num: int | float) -> None:
145148
with self._tracer.start_as_current_span("calculate"):
146149
square_thread = threading.Thread(
147150
target=self.print_square, args=(num,)
@@ -294,3 +297,48 @@ def test_threadpool_with_valid_context_token(self, mock_detach: MagicMock):
294297
future = executor.submit(self.get_current_span_context_for_test)
295298
future.result()
296299
mock_detach.assert_called_once()
300+
301+
def test_threading_run_without_start(self):
302+
square_thread = threading.Thread(target=self.print_square, args=(10,))
303+
with self._tracer.start_as_current_span("root"):
304+
square_thread.run()
305+
306+
spans = self.memory_exporter.get_finished_spans()
307+
self.assertEqual(len(spans), 2)
308+
root_span = next(span for span in spans if span.name == "root")
309+
self.assertIsNotNone(root_span)
310+
self.assertIsNone(root_span.parent)
311+
square_span = next(span for span in spans if span.name == "square")
312+
self.assertIsNotNone(square_span)
313+
self.assertIs(square_span.parent, root_span.get_span_context())
314+
315+
def test_threading_run_with_custom_run(self):
316+
_tracer = self._tracer
317+
318+
class ThreadWithCustomRun(threading.Thread):
319+
def run(self):
320+
# don't call super().run() on purpose
321+
# Thread.run() cannot be called twice
322+
with _tracer.start_as_current_span("square"):
323+
pass
324+
325+
square_thread = ThreadWithCustomRun(
326+
target=self.print_square, args=(10,)
327+
)
328+
with self._tracer.start_as_current_span("run_1"):
329+
square_thread.run()
330+
with self._tracer.start_as_current_span("run_2"):
331+
square_thread.run()
332+
333+
spans = self.memory_exporter.get_finished_spans()
334+
self.assertEqual(len(spans), 4)
335+
run_1_span = next(span for span in spans if span.name == "run_1")
336+
run_2_span = next(span for span in spans if span.name == "run_2")
337+
square_spans = [span for span in spans if span.name == "square"]
338+
square_spans.sort(key=lambda x: x.start_time or 0)
339+
run_1_child_span = square_spans[0]
340+
run_2_child_span = square_spans[1]
341+
self.assertIs(run_1_child_span.parent, run_1_span.get_span_context())
342+
self.assertIs(run_2_child_span.parent, run_2_span.get_span_context())
343+
self.assertIsNone(run_1_span.parent)
344+
self.assertIsNone(run_2_span.parent)

0 commit comments

Comments
 (0)