Skip to content

Commit 7d5ea5d

Browse files
committed
add ProtoJSON compatability tests
1 parent 6a0c46f commit 7d5ea5d

File tree

6 files changed

+759
-408
lines changed

6 files changed

+759
-408
lines changed

exporter/opentelemetry-exporter-otlp-json-common/test-requirements.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ importlib-metadata==6.11.0
33
iniconfig==2.0.0
44
packaging==24.0
55
pluggy==1.6.0
6+
protobuf==6.31.1
67
py-cpuinfo==9.0.0
78
pytest==7.4.4
89
tomli==2.0.1
@@ -14,4 +15,6 @@ zipp==3.19.2
1415
-e opentelemetry-semantic-conventions
1516
-e tests/opentelemetry-test-utils
1617
-e opentelemetry-proto-json
18+
-e opentelemetry-proto
1719
-e exporter/opentelemetry-exporter-otlp-json-common
20+
-e exporter/opentelemetry-exporter-otlp-proto-common

exporter/opentelemetry-exporter-otlp-json-common/tests/__init__.py

Lines changed: 329 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,50 @@
1515
import dataclasses
1616
import unittest
1717

18+
from opentelemetry._logs import LogRecord, SeverityNumber
19+
from opentelemetry.sdk._logs import ReadableLogRecord
20+
from opentelemetry.sdk.metrics.export import (
21+
AggregationTemporality,
22+
Buckets,
23+
ExponentialHistogram,
24+
ExponentialHistogramDataPoint,
25+
Gauge,
26+
Histogram,
27+
HistogramDataPoint,
28+
Metric,
29+
MetricsData,
30+
NumberDataPoint,
31+
ResourceMetrics,
32+
ScopeMetrics,
33+
Sum,
34+
)
35+
from opentelemetry.sdk.resources import Resource
36+
from opentelemetry.sdk.trace import SpanContext, _Span
37+
from opentelemetry.sdk.util.instrumentation import InstrumentationScope
38+
from opentelemetry.trace import (
39+
NonRecordingSpan,
40+
TraceFlags,
41+
set_span_in_context,
42+
)
43+
44+
# ---------------------------------------------------------------------------
45+
# Shared constants
46+
# ---------------------------------------------------------------------------
47+
48+
TRACE_ID = 0x3E0C63257DE34C926F9EFCD03927272E
49+
SPAN_ID = 0x34BF92DEEFC58C92
50+
PARENT_SPAN_ID = 0x1111111111111111
51+
BASE_TIME = 683647322 * 10**9
52+
START_TIME = 1641946015139533244
53+
TIME = 1641946016139533244
54+
55+
_UNSET = object()
56+
57+
58+
# ---------------------------------------------------------------------------
59+
# Assertion utilities
60+
# ---------------------------------------------------------------------------
61+
1862

1963
def _is_none_equivalent(val_a, val_b):
2064
"""Check if two values should be treated as equal because one is None
@@ -61,3 +105,288 @@ def assert_proto_json_equal(
61105
test_case.assertEqual(
62106
obj_a, obj_b, f"Mismatch at {path}: {obj_a!r} != {obj_b!r}"
63107
)
108+
109+
110+
# ---------------------------------------------------------------------------
111+
# Span builders
112+
# ---------------------------------------------------------------------------
113+
114+
115+
def make_span_unended(
116+
name="test-span",
117+
trace_id=TRACE_ID,
118+
span_id=SPAN_ID,
119+
parent=None,
120+
resource=None,
121+
instrumentation_scope=None,
122+
events=(),
123+
links=(),
124+
start_time=BASE_TIME,
125+
):
126+
if resource is None:
127+
resource = Resource({})
128+
span = _Span(
129+
name=name,
130+
context=SpanContext(
131+
trace_id,
132+
span_id,
133+
is_remote=False,
134+
trace_flags=TraceFlags(TraceFlags.SAMPLED),
135+
),
136+
parent=parent,
137+
resource=resource,
138+
instrumentation_scope=instrumentation_scope,
139+
events=events,
140+
links=links,
141+
)
142+
span.start(start_time=start_time)
143+
return span
144+
145+
146+
def make_span(
147+
name="test-span",
148+
trace_id=TRACE_ID,
149+
span_id=SPAN_ID,
150+
parent=None,
151+
resource=None,
152+
instrumentation_scope=None,
153+
events=(),
154+
links=(),
155+
start_time=BASE_TIME,
156+
end_time=BASE_TIME + 50 * 10**6,
157+
):
158+
span = make_span_unended(
159+
name=name,
160+
trace_id=trace_id,
161+
span_id=span_id,
162+
parent=parent,
163+
resource=resource,
164+
instrumentation_scope=instrumentation_scope,
165+
events=events,
166+
links=links,
167+
start_time=start_time,
168+
)
169+
span.end(end_time=end_time)
170+
return span
171+
172+
173+
# ---------------------------------------------------------------------------
174+
# Log builders
175+
# ---------------------------------------------------------------------------
176+
177+
178+
def make_log_context(trace_id=TRACE_ID, span_id=SPAN_ID):
179+
return set_span_in_context(
180+
NonRecordingSpan(
181+
SpanContext(trace_id, span_id, False, TraceFlags(0x01))
182+
)
183+
)
184+
185+
186+
def make_log(
187+
body="test log message",
188+
severity_text="INFO",
189+
severity_number=SeverityNumber.INFO,
190+
attributes=None,
191+
timestamp=TIME,
192+
observed_timestamp=TIME + 1000,
193+
resource=None,
194+
instrumentation_scope=_UNSET,
195+
event_name=None,
196+
context=None,
197+
limits=None,
198+
):
199+
kwargs = dict(
200+
timestamp=timestamp,
201+
observed_timestamp=observed_timestamp,
202+
severity_text=severity_text,
203+
severity_number=severity_number,
204+
body=body,
205+
attributes=attributes or {},
206+
event_name=event_name,
207+
)
208+
if context is not None:
209+
kwargs["context"] = context
210+
211+
rw_kwargs = dict(
212+
resource=resource or Resource({}),
213+
instrumentation_scope=InstrumentationScope("test_scope", "1.0")
214+
if instrumentation_scope is _UNSET
215+
else instrumentation_scope,
216+
)
217+
if limits is not None:
218+
rw_kwargs["limits"] = limits
219+
220+
return ReadableLogRecord(LogRecord(**kwargs), **rw_kwargs)
221+
222+
223+
# ---------------------------------------------------------------------------
224+
# Metric builders
225+
# ---------------------------------------------------------------------------
226+
227+
228+
def make_metrics_data(
229+
metrics,
230+
resource_attrs=None,
231+
resource_schema_url=None,
232+
scope_name="test_scope",
233+
scope_version="1.0",
234+
scope_schema_url=None,
235+
):
236+
return MetricsData(
237+
resource_metrics=[
238+
ResourceMetrics(
239+
resource=Resource(
240+
attributes=resource_attrs or {},
241+
schema_url=resource_schema_url,
242+
),
243+
scope_metrics=[
244+
ScopeMetrics(
245+
scope=InstrumentationScope(
246+
name=scope_name,
247+
version=scope_version,
248+
schema_url=scope_schema_url,
249+
),
250+
metrics=metrics,
251+
schema_url=scope_schema_url,
252+
)
253+
],
254+
schema_url=resource_schema_url,
255+
)
256+
]
257+
)
258+
259+
260+
def make_sum(
261+
name="test_sum",
262+
value=33,
263+
attributes=None,
264+
temporality=AggregationTemporality.CUMULATIVE,
265+
is_monotonic=True,
266+
description="desc",
267+
unit="s",
268+
):
269+
return Metric(
270+
name=name,
271+
description=description,
272+
unit=unit,
273+
data=Sum(
274+
data_points=[
275+
NumberDataPoint(
276+
attributes=attributes or {"a": 1},
277+
start_time_unix_nano=START_TIME,
278+
time_unix_nano=TIME,
279+
value=value,
280+
)
281+
],
282+
aggregation_temporality=temporality,
283+
is_monotonic=is_monotonic,
284+
),
285+
)
286+
287+
288+
def make_gauge(
289+
name="test_gauge",
290+
value=9000,
291+
attributes=None,
292+
description="desc",
293+
unit="1",
294+
):
295+
return Metric(
296+
name=name,
297+
description=description,
298+
unit=unit,
299+
data=Gauge(
300+
data_points=[
301+
NumberDataPoint(
302+
attributes=attributes or {"a": 1},
303+
start_time_unix_nano=None,
304+
time_unix_nano=TIME,
305+
value=value,
306+
)
307+
],
308+
),
309+
)
310+
311+
312+
def make_histogram(
313+
name="test_histogram",
314+
attributes=None,
315+
count=5,
316+
sum_value=67,
317+
bucket_counts=None,
318+
explicit_bounds=None,
319+
min_value=8,
320+
max_value=18,
321+
exemplars=None,
322+
temporality=AggregationTemporality.DELTA,
323+
description="desc",
324+
unit="ms",
325+
):
326+
return Metric(
327+
name=name,
328+
description=description,
329+
unit=unit,
330+
data=Histogram(
331+
data_points=[
332+
HistogramDataPoint(
333+
attributes=attributes or {"a": 1},
334+
start_time_unix_nano=START_TIME,
335+
time_unix_nano=TIME,
336+
count=count,
337+
sum=sum_value,
338+
bucket_counts=bucket_counts or [1, 4],
339+
explicit_bounds=explicit_bounds or [10.0, 20.0],
340+
min=min_value,
341+
max=max_value,
342+
exemplars=exemplars or [],
343+
)
344+
],
345+
aggregation_temporality=temporality,
346+
),
347+
)
348+
349+
350+
def make_exponential_histogram(
351+
name="test_exp_hist",
352+
attributes=None,
353+
count=10,
354+
sum_value=100.5,
355+
scale=1,
356+
zero_count=2,
357+
positive=None,
358+
negative=None,
359+
flags=0,
360+
min_value=1.0,
361+
max_value=50.0,
362+
exemplars=None,
363+
temporality=AggregationTemporality.CUMULATIVE,
364+
description="desc",
365+
unit="s",
366+
):
367+
return Metric(
368+
name=name,
369+
description=description,
370+
unit=unit,
371+
data=ExponentialHistogram(
372+
data_points=[
373+
ExponentialHistogramDataPoint(
374+
attributes=attributes or {"a": 1},
375+
start_time_unix_nano=START_TIME,
376+
time_unix_nano=TIME,
377+
count=count,
378+
sum=sum_value,
379+
scale=scale,
380+
zero_count=zero_count,
381+
positive=positive
382+
or Buckets(offset=0, bucket_counts=[1, 2, 3]),
383+
negative=negative or Buckets(offset=1, bucket_counts=[1]),
384+
flags=flags,
385+
min=min_value,
386+
max=max_value,
387+
exemplars=exemplars or [],
388+
)
389+
],
390+
aggregation_temporality=temporality,
391+
),
392+
)

0 commit comments

Comments
 (0)