Skip to content

Commit 243cdb7

Browse files
committed
Fixes
1 parent dfbdd8a commit 243cdb7

File tree

3 files changed

+120
-120
lines changed
  • exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http

3 files changed

+120
-120
lines changed

exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/_log_exporter/__init__.py

Lines changed: 59 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -203,69 +203,69 @@ def export(
203203
_logger.warning("Exporter already shutdown, ignoring batch")
204204
return LogRecordExportResult.FAILURE
205205

206-
finish_export = self._metrics.start_export(len(batch))
206+
with self._metrics.export_operation(len(batch)) as result:
207+
serialized_data = encode_logs(batch).SerializeToString()
208+
deadline_sec = time() + self._timeout
209+
for retry_num in range(_MAX_RETRYS):
210+
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
211+
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
212+
export_error: Optional[Exception] = None
213+
try:
214+
resp = self._export(serialized_data, deadline_sec - time())
215+
if resp.ok:
216+
return LogRecordExportResult.SUCCESS
217+
except requests.exceptions.RequestException as error:
218+
reason = error
219+
export_error = error
220+
retryable = isinstance(error, ConnectionError)
221+
status_code = None
222+
else:
223+
reason = resp.reason
224+
retryable = _is_retryable(resp)
225+
status_code = resp.status_code
207226

208-
serialized_data = encode_logs(batch).SerializeToString()
209-
deadline_sec = time() + self._timeout
210-
for retry_num in range(_MAX_RETRYS):
211-
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
212-
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
213-
export_error: Optional[Exception] = None
214-
try:
215-
resp = self._export(serialized_data, deadline_sec - time())
216-
if resp.ok:
217-
finish_export(None, None)
218-
return LogRecordExportResult.SUCCESS
219-
except requests.exceptions.RequestException as error:
220-
reason = error
221-
export_error = error
222-
retryable = isinstance(error, ConnectionError)
223-
status_code = None
224-
else:
225-
reason = resp.reason
226-
retryable = _is_retryable(resp)
227-
status_code = resp.status_code
227+
if not retryable:
228+
_logger.error(
229+
"Failed to export logs batch code: %s, reason: %s",
230+
status_code,
231+
reason,
232+
)
233+
error_attrs = (
234+
{HTTP_RESPONSE_STATUS_CODE: status_code}
235+
if status_code is not None
236+
else None
237+
)
238+
result.error = export_error
239+
result.error_attrs = error_attrs
240+
return LogRecordExportResult.FAILURE
228241

229-
if not retryable:
230-
_logger.error(
231-
"Failed to export logs batch code: %s, reason: %s",
232-
status_code,
242+
if (
243+
retry_num + 1 == _MAX_RETRYS
244+
or backoff_seconds > (deadline_sec - time())
245+
or self._shutdown
246+
):
247+
_logger.error(
248+
"Failed to export logs batch due to timeout, "
249+
"max retries or shutdown."
250+
)
251+
error_attrs = (
252+
{HTTP_RESPONSE_STATUS_CODE: status_code}
253+
if status_code is not None
254+
else None
255+
)
256+
result.error = export_error
257+
result.error_attrs = error_attrs
258+
return LogRecordExportResult.FAILURE
259+
_logger.warning(
260+
"Transient error %s encountered while exporting logs batch, retrying in %.2fs.",
233261
reason,
262+
backoff_seconds,
234263
)
235-
error_attrs = (
236-
{HTTP_RESPONSE_STATUS_CODE: status_code}
237-
if status_code is not None
238-
else None
239-
)
240-
finish_export(export_error, error_attrs)
241-
return LogRecordExportResult.FAILURE
242-
243-
if (
244-
retry_num + 1 == _MAX_RETRYS
245-
or backoff_seconds > (deadline_sec - time())
246-
or self._shutdown
247-
):
248-
_logger.error(
249-
"Failed to export logs batch due to timeout, "
250-
"max retries or shutdown."
251-
)
252-
error_attrs = (
253-
{HTTP_RESPONSE_STATUS_CODE: status_code}
254-
if status_code is not None
255-
else None
256-
)
257-
finish_export(export_error, error_attrs)
258-
return LogRecordExportResult.FAILURE
259-
_logger.warning(
260-
"Transient error %s encountered while exporting logs batch, retrying in %.2fs.",
261-
reason,
262-
backoff_seconds,
263-
)
264-
shutdown = self._shutdown_is_occuring.wait(backoff_seconds)
265-
if shutdown:
266-
_logger.warning("Shutdown in progress, aborting retry.")
267-
break
268-
return LogRecordExportResult.FAILURE
264+
shutdown = self._shutdown_is_occuring.wait(backoff_seconds)
265+
if shutdown:
266+
_logger.warning("Shutdown in progress, aborting retry.")
267+
break
268+
return LogRecordExportResult.FAILURE
269269

270270
def force_flush(self, timeout_millis: float = 10_000) -> bool:
271271
"""Nothing is buffered in this exporter, so this method does nothing."""

exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/metric_exporter/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -395,8 +395,8 @@ def force_flush(self, timeout_millis: float = 10_000) -> bool:
395395

396396
def set_meter_provider(self, meter_provider: MeterProvider) -> None:
397397
self._metrics = ExporterMetrics(
398-
OtelComponentTypeValues.OTLP_HTTP_METRIC_EXPORTER.value,
399-
"metric_data_point",
398+
OtelComponentTypeValues.OTLP_HTTP_METRIC_EXPORTER,
399+
"metrics",
400400
urlparse(self._endpoint),
401401
meter_provider,
402402
)

exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/trace_exporter/__init__.py

Lines changed: 59 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -196,69 +196,69 @@ def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
196196
_logger.warning("Exporter already shutdown, ignoring batch")
197197
return SpanExportResult.FAILURE
198198

199-
finish_export = self._metrics.start_export(len(spans))
199+
with self._metrics.export_operation(len(spans)) as result:
200+
serialized_data = encode_spans(spans).SerializePartialToString()
201+
deadline_sec = time() + self._timeout
202+
for retry_num in range(_MAX_RETRYS):
203+
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
204+
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
205+
export_error: Optional[Exception] = None
206+
try:
207+
resp = self._export(serialized_data, deadline_sec - time())
208+
if resp.ok:
209+
return SpanExportResult.SUCCESS
210+
except requests.exceptions.RequestException as error:
211+
reason = error
212+
export_error = error
213+
retryable = isinstance(error, ConnectionError)
214+
status_code = None
215+
else:
216+
reason = resp.reason
217+
retryable = _is_retryable(resp)
218+
status_code = resp.status_code
200219

201-
serialized_data = encode_spans(spans).SerializePartialToString()
202-
deadline_sec = time() + self._timeout
203-
for retry_num in range(_MAX_RETRYS):
204-
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
205-
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
206-
export_error: Optional[Exception] = None
207-
try:
208-
resp = self._export(serialized_data, deadline_sec - time())
209-
if resp.ok:
210-
finish_export(None, None)
211-
return SpanExportResult.SUCCESS
212-
except requests.exceptions.RequestException as error:
213-
reason = error
214-
export_error = error
215-
retryable = isinstance(error, ConnectionError)
216-
status_code = None
217-
else:
218-
reason = resp.reason
219-
retryable = _is_retryable(resp)
220-
status_code = resp.status_code
220+
if not retryable:
221+
_logger.error(
222+
"Failed to export span batch code: %s, reason: %s",
223+
status_code,
224+
reason,
225+
)
226+
error_attrs = (
227+
{HTTP_RESPONSE_STATUS_CODE: status_code}
228+
if status_code is not None
229+
else None
230+
)
231+
result.error = export_error
232+
result.error_attrs = error_attrs
233+
return SpanExportResult.FAILURE
221234

222-
if not retryable:
223-
_logger.error(
224-
"Failed to export span batch code: %s, reason: %s",
225-
status_code,
235+
if (
236+
retry_num + 1 == _MAX_RETRYS
237+
or backoff_seconds > (deadline_sec - time())
238+
or self._shutdown
239+
):
240+
_logger.error(
241+
"Failed to export span batch due to timeout, "
242+
"max retries or shutdown."
243+
)
244+
error_attrs = (
245+
{HTTP_RESPONSE_STATUS_CODE: status_code}
246+
if status_code is not None
247+
else None
248+
)
249+
result.error = export_error
250+
result.error_attrs = error_attrs
251+
return SpanExportResult.FAILURE
252+
_logger.warning(
253+
"Transient error %s encountered while exporting span batch, retrying in %.2fs.",
226254
reason,
255+
backoff_seconds,
227256
)
228-
error_attrs = (
229-
{HTTP_RESPONSE_STATUS_CODE: status_code}
230-
if status_code is not None
231-
else None
232-
)
233-
finish_export(export_error, error_attrs)
234-
return SpanExportResult.FAILURE
235-
236-
if (
237-
retry_num + 1 == _MAX_RETRYS
238-
or backoff_seconds > (deadline_sec - time())
239-
or self._shutdown
240-
):
241-
_logger.error(
242-
"Failed to export span batch due to timeout, "
243-
"max retries or shutdown."
244-
)
245-
error_attrs = (
246-
{HTTP_RESPONSE_STATUS_CODE: status_code}
247-
if status_code is not None
248-
else None
249-
)
250-
finish_export(export_error, error_attrs)
251-
return SpanExportResult.FAILURE
252-
_logger.warning(
253-
"Transient error %s encountered while exporting span batch, retrying in %.2fs.",
254-
reason,
255-
backoff_seconds,
256-
)
257-
shutdown = self._shutdown_in_progress.wait(backoff_seconds)
258-
if shutdown:
259-
_logger.warning("Shutdown in progress, aborting retry.")
260-
break
261-
return SpanExportResult.FAILURE
257+
shutdown = self._shutdown_in_progress.wait(backoff_seconds)
258+
if shutdown:
259+
_logger.warning("Shutdown in progress, aborting retry.")
260+
break
261+
return SpanExportResult.FAILURE
262262

263263
def shutdown(self):
264264
if self._shutdown:

0 commit comments

Comments
 (0)