22import logging
33import os
44import pickle
5- from contextlib import suppress
65from typing import Optional , cast
76
87from llama_index .core .workflow import (
1312 WorkflowTimeoutError ,
1413)
1514from llama_index .core .workflow .handler import WorkflowHandler # type: ignore
16- from openinference .instrumentation .llama_index import (
17- LlamaIndexInstrumentor ,
18- get_current_span ,
19- )
20- from opentelemetry import trace
21- from opentelemetry .sdk .trace import TracerProvider
22- from opentelemetry .sdk .trace .export import BatchSpanProcessor
2315from uipath ._cli ._runtime ._contracts import (
2416 UiPathBaseRuntime ,
2517 UiPathErrorCategory ,
2820 UiPathRuntimeStatus ,
2921)
3022from uipath ._cli ._runtime ._hitl import HitlProcessor , HitlReader
31- from uipath .tracing import TracingManager
3223
33- from .._tracing . _oteladapter import LlamaIndexExporter
24+ from .._utils . _config import LlamaIndexConfig
3425from ._context import UiPathLlamaIndexRuntimeContext
3526from ._exception import UiPathLlamaIndexRuntimeError
3627
@@ -58,19 +49,6 @@ async def execute(self) -> Optional[UiPathRuntimeResult]:
5849 """
5950 await self .validate ()
6051
61- self .trace_provider = TracerProvider ()
62- self .tracer = self .trace_provider .get_tracer ("uipath.llamaindex.runtime" )
63-
64- with suppress (Exception ):
65- trace .set_tracer_provider (self .trace_provider )
66- self .trace_provider .add_span_processor (
67- BatchSpanProcessor (LlamaIndexExporter ())
68- )
69-
70- LlamaIndexInstrumentor ().instrument (tracer_provider = self .trace_provider )
71-
72- TracingManager .register_current_span_provider (get_current_span )
73-
7452 try :
7553 if self .context .resume is False and self .context .job_id is None :
7654 # Delete the previous graph state file at debug time
@@ -176,8 +154,6 @@ async def execute(self) -> Optional[UiPathRuntimeResult]:
176154 detail ,
177155 UiPathErrorCategory .USER ,
178156 ) from e
179- finally :
180- self .trace_provider .shutdown ()
181157
182158 async def validate (self ) -> None :
183159 """Validate runtime inputs and load Llama agent configuration."""
@@ -193,12 +169,14 @@ async def validate(self) -> None:
193169 ) from e
194170
195171 if self .context .config is None :
196- raise UiPathLlamaIndexRuntimeError (
197- "CONFIG_MISSING" ,
198- "Invalid configuration" ,
199- "Failed to load configuration" ,
200- UiPathErrorCategory .DEPLOYMENT ,
201- )
172+ self .context .config = LlamaIndexConfig ()
173+ if not self .context .config .exists :
174+ raise UiPathLlamaIndexRuntimeError (
175+ "CONFIG_MISSING" ,
176+ "Invalid configuration" ,
177+ "Failed to load configuration" ,
178+ UiPathErrorCategory .DEPLOYMENT ,
179+ )
202180
203181 try :
204182 self .context .config .load_config ()
0 commit comments