1212 JsonPickleSerializer ,
1313)
1414from llama_index .core .workflow .handler import WorkflowHandler
15- from openinference .instrumentation .llama_index import LlamaIndexInstrumentor , get_current_span
15+ from openinference .instrumentation .llama_index import (
16+ LlamaIndexInstrumentor ,
17+ get_current_span ,
18+ )
1619from opentelemetry import trace
1720from opentelemetry .sdk .trace import TracerProvider
1821from opentelemetry .sdk .trace .export import BatchSpanProcessor
2326 UiPathRuntimeResult ,
2427 UiPathRuntimeStatus ,
2528)
29+ from uipath .tracing import TracingManager
2630
2731from .._tracing ._oteladapter import LlamaIndexExporter
2832from ._context import UiPathLlamaIndexRuntimeContext
2933from ._exception import UiPathLlamaIndexRuntimeError
3034from ._hitl import HitlProcessor , HitlReader
3135
32- from uipath .tracing import TracingManager
33-
3436logger = logging .getLogger (__name__ )
3537
3638
@@ -74,14 +76,20 @@ async def execute(self) -> Optional[UiPathRuntimeResult]:
7476 if os .path .exists (self .state_file_path ):
7577 os .remove (self .state_file_path )
7678
79+ if self .context .workflow is None :
80+ return None
81+
7782 start_event_class = self .context .workflow ._start_event_class
78- ev = start_event_class (** self .context .input_json )
83+ ev = start_event_class (** ( self .context .input_json or {}) )
7984 await self .load_workflow_context ()
8085
86+ if self .context .workflow_context is None :
87+ return None
88+
8189 handler : WorkflowHandler = self .context .workflow .run (
8290 start_event = ev if self .context .resume else None ,
8391 ctx = self .context .workflow_context ,
84- ** self .context .input_json ,
92+ ** ( self .context .input_json or {}) ,
8593 )
8694
8795 resume_trigger : Optional [UiPathResumeTrigger ] = None
@@ -94,9 +102,10 @@ async def execute(self) -> Optional[UiPathRuntimeResult]:
94102 if self .context .resume and not response_applied :
95103 # If we are resuming, we need to apply the response to the event stream.
96104 response_applied = True
97- self .context .workflow_context .send_event (
98- await self .get_response_event ()
99- )
105+ response_event = await self .get_response_event ()
106+ if response_event :
107+ # If we have a response event, send it to the workflow context.
108+ self .context .workflow_context .send_event (response_event )
100109 else :
101110 resume_trigger = await hitl_processor .create_resume_trigger ()
102111 break
@@ -244,6 +253,9 @@ async def load_workflow_context(self):
244253 """
245254 logger .debug (f"Resumed: { self .context .resume } Input: { self .context .input_json } " )
246255
256+ if self .context .workflow is None :
257+ return
258+
247259 if not self .context .resume :
248260 self .context .workflow_context = Context (self .context .workflow )
249261 return
@@ -277,7 +289,7 @@ async def get_response_event(self) -> Optional[HumanResponseEvent]:
277289 """
278290 if self .context .input_json :
279291 # If input_json is provided, use it to create a HumanResponseEvent
280- return HumanResponseEvent (** self .context .input_json )
292+ return HumanResponseEvent (** ( self .context .input_json or {}) )
281293 # If resumed_trigger is set, fetch the feedback
282294 if self .context .resumed_trigger :
283295 feedback = await HitlReader .read (self .context .resumed_trigger )
0 commit comments