Skip to content

Commit 0c347fd

Browse files
committed
refactor(diagnostics): extract _run_in_subprocess and _save_import_cache helpers
- Unify duplicated ProcessPoolExecutor lifecycle from _get_library_libdoc and _get_variables_libdoc into _run_in_subprocess - Unify duplicated cache save logic into _save_import_cache
1 parent 5696868 commit 0c347fd

File tree

1 file changed

+80
-92
lines changed

1 file changed

+80
-92
lines changed

packages/robot/src/robotcode/robot/diagnostics/imports_manager.py

Lines changed: 80 additions & 92 deletions
Original file line numberDiff line numberDiff line change
@@ -1515,6 +1515,58 @@ def executor(self) -> ProcessPoolExecutor:
15151515

15161516
return self._executor
15171517

1518+
def _run_in_subprocess(self, func: Any, func_args: Tuple[Any, ...], timeout_msg: str) -> Any:
1519+
"""Run a callable in a fresh single-use subprocess and return the result.
1520+
1521+
A fresh process per import is intentional: libraries and variable files
1522+
can pollute the interpreter (e.g. via sys.modules, global state, native
1523+
extensions) and cannot be safely re-imported after on-disk changes.
1524+
"""
1525+
executor = ProcessPoolExecutor(max_workers=1, mp_context=mp.get_context("spawn"))
1526+
try:
1527+
try:
1528+
return executor.submit(func, *func_args).result(self.load_library_timeout)
1529+
except TimeoutError as e:
1530+
raise RuntimeError(
1531+
f"{timeout_msg} "
1532+
f"timed out after {self.load_library_timeout} seconds. "
1533+
"The import may be slow or blocked. "
1534+
"If required, increase the timeout by setting the ROBOTCODE_LOAD_LIBRARY_TIMEOUT "
1535+
"environment variable."
1536+
) from e
1537+
except (SystemExit, KeyboardInterrupt):
1538+
raise
1539+
except BaseException as e:
1540+
self._logger.exception(e)
1541+
raise
1542+
finally:
1543+
executor.shutdown(wait=True)
1544+
1545+
def _save_import_cache(
1546+
self,
1547+
section: CacheSection,
1548+
meta: Optional[LibraryMetaData],
1549+
result: Any,
1550+
kind: str,
1551+
name: str,
1552+
args: Tuple[Any, ...],
1553+
) -> None:
1554+
"""Save an import result to the disk cache, or log skip if no meta."""
1555+
try:
1556+
if meta is not None:
1557+
try:
1558+
self.data_cache.save_entry(section, meta.cache_key, meta, result)
1559+
except (SystemExit, KeyboardInterrupt):
1560+
raise
1561+
except BaseException as e:
1562+
raise RuntimeError(f"Cannot write cache entry for {kind} '{name}'") from e
1563+
else:
1564+
self._logger.debug(lambda: f"Skip caching {kind} {name}{args!r}", context_name="import")
1565+
except (SystemExit, KeyboardInterrupt):
1566+
raise
1567+
except BaseException as e:
1568+
self._logger.exception(e)
1569+
15181570
def _get_library_libdoc(
15191571
self,
15201572
name: str,
@@ -1543,55 +1595,24 @@ def _get_library_libdoc(
15431595
self._logger.exception(e)
15441596

15451597
self._logger.debug(lambda: f"Load library in process {name}{args!r}", context_name="import")
1546-
# A fresh process per import is intentional: libraries can pollute the interpreter
1547-
# (e.g. via sys.modules, global state, native extensions) and cannot be safely
1548-
# re-imported after on-disk changes without unknown side effects.
1549-
executor = ProcessPoolExecutor(max_workers=1, mp_context=mp.get_context("spawn"))
1550-
try:
1551-
try:
1552-
result = executor.submit(
1553-
get_library_doc,
1554-
name,
1555-
args if not ignore_arguments else (),
1556-
working_dir,
1557-
base_dir,
1558-
self.get_resolvable_command_line_variables(),
1559-
variables,
1560-
).result(self.load_library_timeout)
15611598

1562-
except TimeoutError as e:
1563-
raise RuntimeError(
1564-
f"Loading library {name!r} with args {args!r} (working_dir={working_dir!r}, base_dir={base_dir!r}) "
1565-
f"timed out after {self.load_library_timeout} seconds. "
1566-
"The library may be slow or blocked during import. "
1567-
"If required, increase the timeout by setting the ROBOTCODE_LOAD_LIBRARY_TIMEOUT "
1568-
"environment variable."
1569-
) from e
1570-
1571-
except (SystemExit, KeyboardInterrupt):
1572-
raise
1573-
except BaseException as e:
1574-
self._logger.exception(e)
1575-
raise
1576-
finally:
1577-
executor.shutdown(wait=True)
1599+
result = self._run_in_subprocess(
1600+
get_library_doc,
1601+
(
1602+
name,
1603+
args if not ignore_arguments else (),
1604+
working_dir,
1605+
base_dir,
1606+
self.get_resolvable_command_line_variables(),
1607+
variables,
1608+
),
1609+
f"Loading library {name!r} with args {args!r} (working_dir={working_dir!r}, base_dir={base_dir!r})",
1610+
)
15781611

1579-
try:
1580-
if meta is not None:
1581-
meta.has_errors = bool(result.errors)
1612+
if meta is not None:
1613+
meta.has_errors = bool(result.errors)
15821614

1583-
try:
1584-
self.data_cache.save_entry(CacheSection.LIBRARY, meta.cache_key, meta, result)
1585-
except (SystemExit, KeyboardInterrupt):
1586-
raise
1587-
except BaseException as e:
1588-
raise RuntimeError(f"Cannot write cache entry for library '{name}'") from e
1589-
else:
1590-
self._logger.debug(lambda: f"Skip caching library {name}{args!r}", context_name="import")
1591-
except (SystemExit, KeyboardInterrupt):
1592-
raise
1593-
except BaseException as e:
1594-
self._logger.exception(e)
1615+
self._save_import_cache(CacheSection.LIBRARY, meta, result, "library", name, args)
15951616

15961617
return result, meta
15971618

@@ -1746,53 +1767,20 @@ def _get_variables_libdoc(
17461767
except BaseException as e:
17471768
self._logger.exception(e)
17481769

1749-
# A fresh process per import is intentional: variable files can pollute the
1750-
# interpreter and cannot be safely re-imported after on-disk changes.
1751-
executor = ProcessPoolExecutor(max_workers=1, mp_context=mp.get_context("spawn"))
1752-
try:
1753-
try:
1754-
result = executor.submit(
1755-
get_variables_doc,
1756-
name,
1757-
args,
1758-
working_dir,
1759-
base_dir,
1760-
self.get_resolvable_command_line_variables() if resolve_command_line_vars else None,
1761-
variables,
1762-
).result(self.load_library_timeout)
1763-
1764-
except TimeoutError as e:
1765-
raise RuntimeError(
1766-
f"Loading variables {name!r} with args {args!r} (working_dir={working_dir!r}, "
1767-
f"base_dir={base_dir!r}) "
1768-
f"timed out after {self.load_library_timeout} seconds. "
1769-
"The variables may be slow or blocked during import. "
1770-
"If required, increase the timeout by setting the ROBOTCODE_LOAD_LIBRARY_TIMEOUT "
1771-
"environment variable."
1772-
) from e
1773-
1774-
except (SystemExit, KeyboardInterrupt):
1775-
raise
1776-
except BaseException as e:
1777-
self._logger.exception(e)
1778-
raise
1779-
finally:
1780-
executor.shutdown(True)
1770+
result = self._run_in_subprocess(
1771+
get_variables_doc,
1772+
(
1773+
name,
1774+
args,
1775+
working_dir,
1776+
base_dir,
1777+
self.get_resolvable_command_line_variables() if resolve_command_line_vars else None,
1778+
variables,
1779+
),
1780+
f"Loading variables {name!r} with args {args!r} (working_dir={working_dir!r}, base_dir={base_dir!r})",
1781+
)
17811782

1782-
try:
1783-
if meta is not None:
1784-
try:
1785-
self.data_cache.save_entry(CacheSection.VARIABLES, meta.cache_key, meta, result)
1786-
except (SystemExit, KeyboardInterrupt):
1787-
raise
1788-
except BaseException as e:
1789-
raise RuntimeError(f"Cannot write cache entry for variables '{name}'") from e
1790-
else:
1791-
self._logger.debug(lambda: f"Skip caching variables {name}{args!r}", context_name="import")
1792-
except (SystemExit, KeyboardInterrupt):
1793-
raise
1794-
except BaseException as e:
1795-
self._logger.exception(e)
1783+
self._save_import_cache(CacheSection.VARIABLES, meta, result, "variables", name, args)
17961784

17971785
return result, meta
17981786

0 commit comments

Comments
 (0)