diff --git a/octobot/cli.py b/octobot/cli.py
index f88362cd26..17682fb9b4 100644
--- a/octobot/cli.py
+++ b/octobot/cli.py
@@ -15,6 +15,7 @@
# License along with OctoBot. If not, see .
import argparse
import os
+import pathlib
import sys
import multiprocessing
import asyncio
@@ -30,6 +31,7 @@
import octobot_commons.authentication as authentication
import octobot_commons.constants as common_constants
import octobot_commons.errors as errors
+ import octobot_commons.user_root_folder_provider as user_root_folder_provider
import octobot_services.api as service_api
@@ -103,6 +105,14 @@ def _disable_interface_from_param(interface_identifier, param_value, logger):
logger.info(interface_identifier.capitalize() + " interface disabled")
+def _set_user_root_from_cli(user_folder: str) -> None:
+ if not (user_folder and str(user_folder).strip()):
+ raise errors.ConfigError("User folder must be a non-empty path.")
+ if ".." in pathlib.PurePath(user_folder).parts:
+ raise errors.ConfigError("Invalid user folder: parent directory segments are not allowed.")
+ user_root_folder_provider.instance().set_root(os.path.normpath(user_folder))
+
+
def _log_environment(logger):
try:
bot_type = "cloud" if constants.IS_CLOUD_ENV else "self-hosted"
@@ -115,10 +125,12 @@ def _log_environment(logger):
def _create_configuration():
config_path = configuration.get_user_config()
- config = configuration.Configuration(config_path,
- common_constants.USER_PROFILES_FOLDER,
- constants.CONFIG_FILE_SCHEMA,
- constants.PROFILE_FILE_SCHEMA)
+ config = configuration.Configuration(
+ config_path,
+ user_root_folder_provider.get_user_profiles_folder(),
+ constants.CONFIG_FILE_SCHEMA,
+ constants.PROFILE_FILE_SCHEMA,
+ )
return config
@@ -297,7 +309,9 @@ def _load_or_create_tentacles(community_auth, config, logger):
# add tentacles folder to Python path
sys.path.append(os.path.realpath(os.getcwd()))
- if os.path.isfile(tentacles_manager_constants.USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH):
+ if os.path.isfile(
+ user_root_folder_provider.get_user_reference_tentacle_config_file_path()
+ ):
# when tentacles folder already exists
config.load_profiles_if_possible_and_necessary()
tentacles_setup_config = tentacles_manager_api.get_tentacles_setup_config(
@@ -318,9 +332,13 @@ def start_octobot(args, default_config_file=None):
print(constants.LONG_VERSION)
return
- # log folder can be overridden by the LOGS_FOLDER environment variable,
- # useful to run multiple bots from the same folder
- logger = octobot_logger.init_logger(logs_folder=constants.LOGS_FOLDER)
+ user_folder = getattr(args, "user_folder", None)
+ if user_folder:
+ _set_user_root_from_cli(user_folder)
+
+ # log folder: --log-folder overrides default (from LOGS_FOLDER env at import + default "logs")
+ logs_folder = getattr(args, "log_folder", None) or constants.LOGS_FOLDER
+ logger = octobot_logger.init_logger(logs_folder=logs_folder)
startup_messages = []
# Version
@@ -384,6 +402,11 @@ def start_octobot(args, default_config_file=None):
reset_trading_history=args.reset_trading_history,
startup_messages=startup_messages)
+ if not args.backtesting:
+ path = getattr(args, "dump_state", None)
+ if path:
+ bot.dump_state_path = os.path.normpath(path)
+
# set global bot instance
commands.set_global_bot_instance(bot)
@@ -473,6 +496,18 @@ def octobot_parser(parser, default_config_file=None):
'When disabled, the backtesting run will not be interrupted during execution',
action='store_true')
parser.add_argument('-r', '--risk', type=float, help='Force a specific risk configuration (between 0 and 1).')
+ parser.add_argument(
+ '--user-folder',
+ type=str,
+ default=None,
+ help='User data root (config, profiles, reference tentacles). Relative to the current working directory.',
+ )
+ parser.add_argument(
+ '--log-folder',
+ type=str,
+ default=None,
+ help='Log files directory. When set, overrides the LOGS_FOLDER environment variable and default "logs".',
+ )
parser.add_argument('-nw', '--no_web', help="Don't start OctoBot web interface.",
action='store_true')
parser.add_argument('-nl', '--no_logs', help="Disable OctoBot logs in backtesting.",
@@ -486,6 +521,13 @@ def octobot_parser(parser, default_config_file=None):
" exchanges configuration in your config.json without using any interface "
"(ie the web interface that handle encryption automatically).",
action='store_true')
+ parser.add_argument(
+ "--dump-state",
+ type=str,
+ default=None,
+ help="Absolute path of the JSON file where OctoBot periodically writes ProcessBotState (liveness, "
+ "next to the user config directory). Omitted in normal use; spawned DSL children pass this explicitly.",
+ )
parser.add_argument('--identifier', help="OctoBot community identifier.", type=str, nargs=1)
parser.add_argument('-o', '--strategy_optimizer', help='Start Octobot strategy optimizer. This mode will make '
'octobot play backtesting scenarii located in '
@@ -603,6 +645,8 @@ def start_background_octobot_with_args(
in_subprocess=False,
reset_trading_history=False,
default_config_file=None,
+ user_folder=None,
+ log_folder=None,
):
if backtesting_files is None:
backtesting_files = []
@@ -621,7 +665,9 @@ def start_background_octobot_with_args(
enable_backtesting_timeout=enable_backtesting_timeout,
simulate=simulate,
risk=risk,
- reset_trading_history=reset_trading_history)
+ reset_trading_history=reset_trading_history,
+ user_folder=user_folder,
+ log_folder=log_folder)
if in_subprocess:
bot_process = multiprocessing.Process(target=start_octobot, args=(args, default_config_file))
bot_process.start()
diff --git a/octobot/commands.py b/octobot/commands.py
index 25f4261582..b9a275cf57 100644
--- a/octobot/commands.py
+++ b/octobot/commands.py
@@ -39,6 +39,8 @@
import octobot.community.tentacles_packages as community_tentacles_packages
import octobot.configuration_manager as configuration_manager
+import octobot.storage.process_bot_state_dumper as process_bot_state_dumper
+
COMMANDS_LOGGER_NAME = "Commands"
IGNORED_COMMAND_WHEN_RESTART = ["-u", "--update"]
@@ -324,6 +326,12 @@ async def start_bot(bot, logger, catch=False):
await bot.initialize()
except asyncio.CancelledError:
logger.info("Core engine tasks cancelled.")
+ else:
+ if bot.dump_state_path:
+
+ bot._process_bot_state_dump_task = asyncio.create_task(
+ process_bot_state_dumper.run_periodic_dump_loop(bot.dump_state_path, logger, bot)
+ )
except Exception as e:
logger.exception(e)
diff --git a/octobot/configuration_manager.py b/octobot/configuration_manager.py
index e13f851a9f..8b2e9d9dd9 100644
--- a/octobot/configuration_manager.py
+++ b/octobot/configuration_manager.py
@@ -20,6 +20,7 @@
import octobot.constants as constants
import octobot_commons.configuration as configuration
import octobot_commons.constants as common_constants
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
import octobot_commons.logging as logging
import octobot_commons.json_util as json_util
import octobot_tentacles_manager.constants as tentacles_manager_constants
@@ -123,8 +124,9 @@ def init_config(
:param from_config_file: the default config file path
"""
try:
- if not os.path.exists(common_constants.USER_FOLDER):
- os.makedirs(common_constants.USER_FOLDER)
+ user_root = user_root_folder_provider.get_user_root_folder()
+ if not os.path.exists(user_root):
+ os.makedirs(user_root)
shutil.copyfile(from_config_file, config_file)
except Exception as global_exception:
@@ -169,35 +171,48 @@ def get_default_tentacles_url(version=None):
def get_user_local_config_file():
try:
- import octobot_commons.constants as commons_constants
- return f"{commons_constants.USER_FOLDER}/logging_config.ini"
+ import octobot_commons.user_root_folder_provider as user_root_folder_provider
+
+ return os.path.join(
+ user_root_folder_provider.get_user_root_folder(), "logging_config.ini"
+ )
except ImportError:
return None
def load_default_tentacles_config(profile_folder):
- if os.path.isdir(tentacles_manager_constants.USER_REFERENCE_TENTACLE_CONFIG_PATH):
- shutil.copyfile(tentacles_manager_constants.USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH,
- os.path.join(profile_folder, tentacles_manager_constants.constants.CONFIG_TENTACLES_FILE))
- shutil.copytree(tentacles_manager_constants.USER_REFERENCE_TENTACLE_SPECIFIC_CONFIG_PATH,
- os.path.join(profile_folder, tentacles_manager_constants.TENTACLES_SPECIFIC_CONFIG_FOLDER))
+ ref_path = user_root_folder_provider.get_user_reference_tentacle_config_path()
+ ref_file = user_root_folder_provider.get_user_reference_tentacle_config_file_path()
+ ref_spec = user_root_folder_provider.get_user_reference_tentacle_specific_config_path()
+ if os.path.isdir(ref_path):
+ shutil.copyfile(
+ ref_file,
+ os.path.join(profile_folder, tentacles_manager_constants.constants.CONFIG_TENTACLES_FILE),
+ )
+ shutil.copytree(
+ ref_spec,
+ os.path.join(profile_folder, tentacles_manager_constants.TENTACLES_SPECIFIC_CONFIG_FOLDER),
+ )
def migrate_from_previous_config(config):
logger = logging.get_logger(LOGGER_NAME)
# migrate tentacles configuration if necessary
- previous_tentacles_config = os.path.join(common_constants.USER_FOLDER, "tentacles_config")
- previous_tentacles_config_save = os.path.join(common_constants.USER_FOLDER, "tentacles_config.back")
- if os.path.isdir(previous_tentacles_config) and \
- not os.path.isdir(tentacles_manager_constants.USER_REFERENCE_TENTACLE_CONFIG_PATH):
+ user_root = user_root_folder_provider.get_user_root_folder()
+ ref_tent_path = user_root_folder_provider.get_user_reference_tentacle_config_path()
+ previous_tentacles_config = os.path.join(user_root, "tentacles_config")
+ previous_tentacles_config_save = os.path.join(user_root, "tentacles_config.back")
+ if os.path.isdir(previous_tentacles_config) and not os.path.isdir(ref_tent_path):
logger.info(
f"Updating your tentacles configuration located in {previous_tentacles_config} into the new format. "
f"A save of your previous tentacles config is available in {previous_tentacles_config_save}")
- shutil.copytree(previous_tentacles_config,
- tentacles_manager_constants.USER_REFERENCE_TENTACLE_CONFIG_PATH)
+ shutil.copytree(previous_tentacles_config, ref_tent_path)
shutil.move(previous_tentacles_config, previous_tentacles_config_save)
load_default_tentacles_config(
- os.path.join(common_constants.USER_PROFILES_FOLDER, common_constants.DEFAULT_PROFILE)
+ os.path.join(
+ user_root_folder_provider.get_user_profiles_folder(),
+ common_constants.DEFAULT_PROFILE,
+ )
)
# migrate global configuration if necessary
config_path = configuration.get_user_config()
diff --git a/octobot/constants.py b/octobot/constants.py
index c9e4ac1786..0957e24737 100644
--- a/octobot/constants.py
+++ b/octobot/constants.py
@@ -202,6 +202,16 @@
# logs
DEFAULT_LOGS_FOLDER = "logs"
LOGS_FOLDER = os.getenv("LOGS_FOLDER", DEFAULT_LOGS_FOLDER)
+
+# Web automation: child process sets OCTOBOT_WEB_API_KEY
+ENV_WEB_API_KEY = "OCTOBOT_WEB_API_KEY"
+WEB_API_KEY_HEADER = "X-Octobot-Api-Key"
+# Process bot state JSON next to user config (--dump-state); liveness for run_octobot_process
+PROCESS_BOT_STATE_FILE_NAME = "process_bot_state.json"
+ENV_PROCESS_BOT_STATE_DUMP_INTERVAL_SECONDS = "OCTOBOT_PROCESS_BOT_STATE_DUMP_INTERVAL_SECONDS"
+PROCESS_BOT_STATE_DUMP_INTERVAL_SECONDS = float(
+ os.getenv(ENV_PROCESS_BOT_STATE_DUMP_INTERVAL_SECONDS, "30")
+)
FORCED_LOG_LEVEL = os.getenv("FORCED_LOG_LEVEL", "")
ENV_TRADING_ENABLE_DEBUG_LOGS = os_util.parse_boolean_environment_var("ENV_TRADING_ENABLE_DEBUG_LOGS", "False")
diff --git a/octobot/logger.py b/octobot/logger.py
index 33a7ca25c2..659c24dfd6 100644
--- a/octobot/logger.py
+++ b/octobot/logger.py
@@ -38,6 +38,7 @@
import octobot.constants as constants
import octobot.configuration_manager as configuration_manager
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
BOT_CHANNEL_LOGGER = None
LOGGER_PRIORITY_LEVEL = channel_enums.ChannelConsumerPriorityLevels.OPTIONAL.value
@@ -51,7 +52,7 @@ def _log_uncaught_exceptions(ex_cls, ex, tb):
def init_logger(logs_folder: str = constants.DEFAULT_LOGS_FOLDER):
try:
if not os.path.exists(logs_folder):
- os.mkdir(logs_folder)
+ os.makedirs(logs_folder)
_load_logger_config(logs_folder)
init_bot_channel_logger()
except KeyError:
@@ -88,8 +89,9 @@ def _load_logger_config(logs_folder: str):
try:
# use local logging file to allow users to customize the log level
if not os.path.isfile(configuration_manager.get_user_local_config_file()):
- if not os.path.exists(commons_constants.USER_FOLDER):
- os.mkdir(commons_constants.USER_FOLDER)
+ user_root = user_root_folder_provider.get_user_root_folder()
+ if not os.path.exists(user_root):
+ os.mkdir(user_root)
shutil.copyfile(constants.LOGGING_CONFIG_FILE, configuration_manager.get_user_local_config_file())
logging.config.fileConfig(configuration_manager.get_user_local_config_file())
logger = logging.getLogger("Logging Configuration")
diff --git a/octobot/octobot.py b/octobot/octobot.py
index a34b78dba7..8daeb3fafe 100644
--- a/octobot/octobot.py
+++ b/octobot/octobot.py
@@ -105,6 +105,9 @@ def __init__(self, config: configuration.Configuration, community_authenticator=
self.initializer = initializer.Initializer(self)
self.task_manager = task_manager.TaskManager(self)
self._init_metadata_run_task = None
+ # optional path for periodic ProcessBotState JSON (see cli --dump-state)
+ self.dump_state_path = None
+ self._process_bot_state_dump_task = None
# Producers
self.exchange_producer = None
@@ -211,6 +214,9 @@ async def stop(self):
self.logger.debug("Stopping ...")
if self._init_metadata_run_task is not None and not self._init_metadata_run_task.done():
self._init_metadata_run_task.cancel()
+ if self._process_bot_state_dump_task is not None and not self._process_bot_state_dump_task.done():
+ self._process_bot_state_dump_task.cancel()
+ self._process_bot_state_dump_task = None
signals.SignalPublisher.instance().stop()
if self.evaluator_producer is not None:
await self.evaluator_producer.stop()
diff --git a/octobot/storage/process_bot_state_dumper.py b/octobot/storage/process_bot_state_dumper.py
new file mode 100644
index 0000000000..d611bf85c1
--- /dev/null
+++ b/octobot/storage/process_bot_state_dumper.py
@@ -0,0 +1,113 @@
+# This file is part of OctoBot (https://github.com/Drakkar-Software/OctoBot)
+# Copyright (c) 2025 Drakkar-Software, All rights reserved.
+#
+# OctoBot is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# OctoBot is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public
+# License along with OctoBot. If not, see .
+import asyncio
+import os
+import time
+import typing
+import uuid
+
+import aiofiles
+import octobot_commons.json_util as json_util
+import octobot_trading.api as trading_api
+
+import octobot.constants as octobot_app_constants
+import octobot_flow.entities.accounts.exchange_account_elements as exchange_account_elements_import
+import octobot_flow.entities.accounts.process_bot_state as process_bot_state_import
+
+
+def _synced_exchange_account_elements_for_first_trading_exchange(
+ octobot: typing.Any,
+ logger: typing.Any,
+) -> exchange_account_elements_import.ExchangeAccountElements:
+ """
+ Build one snapshot for the first trading exchange only. If several are trading, log an error
+ for each additional one (only the first is included in the dump).
+ """
+ empty = exchange_account_elements_import.ExchangeAccountElements()
+ if octobot is None or octobot.exchange_producer is None:
+ return empty
+ managers = [
+ trading_api.get_exchange_manager_from_exchange_id(exchange_manager_id)
+ for exchange_manager_id in octobot.exchange_producer.exchange_manager_ids
+ ]
+ trading_managers = trading_api.get_trading_exchanges(managers)
+ if not trading_managers:
+ return empty
+ first_exchange_manager = trading_managers[0]
+ elements = exchange_account_elements_import.ExchangeAccountElements()
+ elements.name = trading_api.get_exchange_name(first_exchange_manager)
+ elements.sync_from_exchange_manager(first_exchange_manager, [])
+ for skipped_exchange_manager in trading_managers[1:]:
+ logger.error(
+ "process bot state dump includes only the first trading exchange; dumping %s (%s). "
+ "Skipping additional trading exchange %s (%s).",
+ trading_api.get_exchange_name(first_exchange_manager),
+ trading_api.get_exchange_manager_id(first_exchange_manager),
+ trading_api.get_exchange_name(skipped_exchange_manager),
+ trading_api.get_exchange_manager_id(skipped_exchange_manager),
+ )
+ return elements
+
+
+async def _write_state_file_async(
+ state_file_path: str,
+ interval: float,
+ bot: typing.Any,
+ logger: typing.Any,
+) -> None:
+ now = time.time()
+ state = process_bot_state_import.ProcessBotState(
+ metadata=process_bot_state_import.Metadata(updated_at=now, next_updated_at=now + interval),
+ exchange_account_elements=_synced_exchange_account_elements_for_first_trading_exchange(
+ bot, logger
+ ),
+ )
+ content = state.to_dict(include_default_values=False)
+ str_content = json_util.dump_formatted_json(content)
+ full_path = os.path.abspath(state_file_path)
+ directory = os.path.dirname(full_path)
+ if directory:
+ os.makedirs(directory, exist_ok=True)
+ tmp_name = f"{full_path}.{os.getpid()}.{uuid.uuid4().hex}.tmp"
+ try:
+ async with aiofiles.open(tmp_name, mode="w", encoding="utf-8") as write_file:
+ await write_file.write(str_content)
+ os.replace(tmp_name, full_path)
+ except Exception:
+ if os.path.isfile(tmp_name):
+ try:
+ os.remove(tmp_name)
+ except OSError:
+ pass
+ raise
+
+
+async def run_periodic_dump_loop(state_file_path: str, logger, bot: typing.Any) -> None:
+ """
+ Periodically write ProcessBotState next to the user config. Cancel the task to stop.
+ """
+ interval = octobot_app_constants.PROCESS_BOT_STATE_DUMP_INTERVAL_SECONDS
+ while True:
+ try:
+ await _write_state_file_async(state_file_path, interval, bot, logger)
+ except asyncio.CancelledError:
+ raise
+ except Exception as err: # pylint: disable=broad-except
+ logger.exception("process bot state dump failed: %s", err)
+ try:
+ await asyncio.sleep(interval)
+ except asyncio.CancelledError:
+ break
diff --git a/packages/commons/octobot_commons/configuration/config_file_manager.py b/packages/commons/octobot_commons/configuration/config_file_manager.py
index 2f704b86ce..f7348a0cb7 100644
--- a/packages/commons/octobot_commons/configuration/config_file_manager.py
+++ b/packages/commons/octobot_commons/configuration/config_file_manager.py
@@ -17,6 +17,7 @@
import os
import octobot_commons.logging as logging
import octobot_commons.constants as commons_constants
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
import octobot_commons.configuration.fields_utils as fields_utils
import octobot_commons.json_util as json_util
@@ -29,7 +30,9 @@ def get_user_config() -> str:
Return user config path
:return: user config path
"""
- return os.path.join(commons_constants.USER_FOLDER, commons_constants.CONFIG_FILE)
+ return os.path.join(
+ user_root_folder_provider.get_user_root_folder(), commons_constants.CONFIG_FILE
+ )
def load(config_file, should_raise=True, fill_missing_fields=False) -> dict:
diff --git a/packages/commons/octobot_commons/constants.py b/packages/commons/octobot_commons/constants.py
index 47182e86b0..29ce0aa273 100644
--- a/packages/commons/octobot_commons/constants.py
+++ b/packages/commons/octobot_commons/constants.py
@@ -46,6 +46,8 @@ def parse_boolean_environment_var(env_key: str, default_value: str) -> bool:
CONFIG_DEBUG_OPTION = "DEV-MODE"
CONFIG_TIME_FRAME = "time_frame"
USER_FOLDER = "user"
+AUTOMATIONS_FOLDER = "automations"
+USER_AUTOMATIONS_FOLDER = f"{USER_FOLDER}/{AUTOMATIONS_FOLDER}"
CONFIG_FOLDER = "config"
CONFIG_FILE = "config.json"
SAFE_DUMP_SUFFIX = ".back"
@@ -301,4 +303,5 @@ def parse_boolean_environment_var(env_key: str, default_value: str) -> bool:
"https://tentacles.octobot.online/officials/packages/full/base/1.0.9/metadata.yaml"
)
IS_DEV_MODE_ENABLED = parse_boolean_environment_var(CONFIG_DEBUG_OPTION, "False")
-USE_MINIMAL_LIBS = parse_boolean_environment_var("USE_MINIMAL_LIBS", "false")
+ENV_USE_MINIMAL_LIBS = "USE_MINIMAL_LIBS"
+USE_MINIMAL_LIBS = parse_boolean_environment_var(ENV_USE_MINIMAL_LIBS, "false")
diff --git a/packages/commons/octobot_commons/databases/cache_manager.py b/packages/commons/octobot_commons/databases/cache_manager.py
index d4ad60b56b..fac919d0a6 100644
--- a/packages/commons/octobot_commons/databases/cache_manager.py
+++ b/packages/commons/octobot_commons/databases/cache_manager.py
@@ -23,6 +23,7 @@
import octobot_commons.symbols.symbol_util as symbol_util
import octobot_commons.errors as common_errors
import octobot_commons.tree as tree
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
class CacheManager:
@@ -350,7 +351,7 @@ def get_cache_or_build_path(
identifying_tentacles, tentacles_setup_config
)
return os.path.join(
- common_constants.USER_FOLDER,
+ user_root_folder_provider.get_user_root_folder(),
common_constants.CACHE_FOLDER,
tentacle_name,
exchange_name,
diff --git a/packages/commons/octobot_commons/databases/run_databases/run_databases_identifier.py b/packages/commons/octobot_commons/databases/run_databases/run_databases_identifier.py
index 198b2963e5..f4b3168713 100644
--- a/packages/commons/octobot_commons/databases/run_databases/run_databases_identifier.py
+++ b/packages/commons/octobot_commons/databases/run_databases/run_databases_identifier.py
@@ -19,6 +19,7 @@
import octobot_commons.databases.document_database_adaptors as adaptors
import octobot_commons.constants as constants
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
import octobot_commons.enums as enums
import octobot_commons.symbols.symbol_util as symbol_util
@@ -47,7 +48,9 @@ def __init__(
)
self.enable_storage = enable_storage
self.context = context
- self.data_path = self._merge_parts(constants.USER_FOLDER, constants.DATA_FOLDER)
+ self.data_path = self._merge_parts(
+ user_root_folder_provider.get_user_root_folder(), constants.DATA_FOLDER
+ )
self.base_path = self._merge_parts(self.data_path, self.tentacle_class)
self.suffix = (
self.database_adaptor.get_db_file_ext()
diff --git a/packages/commons/octobot_commons/dsl_interpreter/__init__.py b/packages/commons/octobot_commons/dsl_interpreter/__init__.py
index ab9bdd8090..e99aee7f4b 100644
--- a/packages/commons/octobot_commons/dsl_interpreter/__init__.py
+++ b/packages/commons/octobot_commons/dsl_interpreter/__init__.py
@@ -39,8 +39,13 @@
ExpressionOperator,
PreComputingCallOperator,
ReCallableOperatorMixin,
+ SignalableOperatorMixin,
+ OperatorSignal,
+ OperatorSignals,
ReCallingOperatorResult,
ReCallingOperatorResultKeys,
+ ProcessBoundOperatorMixin,
+ is_process_bound,
)
from octobot_commons.dsl_interpreter.interpreter_dependency import (
InterpreterDependency,
@@ -73,6 +78,11 @@
"ExpressionOperator",
"PreComputingCallOperator",
"ReCallableOperatorMixin",
+ "SignalableOperatorMixin",
+ "OperatorSignal",
+ "OperatorSignals",
+ "ProcessBoundOperatorMixin",
+ "is_process_bound",
"InterpreterDependency",
"format_parameter_value",
"resove_operator_params",
diff --git a/packages/commons/octobot_commons/dsl_interpreter/operators/__init__.py b/packages/commons/octobot_commons/dsl_interpreter/operators/__init__.py
index 7591c19131..cb60bfa323 100644
--- a/packages/commons/octobot_commons/dsl_interpreter/operators/__init__.py
+++ b/packages/commons/octobot_commons/dsl_interpreter/operators/__init__.py
@@ -50,6 +50,15 @@
ReCallingOperatorResult,
ReCallingOperatorResultKeys,
)
+from octobot_commons.dsl_interpreter.operators.signalable_operator_mixin import (
+ OperatorSignal,
+ OperatorSignals,
+ SignalableOperatorMixin,
+)
+from octobot_commons.dsl_interpreter.operators.process_bound_operator_mixin import (
+ ProcessBoundOperatorMixin,
+ is_process_bound,
+)
__all__ = [
"BinaryOperator",
@@ -65,4 +74,9 @@
"ReCallableOperatorMixin",
"ReCallingOperatorResult",
"ReCallingOperatorResultKeys",
+ "SignalableOperatorMixin",
+ "OperatorSignal",
+ "OperatorSignals",
+ "ProcessBoundOperatorMixin",
+ "is_process_bound",
]
diff --git a/packages/commons/octobot_commons/dsl_interpreter/operators/process_bound_operator_mixin.py b/packages/commons/octobot_commons/dsl_interpreter/operators/process_bound_operator_mixin.py
new file mode 100644
index 0000000000..365135ca42
--- /dev/null
+++ b/packages/commons/octobot_commons/dsl_interpreter/operators/process_bound_operator_mixin.py
@@ -0,0 +1,187 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+import asyncio
+import pathlib
+import socket
+import subprocess
+import time
+import typing
+
+import octobot_commons.dsl_interpreter.operator as dsl_interpreter_operator
+import octobot_commons.errors as commons_errors
+import octobot_commons.logging as commons_logging
+import octobot_commons.process_util as process_util
+
+
+class ProcessBoundOperatorMixin:
+ """
+ Identifies operators that are bound to an external process, and provides
+ portable helpers for spawning and stopping that process (no app-specific naming).
+ """
+
+ def __init__(self) -> None:
+ """``pid`` is set when a bound child process identifier becomes available."""
+ self.pid: typing.Optional[int] = None
+
+ def is_process_running(self) -> bool:
+ """Best-effort: whether ``self.pid`` refers to a running OS process."""
+ if self.pid is None:
+ return False
+ return process_util.pid_is_running(self.pid)
+
+ def request_graceful_stop(
+ self,
+ *,
+ logger: typing.Optional[typing.Any] = None,
+ ) -> dict[str, typing.Any]:
+ """Ask the bound process (``SIGTERM`` when available) to terminate."""
+ if self.pid is None:
+ raise commons_errors.DSLInterpreterError(
+ "No process id set; cannot request graceful stop."
+ )
+ return process_util.request_graceful_stop_via_sigterm(self.pid, logger=logger)
+
+ async def wait_until_pid_stopped(
+ self,
+ pid: int,
+ *,
+ logger: typing.Optional[typing.Any] = None,
+ timeout_seconds: float,
+ poll_interval: float = 0.2,
+ ) -> None:
+ """Poll until ``pid`` is gone or ``timeout_seconds`` elapses (after e.g. SIGTERM)."""
+ resolved_logger = logger or commons_logging.get_logger(self.__class__.__name__)
+ if pid <= 0:
+ resolved_logger.info(
+ "wait_until_pid_stopped: pid=%s treated as already stopped (non-positive)",
+ pid,
+ )
+ return
+ resolved_logger.info(
+ "wait_until_pid_stopped: waiting for pid=%s to exit (timeout=%ss)",
+ pid,
+ timeout_seconds,
+ )
+ deadline = time.monotonic() + timeout_seconds
+ while time.monotonic() < deadline:
+ if not process_util.pid_is_running(pid):
+ resolved_logger.info("wait_until_pid_stopped: pid=%s exited", pid)
+ return
+ await asyncio.sleep(poll_interval)
+ raise commons_errors.DSLInterpreterError(
+ f"Timed out after {timeout_seconds}s waiting for pid={pid} to exit."
+ )
+
+ def spawn_subprocess(
+ self,
+ argv: list[str],
+ *,
+ working_directory: str,
+ environment: typing.Optional[typing.Mapping[str, str]] = None,
+ hide_console_window: bool = False,
+ ) -> subprocess.Popen:
+ """Launch a child process without a shell (see :func:`process_util.spawn_managed_subprocess`)."""
+ proc = process_util.spawn_managed_subprocess(
+ argv,
+ working_directory=working_directory,
+ environment=environment,
+ hide_console_window=hide_console_window,
+ )
+ self.pid = proc.pid
+ return proc
+
+ @staticmethod
+ def reject_user_path_segment(path_value: str) -> None:
+ """Reject obvious path traversal in user-supplied relative paths."""
+ if ".." in pathlib.PurePath(path_value).parts:
+ raise commons_errors.DSLInterpreterError(
+ "Invalid path: parent directory segments are not allowed."
+ )
+
+ @staticmethod
+ def _tcp_port_is_free(bind_host: str, port: int) -> bool:
+ """True if nothing is currently bound to (host, port) for TCP."""
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ try:
+ sock.bind((bind_host, port))
+ except OSError:
+ return False
+ return True
+
+ @staticmethod
+ def find_first_free_listen_port_after_base(
+ bind_host_for_probe: str,
+ listen_port_base: int,
+ max_offset: int = 256,
+ blocklist: list[int] = None,
+ ) -> int:
+ """
+ First offset where ``listen_port_base + offset`` is TCP-free on ``bind_host_for_probe``
+ (optional: require ``paired_listen_port_base + offset`` free as well, same scan step).
+ Returns ``listen_port``.
+ """
+ for offset_from_base in range(max_offset):
+ listen_port = listen_port_base + offset_from_base
+ if blocklist and listen_port in blocklist:
+ continue
+ if not ProcessBoundOperatorMixin._tcp_port_is_free(
+ bind_host_for_probe, listen_port
+ ):
+ continue
+ return listen_port
+ raise commons_errors.DSLInterpreterError(
+ "No free listen port found in the scanned range."
+ )
+
+ @staticmethod
+ def bind_address_for_env_and_probe_hosts(
+ params: dict,
+ bind_listen_key: str = "bind_host",
+ ) -> tuple[str, str]:
+ """
+ Effective bind/listen address from ``params``, and the host to use for local
+ port checks (``0.0.0.0`` is probed via loopback).
+ """
+ resolved_bind = params.get(bind_listen_key) or "127.0.0.1"
+ probe_bind = "127.0.0.1" if resolved_bind == "0.0.0.0" else resolved_bind
+ return resolved_bind, probe_bind
+
+ @staticmethod
+ def spawn_managed_subprocess(
+ argv: list[str],
+ *,
+ working_directory: str,
+ environment: typing.Optional[typing.Mapping[str, str]] = None,
+ hide_console_window: bool = False,
+ ) -> subprocess.Popen:
+ """
+ Launch a child process without a shell (``creationflags``: hide console on Windows when asked).
+ """
+ return process_util.spawn_managed_subprocess(
+ argv,
+ working_directory=working_directory,
+ environment=environment,
+ hide_console_window=hide_console_window,
+ )
+
+
+def is_process_bound(operator: dsl_interpreter_operator.Operator) -> bool:
+ """
+ Check if the operator is bound to an external process.
+ """
+ return isinstance(operator, ProcessBoundOperatorMixin)
diff --git a/packages/commons/octobot_commons/dsl_interpreter/operators/re_callable_operator_mixin.py b/packages/commons/octobot_commons/dsl_interpreter/operators/re_callable_operator_mixin.py
index fda68f056e..c67b57a186 100644
--- a/packages/commons/octobot_commons/dsl_interpreter/operators/re_callable_operator_mixin.py
+++ b/packages/commons/octobot_commons/dsl_interpreter/operators/re_callable_operator_mixin.py
@@ -14,9 +14,9 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import dataclasses
-import typing
-import time
import enum
+import time
+import typing
import octobot_commons.dataclasses
import octobot_commons.dsl_interpreter.operator_parameter as operator_parameter
@@ -71,7 +71,7 @@ def get_script_override(result: typing.Any) -> typing.Optional[str]:
)
@staticmethod
- def get_keyword(result: typing.Any) -> typing.Optional[str]:
+ def get_keyword(result: dict[str, typing.Any]) -> typing.Optional[str]:
"""
Returns the keyword from the re-calling operator result.
"""
diff --git a/packages/commons/octobot_commons/dsl_interpreter/operators/signalable_operator_mixin.py b/packages/commons/octobot_commons/dsl_interpreter/operators/signalable_operator_mixin.py
new file mode 100644
index 0000000000..a1814a788c
--- /dev/null
+++ b/packages/commons/octobot_commons/dsl_interpreter/operators/signalable_operator_mixin.py
@@ -0,0 +1,77 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+import enum
+import typing
+
+
+class OperatorSignal(enum.StrEnum):
+ """
+ Canonical operator signal strings. Call sites pass *.value where a plain str API is expected.
+ """
+ STOP = "STOP"
+ UPDATE_CONFIG = "UPDATE_CONFIG"
+
+
+class OperatorSignals:
+ """
+ Mutable map of DSL operator name to execution signal string for one interpreter/run.
+
+ ``sync`` clears and replaces ``signal_by_operator`` (same pattern as DSLExecutor before each
+ action execution).
+ """
+
+ def __init__(self):
+ self.signal_by_operator: typing.Dict[str, typing.Any] = {}
+
+ def sync(self, signals: typing.Dict[str, typing.Any]) -> None:
+ """
+ Replaces signals mapping with the given signals.
+ """
+ self.signal_by_operator.clear()
+ self.signal_by_operator.update(signals)
+
+
+class SignalableOperatorMixin:
+ """
+ Mixin for operators whose behavior depends on execution signals keyed by operator name.
+
+ Each instance holds an optional ``OperatorSignals`` shared for the DSL run (typically one per
+ interpreter). Callers fill the map via ``OperatorSignals.sync`` (e.g. DSLExecutor before
+ interpretation). ``get_name()`` identifies which map entry applies to ``matches_operator_signal``.
+ """
+
+ def __init__(self, signals: typing.Optional[OperatorSignals] = None):
+ self.signals: typing.Optional[OperatorSignals] = signals
+
+ def matches_operator_signal(self, signal: str) -> bool:
+ """Return whether ``self.signals`` maps this operator's name to ``signal``."""
+ if self.signals is None:
+ return False
+ return self.signals.signal_by_operator.get(self.get_name()) == signal # type: ignore
+
+ @classmethod
+ def should_dispatch_operator_signal_for_result( # pylint: disable=unused-argument
+ cls,
+ signal: str,
+ re_calling_result: typing.Optional[dict],
+ ) -> bool:
+ """
+ When draining dispatcher-driven operator signals for automation shutdown, whether this
+ operator should run its branch for the given previous re-calling payload.
+ Default: do nothing; subclasses override.
+ """
+ return False
diff --git a/packages/commons/octobot_commons/process_util.py b/packages/commons/octobot_commons/process_util.py
new file mode 100644
index 0000000000..08b7d61b97
--- /dev/null
+++ b/packages/commons/octobot_commons/process_util.py
@@ -0,0 +1,142 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+import os
+import signal
+import subprocess
+import sys
+import typing
+
+import octobot_commons.errors as commons_errors
+import octobot_commons.logging as commons_logging
+
+try:
+ import psutil
+except ImportError: # pragma: no cover
+ psutil = None # type: ignore
+
+
+def spawn_managed_subprocess(
+ argv: list[str],
+ *,
+ working_directory: str,
+ environment: typing.Optional[typing.Mapping[str, str]] = None,
+ hide_console_window: bool = False,
+ forward_terminal_output: bool = False,
+) -> subprocess.Popen:
+ """
+ Launch a child process without a shell (``creationflags``: hide console on Windows when asked).
+
+ When ``forward_terminal_output`` is True, the child inherits the parent stdout/stderr (live terminal
+ output). On Windows, ``hide_console_window`` is ignored in that case: ``CREATE_NO_WINDOW`` would
+ detach console output and hide logs even when streams are inherited.
+
+ When ``forward_terminal_output`` is False, stdout and stderr are discarded (``subprocess.DEVNULL``).
+ """
+ resolved_env = dict(environment) if environment is not None else os.environ.copy()
+ use_hidden_console = (
+ hide_console_window and sys.platform == "win32" and not forward_terminal_output
+ )
+ # subprocess.CREATE_NO_WINDOW exists only on Windows; tests may patch platform on Linux CI.
+ creationflags = (
+ getattr(subprocess, "CREATE_NO_WINDOW", 0) if use_hidden_console else 0
+ )
+ if forward_terminal_output:
+ child_stdout: typing.Optional[int] = None
+ child_stderr: typing.Optional[int] = None
+ else:
+ child_stdout = subprocess.DEVNULL
+ child_stderr = subprocess.DEVNULL
+ return subprocess.Popen(
+ argv,
+ cwd=working_directory,
+ env=resolved_env,
+ creationflags=creationflags,
+ stdout=child_stdout,
+ stderr=child_stderr,
+ )
+
+
+def pid_is_running(pid: int) -> bool: # pylint: disable=too-many-return-statements
+ """Best-effort: whether ``pid`` denotes a running OS process (zombies are treated as not running)."""
+ if pid <= 0:
+ return False
+ try:
+ proc = psutil.Process(pid)
+ except psutil.NoSuchProcess:
+ return False
+ except psutil.AccessDenied:
+ return True
+ try:
+ if proc.status() == psutil.STATUS_ZOMBIE:
+ return False
+ except psutil.ZombieProcess:
+ return False
+ except psutil.NoSuchProcess:
+ # PID can disappear between Process() creation and status() (e.g. SIGTERM on Windows).
+ return False
+ try:
+ return proc.is_running()
+ except psutil.ZombieProcess:
+ return False
+ except psutil.NoSuchProcess:
+ return False
+
+
+def request_graceful_stop_via_sigterm(
+ pid: int,
+ *,
+ logger: typing.Optional[typing.Any] = None,
+) -> dict[str, typing.Any]:
+ """
+ Ask the subprocess identified by ``pid`` to terminate (``SIGTERM`` when available).
+
+ Returns a small dict with ``status`` and optional ``reason`` / ``signal`` keys.
+ """
+ resolved_logger = logger or commons_logging.get_logger(__name__)
+ if pid <= 0:
+ raise commons_errors.DSLInterpreterError(
+ "Invalid pid for graceful stop via SIGTERM."
+ )
+ sigterm = getattr(signal, "SIGTERM", None)
+ if sigterm is None:
+ raise commons_errors.DSLInterpreterError(
+ "SIGTERM is not available on this platform."
+ )
+ if not pid_is_running(pid):
+ resolved_logger.info(
+ "Graceful stop: pid=%s not running, treating as already stopped",
+ pid,
+ )
+ return {"status": "already_stopped", "reason": "not_running"}
+ try:
+ os.kill(pid, sigterm)
+ except OSError as err:
+ if not pid_is_running(pid):
+ resolved_logger.info(
+ "Graceful stop: pid=%s gone after failed signal: %s",
+ pid,
+ err,
+ )
+ return {"status": "already_stopped", "reason": str(err)}
+ resolved_logger.warning(
+ "Graceful stop: failed to signal pid=%s: %s", pid, err
+ )
+ raise commons_errors.DSLInterpreterError(
+ f"Failed to send stop signal to pid={pid}: {err}"
+ ) from err
+ resolved_logger.info("Sent graceful stop signal (sigterm) to pid=%s", pid)
+ return {"status": "stopped", "signal": "sigterm"}
diff --git a/packages/commons/octobot_commons/profiles/profile_sharing.py b/packages/commons/octobot_commons/profiles/profile_sharing.py
index 737107580a..b978485e3f 100644
--- a/packages/commons/octobot_commons/profiles/profile_sharing.py
+++ b/packages/commons/octobot_commons/profiles/profile_sharing.py
@@ -47,6 +47,7 @@ def __init__(self, *args):
from octobot_commons.profiles.profile import Profile
import octobot_commons.profiles.profile_data as profile_data_import
import octobot_commons.profiles.profile_data_import as profile_data_importer
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
NON_OVERWRITTEN_PROFILE_FOLDERS = []
@@ -331,7 +332,10 @@ def _get_target_import_path(
:return: (the final target import path, True if the profile is replaced)
"""
target_import_path = os.path.join(
- bot_install_path, constants.USER_PROFILES_FOLDER, profile_name
+ bot_install_path,
+ user_root_folder_provider.get_user_root_folder(),
+ constants.PROFILES_FOLDER,
+ profile_name,
)
if replace_if_exists:
return target_import_path
diff --git a/packages/commons/octobot_commons/user_root_folder_provider.py b/packages/commons/octobot_commons/user_root_folder_provider.py
new file mode 100644
index 0000000000..fd659a1d1a
--- /dev/null
+++ b/packages/commons/octobot_commons/user_root_folder_provider.py
@@ -0,0 +1,98 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import os
+import typing
+
+import octobot_commons.constants as commons_constants
+import octobot_commons.singleton.singleton_class as singleton_class
+
+# Matches historical layout under the user root; "specific_config" matches
+# octobot_tentacles_manager.constants.TENTACLES_SPECIFIC_CONFIG_FOLDER (commons cannot import tentacles_manager).
+_REFERENCE_TENTACLES_CONFIG_DIR = "reference_tentacles_config"
+_TENTACLES_SPECIFIC_CONFIG_DIR = "specific_config"
+
+
+class UserRootFolderProvider(singleton_class.Singleton):
+ """
+ Singleton: effective user data root (config, profiles, reference_tentacles_config, etc.).
+ Default matches commons_constants.USER_FOLDER when unset.
+ """
+
+ def __init__(self) -> None:
+ """Initialize with no explicit root (``get_root`` falls back to ``USER_FOLDER``)."""
+ self._root: typing.Optional[str] = None
+
+ def get_root(self) -> str:
+ """Return the configured user data root, or ``commons_constants.USER_FOLDER`` if unset."""
+ if self._root is None:
+ return commons_constants.USER_FOLDER
+ return self._root
+
+ def set_root(self, root: str) -> None:
+ """Set the user data root directory path."""
+ self._root = root
+
+ def get_user_profiles_folder(self) -> str:
+ """Return the profiles folder path under the user root."""
+ return os.path.join(self.get_root(), commons_constants.PROFILES_FOLDER)
+
+ def get_user_reference_tentacle_config_path(self) -> str:
+ """Return the reference tentacles config directory under the user root."""
+ return os.path.join(self.get_root(), _REFERENCE_TENTACLES_CONFIG_DIR)
+
+ def get_user_reference_tentacle_config_file_path(self) -> str:
+ """Return the path to the main tentacles config file under reference config."""
+ return os.path.join(
+ self.get_user_reference_tentacle_config_path(),
+ commons_constants.CONFIG_TENTACLES_FILE,
+ )
+
+ def get_user_reference_tentacle_specific_config_path(self) -> str:
+ """Return the tentacles-specific config directory under reference config."""
+ return os.path.join(
+ self.get_user_reference_tentacle_config_path(),
+ _TENTACLES_SPECIFIC_CONFIG_DIR,
+ )
+
+
+def instance() -> UserRootFolderProvider:
+ """Module alias for the singleton (same as ``UserRootFolderProvider.instance()``)."""
+ return UserRootFolderProvider.instance()
+
+
+def get_user_root_folder() -> str:
+ """Return the effective user data root from the singleton provider."""
+ return UserRootFolderProvider.instance().get_root()
+
+
+def get_user_profiles_folder() -> str:
+ """Module-level helper: profiles folder under the user root."""
+ return UserRootFolderProvider.instance().get_user_profiles_folder()
+
+
+def get_user_reference_tentacle_config_path() -> str:
+ """Module-level helper: reference tentacles config directory under the user root."""
+ return UserRootFolderProvider.instance().get_user_reference_tentacle_config_path()
+
+
+def get_user_reference_tentacle_config_file_path() -> str:
+ """Module-level helper: main tentacles config file path under reference config."""
+ return UserRootFolderProvider.instance().get_user_reference_tentacle_config_file_path()
+
+
+def get_user_reference_tentacle_specific_config_path() -> str:
+ """Module-level helper: tentacles-specific config directory under reference config."""
+ return UserRootFolderProvider.instance().get_user_reference_tentacle_specific_config_path()
diff --git a/packages/commons/tests/dsl_interpreter/operators/test_process_bound_operator_mixin.py b/packages/commons/tests/dsl_interpreter/operators/test_process_bound_operator_mixin.py
new file mode 100644
index 0000000000..b03dd48a35
--- /dev/null
+++ b/packages/commons/tests/dsl_interpreter/operators/test_process_bound_operator_mixin.py
@@ -0,0 +1,227 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import mock
+
+import pytest
+
+import octobot_commons.dsl_interpreter.operator as dsl_interpreter_operator
+import octobot_commons.dsl_interpreter.operators.process_bound_operator_mixin as process_bound_operator_mixin
+import octobot_commons.errors as commons_errors
+import octobot_commons.process_util as process_util
+
+
+class _BareOperator(dsl_interpreter_operator.Operator):
+ @staticmethod
+ def get_name() -> str:
+ return "bare_process_bound_test"
+
+
+class _BoundOperator(
+ dsl_interpreter_operator.Operator,
+ process_bound_operator_mixin.ProcessBoundOperatorMixin,
+):
+ @staticmethod
+ def get_name() -> str:
+ return "bound_process_bound_test"
+
+
+class TestProcessBoundOperatorMixinInit:
+ def test_pid_starts_none(self):
+ bound = process_bound_operator_mixin.ProcessBoundOperatorMixin()
+ assert bound.pid is None
+
+
+class TestIsProcessRunning:
+ def test_false_when_pid_not_set(self):
+ bound = process_bound_operator_mixin.ProcessBoundOperatorMixin()
+ assert bound.is_process_running() is False
+
+ def test_delegates_to_process_util(self):
+ bound = process_bound_operator_mixin.ProcessBoundOperatorMixin()
+ bound.pid = 12345
+ with mock.patch.object(process_util, "pid_is_running", return_value=True) as running_mock:
+ assert bound.is_process_running() is True
+ running_mock.assert_called_once_with(12345)
+
+
+class TestRequestGracefulStop:
+ def test_raises_when_pid_not_set(self):
+ bound = process_bound_operator_mixin.ProcessBoundOperatorMixin()
+ with pytest.raises(commons_errors.DSLInterpreterError, match="No process id set"):
+ bound.request_graceful_stop()
+
+ def test_delegates_to_process_util(self):
+ bound = process_bound_operator_mixin.ProcessBoundOperatorMixin()
+ bound.pid = 99
+ expected = {"status": "stopped", "signal": "sigterm"}
+ with mock.patch.object(
+ process_util,
+ "request_graceful_stop_via_sigterm",
+ return_value=expected,
+ ) as stop_mock:
+ result = bound.request_graceful_stop(logger=mock.sentinel.log)
+ assert result == expected
+ stop_mock.assert_called_once_with(99, logger=mock.sentinel.log)
+
+
+@pytest.mark.asyncio
+class TestWaitUntilPidStopped:
+ async def test_non_positive_pid_returns_without_poll(self):
+ bound = process_bound_operator_mixin.ProcessBoundOperatorMixin()
+ with mock.patch.object(process_util, "pid_is_running") as running_mock:
+ await bound.wait_until_pid_stopped(0, timeout_seconds=5.0, logger=mock.Mock())
+ running_mock.assert_not_called()
+
+ async def test_returns_when_pid_not_running(self):
+ bound = process_bound_operator_mixin.ProcessBoundOperatorMixin()
+ with mock.patch.object(process_util, "pid_is_running", return_value=False):
+ await bound.wait_until_pid_stopped(7, timeout_seconds=5.0)
+
+ async def test_timeout_raises_dsl_error(self):
+ bound = process_bound_operator_mixin.ProcessBoundOperatorMixin()
+ with mock.patch.object(process_util, "pid_is_running", return_value=True):
+ with pytest.raises(commons_errors.DSLInterpreterError, match="Timed out"):
+ await bound.wait_until_pid_stopped(
+ 99,
+ timeout_seconds=0.05,
+ poll_interval=0.01,
+ )
+
+
+class TestSpawnSubprocess:
+ def test_sets_self_pid_from_child_and_returns_popen(self):
+ bound = process_bound_operator_mixin.ProcessBoundOperatorMixin()
+ fake_popen = mock.Mock()
+ fake_popen.pid = 777
+ with mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ return_value=fake_popen,
+ ) as spawn_mock:
+ returned = bound.spawn_subprocess(
+ ["/bin/true"],
+ working_directory="/tmp/wd",
+ environment={"A": "1"},
+ hide_console_window=True,
+ )
+ assert returned is fake_popen
+ assert bound.pid == 777
+ spawn_mock.assert_called_once_with(
+ ["/bin/true"],
+ working_directory="/tmp/wd",
+ environment={"A": "1"},
+ hide_console_window=True,
+ )
+
+
+class TestRejectUserPathSegment:
+ def test_accepts_simple_relative_path(self):
+ process_bound_operator_mixin.ProcessBoundOperatorMixin.reject_user_path_segment("bots/mybot")
+
+ def test_raises_on_parent_directory_parts(self):
+ with pytest.raises(commons_errors.DSLInterpreterError, match="parent directory"):
+ process_bound_operator_mixin.ProcessBoundOperatorMixin.reject_user_path_segment("a/../b")
+
+
+class TestFindFirstFreeListenPortAfterBase:
+ def test_returns_base_when_free(self):
+ mixin = process_bound_operator_mixin.ProcessBoundOperatorMixin
+ with mock.patch.object(mixin, "_tcp_port_is_free", return_value=True):
+ listen_port = mixin.find_first_free_listen_port_after_base("127.0.0.1", 50000)
+ assert listen_port == 50000
+
+ def test_skips_until_first_free_port(self):
+ mixin = process_bound_operator_mixin.ProcessBoundOperatorMixin
+ with mock.patch.object(mixin, "_tcp_port_is_free", side_effect=[False, False, True]):
+ listen_port = mixin.find_first_free_listen_port_after_base("127.0.0.1", 50100)
+ assert listen_port == 50102
+
+ def test_skips_blocklisted_ports(self):
+ mixin = process_bound_operator_mixin.ProcessBoundOperatorMixin
+ with mock.patch.object(mixin, "_tcp_port_is_free", return_value=True):
+ listen_port = mixin.find_first_free_listen_port_after_base(
+ "127.0.0.1",
+ 50200,
+ blocklist=[50200],
+ )
+ assert listen_port == 50201
+
+ def test_raises_when_scan_exhausted(self):
+ mixin = process_bound_operator_mixin.ProcessBoundOperatorMixin
+ with mock.patch.object(mixin, "_tcp_port_is_free", return_value=False):
+ with pytest.raises(commons_errors.DSLInterpreterError, match="No free listen port"):
+ mixin.find_first_free_listen_port_after_base("127.0.0.1", 50300, max_offset=2)
+
+
+class TestBindAddressForEnvAndProbeHosts:
+ def test_defaults_bind_and_probe_to_loopback(self):
+ resolved_bind, probe_bind = (
+ process_bound_operator_mixin.ProcessBoundOperatorMixin.bind_address_for_env_and_probe_hosts({})
+ )
+ assert resolved_bind == "127.0.0.1"
+ assert probe_bind == "127.0.0.1"
+
+ def test_any_bind_uses_loopback_probe(self):
+ resolved_bind, probe_bind = (
+ process_bound_operator_mixin.ProcessBoundOperatorMixin.bind_address_for_env_and_probe_hosts(
+ {"bind_host": "0.0.0.0"}
+ )
+ )
+ assert resolved_bind == "0.0.0.0"
+ assert probe_bind == "127.0.0.1"
+
+ def test_custom_bind_listen_key(self):
+ resolved_bind, probe_bind = (
+ process_bound_operator_mixin.ProcessBoundOperatorMixin.bind_address_for_env_and_probe_hosts(
+ {"listen_addr": "10.0.0.5"},
+ bind_listen_key="listen_addr",
+ )
+ )
+ assert resolved_bind == "10.0.0.5"
+ assert probe_bind == "10.0.0.5"
+
+
+class TestSpawnManagedSubprocessStatic:
+ def test_delegates_to_process_util(self):
+ sentinel_popen = mock.sentinel.proc
+ argv = ["a", "b"]
+ with mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ return_value=sentinel_popen,
+ ) as spawn_mock:
+ result = process_bound_operator_mixin.ProcessBoundOperatorMixin.spawn_managed_subprocess(
+ argv,
+ working_directory="/proj",
+ environment=None,
+ hide_console_window=False,
+ )
+ assert result is sentinel_popen
+ spawn_mock.assert_called_once_with(
+ argv,
+ working_directory="/proj",
+ environment=None,
+ hide_console_window=False,
+ )
+
+
+class TestIsProcessBound:
+ def test_true_for_operator_using_mixin(self):
+ assert process_bound_operator_mixin.is_process_bound(_BoundOperator()) is True
+
+ def test_false_for_operator_without_mixin(self):
+ assert process_bound_operator_mixin.is_process_bound(_BareOperator()) is False
+
diff --git a/packages/commons/tests/dsl_interpreter/operators/test_re_callable_operator_mixin.py b/packages/commons/tests/dsl_interpreter/operators/test_re_callable_operator_mixin.py
index 6b727209e2..8c82b74194 100644
--- a/packages/commons/tests/dsl_interpreter/operators/test_re_callable_operator_mixin.py
+++ b/packages/commons/tests/dsl_interpreter/operators/test_re_callable_operator_mixin.py
@@ -13,8 +13,8 @@
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
-import time
import mock
+import time
import octobot_commons.dsl_interpreter as dsl_interpreter
import octobot_commons.dsl_interpreter.operator_parameter as operator_parameter
@@ -92,6 +92,12 @@ class _TestReCallableOperator(dsl_interpreter.ReCallableOperatorMixin):
def __init__(self):
pass
+ @staticmethod
+ def get_name() -> str:
+ return "test_recallable_minimal"
+
+
+
class _ReCreateScriptTestOperator(dsl_interpreter.Operator, dsl_interpreter.ReCallableOperatorMixin):
"""Operator with parameters so re_create_script can call resove_operator_params."""
diff --git a/packages/commons/tests/dsl_interpreter/operators/test_signalable_operator_mixin.py b/packages/commons/tests/dsl_interpreter/operators/test_signalable_operator_mixin.py
new file mode 100644
index 0000000000..efb152215a
--- /dev/null
+++ b/packages/commons/tests/dsl_interpreter/operators/test_signalable_operator_mixin.py
@@ -0,0 +1,93 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+import octobot_commons.dsl_interpreter as dsl_interpreter
+
+
+class _TestSignalOperatorA(dsl_interpreter.SignalableOperatorMixin):
+ def __init__(self, signals=None):
+ super().__init__(signals)
+
+ @staticmethod
+ def get_name() -> str:
+ return "op_signal_a"
+
+
+class _TestSignalOperatorB(dsl_interpreter.SignalableOperatorMixin):
+ def __init__(self, signals=None):
+ super().__init__(signals)
+
+ @staticmethod
+ def get_name() -> str:
+ return "op_signal_b"
+
+
+class TestSignalableOperatorMixin:
+ def test_matches_operator_signal_false_by_default(self):
+ stop = dsl_interpreter.OperatorSignal.STOP.value
+ assert _TestSignalOperatorA().matches_operator_signal(stop) is False
+ assert _TestSignalOperatorB().matches_operator_signal(stop) is False
+ shared_signals = dsl_interpreter.OperatorSignals()
+ assert _TestSignalOperatorA(shared_signals).matches_operator_signal(stop) is False
+ assert _TestSignalOperatorB(shared_signals).matches_operator_signal(stop) is False
+
+ def test_signal_for_one_operator_does_not_match_other(self):
+ stop = dsl_interpreter.OperatorSignal.STOP.value
+ shared_signals = dsl_interpreter.OperatorSignals()
+ op_signal_a = _TestSignalOperatorA(shared_signals)
+ op_signal_b = _TestSignalOperatorB(shared_signals)
+ shared_signals.sync({op_signal_a.get_name(): stop})
+ assert op_signal_a.matches_operator_signal(stop) is True
+ assert op_signal_b.matches_operator_signal(stop) is False
+ shared_signals.sync({})
+ assert op_signal_a.matches_operator_signal(stop) is False
+ assert op_signal_b.matches_operator_signal(stop) is False
+
+ def test_multiple_operator_signals_on_shared_container(self):
+ stop = dsl_interpreter.OperatorSignal.STOP.value
+ update_config = dsl_interpreter.OperatorSignal.UPDATE_CONFIG.value
+ shared_signals = dsl_interpreter.OperatorSignals()
+ op_signal_a = _TestSignalOperatorA(shared_signals)
+ op_signal_b = _TestSignalOperatorB(shared_signals)
+ shared_signals.sync(
+ {
+ op_signal_a.get_name(): stop,
+ op_signal_b.get_name(): update_config,
+ }
+ )
+ assert op_signal_a.matches_operator_signal(stop) is True
+ assert op_signal_a.matches_operator_signal(update_config) is False
+ assert op_signal_b.matches_operator_signal(update_config) is True
+ assert op_signal_b.matches_operator_signal(stop) is False
+
+ def test_should_dispatch_operator_signal_for_result_default_false(self):
+ assert _TestSignalOperatorA.should_dispatch_operator_signal_for_result(
+ dsl_interpreter.OperatorSignal.STOP.value,
+ {},
+ ) is False
+
+ def test_operator_signals_sync_replaces_map(self):
+ stop = dsl_interpreter.OperatorSignal.STOP.value
+ update_config = dsl_interpreter.OperatorSignal.UPDATE_CONFIG.value
+ operator_signals_holder = dsl_interpreter.OperatorSignals()
+ operator_signals_holder.sync({_TestSignalOperatorA.get_name(): stop})
+ assert operator_signals_holder.signal_by_operator == {
+ _TestSignalOperatorA.get_name(): stop
+ }
+ operator_signals_holder.sync({_TestSignalOperatorB.get_name(): update_config})
+ assert operator_signals_holder.signal_by_operator == {
+ _TestSignalOperatorB.get_name(): update_config
+ }
diff --git a/packages/commons/tests/profiles/test_profile_sharing.py b/packages/commons/tests/profiles/test_profile_sharing.py
index f287594815..4529b2b256 100644
--- a/packages/commons/tests/profiles/test_profile_sharing.py
+++ b/packages/commons/tests/profiles/test_profile_sharing.py
@@ -23,6 +23,7 @@
import pytest
import octobot_commons.constants as constants
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
import octobot_commons.errors as commons_errors
import octobot_commons.profiles as profiles
import octobot_commons.profiles.profile_sharing as profile_sharing
@@ -102,13 +103,13 @@ def test_import_install_profile(profile, invalid_profile):
exported_file = f"{export_path}.zip"
spec_tentacles_config = os.path.join(get_profile_path(), "specific_config")
tentacles_config = os.path.join(get_profile_path(), "tentacles_config.json")
- other_profile = os.path.join(constants.USER_PROFILES_FOLDER, "default")
+ other_profile = os.path.join(user_root_folder_provider.get_user_profiles_folder(), "default")
profile_schema = os.path.join(test_config.TEST_CONFIG_FOLDER, "profile_schema.json")
with _cleaned_tentacles(export_path,
exported_file,
tentacles_config,
dir1=other_profile,
- dir2=constants.USER_FOLDER,
+ dir2=user_root_folder_provider.get_user_root_folder(),
dir3=spec_tentacles_config):
# create fake tentacles config
shutil.copy(profile.config_file(), tentacles_config)
@@ -116,7 +117,7 @@ def test_import_install_profile(profile, invalid_profile):
shutil.copy(profile.config_file(), os.path.join(spec_tentacles_config, "t1.json"))
shutil.copy(profile.config_file(), os.path.join(spec_tentacles_config, "t2.json"))
profiles.export_profile(profile, export_path)
- imported_profile_path = os.path.join(constants.USER_PROFILES_FOLDER, "default")
+ imported_profile_path = os.path.join(user_root_folder_provider.get_user_profiles_folder(), "default")
with mock.patch.object(profile_sharing, "_ensure_unique_profile_id", mock.Mock()) \
as _ensure_unique_profile_id_mock:
imported_profile = profiles.import_profile(exported_file, profile_schema, origin_url="plop.wow")
diff --git a/packages/commons/tests/test_process_util.py b/packages/commons/tests/test_process_util.py
new file mode 100644
index 0000000000..eba9fb9393
--- /dev/null
+++ b/packages/commons/tests/test_process_util.py
@@ -0,0 +1,191 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import os
+import subprocess
+import sys
+import time
+
+import mock
+
+import pytest
+
+import octobot_commons.errors as commons_errors
+import octobot_commons.process_util as process_util
+
+
+class TestSpawnManagedSubprocess:
+ def test_popen_called_with_argv_cwd_env(self):
+ fake_handle = mock.Mock(spec=subprocess.Popen)
+ with mock.patch.object(
+ process_util.subprocess,
+ "Popen",
+ return_value=fake_handle,
+ ) as popen_mock:
+ result = process_util.spawn_managed_subprocess(
+ ["x", "y"],
+ working_directory="/work",
+ environment={"K": "V"},
+ hide_console_window=False,
+ )
+ assert result is fake_handle
+ popen_mock.assert_called_once()
+ positional_args, keywords = popen_mock.call_args
+ assert positional_args[0] == ["x", "y"]
+ assert keywords["cwd"] == "/work"
+ assert keywords["env"] == {"K": "V"}
+ assert keywords.get("creationflags", 0) == 0
+
+ def test_uses_os_environ_copy_when_environment_missing(self):
+ with mock.patch.object(process_util.subprocess, "Popen") as popen_mock, mock.patch.dict(
+ os.environ,
+ {"EXISTING_ENV_KEY": "1"},
+ clear=False,
+ ):
+ process_util.spawn_managed_subprocess([], working_directory="/w")
+ _args, keywords = popen_mock.call_args
+ assert keywords["env"]["EXISTING_ENV_KEY"] == "1"
+
+ def test_creationflags_hide_console_on_windows(self):
+ fake_handle = mock.Mock(spec=subprocess.Popen)
+ with mock.patch.object(process_util.sys, "platform", "win32"), mock.patch.object(
+ process_util.subprocess,
+ "Popen",
+ return_value=fake_handle,
+ ) as popen_mock:
+ process_util.spawn_managed_subprocess([], working_directory="/w", hide_console_window=True)
+ _args, keywords = popen_mock.call_args
+ assert keywords["creationflags"] == getattr(
+ subprocess,
+ "CREATE_NO_WINDOW",
+ 0,
+ )
+
+
+class TestPidIsRunning:
+ def test_non_positive_pid_is_false(self):
+ assert process_util.pid_is_running(0) is False
+ assert process_util.pid_is_running(-1) is False
+
+ def test_with_psutil_process_running_true(self):
+ fake_process = mock.Mock()
+ fake_process.status.return_value = process_util.psutil.STATUS_RUNNING
+ fake_process.is_running.return_value = True
+ with mock.patch.object(process_util.psutil, "Process", return_value=fake_process):
+ assert process_util.pid_is_running(42) is True
+ fake_process.is_running.assert_called_once()
+
+ def test_with_psutil_zombie_is_not_running(self):
+ """``is_running()`` can stay True for Linux zombies; we treat them as stopped for lifecycle waits."""
+ fake_process = mock.Mock()
+ fake_process.status.return_value = process_util.psutil.STATUS_ZOMBIE
+ fake_process.is_running.return_value = True
+ with mock.patch.object(process_util.psutil, "Process", return_value=fake_process):
+ assert process_util.pid_is_running(42) is False
+ fake_process.is_running.assert_not_called()
+
+ def test_with_psutil_zombie_process_exception_from_status(self):
+ fake_process = mock.Mock()
+ fake_process.status.side_effect = process_util.psutil.ZombieProcess(42)
+ with mock.patch.object(process_util.psutil, "Process", return_value=fake_process):
+ assert process_util.pid_is_running(42) is False
+
+ def test_with_psutil_no_such_process(self):
+ fake_process_constructor = mock.Mock(
+ side_effect=process_util.psutil.NoSuchProcess(42),
+ )
+ with mock.patch.object(process_util.psutil, "Process", fake_process_constructor):
+ assert process_util.pid_is_running(42) is False
+
+ def test_with_psutil_no_such_process_from_status_after_process_ctor(self):
+ """Process() succeeds but status() raises (race: exited between ctor and probe, common on Windows)."""
+ fake_process = mock.Mock()
+ fake_process.status.side_effect = process_util.psutil.NoSuchProcess(42)
+ with mock.patch.object(process_util.psutil, "Process", return_value=fake_process):
+ assert process_util.pid_is_running(42) is False
+
+ def test_with_psutil_no_such_process_from_is_running(self):
+ fake_process = mock.Mock()
+ fake_process.status.return_value = process_util.psutil.STATUS_RUNNING
+ fake_process.is_running.side_effect = process_util.psutil.NoSuchProcess(42)
+ with mock.patch.object(process_util.psutil, "Process", return_value=fake_process):
+ assert process_util.pid_is_running(42) is False
+
+
+class TestRequestGracefulStopViaSigterm:
+ def test_invalid_pid_raises(self):
+ with pytest.raises(commons_errors.DSLInterpreterError, match="Invalid pid"):
+ process_util.request_graceful_stop_via_sigterm(0)
+
+ def test_raises_when_sigterm_unavailable(self):
+ sentinel_signal_module = mock.Mock()
+ sentinel_signal_module.SIGTERM = None
+ with mock.patch.object(process_util, "signal", sentinel_signal_module):
+ with pytest.raises(commons_errors.DSLInterpreterError, match="SIGTERM is not available"):
+ process_util.request_graceful_stop_via_sigterm(10)
+
+ def test_returns_already_stopped_when_pid_not_running(self):
+ with mock.patch.object(process_util, "pid_is_running", return_value=False):
+ result = process_util.request_graceful_stop_via_sigterm(55)
+ assert result["status"] == "already_stopped"
+
+ def test_os_kill_after_signal_when_process_exits_raises_oserror_returns_already_stopped(self):
+ with (
+ mock.patch.object(process_util, "pid_is_running", side_effect=[True, False]),
+ mock.patch.object(process_util.os, "kill", side_effect=OSError("send failed")),
+ ):
+ result = process_util.request_graceful_stop_via_sigterm(77)
+ assert result["status"] == "already_stopped"
+
+ def test_os_kill_failure_when_still_running_wraps_error(self):
+ with (
+ mock.patch.object(process_util, "pid_is_running", return_value=True),
+ mock.patch.object(process_util.os, "kill", side_effect=OSError("perm denied")),
+ ):
+ with pytest.raises(
+ commons_errors.DSLInterpreterError,
+ match=r"Failed to send stop signal to pid=88",
+ ):
+ process_util.request_graceful_stop_via_sigterm(88)
+
+
+class TestSpawnManagedSubprocessGracefulStopIntegration:
+ def test_spawned_sleeping_child_can_be_stopped_by_request_graceful_stop_via_sigterm(
+ self,
+ tmp_path,
+ ):
+ child = process_util.spawn_managed_subprocess(
+ [
+ sys.executable,
+ "-c",
+ "import time; time.sleep(30)",
+ ],
+ working_directory=str(tmp_path),
+ )
+ try:
+ assert process_util.pid_is_running(child.pid), "child process should be running"
+ result = process_util.request_graceful_stop_via_sigterm(child.pid)
+ assert result["status"] == "stopped"
+ assert result.get("signal") == "sigterm"
+ deadline = time.monotonic() + 25.0
+ while child.poll() is None and time.monotonic() < deadline:
+ time.sleep(0.05)
+ assert child.poll() is not None, "child should exit after graceful stop"
+ child.wait(timeout=5)
+ assert not process_util.pid_is_running(child.pid)
+ finally:
+ if child.poll() is None:
+ child.kill()
+ child.wait(timeout=10)
diff --git a/packages/flow/octobot_flow/entities/accounts/__init__.py b/packages/flow/octobot_flow/entities/accounts/__init__.py
index 3cb525c719..6bae35fc16 100644
--- a/packages/flow/octobot_flow/entities/accounts/__init__.py
+++ b/packages/flow/octobot_flow/entities/accounts/__init__.py
@@ -2,6 +2,7 @@
from octobot_flow.entities.accounts.exchange_account_elements import ExchangeAccountElements
from octobot_flow.entities.accounts.exchange_account_details import ExchangeAccountDetails, ExchangeAccountPortfolio
from octobot_flow.entities.accounts.account_elements import AccountElements
+from octobot_flow.entities.accounts.process_bot_state import Metadata, ProcessBotState
__all__ = [
"PortfolioAssetHolding",
@@ -9,4 +10,6 @@
"ExchangeAccountDetails",
"ExchangeAccountPortfolio",
"AccountElements",
+ "Metadata",
+ "ProcessBotState",
]
diff --git a/packages/flow/octobot_flow/entities/accounts/account_elements.py b/packages/flow/octobot_flow/entities/accounts/account_elements.py
index 78755cf396..8ba9e54ae9 100644
--- a/packages/flow/octobot_flow/entities/accounts/account_elements.py
+++ b/packages/flow/octobot_flow/entities/accounts/account_elements.py
@@ -26,16 +26,27 @@ def sync_from_transactions(self, transactions: list[dict]) -> list[octobot_flow.
if self._sync_transactions(transactions):
changed_elements.append(octobot_flow.enums.ChangedElements.TRANSACTIONS)
return changed_elements
-
- def _sync_transactions(self, transactions: list[dict]) -> bool:
- previous_transactions_ids = {
- transaction[octobot_trading.enums.ExchangeConstantsTransactionColumns.TXID.value]
+
+ def append_new_transactions_deduped(self, transactions: list[dict]) -> bool:
+ tx_id_key = octobot_trading.enums.ExchangeConstantsTransactionColumns.TXID.value
+ known_txids = {
+ transaction[tx_id_key]
for transaction in self.transactions
+ if tx_id_key in transaction
}
- added_transactions = [
- transaction
- for transaction in transactions
- if transaction[octobot_trading.enums.ExchangeConstantsTransactionColumns.TXID.value] not in previous_transactions_ids
- ]
- self.transactions.extend(added_transactions)
- return bool(added_transactions)
+ added = False
+ for transaction in transactions:
+ tx_id = transaction.get(tx_id_key)
+ if tx_id is None or tx_id in known_txids:
+ continue
+ known_txids.add(tx_id)
+ self.transactions.append(dict(transaction))
+ added = True
+ return added
+
+ def merge_transactions_from_account_elements(self, other: "AccountElements") -> bool:
+ """Append transactions from ``other`` excluding tx ids already on ``self``."""
+ return self.append_new_transactions_deduped(other.transactions)
+
+ def _sync_transactions(self, transactions: list[dict]) -> bool:
+ return self.append_new_transactions_deduped(transactions)
diff --git a/packages/flow/octobot_flow/entities/accounts/exchange_account_elements.py b/packages/flow/octobot_flow/entities/accounts/exchange_account_elements.py
index 3f189e0f02..7d21a9fa79 100644
--- a/packages/flow/octobot_flow/entities/accounts/exchange_account_elements.py
+++ b/packages/flow/octobot_flow/entities/accounts/exchange_account_elements.py
@@ -63,6 +63,80 @@ def sync_from_exchange_manager(
changed_elements.append(octobot_flow.enums.ChangedElements.TRADES)
return changed_elements
+ def append_new_trades_deduped(self, trades: list[dict]) -> bool:
+ exchange_trade_id_key = octobot_trading.enums.ExchangeConstantsOrderColumns.EXCHANGE_TRADE_ID.value
+ exchange_order_exchange_id_key = octobot_trading.enums.ExchangeConstantsOrderColumns.EXCHANGE_ID.value
+
+ def _identity_key(trade: dict) -> typing.Optional[tuple[str, typing.Hashable]]:
+ exchange_trade_id = trade.get(exchange_trade_id_key)
+ if exchange_trade_id is not None:
+ return exchange_trade_id_key, exchange_trade_id
+ exchange_order_exchange_id = trade.get(exchange_order_exchange_id_key)
+ if exchange_order_exchange_id is not None:
+ return exchange_order_exchange_id_key, exchange_order_exchange_id
+ return None
+
+ known_trade_keys: set[tuple[str, typing.Hashable]] = set()
+ for trade in self.trades:
+ key = _identity_key(trade)
+ if key is not None:
+ known_trade_keys.add(key)
+ added = False
+ for trade in trades:
+ key = _identity_key(trade)
+ if key is None or key in known_trade_keys:
+ continue
+ known_trade_keys.add(key)
+ self.trades.append(dict(trade))
+ added = True
+ return added
+
+ def merge_trades_from_exchange_account_elements(
+ self,
+ other: "ExchangeAccountElements",
+ ) -> bool:
+ """Append trades from ``other`` excluding exchange trade ids already on ``self``."""
+ return self.append_new_trades_deduped(other.trades)
+
+ def merge_synchronized_snapshots(
+ self,
+ snapshots: list["ExchangeAccountElements"],
+ ) -> list[octobot_flow.enums.ChangedElements]:
+ """
+ Merge ordered external snapshots into this account: upsert trades/transactions by stable id,
+ then replace orders/portfolio/positions (and name when set) from the last snapshot.
+ """
+ if not snapshots:
+ return []
+ trades_changed = False
+ transactions_changed = False
+ for snapshot in snapshots:
+ if self.merge_trades_from_exchange_account_elements(snapshot):
+ trades_changed = True
+ if self.merge_transactions_from_account_elements(snapshot):
+ transactions_changed = True
+ last_snapshot = snapshots[-1]
+ orders_changed = self.orders != last_snapshot.orders
+ portfolio_changed = self.portfolio != last_snapshot.portfolio
+ positions_changed = self.positions != last_snapshot.positions
+ self.orders = last_snapshot.orders
+ self.portfolio = last_snapshot.portfolio
+ self.positions = list(last_snapshot.positions)
+ if last_snapshot.name is not None:
+ self.name = last_snapshot.name
+ changed: list[octobot_flow.enums.ChangedElements] = []
+ if trades_changed:
+ changed.append(octobot_flow.enums.ChangedElements.TRADES)
+ if transactions_changed:
+ changed.append(octobot_flow.enums.ChangedElements.TRANSACTIONS)
+ if orders_changed:
+ changed.append(octobot_flow.enums.ChangedElements.ORDERS)
+ if portfolio_changed:
+ changed.append(octobot_flow.enums.ChangedElements.PORTFOLIO)
+ if positions_changed:
+ changed.append(octobot_flow.enums.ChangedElements.POSITIONS)
+ return changed
+
def sync_orders_from_exchange_manager(self, exchange_manager: octobot_trading.exchanges.ExchangeManager) -> bool:
previous_orders = self.orders
updated_open_orders_exchange_ids = set()
@@ -108,14 +182,5 @@ def sync_positions_from_exchange_manager(self, exchange_manager: octobot_trading
def _sync_trades_from_exchange_manager(self, exchange_manager: octobot_trading.exchanges.ExchangeManager) -> bool:
previous_trades_count = len(self.trades)
if update_trades := octobot_trading.api.get_trade_history(exchange_manager, as_dict=True):
- current_trade_ids = {
- trade[octobot_trading.enums.ExchangeConstantsOrderColumns.EXCHANGE_TRADE_ID.value]
- for trade in self.trades
- }
- if new_trades := tuple(
- trade
- for trade in update_trades
- if trade[octobot_trading.enums.ExchangeConstantsOrderColumns.EXCHANGE_TRADE_ID.value] not in current_trade_ids
- ):
- self.trades.extend(new_trades)
+ self.append_new_trades_deduped(update_trades)
return previous_trades_count != len(self.trades)
diff --git a/packages/flow/octobot_flow/entities/accounts/process_bot_state.py b/packages/flow/octobot_flow/entities/accounts/process_bot_state.py
new file mode 100644
index 0000000000..8128b1716a
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/accounts/process_bot_state.py
@@ -0,0 +1,41 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+import dataclasses
+
+import octobot_commons.dataclasses
+
+import octobot_flow.entities.accounts.exchange_account_elements as exchange_account_elements_import
+
+
+@dataclasses.dataclass
+class Metadata(octobot_commons.dataclasses.MinimizableDataclass):
+ """
+ Timestamps written with process bot state dumps; used for file-based liveness checks.
+ """
+
+ updated_at: float = 0.0
+ next_updated_at: float = 0.0
+
+
+@dataclasses.dataclass
+class ProcessBotState(octobot_commons.dataclasses.MinimizableDataclass):
+ """
+ Serialized JSON next to the user config when --dump-state is enabled. Liveness is driven by
+ metadata.updated_at / metadata.next_updated_at only. exchange_account_elements is a single
+ snapshot for the dumped trading exchange (see process_bot_state_dumper).
+ """
+
+ metadata: Metadata = dataclasses.field(default_factory=Metadata)
+ exchange_account_elements: exchange_account_elements_import.ExchangeAccountElements = (
+ dataclasses.field(default_factory=exchange_account_elements_import.ExchangeAccountElements)
+ )
+
+ def __post_init__(self) -> None:
+ if isinstance(self.metadata, dict):
+ self.metadata = Metadata.from_dict(self.metadata)
+ if isinstance(self.exchange_account_elements, dict):
+ self.exchange_account_elements = (
+ exchange_account_elements_import.ExchangeAccountElements.from_dict(
+ self.exchange_account_elements
+ )
+ )
diff --git a/packages/flow/octobot_flow/entities/automations/automation_state.py b/packages/flow/octobot_flow/entities/automations/automation_state.py
index b86fd2f00c..0edf7860ba 100644
--- a/packages/flow/octobot_flow/entities/automations/automation_state.py
+++ b/packages/flow/octobot_flow/entities/automations/automation_state.py
@@ -92,3 +92,10 @@ def __post_init__(self):
self.automation = automation_details_import.AutomationDetails.from_dict(self.automation)
if self.exchange_account_details and isinstance(self.exchange_account_details, dict):
self.exchange_account_details = exchange_account_details_import.ExchangeAccountDetails.from_dict(self.exchange_account_details)
+ if self.priority_actions:
+ self.priority_actions = [
+ action_details_import.parse_action_details(action)
+ if isinstance(action, dict)
+ else action
+ for action in self.priority_actions
+ ]
diff --git a/packages/flow/octobot_flow/entities/automations/post_iteration_actions_details.py b/packages/flow/octobot_flow/entities/automations/post_iteration_actions_details.py
index 6e02669101..a5a3fd6400 100644
--- a/packages/flow/octobot_flow/entities/automations/post_iteration_actions_details.py
+++ b/packages/flow/octobot_flow/entities/automations/post_iteration_actions_details.py
@@ -24,6 +24,8 @@ class NextIterationDetails(octobot_commons.dataclasses.FlexibleDataclass):
@dataclasses.dataclass
class PostIterationActionsDetails(octobot_commons.dataclasses.MinimizableDataclass):
stop_automation: bool = False
+ configuration_update: typing.Optional[str] = None
+ updated_exchange_account_elements: typing.Optional[dict] = None
postpone_execution: bool = False
postpone_reason: typing.Optional[str] = None
raisable_error: typing.Optional[str] = None
diff --git a/packages/flow/octobot_flow/errors.py b/packages/flow/octobot_flow/errors.py
index e616ea9e6d..82644daeb8 100644
--- a/packages/flow/octobot_flow/errors.py
+++ b/packages/flow/octobot_flow/errors.py
@@ -77,6 +77,10 @@ class AutomationDAGResetError(AutomationActionError):
"""raise when a DAG reset fails"""
+class SynchronizedExchangeConflictError(AutomationActionError):
+ """raise when external exchange snapshots and a live exchange manager would both apply in one pass"""
+
+
class ActionDependencyNotFoundError(ActionDependencyError):
"""raise when an action dependency is not found"""
diff --git a/packages/flow/octobot_flow/jobs/automation_job.py b/packages/flow/octobot_flow/jobs/automation_job.py
index 97bee6adbf..47a1a04b64 100644
--- a/packages/flow/octobot_flow/jobs/automation_job.py
+++ b/packages/flow/octobot_flow/jobs/automation_job.py
@@ -76,13 +76,10 @@ async def run(self) -> list[octobot_flow.entities.AbstractActionDetails]:
# fetch the actions and signals if any
await self._fetch_actions(maybe_authenticator)
# resolve the DSL scripts in case it has dependencies on other actions
- self._resolve_dsl_scripts(
- self.automation_state.automation.actions_dag.get_executable_actions(),
- True
- )
+ self._resolve_dsl_scripts(to_execute_actions, True)
# fetch the dependencies of the automation environment
fetched_dependencies = await self._fetch_dependencies(
- maybe_community_repository, to_execute_actions
+ maybe_community_repository, to_execute_actions,
)
# Align on the previous scheduled time when possible when running priority actions
# to keep sleep cycles consistency when a priority action is processed.
@@ -218,6 +215,18 @@ async def _fetch_dependencies(
self.automation_state.automation.metadata.automation_id,
set()
)
+ dag_actions = self.automation_state.automation.actions_dag.get_executable_actions()
+ # check are_all_actions_process_bound_only from dag actions only
+ if octobot_flow.logic.dsl.are_all_actions_process_bound_only(
+ minimal_profile_data, dag_actions
+ ):
+ self._logger.info(
+ "Skipping copy-trading and exchange dependency initialization (process-bound DSL actions only)."
+ )
+ return octobot_flow.entities.FetchedDependencies(
+ fetched_exchange_data=None,
+ fetched_copy_trading_data=None,
+ )
if fetched_copy_trading_data := await self._init_all_required_copy_trading_data(
maybe_community_repository, to_execute_actions, minimal_profile_data,
):
diff --git a/packages/flow/octobot_flow/jobs/automation_runner_job.py b/packages/flow/octobot_flow/jobs/automation_runner_job.py
index a27273e72c..c9d6e36bce 100644
--- a/packages/flow/octobot_flow/jobs/automation_runner_job.py
+++ b/packages/flow/octobot_flow/jobs/automation_runner_job.py
@@ -115,7 +115,8 @@ def init_predictive_orders_exchange_data(self, exchange_data: exchange_data_impo
exchange_account_elements = self.automation_state.automation.exchange_account_elements
if exchange_account_elements is None:
return
- exchange_data.markets = self.fetched_dependencies.fetched_exchange_data.public_data.markets
+ if self.fetched_dependencies.fetched_exchange_data:
+ exchange_data.markets = self.fetched_dependencies.fetched_exchange_data.public_data.markets
exchange_data.portfolio_details.content = exchange_account_elements.portfolio.content
exchange_data.orders_details.open_orders = exchange_account_elements.orders.open_orders
exchange_data.trades = exchange_account_elements.trades
diff --git a/packages/flow/octobot_flow/logic/actions/actions_executor.py b/packages/flow/octobot_flow/logic/actions/actions_executor.py
index 4be98beebd..fa4c15a056 100644
--- a/packages/flow/octobot_flow/logic/actions/actions_executor.py
+++ b/packages/flow/octobot_flow/logic/actions/actions_executor.py
@@ -1,8 +1,8 @@
import typing
import octobot_commons.logging
-import octobot_commons.dsl_interpreter
import octobot_commons.profiles
+import octobot_commons.dsl_interpreter
import octobot_trading.exchanges
import octobot.community
@@ -10,7 +10,7 @@
import octobot_flow.entities
import octobot_flow.repositories.community
import octobot_flow.logic.dsl
-import octobot_flow.enums
+import octobot_flow.enums as octobot_flow_enums_import
import octobot_flow.errors
import tentacles.Meta.DSL_operators.exchange_operators as exchange_operators
@@ -27,7 +27,7 @@ def __init__(
actions: list[octobot_flow.entities.AbstractActionDetails],
update_execution_details: bool,
):
- self.changed_elements: list[octobot_flow.enums.ChangedElements] = []
+ self.changed_elements: list[octobot_flow_enums_import.ChangedElements] = []
self.next_execution_scheduled_to: float = 0
self._maybe_community_repository: typing.Optional[
@@ -48,14 +48,17 @@ async def execute(self):
if self._exchange_manager:
await octobot_trading.exchanges.create_exchange_channels(self._exchange_manager)
recall_dag_details: typing.Optional[octobot_commons.dsl_interpreter.ReCallingOperatorResult] = None
+ synchronized_exchange_account_elements: list[octobot_flow.entities.ExchangeAccountElements] = []
async with dsl_executor.dependencies_context(self._actions):
for index, action in enumerate(self._actions):
await self._execute_action(dsl_executor, action)
if self._update_execution_details:
- recall_dag_details, should_stop_processing = self._handle_execution_result(action, index)
+ recall_dag_details, should_stop_processing = await self._handle_execution_result(
+ dsl_executor, action, index, synchronized_exchange_account_elements
+ )
if should_stop_processing:
break
- self._sync_after_execution()
+ self._sync_after_execution(synchronized_exchange_account_elements)
if self._update_execution_details:
await self._update_actions_history()
await self._insert_execution_bot_logs(dsl_executor.pending_bot_logs)
@@ -69,55 +72,243 @@ async def execute(self):
# no reset: schedule immediately
self.next_execution_scheduled_to = 0
- def _handle_execution_result(
- self, action: octobot_flow.entities.AbstractActionDetails, index: int
+ async def _handle_execution_result(
+ self,
+ dsl_executor: "octobot_flow.logic.dsl.DSLExecutor",
+ action: octobot_flow.entities.AbstractActionDetails,
+ index: int,
+ synchronized_exchange_account_elements: list[octobot_flow.entities.ExchangeAccountElements],
) -> tuple[typing.Optional[octobot_commons.dsl_interpreter.ReCallingOperatorResult], bool]:
if not isinstance(action.result, dict):
return None, False
- if octobot_flow.entities.PostIterationActionsDetails.__name__ in action.result:
- post_iteration_actions_details = octobot_flow.entities.PostIterationActionsDetails.from_dict(
- action.result[octobot_flow.entities.PostIterationActionsDetails.__name__]
- )
- if post_iteration_actions_details.stop_automation:
- self._get_logger().info(f"Stopping automation: {self._automation.metadata.automation_id}")
- self._automation.post_actions.stop_automation = True
- # todo cancel open orders and sell assets if required in action config
- return None, True
+ if post_iteration_actions_details := self._get_post_iteration_details(action.result):
+ if early_return := await self._execute_post_iteration_actions(
+ dsl_executor,
+ post_iteration_actions_details,
+ synchronized_exchange_account_elements,
+ ):
+ return early_return
+ return self._create_recall_dag_details_if_necessary(
+ action.id, action.result, index, self._actions
+ )
+
+ def _get_post_iteration_details(
+ self,
+ action_result: dict,
+ ) -> typing.Optional[octobot_flow.entities.PostIterationActionsDetails]:
+ """
+ If ``PostIterationActionsDetails`` is present on ``action_result`` or inside the recall
+ wrapper's ``last_execution_result``, return the dict that contains that key; else None.
+ """
+ post_iter_name = octobot_flow.entities.PostIterationActionsDetails.__name__
+ post_iteration_source: typing.Optional[dict] = None
+ if post_iter_name in action_result:
+ post_iteration_source = action_result
+ elif octobot_commons.dsl_interpreter.ReCallingOperatorResult.is_re_calling_operator_result(
+ action_result
+ ):
+ recall_wrapper = octobot_commons.dsl_interpreter.ReCallingOperatorResult.from_dict(
+ action_result[octobot_commons.dsl_interpreter.ReCallingOperatorResult.__name__]
+ )
+ inner_last = recall_wrapper.last_execution_result
+ if isinstance(inner_last, dict) and post_iter_name in inner_last:
+ post_iteration_source = inner_last
+ return octobot_flow.entities.PostIterationActionsDetails.from_dict(
+ post_iteration_source[post_iter_name]
+ ) if post_iteration_source else None
+
+ async def _execute_post_iteration_actions(
+ self,
+ dsl_executor: "octobot_flow.logic.dsl.DSLExecutor",
+ post_iteration_actions_details: octobot_flow.entities.PostIterationActionsDetails,
+ synchronized_exchange_account_elements: list[octobot_flow.entities.ExchangeAccountElements],
+ ) -> typing.Optional[
+ tuple[
+ typing.Optional[octobot_commons.dsl_interpreter.ReCallingOperatorResult],
+ bool,
+ ]
+ ]:
+ if post_iteration_actions_details.stop_automation:
+ self._get_logger().info(f"Stopping automation: {self._automation.metadata.automation_id}")
+ self._automation.post_actions.stop_automation = True
+ # todo cancel open orders and sell assets if required in action config
+ await self._await_recallable_operator_signal(
+ dsl_executor,
+ octobot_commons.dsl_interpreter.OperatorSignal.STOP.value,
+ )
+ return None, True
+ if post_iteration_actions_details.configuration_update is not None:
+ if not post_iteration_actions_details.configuration_update:
+ raise octobot_flow.errors.AutomationActionError(
+ "configuration_update must be a non-empty DSL string."
+ )
+ self._get_logger().info(
+ "Automation configuration update requested for automation: %s",
+ self._automation.metadata.automation_id,
+ )
+ executable_actions = self._automation.actions_dag.get_executable_actions()
+ if len(executable_actions) != 1:
+ raise octobot_flow.errors.AutomationActionError(
+ "update_automation_configuration requires exactly one executable DAG action; "
+ f"found {len(executable_actions)}: {[a.id for a in executable_actions]}"
+ )
+ target_action = executable_actions[0]
+ if not isinstance(target_action, octobot_flow.entities.DSLScriptActionDetails):
+ raise octobot_flow.errors.AutomationActionError(
+ "update_automation_configuration requires a DSL script action; "
+ f"got {type(target_action).__name__} for action {target_action.id!r}."
+ )
+ target_action.dsl_script = post_iteration_actions_details.configuration_update
+ executed_dag_action, dag_action_result, dag_action_index = await self._await_recallable_operator_signal(
+ dsl_executor,
+ octobot_commons.dsl_interpreter.OperatorSignal.UPDATE_CONFIG.value,
+ )
+ if executed_dag_action is None or dag_action_result is None:
+ raise octobot_flow.errors.AutomationActionError(
+ "update_automation_configuration did not receive a result from the signaled DAG action."
+ )
+ if not dag_action_result.succeeded():
+ raise octobot_flow.errors.AutomationActionError(
+ f"update_automation_configuration failed: {dag_action_result.error}"
+ )
+ return self._create_recall_dag_details_if_necessary(
+ executed_dag_action.id,
+ dag_action_result.result,
+ dag_action_index,
+ self._automation.actions_dag.actions,
+ )
+ if post_iteration_actions_details.updated_exchange_account_elements is not None:
+ synchronized_exchange_account_elements.append(
+ octobot_flow.entities.ExchangeAccountElements.from_dict(
+ post_iteration_actions_details.updated_exchange_account_elements
+ )
+ )
+ # return via the default path, build recall DAG details if necessary
+ return None
+
+ def _create_recall_dag_details_if_necessary(
+ self,
+ action_id: str,
+ action_result: typing.Optional[typing.Any],
+ action_index: int,
+ actions: list[octobot_flow.entities.AbstractActionDetails],
+ ) -> tuple[typing.Optional[octobot_commons.dsl_interpreter.ReCallingOperatorResult], bool]:
+ """
+ Create recall DAG details if necessary.
+ returns:
+ - recall_dag_details: the recall DAG details if necessary (or None)
+ - reset_to_other_action: True if the reset to other action is necessary, False otherwise
+ """
+ if not octobot_commons.dsl_interpreter.ReCallingOperatorResult.is_re_calling_operator_result(action_result):
return None, False
- if octobot_commons.dsl_interpreter.ReCallingOperatorResult.is_re_calling_operator_result(action.result):
- recall_dag_details = octobot_commons.dsl_interpreter.ReCallingOperatorResult.from_dict(
- action.result[octobot_commons.dsl_interpreter.ReCallingOperatorResult.__name__]
- )
- if not recall_dag_details.reset_to_id:
- # reset to the current action if no specific id is provided (loop on this action)
- recall_dag_details.reset_to_id = action.id
- if recall_dag_details.reset_to_id == action.id:
- # Keep executing other selected actions if any: those are not affected by the reset
- # as they don't depend on the reset action
- return recall_dag_details, False
- # Reset to a past action: interrupt execution of the following actions
- # as they might depend on the reset action
- if index < len(self._actions) - 1:
- interrupted_action = self._actions[index + 1: ]
- self._get_logger().info(
- f"DAG reset required. Interrupting execution of "
- f"{len(interrupted_action)} actions: "
- f"{', '.join([action.id for action in interrupted_action])}"
+ recall_dag_details = octobot_commons.dsl_interpreter.ReCallingOperatorResult.from_dict(
+ action_result[octobot_commons.dsl_interpreter.ReCallingOperatorResult.__name__] # type: ignore
+ )
+ if not recall_dag_details.reset_to_id:
+ # reset to the current action if no specific id is provided (loop on this action)
+ recall_dag_details.reset_to_id = action_id
+ if recall_dag_details.reset_to_id == action_id:
+ # Keep executing other selected actions if any: those are not affected by the reset
+ # as they don't depend on the reset action
+ return recall_dag_details, False
+
+ # Reset to a past action: interrupt execution of the following actions
+ # as they might depend on the reset action
+ if action_index < len(actions) - 1:
+ interrupted_action = actions[action_index + 1:]
+ self._get_logger().info(
+ f"DAG reset required. Interrupting execution of "
+ f"{len(interrupted_action)} actions: "
+ f"{', '.join([action.id for action in interrupted_action])}"
+ )
+ return recall_dag_details, True
+
+ @staticmethod
+ def _re_calling_payload_for_execution_stop(
+ action: octobot_flow.entities.DSLScriptActionDetails,
+ ) -> typing.Optional[dict]:
+ for candidate in (action.previous_execution_result, action.result):
+ if not candidate or not isinstance(candidate, dict):
+ continue
+ if octobot_commons.dsl_interpreter.ReCallingOperatorResult.is_re_calling_operator_result(
+ candidate
+ ):
+ return candidate
+ return None
+
+ async def _await_recallable_operator_signal(
+ self,
+ dsl_executor: "octobot_flow.logic.dsl.DSLExecutor",
+ signal: str,
+ ) -> tuple[typing.Optional[octobot_flow.entities.DSLScriptActionDetails], typing.Optional[octobot_commons.dsl_interpreter.DSLCallResult], int]:
+ self._automation.actions_dag.resolve_dsl_scripts(self._automation.actions_dag.actions)
+ operators_by_name: typing.Optional[
+ dict[str, typing.Type[octobot_commons.dsl_interpreter.Operator]]
+ ] = None
+ for index, dag_action in enumerate(self._automation.actions_dag.actions):
+ if not isinstance(dag_action, octobot_flow.entities.DSLScriptActionDetails):
+ continue
+ re_payload = self._re_calling_payload_for_execution_stop(dag_action)
+ if re_payload is None or not isinstance(re_payload, dict):
+ continue
+ try:
+ keyword = octobot_commons.dsl_interpreter.ReCallingOperatorResult.get_keyword(
+ re_payload
)
- return recall_dag_details, True
- return None, False
+ except KeyError:
+ continue
+ if keyword is None:
+ continue
+ if operators_by_name is None:
+ operators_by_name = {
+ operator_class.get_name(): operator_class
+ for operator_class in dsl_executor.get_flow_operator_classes()
+ }
+ operator_class = operators_by_name.get(keyword)
+ if operator_class is None or not issubclass(
+ operator_class,
+ octobot_commons.dsl_interpreter.SignalableOperatorMixin,
+ ):
+ continue
+ if not operator_class.should_dispatch_operator_signal_for_result(
+ signal,
+ re_payload,
+ ):
+ continue
+ dag_action_result = await self._execute_signaled_action(
+ dsl_executor, dag_action, operator_class, signal
+ )
+ return dag_action, dag_action_result, index
+ return None, None, -1
async def _execute_action(
self,
dsl_executor: "octobot_flow.logic.dsl.DSLExecutor",
action: octobot_flow.entities.AbstractActionDetails
- ):
+ ) -> octobot_commons.dsl_interpreter.DSLCallResult:
if isinstance(action, octobot_flow.entities.DSLScriptActionDetails):
return await dsl_executor.execute_action(action)
raise octobot_flow.errors.UnsupportedActionTypeError(
f"{self.__class__.__name__} does not support action type: {type(action)}"
) from None
+ async def _execute_signaled_action(
+ self,
+ dsl_executor: "octobot_flow.logic.dsl.DSLExecutor",
+ action: octobot_flow.entities.AbstractActionDetails,
+ operator_class: typing.Type[octobot_commons.dsl_interpreter.SignalableOperatorMixin],
+ signal: str,
+ ) -> octobot_commons.dsl_interpreter.DSLCallResult:
+ return await dsl_executor.execute_action(
+ action,
+ operator_signals=[
+ (
+ operator_class,
+ signal,
+ ),
+ ],
+ )
+
def _reset_dag_to(
self, recall_dag_details: octobot_commons.dsl_interpreter.ReCallingOperatorResult
):
@@ -152,9 +343,27 @@ async def _insert_execution_bot_logs(self, log_data: list[octobot.community.BotL
"No available community repository: bot logs upload is skipped"
)
- def _sync_after_execution(self):
+ def _sync_after_execution(
+ self,
+ synchronized_exchange_account_elements: list[octobot_flow.entities.ExchangeAccountElements],
+ ):
+ if synchronized_exchange_account_elements:
+ self._get_logger().info(
+ f"Exchange account elements are being updated from {len(synchronized_exchange_account_elements)}"
+ f"synchronized exchange account elements on {[s.name for s in synchronized_exchange_account_elements]}"
+ f"returned by actions; this iteration does not apply sync_from_exchange_manager from the "
+ f"local exchange_manager.",
+ )
+ if self._automation.exchange_account_elements is None:
+ self._automation.exchange_account_elements = octobot_flow.entities.ExchangeAccountElements()
+ self.changed_elements = self._automation.exchange_account_elements.merge_synchronized_snapshots(
+ synchronized_exchange_account_elements
+ )
+ return
if exchange_account_elements := self._automation.exchange_account_elements:
- new_transactions = self._get_new_transactions_from_actions_results(exchange_account_elements)
+ new_transactions = self._get_new_transactions_from_actions_results(
+ exchange_account_elements
+ )
self._sync_exchange_account_elements(exchange_account_elements, new_transactions)
def _get_new_transactions_from_actions_results(
@@ -178,7 +387,9 @@ def _sync_exchange_account_elements(
new_transactions: list[dict],
):
if self._exchange_manager or new_transactions:
- self.changed_elements = exchange_account_elements.sync_from_exchange_manager(self._exchange_manager, new_transactions)
+ self.changed_elements = exchange_account_elements.sync_from_exchange_manager(
+ self._exchange_manager, new_transactions
+ )
def _get_logger(self) -> octobot_commons.logging.BotLogger:
return octobot_commons.logging.get_logger(self.__class__.__name__)
diff --git a/packages/flow/octobot_flow/logic/dsl/__init__.py b/packages/flow/octobot_flow/logic/dsl/__init__.py
index 04a014f694..6323e931a0 100644
--- a/packages/flow/octobot_flow/logic/dsl/__init__.py
+++ b/packages/flow/octobot_flow/logic/dsl/__init__.py
@@ -3,11 +3,14 @@
get_actions_time_frames_dependencies,
get_copy_trading_dependencies,
)
+from octobot_flow.logic.dsl.dsl_actions_util import (
+ are_all_actions_process_bound_only,
+)
from octobot_flow.logic.dsl.dsl_executor import DSLExecutor
-
from octobot_flow.logic.dsl.dsl_action_execution_context import dsl_action_execution
__all__ = [
+ "are_all_actions_process_bound_only",
"get_actions_symbol_dependencies",
"get_actions_time_frames_dependencies",
"get_copy_trading_dependencies",
diff --git a/packages/flow/octobot_flow/logic/dsl/dsl_action_execution_context.py b/packages/flow/octobot_flow/logic/dsl/dsl_action_execution_context.py
index d85abcb285..2268ace2c2 100644
--- a/packages/flow/octobot_flow/logic/dsl/dsl_action_execution_context.py
+++ b/packages/flow/octobot_flow/logic/dsl/dsl_action_execution_context.py
@@ -9,39 +9,73 @@
import octobot_flow.enums
+def _dsl_action_error_call_result(
+ action: octobot_flow.entities.DSLScriptActionDetails,
+ error_status,
+) -> octobot_commons.dsl_interpreter.DSLCallResult:
+ action.complete(error_status=error_status)
+ return octobot_commons.dsl_interpreter.DSLCallResult(
+ statement=action.get_resolved_dsl_script(),
+ error=error_status,
+ )
+
+
def dsl_action_execution(func):
async def _action_execution_error_handler_wrapper(
- self, action: octobot_flow.entities.DSLScriptActionDetails
+ self, action: octobot_flow.entities.DSLScriptActionDetails, **kwargs
):
"""
Handle the error of the DSL script execution.
action.result should only be a value of octobot_flow.enums.ActionErrorStatus.
"""
try:
- call_result: octobot_commons.dsl_interpreter.DSLCallResult = await func(self, action)
+ call_result: octobot_commons.dsl_interpreter.DSLCallResult = await func(
+ self, action, **kwargs
+ )
if call_result.succeeded():
action.complete(result=call_result.result)
else:
action.complete(error_status=call_result.error)
- except octobot_trading.errors.DisabledFundsTransferError as err:
- action.complete(error_status=octobot_flow.enums.ActionErrorStatus.DISABLED_FUNDS_TRANSFER_ERROR.value)
+ return call_result
+ except octobot_trading.errors.DisabledFundsTransferError:
+ return _dsl_action_error_call_result(
+ action,
+ octobot_flow.enums.ActionErrorStatus.DISABLED_FUNDS_TRANSFER_ERROR.value,
+ )
except octobot_trading.errors.MissingMinimalExchangeTradeVolume as err:
octobot_commons.logging.get_logger("action_execution").exception(err, True, f"Missing minimal exchange trade volume error: {err}")
- action.complete(error_status=octobot_flow.enums.ActionErrorStatus.INVALID_ORDER.value)
- except (octobot_trading.errors.UnsupportedHedgeContractError, octobot_trading.errors.InvalidPositionSide) as err:
- action.complete(error_status=octobot_flow.enums.ActionErrorStatus.UNSUPPORTED_HEDGE_POSITION.value)
- except octobot_trading.errors.ExchangeAccountSymbolPermissionError as err:
- action.complete(error_status=octobot_flow.enums.ActionErrorStatus.SYMBOL_INCOMPATIBLE_WITH_ACCOUNT.value)
- except octobot_commons.errors.InvalidParameterFormatError as err:
- action.complete(error_status=octobot_flow.enums.ActionErrorStatus.INVALID_SIGNAL_FORMAT.value)
+ return _dsl_action_error_call_result(
+ action,
+ octobot_flow.enums.ActionErrorStatus.INVALID_ORDER.value,
+ )
+ except (octobot_trading.errors.UnsupportedHedgeContractError, octobot_trading.errors.InvalidPositionSide):
+ return _dsl_action_error_call_result(
+ action,
+ octobot_flow.enums.ActionErrorStatus.UNSUPPORTED_HEDGE_POSITION.value,
+ )
+ except octobot_trading.errors.ExchangeAccountSymbolPermissionError:
+ return _dsl_action_error_call_result(
+ action,
+ octobot_flow.enums.ActionErrorStatus.SYMBOL_INCOMPATIBLE_WITH_ACCOUNT.value,
+ )
+ except octobot_commons.errors.InvalidParameterFormatError:
+ return _dsl_action_error_call_result(
+ action,
+ octobot_flow.enums.ActionErrorStatus.INVALID_SIGNAL_FORMAT.value,
+ )
except octobot_trading.errors.NotSupportedOrderTypeError as err:
- if err.order_type == octobot_trading.enums.TraderOrderType.STOP_LOSS:
- action.complete(error_status=octobot_flow.enums.ActionErrorStatus.UNSUPPORTED_STOP_ORDER.value)
- else:
- action.complete(error_status=octobot_flow.enums.ActionErrorStatus.INVALID_ORDER.value)
+ error_status_value = (
+ octobot_flow.enums.ActionErrorStatus.UNSUPPORTED_STOP_ORDER.value
+ if err.order_type == octobot_trading.enums.TraderOrderType.STOP_LOSS
+ else octobot_flow.enums.ActionErrorStatus.INVALID_ORDER.value
+ )
+ return _dsl_action_error_call_result(action, error_status_value)
except octobot_trading.errors.BlockchainWalletError as err:
octobot_commons.logging.get_logger("action_execution").exception(err, True, f"Blockchain wallet error: {err}")
- action.complete(error_status=octobot_flow.enums.ActionErrorStatus.BLOCKCHAIN_WALLET_ERROR.value)
+ return _dsl_action_error_call_result(
+ action,
+ octobot_flow.enums.ActionErrorStatus.BLOCKCHAIN_WALLET_ERROR.value,
+ )
except Exception as err:
octobot_commons.logging.get_logger("action_execution").exception(
err,
@@ -49,5 +83,8 @@ async def _action_execution_error_handler_wrapper(
f"Failed to interpret DSL script '{action.get_summary(not octobot_commons.constants.ALLOW_PRIVATE_DATA_LOGS)}' "
f"for action: {action.id}: {err}"
)
- action.complete(error_status=octobot_flow.enums.ActionErrorStatus.INTERNAL_ERROR.value)
+ return _dsl_action_error_call_result(
+ action,
+ octobot_flow.enums.ActionErrorStatus.INTERNAL_ERROR.value,
+ )
return _action_execution_error_handler_wrapper
diff --git a/packages/flow/octobot_flow/logic/dsl/dsl_actions_util.py b/packages/flow/octobot_flow/logic/dsl/dsl_actions_util.py
new file mode 100644
index 0000000000..bc1f3b6df1
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/dsl/dsl_actions_util.py
@@ -0,0 +1,46 @@
+# Drakkar-Software OctoBot
+# Copyright (c) Drakkar-Software, All rights reserved.
+
+import octobot_commons.dsl_interpreter as dsl_interpreter_import
+import octobot_commons.dsl_interpreter.operator as dsl_interpreter_operator
+import octobot_commons.errors as commons_errors
+import octobot_commons.logging as common_logging
+import octobot_commons.profiles.profile_data as profile_data_import
+
+import octobot_flow.entities
+import octobot_flow.errors
+import octobot_flow.logic.dsl.dsl_executor as dsl_executor_module
+
+
+def are_all_actions_process_bound_only(
+ profile_data: profile_data_import.ProfileData,
+ actions: list[octobot_flow.entities.AbstractActionDetails],
+) -> bool:
+ """
+ True when every action is a DSL script whose top-level operator is process-bound
+ (e.g. run_octobot_process). Non-DSL actions and empty lists are not considered eligible to skip
+ exchange/copy-trading dependency fetches.
+ """
+ if not actions:
+ return False
+ dsl_executor_instance = dsl_executor_module.DSLExecutor(
+ profile_data, None, None
+ )
+ for action in actions:
+ if isinstance(action, octobot_flow.entities.ConfiguredActionDetails):
+ return False
+ if not isinstance(action, octobot_flow.entities.DSLScriptActionDetails):
+ return False
+ try:
+ dsl_executor_instance._interpreter.prepare(action.resolved_dsl_script or action.dsl_script)
+ except commons_errors.DSLInterpreterError as err:
+ common_logging.get_logger(__name__).info(
+ "Process-bound check: DSL script skipped for action %s: %s", action.id, action.dsl_script, err
+ )
+ continue
+ top_operator = dsl_executor_instance.get_top_operator()
+ if not isinstance(top_operator, dsl_interpreter_operator.Operator):
+ return False
+ if not dsl_interpreter_import.is_process_bound(top_operator):
+ return False
+ return True
diff --git a/packages/flow/octobot_flow/logic/dsl/dsl_executor.py b/packages/flow/octobot_flow/logic/dsl/dsl_executor.py
index 8f2638d7ca..35f53a1d0d 100644
--- a/packages/flow/octobot_flow/logic/dsl/dsl_executor.py
+++ b/packages/flow/octobot_flow/logic/dsl/dsl_executor.py
@@ -11,6 +11,7 @@
import octobot_trading.modes as trading_modes
import tentacles.Meta.DSL_operators as dsl_operators
+import tentacles.Meta.DSL_operators.octobot_process_operators.octobot_process_ops as octobot_process_ops
import octobot_flow.entities
import octobot_flow.errors
@@ -34,14 +35,15 @@ def __init__(
self._exchange_manager = exchange_manager
self._dependencies = dependencies
self._dependencies_config: dict = profile_data.to_profile("").config
+ self._interpreter_signals: octobot_commons.dsl_interpreter.OperatorSignals = None # type: ignore (reset when interpreter is created)
self._interpreter: octobot_commons.dsl_interpreter.Interpreter = self._create_interpreter(None)
if dsl_script:
self._interpreter.prepare(dsl_script)
- def _create_interpreter(
- self, previous_execution_result: typing.Optional[dict]
- ):
- return octobot_commons.dsl_interpreter.Interpreter(
+ def get_flow_operator_classes(
+ self,
+ ) -> list[typing.Type[octobot_commons.dsl_interpreter.Operator]]:
+ return (
octobot_commons.dsl_interpreter.get_all_operators()
+ dsl_operators.create_ohlcv_operators(self._exchange_manager, None, None)
+ dsl_operators.create_portfolio_operators(self._exchange_manager)
@@ -60,6 +62,17 @@ def _create_interpreter(
copier_exchange_manager=self._exchange_manager,
copier_trading_mode=None,
)
+ + octobot_process_ops.create_octobot_process_operators(
+ self._interpreter_signals
+ )
+ ) # type: ignore (list[type[Operator]])
+
+ def _create_interpreter(
+ self, previous_execution_result: typing.Optional[dict]
+ ) -> octobot_commons.dsl_interpreter.Interpreter:
+ self._interpreter_signals = octobot_commons.dsl_interpreter.OperatorSignals()
+ return octobot_commons.dsl_interpreter.Interpreter(
+ self.get_flow_operator_classes()
)
def get_dependencies(self) -> list[
@@ -74,15 +87,35 @@ def get_top_operator(self) -> typing.Union[
return self._interpreter.get_top_operator()
@dsl_action_execution
- async def execute_action(self, action: octobot_flow.entities.DSLScriptActionDetails) -> typing.Any:
+ async def execute_action(
+ self,
+ action: octobot_flow.entities.DSLScriptActionDetails,
+ *,
+ operator_signals: typing.Optional[
+ list[tuple[
+ typing.Type[octobot_commons.dsl_interpreter.SignalableOperatorMixin],
+ str
+ ]]
+ ] = None,
+ ) -> octobot_commons.dsl_interpreter.DSLCallResult:
self._interpreter = self._create_interpreter(
action.previous_execution_result
)
expression = action.get_resolved_dsl_script()
try:
+ if operator_signals:
+ signals_update = {
+ operator_class.get_name(): signal # type: ignore
+ for operator_class, signal in operator_signals
+ }
+ self._logger().info(f"Executing action with operator signals: {signals_update}")
+ else:
+ signals_update = {}
+ self._interpreter_signals.sync(signals_update)
+ interpretation = await self._interpreter.interprete(expression)
return octobot_commons.dsl_interpreter.DSLCallResult(
statement=expression,
- result=await self._interpreter.interprete(expression),
+ result=interpretation,
)
except octobot_commons.errors.MaxAttemptsExceededError as err:
self._logger().error(f"Max attempts exceeded: {err}")
diff --git a/packages/flow/tests/functionnal_tests/octobot_process_actions/__init__.py b/packages/flow/tests/functionnal_tests/octobot_process_actions/__init__.py
new file mode 100644
index 0000000000..00af8348f4
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/octobot_process_actions/__init__.py
@@ -0,0 +1 @@
+# Subpackage for octobot_process functional tests.
diff --git a/packages/flow/tests/functionnal_tests/octobot_process_actions/octobot_process_functional_shared.py b/packages/flow/tests/functionnal_tests/octobot_process_actions/octobot_process_functional_shared.py
new file mode 100644
index 0000000000..503027a986
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/octobot_process_actions/octobot_process_functional_shared.py
@@ -0,0 +1,262 @@
+# Drakkar-Software OctoBot
+# Shared helpers/constants for octobot process functional tests (run_octobot_process, GridTradingMode).
+
+import copy
+import decimal
+import typing
+
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_trading.constants as trading_constants
+import octobot_trading.enums as trading_enums
+import pytest
+
+import octobot_flow
+import octobot_flow.entities
+import octobot_flow.enums
+import tentacles.Trading.Mode.grid_trading_mode.grid_trading as grid_trading
+import tests.functionnal_tests as functionnal_tests
+
+pytestmark = pytest.mark.asyncio
+
+# --- Timeouts and grid geometry (must match pair_settings spread / increment below) ---
+GLOBAL_START_TIMEOUT_SEC = 30.0
+SLEEP_BETWEEN_JOB_POLLS_SEC = 2.0
+# Grid orders may land after init_state_ok; bounded wait for ≥4 opens in merged automation EAE (job.dump).
+GRID_ORDERS_TIMEOUT_SEC = 15.0
+GRID_ORDERS_POLL_SEC = 1
+
+GRID_INCREMENT = 200
+GRID_SPREAD = 600
+D_INCREMENT = decimal.Decimal(str(GRID_INCREMENT))
+D_SPREAD = decimal.Decimal(str(GRID_SPREAD))
+# After SIGTERM, services may take time to stop; assert on PID instead.
+CHILD_STOP_WAIT_SEC = 15.0
+
+# Child dump interval for this test (set via monkeypatch before Popen). Do not use
+# octobot.constants.PROCESS_BOT_STATE_DUMP_INTERVAL_SECONDS in the parent for assertions;
+# it is fixed at import time and stays 30 unless the interpreter reloads constants.
+EXPECTED_PROCESS_BOT_DUMP_INTERVAL_SEC = 5.0
+
+# Same as `waiting_time=` in run_octobot_process(...) DSL for this file's tests.
+WAITING_TIME_RUN_OCTOBOT_PROCESS_SEC = 2.0
+RECALL_SCHEDULE_TOLERANCE_SEC = 1.5
+
+EXCHANGE_BINANCEUS = "binanceus"
+
+# --- DSL / DAG action ids (fixtures, dependencies, _get_action_by_id) ---
+ACTION_ID_INIT = "action_init"
+ACTION_ID_RUN_OCTOBOT = "action_run_octobot"
+ACTION_ID_STOP_AUTOMATION = "action_stop_automation"
+
+# --- Child profile for run_octobot_process: simulator (trader.enabled False) + GridTradingMode BTC/USDT 2Ă—2 ---
+GRID_BINANCEUS_PROFILE_DATA = {
+ "profile_details": {"name": "func_test_grid_octoprocess", "id": "func_test_grid_octoprocess"},
+ "crypto_currencies": [
+ {"trading_pairs": ["BTC/USDT"], "name": "BTC", "enabled": True},
+ ],
+ "exchanges": [
+ {"internal_name": EXCHANGE_BINANCEUS, "exchange_type": "spot"},
+ ],
+ "trader": {"enabled": False, "load_trade_history": True},
+ "trader_simulator": {
+ "enabled": True,
+ "starting_portfolio": {"USDT": 1000.0, "BTC": 0.01},
+ "maker_fees": 0.0,
+ "taker_fees": 0.0,
+ },
+ "trading": {"reference_market": "USDT", "risk": 1.0, "paused": False},
+ "tentacles": [
+ {
+ "name": "GridTradingMode",
+ "config": {
+ "pair_settings": [
+ grid_trading.GridTradingMode.get_default_pair_config(
+ "BTC/USDT",
+ float(GRID_SPREAD),
+ float(GRID_INCREMENT),
+ 2,
+ 2,
+ False,
+ False,
+ False,
+ )
+ ]
+ },
+ },
+ ],
+ "options": {},
+ "distribution": "default",
+}
+
+
+# --- Helpers: order ladder checks and ReCallingOperatorResult payload access ---
+
+def _open_orders_origins(open_orders: list[dict]) -> list[dict]:
+ return [
+ order[trading_constants.STORAGE_ORIGIN_VALUE]
+ for order in open_orders
+ ]
+
+
+def _assert_two_by_two_grid_ladder_orders(orders_wrapped: list[dict]) -> None:
+ open_orders_origin_values = _open_orders_origins(orders_wrapped)
+ buy_orders = sorted(
+ [
+ order
+ for order in open_orders_origin_values
+ if order[trading_enums.ExchangeConstantsOrderColumns.SIDE.value]
+ == trading_enums.TradeOrderSide.BUY.value
+ ],
+ key=lambda order: order[trading_enums.ExchangeConstantsOrderColumns.PRICE.value],
+ )
+ sell_orders = sorted(
+ [
+ order
+ for order in open_orders_origin_values
+ if order[trading_enums.ExchangeConstantsOrderColumns.SIDE.value]
+ == trading_enums.TradeOrderSide.SELL.value
+ ],
+ key=lambda order: order[trading_enums.ExchangeConstantsOrderColumns.PRICE.value],
+ )
+ assert len(buy_orders) == len(sell_orders) == 2
+ sym = trading_enums.ExchangeConstantsOrderColumns.SYMBOL.value
+ for ord_dict in buy_orders + sell_orders:
+ assert ord_dict.get(sym) == "BTC/USDT"
+ price_col = trading_enums.ExchangeConstantsOrderColumns.PRICE.value
+ lowest_buy_price = functionnal_tests.d_order_price(buy_orders[0][price_col])
+ assert functionnal_tests.d_order_price(buy_orders[1][price_col]) == lowest_buy_price + D_INCREMENT
+ assert functionnal_tests.d_order_price(sell_orders[0][price_col]) == lowest_buy_price + D_INCREMENT + D_SPREAD
+ assert functionnal_tests.d_order_price(sell_orders[1][price_col]) == lowest_buy_price + D_INCREMENT + D_SPREAD + D_INCREMENT
+
+
+def _grid_binanceus_profile_data(buy_orders: int, sell_orders: int) -> dict:
+ """Copy of grid simulator profile with configurable GridTradingMode buy/sell counts."""
+ data = copy.deepcopy(GRID_BINANCEUS_PROFILE_DATA)
+ data["tentacles"] = [
+ {
+ "name": "GridTradingMode",
+ "config": {
+ "pair_settings": [
+ grid_trading.GridTradingMode.get_default_pair_config(
+ "BTC/USDT",
+ float(GRID_SPREAD),
+ float(GRID_INCREMENT),
+ buy_orders,
+ sell_orders,
+ False,
+ False,
+ False,
+ )
+ ]
+ },
+ },
+ ]
+ return data
+
+
+def _recall_inner_state(run_result: typing.Optional[dict]) -> typing.Optional[dict]:
+ if not isinstance(run_result, dict):
+ return None
+ rec = run_result.get(dsl_interpreter.ReCallingOperatorResult.__name__)
+ if not isinstance(rec, dict):
+ return None
+ inner = rec.get("last_execution_result")
+ return inner if isinstance(inner, dict) else None
+
+
+def _recall_inner_from_dsl_action(
+ action: octobot_flow.entities.AbstractActionDetails,
+) -> typing.Optional[dict]:
+ """
+ After a re-calling operator finishes, `ActionsExecutor._reset_dag_to` calls `action.reset()`,
+ which moves the result dict to `previous_execution_result` and clears `result`. Read both.
+ """
+ for run_result in (action.result, action.previous_execution_result):
+ inner = _recall_inner_state(run_result) if run_result is not None else None
+ if inner is not None:
+ return inner
+ return None
+
+
+def _assert_run_octobot_process_recall_scheduled_to_in_dump(
+ job_dump: dict[str, typing.Any],
+ *,
+ expected_waiting_time_sec: float = WAITING_TIME_RUN_OCTOBOT_PROCESS_SEC,
+ schedule_tolerance_sec: float = RECALL_SCHEDULE_TOLERANCE_SEC,
+ assert_delay_matches_waiting_time: bool = True,
+) -> None:
+ """
+ After a job run where `run_octobot_process` schedules a re-call, the merged automation
+ state must expose the next execution time at execution.current_execution.scheduled_to.
+ """
+ automation = job_dump["automation"]
+ execution = automation["execution"]
+ current_execution = execution["current_execution"]
+ previous_execution = execution["previous_execution"]
+ scheduled_to = current_execution["scheduled_to"]
+ assert isinstance(scheduled_to, (int, float))
+ assert scheduled_to > 0, f"next iteration was not scheduled (scheduled_to={scheduled_to})"
+ triggered_at = previous_execution["triggered_at"]
+ assert isinstance(triggered_at, (int, float))
+ assert triggered_at > 0
+ if assert_delay_matches_waiting_time:
+ delay_sec = float(scheduled_to) - float(triggered_at)
+ assert (
+ expected_waiting_time_sec - schedule_tolerance_sec
+ < delay_sec
+ < expected_waiting_time_sec + schedule_tolerance_sec
+ ), (
+ f"recall scheduled_to should be ~triggered_at+{expected_waiting_time_sec}s: "
+ f"delay={delay_sec}s scheduled_to={scheduled_to} triggered_at={triggered_at}"
+ )
+
+
+def _get_action_by_id(
+ job: octobot_flow.AutomationJob, action_id: str
+) -> typing.Optional[octobot_flow.entities.AbstractActionDetails]:
+ for action in job.automation_state.automation.actions_dag.actions:
+ if action.id == action_id:
+ return action
+ return None
+
+
+def _make_tracked_spawn_managed_with_forward_terminal_output(
+ real_spawn_managed: typing.Callable[..., typing.Any],
+ popen_calls: dict[str, int],
+) -> typing.Callable[..., typing.Any]:
+ def _tracked(*args: typing.Any, **kwargs: typing.Any) -> typing.Any:
+ popen_calls["count"] += 1
+ merged_kwargs = dict(kwargs)
+ merged_kwargs["forward_terminal_output"] = True
+ return real_spawn_managed(*args, **merged_kwargs)
+
+ return _tracked
+
+
+@pytest.fixture
+def init_action():
+ # Automation apply_configuration: seed automation state to match expected exchange + portfolio.
+ return {
+ "id": ACTION_ID_INIT,
+ "action": octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value,
+ "config": {
+ "automation": {
+ "metadata": {"automation_id": "automation_1"},
+ "exchange_account_elements": {
+ "portfolio": {
+ "content": {
+ "USDT": {"available": 1000.0, "total": 1000.0},
+ "BTC": {"available": 0.01, "total": 0.01},
+ },
+ },
+ },
+ },
+ "exchange_account_details": {
+ "exchange_details": {
+ "internal_name": EXCHANGE_BINANCEUS,
+ },
+ "auth_details": {},
+ "portfolio": {},
+ },
+ },
+ }
diff --git a/packages/flow/tests/functionnal_tests/octobot_process_actions/test_octobot_process_edit_config.py b/packages/flow/tests/functionnal_tests/octobot_process_actions/test_octobot_process_edit_config.py
new file mode 100644
index 0000000000..1fd5705e62
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/octobot_process_actions/test_octobot_process_edit_config.py
@@ -0,0 +1,337 @@
+# Drakkar-Software OctoBot
+# Functional test: update_automation_configuration / grid refresh (GridTradingMode, binanceus simulator)
+
+import asyncio
+import os
+import shutil
+import time
+import typing
+import uuid
+
+import mock
+import octobot.constants as octobot_app_constants
+import octobot_commons.constants as common_constants
+import octobot_commons.process_util as process_util
+import octobot_node.constants as octobot_node_constants
+import octobot_trading.enums as trading_enums
+import pytest
+
+import octobot_flow
+import tests.functionnal_tests as functionnal_tests
+import tests.functionnal_tests.octobot_process_actions.octobot_process_functional_shared as octobot_process_functional_shared
+
+import octobot_flow.entities.accounts.exchange_account_elements as exchange_account_elements_import
+
+pytestmark = octobot_process_functional_shared.pytestmark
+pytest_plugins = (octobot_process_functional_shared.__name__,)
+
+ACTION_ID_UPDATE_AUTOMATION_CONFIGURATION = "action_update_automation_configuration"
+
+
+def _assert_three_by_three_grid_ladder_orders(orders_wrapped: list[dict]) -> None:
+ open_orders_origin_values = octobot_process_functional_shared._open_orders_origins(orders_wrapped)
+ buy_orders = sorted(
+ [
+ order
+ for order in open_orders_origin_values
+ if order[trading_enums.ExchangeConstantsOrderColumns.SIDE.value]
+ == trading_enums.TradeOrderSide.BUY.value
+ ],
+ key=lambda order: order[trading_enums.ExchangeConstantsOrderColumns.PRICE.value],
+ )
+ sell_orders = sorted(
+ [
+ order
+ for order in open_orders_origin_values
+ if order[trading_enums.ExchangeConstantsOrderColumns.SIDE.value]
+ == trading_enums.TradeOrderSide.SELL.value
+ ],
+ key=lambda order: order[trading_enums.ExchangeConstantsOrderColumns.PRICE.value],
+ )
+ assert len(buy_orders) == len(sell_orders) == 3
+ sym = trading_enums.ExchangeConstantsOrderColumns.SYMBOL.value
+ for ord_dict in buy_orders + sell_orders:
+ assert ord_dict.get(sym) == "BTC/USDT"
+ price_col = trading_enums.ExchangeConstantsOrderColumns.PRICE.value
+ # After UPDATE_CONFIG the grid may re-anchor to a new reference price; require ladder steps and buy/sell gap.
+ for step_index in range(1, 3):
+ assert functionnal_tests.d_order_price(buy_orders[step_index][price_col]) == functionnal_tests.d_order_price(
+ buy_orders[step_index - 1][price_col]
+ ) + octobot_process_functional_shared.D_INCREMENT
+ assert functionnal_tests.d_order_price(sell_orders[step_index][price_col]) == functionnal_tests.d_order_price(
+ sell_orders[step_index - 1][price_col]
+ ) + octobot_process_functional_shared.D_INCREMENT
+ assert functionnal_tests.d_order_price(buy_orders[-1][price_col]) < functionnal_tests.d_order_price(
+ sell_orders[0][price_col]
+ )
+
+
+async def test_run_octobot_process_grid_refresh_four_to_six_orders(
+ init_action: dict,
+ monkeypatch: pytest.MonkeyPatch,
+):
+ """2×2 grid (4 orders) → priority `update_automation_configuration(new_run_dsl)` → 3×3 grid (6 orders) + stop."""
+ # Preamble: unique user folder, DSL actions, spawn counter, predicted cleanup paths.
+ if not os.path.isfile(os.path.join(os.getcwd(), "start.py")):
+ pytest.skip("start.py missing: run pytest with cwd set to the OctoBot project root")
+
+ monkeypatch.setenv(octobot_app_constants.ENV_PROCESS_BOT_STATE_DUMP_INTERVAL_SECONDS, "5")
+
+ user_folder = f"functionnal_tests/octocfg_{uuid.uuid4().hex[:12]}"
+ profile_2x2 = octobot_process_functional_shared._grid_binanceus_profile_data(2, 2)
+ run_dsl = (
+ "run_octobot_process("
+ f"{user_folder!r}, {repr(profile_2x2)}, "
+ "waiting_time=2.0, ping_timeout=30.0)"
+ )
+ run_action = {
+ "id": octobot_process_functional_shared.ACTION_ID_RUN_OCTOBOT,
+ "dsl_script": run_dsl,
+ "dependencies": [{"action_id": octobot_process_functional_shared.ACTION_ID_INIT}],
+ }
+ stop_automation_action = {
+ "id": octobot_process_functional_shared.ACTION_ID_STOP_AUTOMATION,
+ "dsl_script": "stop_automation()",
+ "dependencies": [{"action_id": octobot_process_functional_shared.ACTION_ID_INIT}],
+ }
+
+ popen_calls = {"count": 0}
+ tracked_spawn_managed = (
+ octobot_process_functional_shared._make_tracked_spawn_managed_with_forward_terminal_output(
+ process_util.spawn_managed_subprocess,
+ popen_calls,
+ )
+ )
+
+ user_root_guess = os.path.normpath(
+ os.path.join(
+ os.getcwd(),
+ *common_constants.USER_AUTOMATIONS_FOLDER.split("/"),
+ *user_folder.replace("\\", "/").split("/"),
+ )
+ )
+ log_folder_guess = os.path.normpath(
+ os.path.join(
+ os.getcwd(),
+ *octobot_node_constants.AUTOMATION_LOGS_FOLDER.split("/"),
+ *[segment for segment in user_folder.replace("\\", "/").split("/") if segment],
+ )
+ )
+
+ try:
+ # Real child via patched spawn_managed_subprocess (spawn count + forward_terminal_output).
+ with (
+ functionnal_tests.mocked_community_authentication(),
+ functionnal_tests.mocked_community_repository(),
+ mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ side_effect=tracked_spawn_managed,
+ ),
+ ):
+ # 1) Bootstrap automation state and register run_octobot_process with a 2Ă—2 grid profile.
+ state = functionnal_tests.automation_state_dict(
+ functionnal_tests.resolved_actions([init_action])
+ )
+ async with octobot_flow.AutomationJob(state, [], [], {}) as init_job:
+ await init_job.run()
+ state = init_job.dump()
+
+ async with octobot_flow.AutomationJob(state, [], [], {}) as job:
+ job.automation_state.upsert_automation_actions(
+ functionnal_tests.resolved_actions([run_action])
+ )
+ state = job.dump()
+
+ deadline = time.monotonic() + octobot_process_functional_shared.GLOBAL_START_TIMEOUT_SEC
+ inner: typing.Optional[dict] = None
+ # 2) First automation pass, then poll until the child reports init_state_ok (ready to query).
+ async with octobot_flow.AutomationJob(state, [], [], {}) as first_poll:
+ await first_poll.run()
+ octobot_process_functional_shared._assert_run_octobot_process_recall_scheduled_to_in_dump(
+ first_poll.dump()
+ )
+ first_run = octobot_process_functional_shared._get_action_by_id(
+ first_poll, octobot_process_functional_shared.ACTION_ID_RUN_OCTOBOT
+ )
+ assert first_run is not None
+ inner = octobot_process_functional_shared._recall_inner_from_dsl_action(first_run)
+ state = first_poll.dump()
+ if not (inner and inner.get("init_state_ok") is True):
+ while time.monotonic() < deadline:
+ await asyncio.sleep(octobot_process_functional_shared.SLEEP_BETWEEN_JOB_POLLS_SEC)
+ async with octobot_flow.AutomationJob(state, [], [], {}) as poll_job:
+ await poll_job.run()
+ octobot_process_functional_shared._assert_run_octobot_process_recall_scheduled_to_in_dump(
+ poll_job.dump()
+ )
+ run_details = octobot_process_functional_shared._get_action_by_id(
+ poll_job, octobot_process_functional_shared.ACTION_ID_RUN_OCTOBOT
+ )
+ assert run_details is not None
+ inner = octobot_process_functional_shared._recall_inner_from_dsl_action(run_details)
+ if inner and inner.get("init_state_ok") is True:
+ state = poll_job.dump()
+ break
+ state = poll_job.dump()
+ else:
+ pytest.fail(
+ f"OctoBot did not become ready (init_state_ok) within "
+ f"{octobot_process_functional_shared.GLOBAL_START_TIMEOUT_SEC}s"
+ )
+
+ assert inner is not None
+ assert inner.get("pid")
+ initial_spawn_count = popen_calls["count"]
+ assert initial_spawn_count >= 1
+
+ # 3) Wait until at least four open ladder orders exist, then assert a 2Ă—2 grid pattern.
+ orders_deadline = time.monotonic() + octobot_process_functional_shared.GRID_ORDERS_TIMEOUT_SEC
+ exchange_account_snapshot: typing.Optional[
+ exchange_account_elements_import.ExchangeAccountElements
+ ] = None
+ last_open_order_count = 0
+ while time.monotonic() < orders_deadline:
+ async with octobot_flow.AutomationJob(state, [], [], {}) as grid_poll_job:
+ await grid_poll_job.run()
+ job_dump_payload = grid_poll_job.dump()
+ octobot_process_functional_shared._assert_run_octobot_process_recall_scheduled_to_in_dump(
+ job_dump_payload
+ )
+ automation_dump = job_dump_payload.get("automation")
+ exchange_account_snapshot_dict = (
+ automation_dump.get("exchange_account_elements")
+ if isinstance(automation_dump, dict)
+ else None
+ )
+ state = job_dump_payload
+ if exchange_account_snapshot_dict is not None:
+ exchange_account_snapshot = (
+ exchange_account_elements_import.ExchangeAccountElements.from_dict(
+ exchange_account_snapshot_dict
+ )
+ )
+ last_open_order_count = len(
+ exchange_account_snapshot.orders.open_orders
+ )
+ if last_open_order_count >= 4:
+ break
+ await asyncio.sleep(octobot_process_functional_shared.GRID_ORDERS_POLL_SEC)
+ else:
+ pytest.fail(
+ f"Timed out waiting for at least four open orders (last count={last_open_order_count}) "
+ f"within {octobot_process_functional_shared.GRID_ORDERS_TIMEOUT_SEC}s"
+ )
+ assert exchange_account_snapshot is not None
+ octobot_process_functional_shared._assert_two_by_two_grid_ladder_orders(
+ exchange_account_snapshot.orders.open_orders,
+ )
+
+ profile_3x3 = octobot_process_functional_shared._grid_binanceus_profile_data(3, 3)
+ new_run_dsl = (
+ "run_octobot_process("
+ f"{user_folder!r}, {repr(profile_3x3)}, "
+ "waiting_time=2.0, ping_timeout=30.0)"
+ )
+ update_config_priority_action = {
+ "id": ACTION_ID_UPDATE_AUTOMATION_CONFIGURATION,
+ "dsl_script": f"update_automation_configuration({new_run_dsl!r})",
+ "dependencies": [{"action_id": octobot_process_functional_shared.ACTION_ID_INIT}],
+ }
+
+ spawn_before_refresh = popen_calls["count"]
+ priority_actions = functionnal_tests.resolved_actions([update_config_priority_action])
+ async with octobot_flow.AutomationJob(state, priority_actions, [], {}) as refresh_phase:
+ await refresh_phase.run()
+ octobot_process_functional_shared._assert_run_octobot_process_recall_scheduled_to_in_dump(
+ refresh_phase.dump()
+ )
+ assert popen_calls["count"] == spawn_before_refresh + 1
+ state = refresh_phase.dump()
+
+ # 6) Poll until six open ladder orders exist, then assert a 3Ă—3 grid pattern.
+ six_orders_deadline = (
+ time.monotonic() + octobot_process_functional_shared.GRID_ORDERS_TIMEOUT_SEC * 3
+ )
+ exchange_account_after: typing.Optional[
+ exchange_account_elements_import.ExchangeAccountElements
+ ] = None
+ last_six_count = 0
+ inner_after: typing.Optional[dict] = None
+ while time.monotonic() < six_orders_deadline:
+ async with octobot_flow.AutomationJob(state, [], [], {}) as six_poll:
+ await six_poll.run()
+ dump_payload = six_poll.dump()
+ octobot_process_functional_shared._assert_run_octobot_process_recall_scheduled_to_in_dump(
+ dump_payload
+ )
+ automation_dump = dump_payload.get("automation")
+ eae_dict = (
+ automation_dump.get("exchange_account_elements")
+ if isinstance(automation_dump, dict)
+ else None
+ )
+ state = dump_payload
+ run_action_details = octobot_process_functional_shared._get_action_by_id(
+ six_poll, octobot_process_functional_shared.ACTION_ID_RUN_OCTOBOT
+ )
+ if run_action_details is not None:
+ inner_after = octobot_process_functional_shared._recall_inner_from_dsl_action(
+ run_action_details
+ )
+ if eae_dict is not None:
+ exchange_account_after = (
+ exchange_account_elements_import.ExchangeAccountElements.from_dict(eae_dict)
+ )
+ last_six_count = len(exchange_account_after.orders.open_orders)
+ if last_six_count >= 6:
+ break
+ await asyncio.sleep(octobot_process_functional_shared.GRID_ORDERS_POLL_SEC)
+ else:
+ pytest.fail(
+ f"Timed out waiting for six open orders after config refresh "
+ f"(last count={last_six_count})"
+ )
+ assert exchange_account_after is not None
+ assert inner_after is not None
+ _assert_three_by_three_grid_ladder_orders(
+ exchange_account_after.orders.open_orders,
+ )
+
+ # After refresh, expect a new managed child PID (extra spawn in step 5).
+ refreshed_pid = int(inner_after["pid"])
+ assert process_util.pid_is_running(refreshed_pid)
+
+ # 7) stop_automation (execution stop on run_octobot), then wait until the child PID is gone.
+ priority_stop = functionnal_tests.resolved_actions([stop_automation_action])
+ async with octobot_flow.AutomationJob(state, priority_stop, [], {}) as stop_phase:
+ await stop_phase.run()
+ octobot_process_functional_shared._assert_run_octobot_process_recall_scheduled_to_in_dump(
+ stop_phase.dump(),
+ assert_delay_matches_waiting_time=False,
+ )
+ assert stop_phase.automation_state.automation.post_actions.stop_automation is True
+ run_stopped = octobot_process_functional_shared._get_action_by_id(
+ stop_phase, octobot_process_functional_shared.ACTION_ID_RUN_OCTOBOT
+ )
+ assert run_stopped is not None
+ assert isinstance(run_stopped.result, dict)
+ assert run_stopped.result.get("status") in ("stopped", "already_stopped")
+
+ process_deadline = time.monotonic() + octobot_process_functional_shared.CHILD_STOP_WAIT_SEC
+ while time.monotonic() < process_deadline:
+ if not process_util.pid_is_running(refreshed_pid):
+ break
+ await asyncio.sleep(0.5)
+ else:
+ pytest.fail(
+ f"expected child pid {refreshed_pid} to be stopped after stop_automation "
+ f"within {octobot_process_functional_shared.CHILD_STOP_WAIT_SEC}s"
+ )
+
+ finally:
+ # Tear down dirs created under the project root for this test run.
+ if os.path.isdir(user_root_guess):
+ shutil.rmtree(user_root_guess, ignore_errors=True)
+ if os.path.isdir(log_folder_guess):
+ shutil.rmtree(log_folder_guess, ignore_errors=True)
diff --git a/packages/flow/tests/functionnal_tests/octobot_process_actions/test_octobot_process_start.py b/packages/flow/tests/functionnal_tests/octobot_process_actions/test_octobot_process_start.py
new file mode 100644
index 0000000000..d254894cdc
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/octobot_process_actions/test_octobot_process_start.py
@@ -0,0 +1,300 @@
+# Drakkar-Software OctoBot
+# Functional test: run_octobot_process lifecycle + stop_automation (GridTradingMode, binanceus simulator)
+
+import asyncio
+import json
+import os
+import shutil
+import time
+import typing
+import uuid
+
+import mock
+import octobot.constants as octobot_app_constants
+import octobot_commons.constants as common_constants
+import octobot_commons.process_util as process_util
+import octobot_node.constants as octobot_node_constants
+import pytest
+
+import octobot_flow
+import tests.functionnal_tests as functionnal_tests
+import tests.functionnal_tests.octobot_process_actions.octobot_process_functional_shared as octobot_process_functional_shared
+
+import octobot_flow.entities.accounts.exchange_account_elements as exchange_account_elements_import
+import octobot_flow.entities.accounts.process_bot_state as process_bot_state_import # Metadata only (hybrid: EAE from job.dump)
+
+pytestmark = octobot_process_functional_shared.pytestmark
+pytest_plugins = (octobot_process_functional_shared.__name__,)
+
+
+# --- Main lifecycle: spawn child OctoBot; EAE from job.dump() after merge; metadata from file; recall, stop ---
+
+async def test_run_octobot_process_lifecycle_grid_trading(
+ init_action: dict,
+ monkeypatch: pytest.MonkeyPatch,
+):
+ if not os.path.isfile(os.path.join(os.getcwd(), "start.py")):
+ pytest.skip("start.py missing: run pytest with cwd set to the OctoBot project root")
+
+ monkeypatch.setenv(octobot_app_constants.ENV_PROCESS_BOT_STATE_DUMP_INTERVAL_SECONDS, "5")
+
+ # --- User folder, DSL scripts, and action DAG wiring ---
+ user_folder = f"functionnal_tests/octlife_{uuid.uuid4().hex[:12]}"
+ run_dsl = (
+ "run_octobot_process("
+ f"{user_folder!r}, {repr(octobot_process_functional_shared.GRID_BINANCEUS_PROFILE_DATA)}, "
+ "waiting_time=2.0, ping_timeout=30.0)"
+ )
+ run_action = {
+ "id": octobot_process_functional_shared.ACTION_ID_RUN_OCTOBOT,
+ "dsl_script": run_dsl,
+ "dependencies": [{"action_id": octobot_process_functional_shared.ACTION_ID_INIT}],
+ }
+ # Depends only on init so it can run in the same ActionsExecutor pass after run_octobot re-calls;
+ # stop_automation() triggers _await_recallable_operator_signal(STOP) → run_octobot_process(execution_stop).
+ stop_automation_action = {
+ "id": octobot_process_functional_shared.ACTION_ID_STOP_AUTOMATION,
+ "dsl_script": "stop_automation()",
+ "dependencies": [{"action_id": octobot_process_functional_shared.ACTION_ID_INIT}],
+ }
+
+ popen_calls = {"count": 0}
+ tracked_spawn_managed = (
+ octobot_process_functional_shared._make_tracked_spawn_managed_with_forward_terminal_output(
+ process_util.spawn_managed_subprocess,
+ popen_calls,
+ )
+ )
+
+ # Paths used only for teardown (child may create these under cwd).
+ user_root_guess = os.path.normpath(
+ os.path.join(
+ os.getcwd(),
+ *common_constants.USER_AUTOMATIONS_FOLDER.split("/"),
+ *user_folder.replace("\\", "/").split("/"),
+ )
+ )
+ log_folder_guess = os.path.normpath(
+ os.path.join(
+ os.getcwd(),
+ *octobot_node_constants.AUTOMATION_LOGS_FOLDER.split("/"),
+ *[segment for segment in user_folder.replace("\\", "/").split("/") if segment],
+ )
+ )
+
+ try:
+ # Mock community + wrap process_util.spawn_managed_subprocess: count spawns and force
+ # forward_terminal_output so child stdout/stderr reach the pytest terminal.
+ with (
+ functionnal_tests.mocked_community_authentication(),
+ functionnal_tests.mocked_community_repository(),
+ mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ side_effect=tracked_spawn_managed,
+ ),
+ ):
+ # 1) Apply init configuration (automation + exchange account seed).
+ state = functionnal_tests.automation_state_dict(
+ functionnal_tests.resolved_actions([init_action])
+ )
+ async with octobot_flow.AutomationJob(state, [], [], {}) as init_job:
+ await init_job.run()
+ state = init_job.dump()
+
+ # 2) Register run_octobot_process; poll job until the child reports init_state_ok (live process_bot_state).
+ async with octobot_flow.AutomationJob(state, [], [], {}) as job:
+ job.automation_state.upsert_automation_actions(
+ functionnal_tests.resolved_actions([run_action])
+ )
+ state = job.dump()
+
+ deadline = time.monotonic() + octobot_process_functional_shared.GLOBAL_START_TIMEOUT_SEC
+ inner: typing.Optional[dict] = None
+ # Run DSL job once, then optionally poll until recall payload shows init_state_ok.
+ async with octobot_flow.AutomationJob(state, [], [], {}) as first_poll:
+ await first_poll.run()
+ octobot_process_functional_shared._assert_run_octobot_process_recall_scheduled_to_in_dump(
+ first_poll.dump()
+ )
+ first_run = octobot_process_functional_shared._get_action_by_id(
+ first_poll, octobot_process_functional_shared.ACTION_ID_RUN_OCTOBOT
+ )
+ assert first_run is not None
+ inner = octobot_process_functional_shared._recall_inner_from_dsl_action(first_run)
+ state = first_poll.dump()
+ if not (inner and inner.get("init_state_ok") is True):
+ while time.monotonic() < deadline:
+ await asyncio.sleep(octobot_process_functional_shared.SLEEP_BETWEEN_JOB_POLLS_SEC)
+ async with octobot_flow.AutomationJob(state, [], [], {}) as poll_job:
+ await poll_job.run()
+ octobot_process_functional_shared._assert_run_octobot_process_recall_scheduled_to_in_dump(
+ poll_job.dump()
+ )
+ run_details = octobot_process_functional_shared._get_action_by_id(
+ poll_job, octobot_process_functional_shared.ACTION_ID_RUN_OCTOBOT
+ )
+ assert run_details is not None
+ inner = octobot_process_functional_shared._recall_inner_from_dsl_action(run_details)
+ if inner and inner.get("init_state_ok") is True:
+ state = poll_job.dump()
+ break
+ state = poll_job.dump()
+ else:
+ pytest.fail(
+ f"OctoBot did not become ready (init_state_ok) within "
+ f"{octobot_process_functional_shared.GLOBAL_START_TIMEOUT_SEC}s"
+ )
+
+ assert inner is not None
+ assert inner.get("pid"), "expected child pid in ensure state"
+ assert popen_calls["count"] >= 1
+
+ # --- process_bot_state path: must exist before poll (child wrote at least one dump) ---
+ state_path = os.path.normpath(
+ os.path.join(
+ inner["user_root"],
+ octobot_app_constants.PROCESS_BOT_STATE_FILE_NAME,
+ )
+ )
+ assert os.path.isfile(state_path)
+
+ # 1) Poll AutomationJob + dump() until merge yields ≥4 open orders (EAE from automation snapshot,
+ # not from parsing full process_bot_state on disk).
+ orders_deadline = time.monotonic() + octobot_process_functional_shared.GRID_ORDERS_TIMEOUT_SEC
+ exchange_account_snapshot: typing.Optional[
+ exchange_account_elements_import.ExchangeAccountElements
+ ] = None
+ last_open_order_count = 0
+ while time.monotonic() < orders_deadline:
+ async with octobot_flow.AutomationJob(state, [], [], {}) as grid_poll_job:
+ await grid_poll_job.run()
+ job_dump_payload = grid_poll_job.dump()
+ octobot_process_functional_shared._assert_run_octobot_process_recall_scheduled_to_in_dump(
+ job_dump_payload
+ )
+ automation_dump = job_dump_payload.get("automation")
+ exchange_account_snapshot_dict = (
+ automation_dump.get("exchange_account_elements")
+ if isinstance(automation_dump, dict)
+ else None
+ )
+ state = job_dump_payload
+ if exchange_account_snapshot_dict is not None:
+ exchange_account_snapshot = (
+ exchange_account_elements_import.ExchangeAccountElements.from_dict(
+ exchange_account_snapshot_dict
+ )
+ )
+ last_open_order_count = len(
+ exchange_account_snapshot.orders.open_orders
+ )
+ if last_open_order_count >= 4:
+ break
+ await asyncio.sleep(octobot_process_functional_shared.GRID_ORDERS_POLL_SEC)
+ else:
+ pytest.fail(
+ f"Timed out waiting for at least four open orders after merge in automation dump "
+ f"(last count={last_open_order_count}) within "
+ f"{octobot_process_functional_shared.GRID_ORDERS_TIMEOUT_SEC}s"
+ )
+
+ assert exchange_account_snapshot is not None
+
+ # 2) One minimal read of process_bot_state.json for Metadata only (timestamps + dump interval).
+ with open(state_path, encoding="utf-8") as process_state_file:
+ file_metadata_payload = json.load(process_state_file)
+ process_metadata = process_bot_state_import.Metadata.from_dict(
+ file_metadata_payload["metadata"]
+ )
+ # Hybrid intent: business assertions use job.dump() EAE; file is not the source of truth for orders.
+ assert "exchange_account_elements" in file_metadata_payload
+
+ # --- Assertions: metadata liveness (file), exchange name, portfolio, grid ladder (dump EAE) ---
+ assert isinstance(process_metadata, process_bot_state_import.Metadata)
+ assert isinstance(process_metadata.updated_at, (int, float))
+ assert isinstance(process_metadata.next_updated_at, (int, float))
+ assert process_metadata.updated_at <= time.time()
+ assert process_metadata.next_updated_at >= process_metadata.updated_at
+ assert abs(
+ (process_metadata.next_updated_at - process_metadata.updated_at)
+ - octobot_process_functional_shared.EXPECTED_PROCESS_BOT_DUMP_INTERVAL_SEC
+ ) < 1.0
+
+ dumped_name = (exchange_account_snapshot.name or "").lower()
+ assert dumped_name and "binance" in dumped_name
+
+ portfolio_content = exchange_account_snapshot.portfolio.content
+ assert "USDT" in portfolio_content and "BTC" in portfolio_content
+ total_key = common_constants.PORTFOLIO_TOTAL
+ avail_key = common_constants.PORTFOLIO_AVAILABLE
+ usdt_c = portfolio_content["USDT"]
+ btc_c = portfolio_content["BTC"]
+ total_usdt = float(usdt_c[total_key])
+ total_btc = float(btc_c[total_key])
+ assert 800.0 <= total_usdt <= 1050.0
+ assert 0.009 <= total_btc <= 0.011
+ assert float(usdt_c[avail_key]) <= total_usdt
+ assert float(btc_c[avail_key]) <= total_btc
+ assert float(usdt_c[avail_key]) < total_usdt or float(btc_c[avail_key]) < total_btc
+
+ octobot_process_functional_shared._assert_two_by_two_grid_ladder_orders(
+ exchange_account_snapshot.orders.open_orders,
+ )
+
+ child_pid = int(inner["pid"])
+ assert process_util.pid_is_running(child_pid)
+
+ # 3) Second automation run: re-call path only (no second Popen; same child pid).
+ before = popen_calls["count"]
+ async with octobot_flow.AutomationJob(state, [], [], {}) as idem_job:
+ await idem_job.run()
+ octobot_process_functional_shared._assert_run_octobot_process_recall_scheduled_to_in_dump(
+ idem_job.dump()
+ )
+ assert popen_calls["count"] == before
+ idem_run = octobot_process_functional_shared._get_action_by_id(
+ idem_job, octobot_process_functional_shared.ACTION_ID_RUN_OCTOBOT
+ )
+ assert idem_run is not None
+ idem_inner = octobot_process_functional_shared._recall_inner_from_dsl_action(idem_run)
+ assert idem_inner is not None
+ assert idem_inner.get("pid") == child_pid
+
+ state = idem_job.dump()
+
+ # 4) stop_automation + execution_stop on run_octobot (SIGTERM to child), then wait for exit.
+ priority_actions = functionnal_tests.resolved_actions([stop_automation_action])
+ async with octobot_flow.AutomationJob(state, priority_actions, [], {}) as stop_phase:
+ await stop_phase.run()
+ octobot_process_functional_shared._assert_run_octobot_process_recall_scheduled_to_in_dump(
+ stop_phase.dump(),
+ assert_delay_matches_waiting_time=False,
+ )
+ assert stop_phase.automation_state.automation.post_actions.stop_automation is True
+ run_stopped = octobot_process_functional_shared._get_action_by_id(
+ stop_phase, octobot_process_functional_shared.ACTION_ID_RUN_OCTOBOT
+ )
+ assert run_stopped is not None
+ assert isinstance(run_stopped.result, dict)
+ assert run_stopped.result.get("status") in ("stopped", "already_stopped")
+
+ # SIGTERM triggers graceful stop; the HTTP server can keep returning 200
+ # until late in shutdown, so wait for the child PID to be gone.
+ process_deadline = time.monotonic() + octobot_process_functional_shared.CHILD_STOP_WAIT_SEC
+ while time.monotonic() < process_deadline:
+ if not process_util.pid_is_running(child_pid):
+ break
+ await asyncio.sleep(0.5)
+ else:
+ pytest.fail(
+ f"expected child pid {child_pid} to be stopped after stop_automation/execution_stop "
+ f"within {octobot_process_functional_shared.CHILD_STOP_WAIT_SEC}s"
+ )
+
+ finally:
+ # Remove user data and automation logs created under the project root for this run.
+ if os.path.isdir(user_root_guess):
+ shutil.rmtree(user_root_guess, ignore_errors=True)
+ if os.path.isdir(log_folder_guess):
+ shutil.rmtree(log_folder_guess, ignore_errors=True)
diff --git a/packages/flow/tests/logic/actions/test_actions_executor_merge.py b/packages/flow/tests/logic/actions/test_actions_executor_merge.py
new file mode 100644
index 0000000000..18b2ff299f
--- /dev/null
+++ b/packages/flow/tests/logic/actions/test_actions_executor_merge.py
@@ -0,0 +1,86 @@
+import mock
+
+import octobot_commons.profiles as commons_profiles
+import octobot_trading.enums as octobot_trading_enums_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+import octobot_flow.entities as octobot_flow_entities
+import octobot_flow.enums as octobot_flow_enums_import
+import octobot_flow.logic.actions.actions_executor as actions_executor_import
+
+
+def _trade_stub(trade_id: str) -> dict:
+ trade_id_key = octobot_trading_enums_import.ExchangeConstantsOrderColumns.EXCHANGE_TRADE_ID.value
+ return {trade_id_key: trade_id}
+
+
+def _tx_stub(txid: str) -> dict:
+ txid_key = octobot_trading_enums_import.ExchangeConstantsTransactionColumns.TXID.value
+ return {txid_key: txid}
+
+
+class TestSyncAfterExecutionWithManagerAndSnapshots:
+ def test_applies_snapshots_without_conflict_when_exchange_manager_set(self):
+ automation = octobot_flow_entities.AutomationDetails(
+ metadata=octobot_flow_entities.AutomationMetadata(automation_id="aid"),
+ exchange_account_elements=octobot_flow_entities.ExchangeAccountElements(),
+ )
+ executor_action = actions_executor_import.ActionsExecutor(
+ None,
+ mock.Mock(name="exchange_manager"),
+ commons_profiles.ProfileData(),
+ automation,
+ [],
+ False,
+ )
+ snap = octobot_flow_entities.ExchangeAccountElements(
+ orders=exchange_data_import.OrdersDetails(open_orders=[{"id": "from-snap"}]),
+ )
+ executor_action._sync_after_execution([snap])
+ assert automation.exchange_account_elements.orders.open_orders == [{"id": "from-snap"}]
+
+
+class TestMergeSynchronizedSnapshotsUpsertsTradesAndUsesLastOrders:
+ def test_merges_trades_and_orders(self):
+ snap1 = octobot_flow_entities.ExchangeAccountElements(
+ orders=exchange_data_import.OrdersDetails(open_orders=[{"id": "keep"}]),
+ trades=[_trade_stub("t-first")],
+ positions=[exchange_data_import.PositionDetails()],
+ )
+ snap2 = octobot_flow_entities.ExchangeAccountElements(
+ orders=exchange_data_import.OrdersDetails(open_orders=[{"id": "last-wins"}]),
+ trades=[_trade_stub("t-second")],
+ positions=[exchange_data_import.PositionDetails()],
+ )
+ automation = octobot_flow_entities.AutomationDetails(
+ metadata=octobot_flow_entities.AutomationMetadata(automation_id="aid"),
+ exchange_account_elements=octobot_flow_entities.ExchangeAccountElements(),
+ )
+ target = automation.exchange_account_elements
+ assert target is not None
+ target.trades.append(_trade_stub("t-existing"))
+ changed = target.merge_synchronized_snapshots([snap1, snap2])
+ assert octobot_flow_enums_import.ChangedElements.TRADES in changed
+ tid = octobot_trading_enums_import.ExchangeConstantsOrderColumns.EXCHANGE_TRADE_ID.value
+ trade_ids = [trade[tid] for trade in target.trades]
+ assert trade_ids == ["t-existing", "t-first", "t-second"]
+ assert target.orders.open_orders == [{"id": "last-wins"}]
+
+
+class TestMergeSynchronizedSnapshotsPreservesTransactions:
+ def test_merges_transactions_only_once_per_txid(self):
+ snap1 = octobot_flow_entities.ExchangeAccountElements(
+ transactions=[_tx_stub("tx-a")],
+ )
+ snap2 = octobot_flow_entities.ExchangeAccountElements(
+ transactions=[_tx_stub("tx-a"), _tx_stub("tx-b")],
+ )
+ automation = octobot_flow_entities.AutomationDetails(
+ metadata=octobot_flow_entities.AutomationMetadata(automation_id="aid"),
+ exchange_account_elements=octobot_flow_entities.ExchangeAccountElements(),
+ )
+ tx_key = octobot_trading_enums_import.ExchangeConstantsTransactionColumns.TXID.value
+ automation.exchange_account_elements.transactions.append(_tx_stub("tx-existing"))
+ automation.exchange_account_elements.merge_synchronized_snapshots([snap1, snap2])
+ txs = automation.exchange_account_elements.transactions
+ assert [t[tx_key] for t in txs] == ["tx-existing", "tx-a", "tx-b"]
diff --git a/packages/tentacles/Meta/DSL_operators/automation_operators/__init__.py b/packages/tentacles/Meta/DSL_operators/automation_operators/__init__.py
index 0516a710f1..3f6ba0d766 100644
--- a/packages/tentacles/Meta/DSL_operators/automation_operators/__init__.py
+++ b/packages/tentacles/Meta/DSL_operators/automation_operators/__init__.py
@@ -18,7 +18,9 @@
import tentacles.Meta.DSL_operators.automation_operators.automation_management
from tentacles.Meta.DSL_operators.automation_operators.automation_management import (
StopAutomationOperator,
+ UpdateAutomationConfigurationOperator,
)
__all__ = [
"StopAutomationOperator",
+ "UpdateAutomationConfigurationOperator",
]
\ No newline at end of file
diff --git a/packages/tentacles/Meta/DSL_operators/automation_operators/automation_management.py b/packages/tentacles/Meta/DSL_operators/automation_operators/automation_management.py
index 22c85551b0..00b4881e77 100644
--- a/packages/tentacles/Meta/DSL_operators/automation_operators/automation_management.py
+++ b/packages/tentacles/Meta/DSL_operators/automation_operators/automation_management.py
@@ -14,6 +14,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.dsl_interpreter.operator_parameter as operator_parameter
import octobot_flow.entities
@@ -35,3 +36,44 @@ def compute(self) -> dict:
stop_automation=True
).to_dict(include_default_values=False)
}
+
+
+class UpdateAutomationConfigurationOperator(dsl_interpreter.CallOperator):
+ DESCRIPTION = (
+ "Requests a configuration refresh for the automation. Pass the full replacement DSL "
+ "script for the DAG action that must be the only executable action at this point; the "
+ "executor sets that action's `dsl_script` and then runs the refresh signal (e.g. restart "
+ "for process-bound operators). This is not limited to `run_octobot_process`—any "
+ "executable DSL action can be retargeted."
+ )
+ EXAMPLE = 'update_automation_configuration("your_dsl_call(...)")'
+
+ @staticmethod
+ def get_name() -> str:
+ return "update_automation_configuration"
+
+ @classmethod
+ def get_parameters(cls) -> list[operator_parameter.OperatorParameter]:
+ return [
+ operator_parameter.OperatorParameter(
+ name="configuration_update",
+ description=(
+ "Full replacement DSL for the single currently executable DAG script action "
+ "(becomes that action's `dsl_script`). Any operator form is valid as long as "
+ "it matches the action being updated (e.g. `run_octobot_process(...)`, "
+ "exchange calls, etc.)."
+ ),
+ required=True,
+ type=str,
+ default=None,
+ ),
+ ]
+
+ def compute(self) -> dict:
+ configuration_update = self.get_computed_value_by_parameter()["configuration_update"]
+ return {
+ octobot_flow.entities.PostIterationActionsDetails.__name__:
+ octobot_flow.entities.PostIterationActionsDetails(
+ configuration_update=configuration_update,
+ ).to_dict(include_default_values=False)
+ }
diff --git a/packages/tentacles/Meta/DSL_operators/automation_operators/tests/test_automation_management.py b/packages/tentacles/Meta/DSL_operators/automation_operators/tests/test_automation_management.py
index f222f098a8..192102161c 100644
--- a/packages/tentacles/Meta/DSL_operators/automation_operators/tests/test_automation_management.py
+++ b/packages/tentacles/Meta/DSL_operators/automation_operators/tests/test_automation_management.py
@@ -16,6 +16,7 @@
import pytest
import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.dsl_interpreter.dictionnaries as dsl_dictionaries
import octobot_commons.errors
import octobot_flow.entities
@@ -24,6 +25,7 @@
@pytest.fixture
def interpreter():
+ dsl_dictionaries.clear_get_all_operators_cache()
return dsl_interpreter.Interpreter(
dsl_interpreter.get_all_operators()
)
@@ -38,6 +40,18 @@ def _assert_stop_automation_result(result):
assert details.stop_automation is True
+_SAMPLE_CONFIGURATION_UPDATE_DSL = 'run_octobot_process("u", {})'
+
+
+def _assert_update_automation_configuration_result(result, expected_configuration_update: str):
+ assert isinstance(result, dict)
+ assert octobot_flow.entities.PostIterationActionsDetails.__name__ in result
+ details = octobot_flow.entities.PostIterationActionsDetails.from_dict(
+ result[octobot_flow.entities.PostIterationActionsDetails.__name__]
+ )
+ assert details.configuration_update == expected_configuration_update
+
+
@pytest.mark.asyncio
async def test_stop_automation_call_as_dsl(interpreter):
assert "stop_automation" in interpreter.operators_by_name
@@ -66,3 +80,43 @@ def test_stop_automation_operator_docs():
assert docs.name == "stop_automation"
assert "stop" in docs.description.lower()
assert docs.example == "stop_automation()"
+
+
+@pytest.mark.asyncio
+async def test_update_automation_configuration_call_as_dsl(interpreter):
+ assert "update_automation_configuration" in interpreter.operators_by_name
+
+ result = await interpreter.interprete(
+ f"update_automation_configuration({_SAMPLE_CONFIGURATION_UPDATE_DSL!r})"
+ )
+ _assert_update_automation_configuration_result(result, _SAMPLE_CONFIGURATION_UPDATE_DSL)
+
+
+def test_update_automation_configuration_operator_compute():
+ operator = automation_management.UpdateAutomationConfigurationOperator(
+ _SAMPLE_CONFIGURATION_UPDATE_DSL,
+ )
+ result = operator.compute()
+ _assert_update_automation_configuration_result(result, _SAMPLE_CONFIGURATION_UPDATE_DSL)
+
+
+@pytest.mark.asyncio
+async def test_update_automation_configuration_operator_invalid_parameters(interpreter):
+ with pytest.raises(
+ octobot_commons.errors.InvalidParametersError,
+ match="requires at least 1",
+ ):
+ await interpreter.interprete("update_automation_configuration()")
+ with pytest.raises(
+ octobot_commons.errors.InvalidParametersError,
+ match="supports up to 1",
+ ):
+ await interpreter.interprete(
+ f"update_automation_configuration({_SAMPLE_CONFIGURATION_UPDATE_DSL!r}, 1)"
+ )
+
+
+def test_update_automation_configuration_operator_docs():
+ docs = automation_management.UpdateAutomationConfigurationOperator.get_docs()
+ assert docs.name == "update_automation_configuration"
+ assert docs.example == 'update_automation_configuration("your_dsl_call(...)")'
diff --git a/packages/tentacles/Meta/DSL_operators/octobot_process_operators/__init__.py b/packages/tentacles/Meta/DSL_operators/octobot_process_operators/__init__.py
new file mode 100644
index 0000000000..60006ef9b1
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/octobot_process_operators/__init__.py
@@ -0,0 +1,26 @@
+# pylint: disable=R0801
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY IMPLIED WARRANTY OF MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+import tentacles.Meta.DSL_operators.octobot_process_operators.octobot_process_ops
+from tentacles.Meta.DSL_operators.octobot_process_operators.octobot_process_ops import (
+ create_octobot_process_operators,
+ ensure_user_profile_and_layout,
+)
+
+__all__ = [
+ "create_octobot_process_operators",
+ "ensure_user_profile_and_layout",
+]
diff --git a/packages/tentacles/Meta/DSL_operators/octobot_process_operators/metadata.json b/packages/tentacles/Meta/DSL_operators/octobot_process_operators/metadata.json
new file mode 100644
index 0000000000..319240c835
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/octobot_process_operators/metadata.json
@@ -0,0 +1,6 @@
+{
+ "version": "1.2.0",
+ "origin_package": "OctoBot-Default-Tentacles",
+ "tentacles": [],
+ "tentacles-requirements": []
+}
\ No newline at end of file
diff --git a/packages/tentacles/Meta/DSL_operators/octobot_process_operators/octobot_process_ops.py b/packages/tentacles/Meta/DSL_operators/octobot_process_operators/octobot_process_ops.py
new file mode 100644
index 0000000000..b73a28752f
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/octobot_process_operators/octobot_process_ops.py
@@ -0,0 +1,846 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT THE IMPLIED WARRANTY OF MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+# pylint: disable=missing-class-docstring,missing-function-docstring
+import asyncio
+import json
+import os
+import shutil
+import sys
+import time
+import types
+import typing
+import uuid
+import aiofiles
+import pydantic
+
+import octobot_commons.constants as commons_constants
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.errors as commons_errors
+import octobot_commons.json_util as json_util
+import octobot_commons.logging as commons_logging
+import octobot_commons.profiles.profile_data as profile_data_module
+import octobot_commons.profiles.profile_data_import as profile_data_import
+import octobot_commons.profiles.exchange_auth_data as exchange_auth_data_module
+import octobot_commons.profiles.tentacles_profile_data_translator as tentacles_profile_data_translator
+import octobot_commons.enums as commons_enums
+import octobot_commons.configuration
+
+import octobot.constants as octobot_constants
+import octobot_flow.entities as octobot_flow_entities
+import octobot_flow.entities.accounts.process_bot_state as process_bot_state_import
+import octobot_node.constants as octobot_node_constants
+import octobot_services.constants as services_constants
+
+# Written only after a successful full init so re-runs can detect an existing per-bot tree.
+DSL_PREPARED_MARKER = ".octobot_dsl_prepared"
+DEFAULT_PING_WAITING_TIME = 2.0
+DEFAULT_ENSURE_TIMEOUT = 120.0
+
+
+class EnsureOctobotProcessState(pydantic.BaseModel):
+ model_config = pydantic.ConfigDict(validate_assignment=True, extra="ignore")
+ http_base_url: str
+ web_port: int
+ node_port: int
+ user_root: str
+ user_folder: str
+ log_folder: str
+ profile_id: str | None
+ pid: int
+ state_file_path: str = ""
+ # Omitted in ensure success `self.value` (stop command); 0.0 is unused there.
+ started_waiting_at: float = 0.0
+ # Set after process_bot_state.json liveness passes; disables the init `ping_timeout` cap (re-calls only use `waiting_time`).
+ init_state_ok: bool = False
+
+
+# Keys on `last_result` that `create_re_callable_result_dict` takes as top-level args (not state).
+_RECALL_OVERRIDABLE_KEYS = frozenset(
+ {
+ dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value,
+ dsl_interpreter.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value,
+ }
+)
+
+_DEFAULT_ENCRYPTED_VALUE = octobot_commons.configuration.encrypt("").decode()
+
+
+def _resolve_state_file_path(recall_state: EnsureOctobotProcessState) -> str:
+ if recall_state.state_file_path:
+ return recall_state.state_file_path
+ return os.path.normpath(
+ os.path.join(
+ recall_state.user_root,
+ octobot_constants.PROCESS_BOT_STATE_FILE_NAME,
+ )
+ )
+
+
+def _is_process_state_alive(state: process_bot_state_import.ProcessBotState) -> bool:
+ interval = octobot_constants.PROCESS_BOT_STATE_DUMP_INTERVAL_SECONDS
+ epsilon = max(0.1 * interval, 1e-6)
+ now = time.time()
+ meta = state.metadata
+ if meta.updated_at > now:
+ return False
+ period = max(meta.next_updated_at - meta.updated_at, epsilon)
+ return (now - meta.updated_at) < 2 * period
+
+
+async def _load_process_bot_state(
+ state_file_path: str,
+) -> typing.Optional[process_bot_state_import.ProcessBotState]:
+ try:
+ async with aiofiles.open(state_file_path, mode="r", encoding="utf-8") as state_file:
+ raw = await state_file.read()
+ data = json.loads(raw)
+ return process_bot_state_import.ProcessBotState.from_dict(data)
+ except (OSError, json.JSONDecodeError, TypeError, ValueError, KeyError):
+ return None
+
+
+def _parse_ensure_recall_state(raw: dict) -> typing.Optional[EnsureOctobotProcessState]:
+ if not raw:
+ return None
+ try:
+ return EnsureOctobotProcessState.model_validate(raw)
+ except pydantic.ValidationError:
+ return None
+
+
+def _remove_path_for_fresh_start(path: str, *, logger: typing.Any) -> None:
+ if not path or not str(path).strip():
+ logger.info("configuration update: skip remove (empty path)")
+ return
+ if not os.path.exists(path):
+ logger.info("configuration update: skip remove (path missing): %s", path)
+ return
+ logger.info("configuration update: removing path for fresh start: %s", path)
+ shutil.rmtree(path, ignore_errors=True)
+
+
+async def _convert_profile_data_to_profile_directory(
+ profile_data: profile_data_module.ProfileData,
+ temp_profile_path: str,
+) -> None:
+ tentacles_snapshot = list(profile_data.tentacles)
+ if tentacles_snapshot:
+ profile_data.tentacles = []
+ try:
+ # in case a translator is enabled on the given tentacles,
+ # apply it to the profile data
+ await tentacles_profile_data_translator.TentaclesProfileDataTranslator(
+ profile_data, []
+ ).translate(tentacles_snapshot, {}, None, None)
+ except KeyError:
+ # no translator found, restore tentacles
+ profile_data.tentacles = tentacles_snapshot
+ await profile_data_import.convert_profile_data_to_profile_directory(
+ profile_data,
+ temp_profile_path,
+ description=profile_data.profile_details.name or "",
+ risk=commons_enums.ProfileRisk.MODERATE,
+ auto_update=False,
+ slug=None,
+ avatar_url=None,
+ force_simulator=False,
+ )
+
+
+def _write_user_root_config_json(
+ config_path: str,
+ profile_id: str,
+ profile_data: typing.Optional[profile_data_module.ProfileData] = None,
+ exchange_auth_data: typing.Optional[
+ list[exchange_auth_data_module.ExchangeAuthData]
+ ] = None,
+) -> None:
+ """
+ Writes user-root ``config.json``: selected profile, disabled web auto-open for DSL-spawned
+ processes, optional exchange stubs from ``profile_data``, then credentials from
+ ``exchange_auth_data`` (merged into ``exchanges``).
+ """
+ # Load packaged defaults; pin profile and disable browser auto-open for headless DSL children.
+ default_cfg = json_util.read_file(octobot_constants.DEFAULT_CONFIG_FILE)
+ default_cfg[commons_constants.CONFIG_PROFILE] = profile_id
+ default_cfg[commons_constants.CONFIG_ACCEPTED_TERMS] = True
+ services_cfg = default_cfg.setdefault(services_constants.CONFIG_CATEGORY_SERVICES, {})
+ web_cfg = services_cfg.setdefault(services_constants.CONFIG_WEB, {})
+ web_cfg[services_constants.CONFIG_AUTO_OPEN_IN_WEB_BROWSER] = False
+ # Seed top-level exchanges so partially-managed merge targets exist before applying secrets.
+ if profile_data is not None:
+ exchanges_cfg = default_cfg.setdefault(commons_constants.CONFIG_EXCHANGES, {})
+ for exchange_details in profile_data.exchanges:
+ internal_exchange_name = exchange_details.internal_name
+ if not internal_exchange_name:
+ continue
+ exchange_entry = exchanges_cfg.setdefault(internal_exchange_name, {})
+ exchange_entry.setdefault(commons_constants.CONFIG_ENABLED_OPTION, True)
+ exchange_entry.setdefault(
+ commons_constants.CONFIG_EXCHANGE_TYPE,
+ exchange_details.exchange_type or commons_constants.DEFAULT_EXCHANGE_TYPE,
+ )
+ # Overlay credentials onto matching exchange entries (adds exchange if missing).
+ if exchange_auth_data:
+ exchange_config_holder = types.SimpleNamespace(config=default_cfg)
+ for auth_element in exchange_auth_data:
+ auth_element.apply_to_exchange_config(exchange_config_holder)
+ exchanges_cfg = default_cfg.get(commons_constants.CONFIG_EXCHANGES) or {}
+ for exchange_cfg in exchanges_cfg.values():
+ if isinstance(exchange_cfg, dict):
+ exchange_cfg.setdefault(commons_constants.CONFIG_EXCHANGE_KEY, _DEFAULT_ENCRYPTED_VALUE)
+ exchange_cfg.setdefault(commons_constants.CONFIG_EXCHANGE_SECRET, _DEFAULT_ENCRYPTED_VALUE)
+ json_util.safe_dump(default_cfg, config_path)
+
+
+async def ensure_user_profile_and_layout(
+ user_folder: str,
+ working_directory: str,
+ profile_data_dict: dict,
+ source_reference_tentacles_config: str | None,
+ exchange_auth_data: typing.Optional[
+ list[exchange_auth_data_module.ExchangeAuthData]
+ ] = None,
+) -> dict[str, typing.Any]:
+ """
+ One-time layout under user_root (/user/automations//):
+ profile tree, top-level config.json, reference_tentacles_config copy.
+ Idempotent when config.json + marker both exist.
+ """
+ dsl_interpreter.ProcessBoundOperatorMixin.reject_user_path_segment(user_folder)
+ user_folder_leaf_segments = [
+ segment for segment in str(user_folder).replace("\\", "/").split("/") if segment
+ ]
+ user_root = os.path.normpath(
+ os.path.join(
+ working_directory,
+ *commons_constants.USER_AUTOMATIONS_FOLDER.split("/"),
+ *user_folder_leaf_segments,
+ )
+ )
+ config_path = os.path.join(user_root, commons_constants.CONFIG_FILE)
+ marker_path = os.path.join(user_root, DSL_PREPARED_MARKER)
+ # Already prepared: do not rewrite files (host may have re-used this folder).
+ if os.path.isfile(config_path) and os.path.isfile(marker_path):
+ profile_id = _read_top_level_profile_id(config_path)
+ return {
+ "user_root": user_root,
+ "profile_id": profile_id,
+ "already_prepared": True,
+ }
+
+ os.makedirs(user_root, exist_ok=True)
+ # Import writes to a throwaway folder first: the real profile id is assigned during import (see rename below).
+ temp_profile_path = os.path.join(
+ user_root,
+ commons_constants.PROFILES_FOLDER,
+ f"_dsl_tmp_{uuid.uuid4().hex}",
+ )
+ os.makedirs(os.path.dirname(temp_profile_path), exist_ok=True)
+
+ profile_data = profile_data_module.ProfileData.from_dict(profile_data_dict)
+ await _convert_profile_data_to_profile_directory(
+ profile_data, temp_profile_path
+ )
+
+ profile_file = os.path.join(temp_profile_path, commons_constants.PROFILE_CONFIG_FILE)
+ profile_on_disk = json_util.read_file(profile_file)
+ profile_id = profile_on_disk[commons_constants.CONFIG_PROFILE][commons_constants.CONFIG_ID]
+ # OctoBot expects each profile under profiles//; move the temp tree to that name.
+ final_profile_path = os.path.join(
+ user_root, commons_constants.PROFILES_FOLDER, profile_id
+ )
+ if os.path.normpath(temp_profile_path) != os.path.normpath(final_profile_path):
+ if os.path.exists(final_profile_path):
+ shutil.rmtree(final_profile_path)
+ os.replace(temp_profile_path, final_profile_path)
+
+ _write_user_root_config_json(config_path, profile_id, profile_data, exchange_auth_data)
+
+ # Mirror default reference tentacles layout expected by the child.
+ ref_src = source_reference_tentacles_config or os.path.join(
+ working_directory, commons_constants.USER_FOLDER, "reference_tentacles_config"
+ )
+ ref_src = os.path.normpath(ref_src)
+ ref_dst = os.path.join(user_root, "reference_tentacles_config")
+ if os.path.isdir(ref_src):
+ if os.path.exists(ref_dst):
+ shutil.rmtree(ref_dst)
+ await asyncio.to_thread(shutil.copytree, ref_src, ref_dst)
+ else:
+ os.makedirs(ref_dst, exist_ok=True)
+
+ # Marker last: if anything above failed, a partial tree will not look "prepared".
+ with open(marker_path, "w", encoding="utf-8") as marker_file:
+ marker_file.write("1")
+
+ return {
+ "user_root": user_root,
+ "profile_id": profile_id,
+ "already_prepared": False,
+ }
+
+
+def _read_top_level_profile_id(config_path: str) -> str | None:
+ """Selected profile id from user root config.json (``"profile"`` key). None if unreadable."""
+ if not os.path.isfile(config_path):
+ return None
+ try:
+ cfg = json_util.read_file(config_path)
+ return cfg.get(commons_constants.CONFIG_PROFILE)
+ except Exception:
+ return None
+
+
+def _ensure_log_folder_path(working_directory: str, user_folder: str) -> str:
+ """Absolute log directory for this `user_folder` (matches ensure_state.log_folder)."""
+ log_folder_param_segments = [segment for segment in str(user_folder).replace("\\", "/").split("/") if segment]
+ return os.path.normpath(
+ os.path.join(
+ working_directory,
+ *octobot_node_constants.AUTOMATION_LOGS_FOLDER.split("/"),
+ *log_folder_param_segments,
+ )
+ )
+
+
+def _ensure_child_environ(web_port: int, node_port: int, bind_host: str) -> dict:
+ """Environment passed to the OctoBot child (ports and bind addresses)."""
+ child_env = os.environ.copy()
+ child_env[services_constants.ENV_WEB_PORT] = str(web_port)
+ child_env[services_constants.ENV_WEB_ADDRESS] = bind_host
+ child_env[services_constants.ENV_NODE_API_PORT] = str(node_port)
+ child_env[services_constants.ENV_NODE_API_ADDRESS] = bind_host
+ child_env[commons_constants.ENV_USE_MINIMAL_LIBS] = "false"
+ return child_env
+
+
+def _ensure_start_cmd(
+ start_script: str,
+ rel_user: str,
+ rel_log: str,
+ no_telegram: bool,
+ state_file_path: str,
+) -> list[str]:
+ """Argv for `python start.py --user-folder … --log-folder …` (+ optional -nt, --dump-state)."""
+ cmd: list[str] = [
+ sys.executable,
+ start_script,
+ "--user-folder",
+ rel_user,
+ "--log-folder",
+ rel_log,
+ ]
+ if no_telegram:
+ cmd.append("-nt")
+ cmd.extend(["--dump-state", state_file_path])
+ return cmd
+
+
+def _listen_port_pair_with_shared_scan_offset(
+ probe_host: str,
+ primary_listen_port_base: int,
+ secondary_listen_port_base: int,
+ *,
+ max_offset: int = 256,
+) -> tuple[int, int]:
+ """Delegates to ``find_first_free_listen_port_after_base`` paired scan (one loop)."""
+ mixin = dsl_interpreter.ProcessBoundOperatorMixin
+ primary_listen_port = mixin.find_first_free_listen_port_after_base(
+ probe_host,
+ primary_listen_port_base,
+ max_offset=max_offset,
+ )
+ secondary_listen_port = mixin.find_first_free_listen_port_after_base(
+ probe_host,
+ secondary_listen_port_base,
+ max_offset=max_offset,
+ blocklist=[primary_listen_port],
+ )
+ return primary_listen_port, secondary_listen_port
+
+
+def create_octobot_process_operators(
+ signals: typing.Optional[dsl_interpreter.OperatorSignals] = None
+) -> list[type[dsl_interpreter.Operator]]:
+ # Child process: user layout, ports, process_bot_state.json liveness (re-callable).
+ class EnsureOctobotProcessOperator(
+ dsl_interpreter.PreComputingCallOperator,
+ dsl_interpreter.ReCallableOperatorMixin,
+ dsl_interpreter.SignalableOperatorMixin,
+ dsl_interpreter.ProcessBoundOperatorMixin,
+ ):
+ DESCRIPTION = (
+ "Prepares a per-bot user directory (profile + config + reference_tentacles_config), "
+ "spawns an OctoBot child with unique WEB/NODE ports and --dump-state for process_bot_state.json. "
+ "Always re-callable: each fresh state file (updated_at within twice the dump interval) schedules the next check (see waiting_time). "
+ "If the state file never becomes live before ping_timeout from the first spawn, the keyword fails and the child is killed."
+ )
+ EXAMPLE = (
+ "run_octobot_process(user_folder='bots/b1', profile_data={...}, "
+ "exchange_auth_data=[{'internal_name': 'binance', 'api_key': '...', 'api_secret': '...'}], "
+ "last_execution_result=None)"
+ )
+
+ def __init__(self, *args, **kwargs):
+ dsl_interpreter.PreComputingCallOperator.__init__(self, *args, **kwargs)
+ dsl_interpreter.ProcessBoundOperatorMixin.__init__(self)
+ dsl_interpreter.SignalableOperatorMixin.__init__(self, signals)
+
+ @staticmethod
+ def get_library() -> str:
+ return commons_constants.CONTEXTUAL_OPERATORS_LIBRARY
+
+ @staticmethod
+ def get_name() -> str:
+ return "run_octobot_process"
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(
+ name="user_folder",
+ description=(
+ "Path segment(s) under /user/automations/ for this bot."
+ ),
+ required=True,
+ type=str,
+ ),
+ dsl_interpreter.OperatorParameter(
+ name="profile_data",
+ description="Object compatible with octobot_commons.profiles.profile_data.ProfileData.",
+ required=True,
+ type=dict,
+ ),
+ dsl_interpreter.OperatorParameter(
+ name="exchange_auth_data",
+ description=(
+ "Optional list of dicts compatible with "
+ "octobot_commons.profiles.exchange_auth_data.ExchangeAuthData "
+ "(e.g. internal_name, api_key, api_secret, api_password, exchange_type, sandboxed)."
+ ),
+ required=False,
+ type=list[dict],
+ default=None,
+ ),
+ dsl_interpreter.OperatorParameter(
+ name="web_port_base",
+ description="Base port for the web interface (uses base+offset; default from services constants).",
+ required=False,
+ type=int,
+ default=services_constants.DEFAULT_SERVER_PORT,
+ ),
+ dsl_interpreter.OperatorParameter(
+ name="node_port_base",
+ description="Base port for the node API (uses base+offset).",
+ required=False,
+ type=int,
+ default=services_constants.DEFAULT_NODE_API_PORT,
+ ),
+ dsl_interpreter.OperatorParameter(
+ name="bind_host",
+ description="Host used for free-port checks and WEB_ADDRESS / NODE_API_ADDRESS for the child.",
+ required=False,
+ type=str,
+ default="127.0.0.1",
+ ),
+ dsl_interpreter.OperatorParameter(
+ name="http_scheme",
+ description="Scheme for http_base_url (default http).",
+ required=False,
+ type=str,
+ default="http",
+ ),
+ dsl_interpreter.OperatorParameter(
+ name="no_telegram",
+ description="If true, spawns with -nt (default true).",
+ required=False,
+ type=bool,
+ default=True,
+ ),
+ dsl_interpreter.OperatorParameter(
+ name="ping_timeout",
+ description=(
+ "Init-only: max seconds from the first spawn until process_bot_state.json is first considered live "
+ "(fresh updated_at / next_updated_at). After that, the child is killed and the keyword fails. "
+ "Does not cap liveness re-calls once up."
+ ),
+ required=False,
+ type=float,
+ default=DEFAULT_ENSURE_TIMEOUT,
+ ),
+ dsl_interpreter.OperatorParameter(
+ name="waiting_time",
+ description=(
+ "Fixed interval in seconds before each re-call (init polling and ongoing liveness while the state file is live)."
+ ),
+ required=False,
+ type=float,
+ default=DEFAULT_PING_WAITING_TIME,
+ ),
+ ] + super().get_re_callable_parameters()
+
+ @classmethod
+ def _re_calling_result_dispatches_this_ensure(
+ cls,
+ re_calling_result: typing.Optional[dict],
+ ) -> bool:
+ if not re_calling_result or not dsl_interpreter.ReCallingOperatorResult.is_re_calling_operator_result(
+ re_calling_result
+ ):
+ return False
+ try:
+ keyword = dsl_interpreter.ReCallingOperatorResult.get_keyword(re_calling_result)
+ except (KeyError, TypeError, AttributeError):
+ return False
+ if keyword != cls.get_name():
+ return False
+ rec = re_calling_result.get(dsl_interpreter.ReCallingOperatorResult.__name__)
+ if not isinstance(rec, dict):
+ return False
+ inner = rec.get("last_execution_result")
+ if not isinstance(inner, dict):
+ return False
+ return _parse_ensure_recall_state(inner) is not None
+
+ @classmethod
+ def should_dispatch_operator_signal_for_result(
+ cls,
+ signal: str,
+ re_calling_result: typing.Optional[dict],
+ ) -> bool:
+ if signal not in (
+ dsl_interpreter.OperatorSignal.STOP.value,
+ dsl_interpreter.OperatorSignal.UPDATE_CONFIG.value,
+ ):
+ return False
+ return cls._re_calling_result_dispatches_this_ensure(re_calling_result)
+
+ def _emit_ensure_recall(
+ self,
+ *,
+ state: EnsureOctobotProcessState,
+ last_result: dict,
+ start_time: float,
+ recall_interval: float,
+ parsed_process_bot_state: typing.Optional[process_bot_state_import.ProcessBotState] = None,
+ ) -> None:
+ re_call_payload = {**state.model_dump()}
+ for payload_key, payload_value in last_result.items():
+ if payload_key in re_call_payload or payload_key in _RECALL_OVERRIDABLE_KEYS:
+ continue
+ re_call_payload[payload_key] = payload_value
+ if parsed_process_bot_state is not None:
+ re_call_payload[octobot_flow_entities.PostIterationActionsDetails.__name__] = (
+ octobot_flow_entities.PostIterationActionsDetails(
+ updated_exchange_account_elements=(
+ parsed_process_bot_state.exchange_account_elements.to_dict(
+ include_default_values=True
+ )
+ ),
+ ).to_dict(include_default_values=False)
+ )
+ self.value = self.create_re_callable_result_dict(
+ keyword=self.get_name(),
+ waiting_time=recall_interval,
+ last_execution_time=start_time,
+ **re_call_payload,
+ )
+
+ async def _pre_compute_recall_path(
+ self,
+ recall_state: EnsureOctobotProcessState,
+ last_result: dict,
+ *,
+ start_time: float,
+ recall_interval: float,
+ ping_timeout: float,
+ ) -> None:
+ state_path = _resolve_state_file_path(recall_state)
+ # Init window: fail and kill the child if the state file never became live in time.
+ if (
+ not recall_state.init_state_ok
+ and time.time() - recall_state.started_waiting_at > ping_timeout
+ ):
+ self.value = self.request_graceful_stop(logger=_get_logger())
+ raise commons_errors.DSLInterpreterError(
+ "Timed out waiting for OctoBot process_bot_state.json during init (see ping_timeout).",
+ )
+ _get_logger().info("process state path (re-call path): %s", state_path)
+ loaded = await _load_process_bot_state(state_path)
+ is_live = loaded is not None and _is_process_state_alive(loaded)
+ if is_live:
+ _get_logger().info(
+ "OctoBot is running (re-call path): user_folder=%r base_url=%r pid=%s",
+ recall_state.user_folder,
+ recall_state.http_base_url,
+ recall_state.pid,
+ )
+ updated = recall_state.model_copy(
+ update={"init_state_ok": True, "state_file_path": state_path}
+ )
+ self._emit_ensure_recall(
+ state=updated,
+ last_result=last_result,
+ start_time=start_time,
+ recall_interval=recall_interval,
+ parsed_process_bot_state=loaded,
+ )
+ return
+ _get_logger().info(
+ "OctoBot is still starting (re-call path, process state not live): user_folder=%r "
+ "base_url=%r pid=%s state_path=%s",
+ recall_state.user_folder,
+ recall_state.http_base_url,
+ recall_state.pid,
+ state_path,
+ )
+ self._emit_ensure_recall(
+ state=recall_state.model_copy(update={"state_file_path": state_path}),
+ last_result=last_result,
+ start_time=start_time,
+ recall_interval=recall_interval,
+ parsed_process_bot_state=loaded,
+ )
+
+ async def _pre_compute_first_spawn(
+ self,
+ user_folder: str,
+ working_directory: str,
+ params: dict,
+ last_result: dict,
+ *,
+ start_time: float,
+ recall_interval: float,
+ ) -> None:
+ # One-time (or re-) materialization, free ports, env, and `Popen` at project root.
+ raw_exchange_auth = params.get("exchange_auth_data")
+ exchange_auth: typing.Optional[
+ list[exchange_auth_data_module.ExchangeAuthData]
+ ] = None
+ if raw_exchange_auth:
+ exchange_auth = [
+ exchange_auth_data_module.ExchangeAuthData.from_dict(entry)
+ if isinstance(entry, dict)
+ else entry
+ for entry in raw_exchange_auth
+ ]
+ init_info = await ensure_user_profile_and_layout(
+ user_folder,
+ working_directory,
+ params["profile_data"],
+ None,
+ exchange_auth,
+ )
+ user_root = init_info["user_root"]
+ log_folder = _ensure_log_folder_path(working_directory, user_folder)
+ bind_host, probe_host = (
+ dsl_interpreter.ProcessBoundOperatorMixin.bind_address_for_env_and_probe_hosts(
+ params
+ )
+ )
+ web_b = int(params.get("web_port_base") or services_constants.DEFAULT_SERVER_PORT)
+ node_b = int(params.get("node_port_base") or services_constants.DEFAULT_NODE_API_PORT)
+ web_port, node_port = _listen_port_pair_with_shared_scan_offset(
+ probe_host, web_b, node_b
+ )
+ start_script = os.path.join(working_directory, "start.py")
+ if not os.path.isfile(start_script):
+ raise commons_errors.DSLInterpreterError(
+ f"start.py not found at {start_script} (current working directory must be the OctoBot project root)."
+ )
+ child_env = _ensure_child_environ(web_port, node_port, bind_host)
+ rel_user = os.path.relpath(user_root, working_directory)
+ rel_log = os.path.relpath(log_folder, working_directory)
+ state_file_path = os.path.normpath(
+ os.path.join(user_root, octobot_constants.PROCESS_BOT_STATE_FILE_NAME)
+ )
+ cmd = _ensure_start_cmd(
+ start_script,
+ rel_user,
+ rel_log,
+ bool(params.get("no_telegram", True)),
+ state_file_path,
+ )
+ self.spawn_subprocess(
+ cmd,
+ working_directory=working_directory,
+ environment=child_env,
+ hide_console_window=True,
+ )
+ scheme = str(params.get("http_scheme") or "http").rstrip(":/")
+ http_base_url = f"{scheme}://{bind_host}:{web_port}"
+ state = EnsureOctobotProcessState(
+ http_base_url=http_base_url,
+ web_port=web_port,
+ node_port=node_port,
+ user_root=user_root,
+ user_folder=str(user_folder),
+ log_folder=log_folder,
+ profile_id=init_info.get("profile_id"),
+ pid=self.pid or 0,
+ state_file_path=state_file_path,
+ started_waiting_at=start_time,
+ )
+ # First process state check after spawn (init cap still uses `state.started_waiting_at`).
+ loaded = await _load_process_bot_state(state_file_path)
+ is_live = loaded is not None and _is_process_state_alive(loaded)
+ if is_live:
+ _get_logger().info(
+ "OctoBot is running (first-spawn path): user_folder=%r base_url=%r pid=%s",
+ user_folder,
+ http_base_url,
+ self.pid,
+ )
+ ready = state.model_copy(update={"init_state_ok": True})
+ self._emit_ensure_recall(
+ state=ready,
+ last_result=last_result,
+ start_time=start_time,
+ recall_interval=recall_interval,
+ parsed_process_bot_state=loaded,
+ )
+ return
+ _get_logger().info(
+ "OctoBot is still starting (first-spawn path, process state not live): user_folder=%r base_url=%r "
+ "pid=%s state_path=%s",
+ user_folder,
+ http_base_url,
+ self.pid,
+ state_file_path,
+ )
+ self._emit_ensure_recall(
+ state=state,
+ last_result=last_result,
+ start_time=start_time,
+ recall_interval=recall_interval,
+ parsed_process_bot_state=loaded,
+ )
+
+ async def _pre_compute_update_config_refresh(
+ self,
+ last_result: dict,
+ user_folder: str,
+ working_directory: str,
+ params: dict,
+ *,
+ start_time: float,
+ recall_interval: float,
+ ping_timeout: float,
+ ) -> None:
+ # Resolve prior child layout from re-call payload; required for stop, wait, and paths to remove.
+ recall_state = self._try_parse_ensure_recall_state(last_result)
+ if recall_state is None:
+ raise commons_errors.DSLInterpreterError(
+ "run_octobot_process(UPDATE_CONFIG) requires last_execution_result from a prior "
+ "run_octobot_process call.",
+ )
+ process_logger = _get_logger()
+ process_logger.info(
+ "configuration update: begin refresh user_folder=%r user_root=%r log_folder=%r pid=%s",
+ user_folder,
+ recall_state.user_root,
+ recall_state.log_folder,
+ recall_state.pid,
+ )
+ stop_outcome = self.request_graceful_stop(logger=process_logger)
+ process_logger.info("configuration update: graceful stop outcome: %s", stop_outcome)
+ await self.wait_until_pid_stopped(
+ recall_state.pid,
+ logger=process_logger,
+ timeout_seconds=ping_timeout,
+ )
+ process_logger.info("configuration update: removing automation user and log directories")
+ _remove_path_for_fresh_start(recall_state.user_root, logger=process_logger)
+ _remove_path_for_fresh_start(recall_state.log_folder, logger=process_logger)
+ process_logger.info("configuration update: spawning new OctoBot process from current parameters")
+ await self._pre_compute_first_spawn(
+ user_folder,
+ working_directory,
+ params,
+ {},
+ start_time=start_time,
+ recall_interval=recall_interval,
+ )
+
+ async def pre_compute(self) -> None:
+ await super().pre_compute()
+ # Resolve params, project root, and a fixed re-call interval for this run.
+ params = self.get_computed_value_by_parameter()
+ if self.matches_operator_signal(dsl_interpreter.OperatorSignal.STOP.value):
+ last_result = self.get_last_execution_result(params) or {}
+ recall_state = self._try_parse_ensure_recall_state(last_result)
+ if recall_state is None:
+ raise commons_errors.DSLInterpreterError(
+ "run_octobot_process(execution_stop) requires last_execution_result from a prior run_octobot_process call.",
+ )
+ if not self.is_process_running():
+ self.value = {"status": "already_stopped", "reason": "not_running"}
+ return
+ self.value = self.request_graceful_stop(logger=_get_logger())
+ return
+ working_directory = os.path.normpath(os.getcwd())
+ user_folder = params["user_folder"]
+ if not user_folder or not str(user_folder).strip():
+ raise commons_errors.DSLInterpreterError("user_folder is required")
+ dsl_interpreter.ProcessBoundOperatorMixin.reject_user_path_segment(user_folder)
+ last_result = self.get_last_execution_result(params) or {}
+ start_time = time.time()
+ ping_timeout = float(params.get("ping_timeout") or DEFAULT_ENSURE_TIMEOUT)
+ recall_interval = float(params.get("waiting_time") or DEFAULT_PING_WAITING_TIME)
+ if self.matches_operator_signal(dsl_interpreter.OperatorSignal.UPDATE_CONFIG.value):
+ await self._pre_compute_update_config_refresh(
+ last_result,
+ user_folder,
+ working_directory,
+ params,
+ start_time=start_time,
+ recall_interval=recall_interval,
+ ping_timeout=ping_timeout,
+ )
+ return
+ recall_state = self._try_parse_ensure_recall_state(last_result)
+ if recall_state is not None and self.is_process_running():
+ await self._pre_compute_recall_path(
+ recall_state,
+ last_result,
+ start_time=start_time,
+ recall_interval=recall_interval,
+ ping_timeout=ping_timeout,
+ )
+ return
+ await self._pre_compute_first_spawn(
+ user_folder,
+ working_directory,
+ params,
+ last_result,
+ start_time=start_time,
+ recall_interval=recall_interval,
+ )
+
+
+ def _try_parse_ensure_recall_state(self, raw: dict) -> typing.Optional[EnsureOctobotProcessState]:
+ if state := _parse_ensure_recall_state(raw):
+ if state.pid:
+ self.pid = state.pid
+ return state
+ return None
+
+ return [EnsureOctobotProcessOperator]
+
+
+
+def _get_logger():
+ return commons_logging.get_logger("OctoBotProcessOperators")
diff --git a/packages/tentacles/Meta/DSL_operators/octobot_process_operators/tests/test_octobot_process_ops.py b/packages/tentacles/Meta/DSL_operators/octobot_process_operators/tests/test_octobot_process_ops.py
new file mode 100644
index 0000000000..cc09539045
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/octobot_process_operators/tests/test_octobot_process_ops.py
@@ -0,0 +1,1301 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import json
+import os
+import pathlib
+import shutil
+import sys
+
+import mock
+import pytest
+
+import octobot.constants as octobot_constants
+import octobot_commons.constants as commons_constants
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.errors as commons_errors
+import octobot_commons.process_util as process_util
+import octobot_node.constants as octobot_node_constants
+import octobot_services.constants as services_constants
+
+import octobot_commons.profiles.profile_data as profile_data_module
+import octobot_commons.profiles.exchange_auth_data as exchange_auth_data_module
+import octobot_flow.entities as octobot_flow_entities
+import octobot_flow.entities.accounts.process_bot_state as process_bot_state_import
+import octobot_tentacles_manager.constants as tentacles_manager_constants
+
+import tentacles.Meta.DSL_operators.octobot_process_operators.octobot_process_ops as octobot_process_ops
+import tentacles.Trading.Mode.grid_trading_mode.grid_trading as grid_trading_module
+import tentacles.Trading.Mode.simple_market_making_trading_mode.simple_market_making_trading as simple_market_making_trading
+
+# Nested class from factory (not exposed on ``octobot_process_ops``).
+EnsureOctobotProcessOperator = octobot_process_ops.create_octobot_process_operators(None)[0]
+
+pytestmark = pytest.mark.asyncio
+
+
+async def _async_return_none_mock(*_unused):
+ return None
+
+
+async def _async_live_process_bot_state_mock(*_unused):
+ now = octobot_process_ops.time.time()
+ interval = float(octobot_constants.PROCESS_BOT_STATE_DUMP_INTERVAL_SECONDS)
+ return process_bot_state_import.ProcessBotState(
+ metadata=process_bot_state_import.Metadata(
+ updated_at=now - 0.1,
+ next_updated_at=now + interval,
+ ),
+ exchange_account_elements=octobot_flow_entities.ExchangeAccountElements(),
+ )
+def _stop_test_ensure_state_dict(http_base_url: str) -> dict:
+ return octobot_process_ops.EnsureOctobotProcessState(
+ http_base_url=http_base_url,
+ web_port=1,
+ node_port=1,
+ user_root="/x",
+ user_folder="u",
+ log_folder="/x/l",
+ profile_id=None,
+ pid=1,
+ state_file_path=os.path.normpath(
+ os.path.join("/x", octobot_constants.PROCESS_BOT_STATE_FILE_NAME)
+ ),
+ ).model_dump()
+
+
+def _re_calling_ensure_value(last_execution_result: dict) -> dict:
+ return {
+ dsl_interpreter.ReCallingOperatorResult.__name__: {
+ "keyword": "run_octobot_process",
+ "last_execution_result": last_execution_result,
+ }
+ }
+
+
+_MINIMAL_PROFILE_DATA = {
+ "profile_details": {"name": "dsl_test", "id": "fixed_profile_id"},
+ "crypto_currencies": [],
+ "exchanges": [],
+ "tentacles": [],
+ "trader": {
+ "enabled": True,
+ "load_trade_history": True,
+ },
+ "trader_simulator": {
+ "enabled": True,
+ "starting_portfolio": {"USDT": 1000},
+ "maker_fees": 0.0,
+ "taker_fees": 0.0,
+ },
+ "trading": {
+ "reference_market": "USDT",
+ "risk": 1.0,
+ "paused": False,
+ },
+ "options": {},
+ "distribution": "default",
+}
+
+# No list literals: the DSL interpreter cannot parse ast.List inside dicts without a List operator.
+_MINIMAL_PROFILE_DATA_DSL_LITERAL = {
+ "profile_details": {"name": "dsl_test", "id": "fixed_profile_id"},
+ "trader": {
+ "enabled": True,
+ "load_trade_history": True,
+ },
+ "trader_simulator": {
+ "enabled": True,
+ "starting_portfolio": {"USDT": 1000},
+ "maker_fees": 0.0,
+ "taker_fees": 0.0,
+ },
+ "trading": {
+ "reference_market": "USDT",
+ "risk": 1.0,
+ "paused": False,
+ },
+ "options": {},
+ "distribution": "default",
+}
+
+
+def _fresh_default_like_cfg_template():
+ """Minimal dict shaped like packaged ``default_config.json`` for isolated ``read_file`` mocks."""
+ return {
+ commons_constants.CONFIG_EXCHANGES: {},
+ services_constants.CONFIG_CATEGORY_SERVICES: {
+ services_constants.CONFIG_WEB: {
+ services_constants.CONFIG_AUTO_OPEN_IN_WEB_BROWSER: True,
+ },
+ },
+ commons_constants.CONFIG_PROFILE: "default",
+ }
+
+
+class TestWriteUserRootConfigJson:
+ def test_sets_profile_and_disables_browser_auto_open(self, tmp_path):
+ config_path = str(tmp_path / commons_constants.CONFIG_FILE)
+ profile_id = "dsl_profile_abc"
+ with mock.patch.object(
+ octobot_process_ops.json_util,
+ "read_file",
+ side_effect=lambda *_unused: _fresh_default_like_cfg_template(),
+ ):
+ octobot_process_ops._write_user_root_config_json(
+ config_path, profile_id, None, None
+ )
+ written = json.loads(pathlib.Path(config_path).read_text(encoding="utf-8"))
+ assert written[commons_constants.CONFIG_PROFILE] == profile_id
+ assert written[services_constants.CONFIG_CATEGORY_SERVICES][services_constants.CONFIG_WEB][
+ services_constants.CONFIG_AUTO_OPEN_IN_WEB_BROWSER
+ ] is False
+ assert written[commons_constants.CONFIG_EXCHANGES] == {}
+
+ def test_seeds_exchanges_from_profile_data(self, tmp_path):
+ config_path = str(tmp_path / commons_constants.CONFIG_FILE)
+ profile_dict = {
+ **_MINIMAL_PROFILE_DATA,
+ "exchanges": [
+ {"internal_name": "seed_exchange", "exchange_type": "future"},
+ {"internal_name": "", "exchange_type": "spot"},
+ ],
+ }
+ profile_data = profile_data_module.ProfileData.from_dict(profile_dict)
+ with mock.patch.object(
+ octobot_process_ops.json_util,
+ "read_file",
+ side_effect=lambda *_unused: _fresh_default_like_cfg_template(),
+ ):
+ octobot_process_ops._write_user_root_config_json(
+ config_path, "p1", profile_data, None
+ )
+ written = json.loads(pathlib.Path(config_path).read_text(encoding="utf-8"))
+ exchanges_cfg = written[commons_constants.CONFIG_EXCHANGES]
+ assert set(exchanges_cfg) == {"seed_exchange"}
+ seeded = exchanges_cfg["seed_exchange"]
+ assert seeded[commons_constants.CONFIG_ENABLED_OPTION] is True
+ assert seeded[commons_constants.CONFIG_EXCHANGE_TYPE] == "future"
+ assert seeded[commons_constants.CONFIG_EXCHANGE_KEY] == octobot_process_ops._DEFAULT_ENCRYPTED_VALUE
+ assert seeded[commons_constants.CONFIG_EXCHANGE_SECRET] == octobot_process_ops._DEFAULT_ENCRYPTED_VALUE
+
+ def test_presets_encrypted_empty_credentials_when_default_config_exchange_has_no_api_fields(
+ self, tmp_path
+ ):
+ """Mirrors packaged ``default_config.json`` rows that omit api-key/api-secret until setdefault."""
+ config_path = str(tmp_path / commons_constants.CONFIG_FILE)
+ template = _fresh_default_like_cfg_template()
+ template[commons_constants.CONFIG_EXCHANGES] = {
+ "prefilled_exchange": {
+ commons_constants.CONFIG_ENABLED_OPTION: True,
+ commons_constants.CONFIG_EXCHANGE_TYPE: commons_constants.CONFIG_EXCHANGE_SPOT,
+ }
+ }
+ with mock.patch.object(
+ octobot_process_ops.json_util,
+ "read_file",
+ side_effect=lambda *_unused: template,
+ ):
+ octobot_process_ops._write_user_root_config_json(config_path, "p0", None, None)
+ written = json.loads(pathlib.Path(config_path).read_text(encoding="utf-8"))
+ exch = written[commons_constants.CONFIG_EXCHANGES]["prefilled_exchange"]
+ assert exch[commons_constants.CONFIG_EXCHANGE_KEY] == octobot_process_ops._DEFAULT_ENCRYPTED_VALUE
+ assert exch[commons_constants.CONFIG_EXCHANGE_SECRET] == octobot_process_ops._DEFAULT_ENCRYPTED_VALUE
+
+ def test_applies_exchange_auth_credentials(self, tmp_path):
+ config_path = str(tmp_path / commons_constants.CONFIG_FILE)
+ auth_list = [
+ exchange_auth_data_module.ExchangeAuthData(
+ internal_name="binance_test",
+ api_key="key-a",
+ api_secret="secret-b",
+ api_password="pwd-c",
+ exchange_type="spot",
+ sandboxed=True,
+ )
+ ]
+ with mock.patch.object(
+ octobot_process_ops.json_util,
+ "read_file",
+ side_effect=lambda *_unused: _fresh_default_like_cfg_template(),
+ ):
+ octobot_process_ops._write_user_root_config_json(config_path, "p2", None, auth_list)
+ written = json.loads(pathlib.Path(config_path).read_text(encoding="utf-8"))
+ exch = written[commons_constants.CONFIG_EXCHANGES]["binance_test"]
+ assert exch[commons_constants.CONFIG_EXCHANGE_KEY] == "key-a"
+ assert exch[commons_constants.CONFIG_EXCHANGE_SECRET] == "secret-b"
+ assert exch[commons_constants.CONFIG_EXCHANGE_PASSWORD] == "pwd-c"
+ assert exch[commons_constants.CONFIG_EXCHANGE_TYPE] == "spot"
+ assert exch[commons_constants.CONFIG_EXCHANGE_SANDBOXED] is True
+
+ def test_profile_seed_then_auth_overlay(self, tmp_path):
+ config_path = str(tmp_path / commons_constants.CONFIG_FILE)
+ exchange_internal_name = "overlay_exchange"
+ profile_dict = {
+ **_MINIMAL_PROFILE_DATA,
+ "exchanges": [{"internal_name": exchange_internal_name, "exchange_type": "spot"}],
+ }
+ profile_data = profile_data_module.ProfileData.from_dict(profile_dict)
+ auth_list = [
+ exchange_auth_data_module.ExchangeAuthData(
+ internal_name=exchange_internal_name,
+ api_key="overlay-key",
+ api_secret="overlay-secret",
+ exchange_type="spot",
+ )
+ ]
+ with mock.patch.object(
+ octobot_process_ops.json_util,
+ "read_file",
+ side_effect=lambda *_unused: _fresh_default_like_cfg_template(),
+ ):
+ octobot_process_ops._write_user_root_config_json(
+ config_path, "p3", profile_data, auth_list
+ )
+ written = json.loads(pathlib.Path(config_path).read_text(encoding="utf-8"))
+ exch = written[commons_constants.CONFIG_EXCHANGES][exchange_internal_name]
+ assert exch[commons_constants.CONFIG_ENABLED_OPTION] is True
+ assert exch[commons_constants.CONFIG_EXCHANGE_TYPE] == "spot"
+ assert exch[commons_constants.CONFIG_EXCHANGE_KEY] == "overlay-key"
+ assert exch[commons_constants.CONFIG_EXCHANGE_SECRET] == "overlay-secret"
+
+
+class TestEnsureUserProfileAndLayout:
+ async def test_marked_prepared_is_skipped(self, tmp_path):
+ user = tmp_path / commons_constants.USER_FOLDER / commons_constants.AUTOMATIONS_FOLDER / "u1"
+ user.mkdir(parents=True)
+ config_path = user / commons_constants.CONFIG_FILE
+ config_path.write_text(
+ json.dumps({commons_constants.CONFIG_PROFILE: "p1"}),
+ encoding="utf-8",
+ )
+ (user / octobot_process_ops.DSL_PREPARED_MARKER).write_text("1", encoding="utf-8")
+ res = await octobot_process_ops.ensure_user_profile_and_layout(
+ "u1",
+ str(tmp_path),
+ _MINIMAL_PROFILE_DATA,
+ None,
+ )
+ assert res["already_prepared"] is True
+ assert res["profile_id"] == "p1"
+
+
+class TestEnsureUserProfileAndLayoutFunctional:
+ async def test_writes_profile_tree_top_level_config_and_exchange_credentials(self, tmp_path):
+ exchange_internal_name = "functional_exchange_okx"
+ fake_api_key = "functional-test-api-key"
+ fake_api_secret = "functional-test-api-secret"
+ fake_api_password = "functional-test-api-password"
+ user_leaf = "functional_layout_user"
+ profile_dict = {
+ **_MINIMAL_PROFILE_DATA,
+ "exchanges": [
+ {
+ "internal_name": exchange_internal_name,
+ "exchange_type": commons_constants.CONFIG_EXCHANGE_SPOT,
+ }
+ ],
+ }
+ exchange_auth_list = [
+ exchange_auth_data_module.ExchangeAuthData(
+ internal_name=exchange_internal_name,
+ api_key=fake_api_key,
+ api_secret=fake_api_secret,
+ api_password=fake_api_password,
+ exchange_type=commons_constants.CONFIG_EXCHANGE_SPOT,
+ sandboxed=True,
+ )
+ ]
+
+ result = await octobot_process_ops.ensure_user_profile_and_layout(
+ user_leaf,
+ str(tmp_path),
+ profile_dict,
+ None,
+ exchange_auth_list,
+ )
+
+ assert result["already_prepared"] is False
+ profile_id = result["profile_id"]
+ assert profile_id
+ user_root = pathlib.Path(result["user_root"])
+ assert user_root == (
+ tmp_path / commons_constants.USER_FOLDER / commons_constants.AUTOMATIONS_FOLDER / user_leaf
+ )
+
+ marker_path = user_root / octobot_process_ops.DSL_PREPARED_MARKER
+ root_config_path = user_root / commons_constants.CONFIG_FILE
+ profile_dir = user_root / commons_constants.PROFILES_FOLDER / profile_id
+ profile_json_path = profile_dir / commons_constants.PROFILE_CONFIG_FILE
+ tentacles_setup_path = profile_dir / commons_constants.CONFIG_TENTACLES_FILE
+
+ assert marker_path.is_file()
+ assert root_config_path.is_file()
+ assert profile_json_path.is_file()
+ assert tentacles_setup_path.is_file()
+ reference_layout = user_root / "reference_tentacles_config"
+ assert reference_layout.is_dir()
+
+ root_cfg = json.loads(root_config_path.read_text(encoding="utf-8"))
+ assert root_cfg[commons_constants.CONFIG_PROFILE] == profile_id
+ assert (
+ root_cfg[services_constants.CONFIG_CATEGORY_SERVICES][services_constants.CONFIG_WEB][
+ services_constants.CONFIG_AUTO_OPEN_IN_WEB_BROWSER
+ ]
+ is False
+ )
+ exchange_root = root_cfg[commons_constants.CONFIG_EXCHANGES][exchange_internal_name]
+ assert exchange_root[commons_constants.CONFIG_ENABLED_OPTION] is True
+ assert exchange_root[commons_constants.CONFIG_EXCHANGE_TYPE] == commons_constants.CONFIG_EXCHANGE_SPOT
+ assert exchange_root[commons_constants.CONFIG_EXCHANGE_KEY] == fake_api_key
+ assert exchange_root[commons_constants.CONFIG_EXCHANGE_SECRET] == fake_api_secret
+ assert exchange_root[commons_constants.CONFIG_EXCHANGE_PASSWORD] == fake_api_password
+ assert exchange_root[commons_constants.CONFIG_EXCHANGE_SANDBOXED] is True
+
+ profile_payload = json.loads(profile_json_path.read_text(encoding="utf-8"))
+ profile_inner = profile_payload[commons_constants.PROFILE_CONFIG]
+ profile_exchanges = profile_inner[commons_constants.CONFIG_EXCHANGES][exchange_internal_name]
+ assert profile_exchanges[commons_constants.CONFIG_ENABLED_OPTION] is True
+ assert profile_exchanges[commons_constants.CONFIG_EXCHANGE_TYPE] == commons_constants.CONFIG_EXCHANGE_SPOT
+
+
+class TestConvertProfileDataToProfileDirectory:
+ async def test_omits_translator_when_profile_has_no_tentacles(self, tmp_path):
+ profile_data = profile_data_module.ProfileData.from_dict(_MINIMAL_PROFILE_DATA)
+ output_dir = tmp_path / "profile_out"
+ output_dir.mkdir()
+ convert_mock = mock.AsyncMock()
+ with mock.patch.object(
+ octobot_process_ops.tentacles_profile_data_translator,
+ "TentaclesProfileDataTranslator",
+ ) as translator_class_mock, mock.patch.object(
+ octobot_process_ops.profile_data_import,
+ "convert_profile_data_to_profile_directory",
+ new=convert_mock,
+ ):
+ await octobot_process_ops._convert_profile_data_to_profile_directory(
+ profile_data, str(output_dir)
+ )
+ translator_class_mock.assert_not_called()
+ convert_mock.assert_awaited_once()
+
+ async def test_restores_tentacles_when_translator_raises_key_error(self, tmp_path):
+ profile_with_grid = {
+ **_MINIMAL_PROFILE_DATA,
+ "tentacles": [
+ {
+ "name": grid_trading_module.GridTradingMode.get_name(),
+ "config": {"pair_settings": []},
+ }
+ ],
+ }
+ profile_data = profile_data_module.ProfileData.from_dict(profile_with_grid)
+ expected_tentacles_snapshot = list(profile_data.tentacles)
+ output_dir = tmp_path / "profile_out"
+ output_dir.mkdir()
+ convert_mock = mock.AsyncMock()
+ with mock.patch.object(
+ octobot_process_ops.profile_data_import,
+ "convert_profile_data_to_profile_directory",
+ new=convert_mock,
+ ):
+ await octobot_process_ops._convert_profile_data_to_profile_directory(
+ profile_data, str(output_dir)
+ )
+ assert len(profile_data.tentacles) == len(expected_tentacles_snapshot)
+ assert [tentacle.name for tentacle in profile_data.tentacles] == [
+ tentacle.name for tentacle in expected_tentacles_snapshot
+ ]
+ convert_mock.assert_awaited_once()
+
+ async def test_calls_translator_then_convert_when_tentacles_present(self, tmp_path):
+ profile_with_grid = {
+ **_MINIMAL_PROFILE_DATA,
+ "tentacles": [
+ {
+ "name": grid_trading_module.GridTradingMode.get_name(),
+ "config": {"pair_settings": []},
+ }
+ ],
+ }
+ profile_data = profile_data_module.ProfileData.from_dict(profile_with_grid)
+ expected_snapshot = list(profile_data.tentacles)
+ output_dir = tmp_path / "profile_out"
+ output_dir.mkdir()
+ mock_translator = mock.Mock()
+ mock_translator.translate = mock.AsyncMock()
+ convert_mock = mock.AsyncMock()
+ with mock.patch.object(
+ octobot_process_ops.tentacles_profile_data_translator,
+ "TentaclesProfileDataTranslator",
+ return_value=mock_translator,
+ ) as translator_class_mock, mock.patch.object(
+ octobot_process_ops.profile_data_import,
+ "convert_profile_data_to_profile_directory",
+ new=convert_mock,
+ ):
+ await octobot_process_ops._convert_profile_data_to_profile_directory(
+ profile_data, str(output_dir)
+ )
+ translator_class_mock.assert_called_once_with(profile_data, [])
+ mock_translator.translate.assert_awaited_once_with(
+ expected_snapshot, {}, None, None
+ )
+ convert_mock.assert_awaited_once()
+
+
+class TestConvertProfileDataToProfileDirectorySimpleMarketMakingFunctional:
+ async def test_writes_disk_profile_with_adapter_augmented_mm_config(self, tmp_path):
+ exchange_internal_name = "binanceus"
+ traded_pair = "BTC/USDT"
+ _SMM = simple_market_making_trading.SimpleMarketMakingTradingMode
+ profile_dict = {
+ **_MINIMAL_PROFILE_DATA,
+ "crypto_currencies": [],
+ "exchanges": [
+ {
+ "internal_name": exchange_internal_name,
+ "exchange_type": "spot",
+ }
+ ],
+ "tentacles": [
+ {
+ "name": _SMM.get_name(),
+ "config": {
+ _SMM.CONFIG_PAIR_SETTINGS: [
+ {
+ _SMM.CONFIG_PAIR: traded_pair,
+ _SMM.REFERENCE_PRICE: [
+ {
+ _SMM.EXCHANGE: exchange_internal_name,
+ _SMM.PAIR: traded_pair,
+ _SMM.WEIGHT: 1,
+ }
+ ],
+ _SMM.MAX_BASE_BUDGET: 0.1,
+ _SMM.MAX_QUOTE_BUDGET: 3000,
+ }
+ ],
+ },
+ }
+ ],
+ }
+ profile_data = profile_data_module.ProfileData.from_dict(profile_dict)
+ output_dir = tmp_path / "mm_profile"
+ output_dir.mkdir()
+ await octobot_process_ops._convert_profile_data_to_profile_directory(
+ profile_data, str(output_dir)
+ )
+
+ profile_json_path = output_dir / commons_constants.PROFILE_CONFIG_FILE
+ assert profile_json_path.is_file()
+ profile_payload = json.loads(profile_json_path.read_text(encoding="utf-8"))
+ profile_config = profile_payload[commons_constants.PROFILE_CONFIG]
+ assert traded_pair in profile_config[commons_constants.CONFIG_CRYPTO_CURRENCIES]
+ assert exchange_internal_name in profile_config[commons_constants.CONFIG_EXCHANGES]
+
+ mm_specific = output_dir / tentacles_manager_constants.TENTACLES_SPECIFIC_CONFIG_FOLDER / (
+ f"{_SMM.get_name()}{tentacles_manager_constants.CONFIG_EXT}"
+ )
+ assert mm_specific.is_file()
+ mm_cfg = json.loads(mm_specific.read_text(encoding="utf-8"))
+ pair_settings = mm_cfg[_SMM.CONFIG_PAIR_SETTINGS]
+ assert len(pair_settings) >= 1
+ ref_prices = pair_settings[0][_SMM.REFERENCE_PRICE]
+ assert any(
+ ref[_SMM.EXCHANGE] == exchange_internal_name for ref in ref_prices
+ )
+
+ tentacles_cfg_path = output_dir / commons_constants.CONFIG_TENTACLES_FILE
+ assert tentacles_cfg_path.is_file()
+ tentacles_raw = tentacles_cfg_path.read_text(encoding="utf-8")
+ assert _SMM.get_name() in tentacles_raw
+
+
+class TestListenPortPair:
+ def test_finds_sequential_ports(self):
+ web_port, node_port = octobot_process_ops._listen_port_pair_with_shared_scan_offset(
+ "127.0.0.1", 20000, 30000, max_offset=100
+ )
+ mixin = dsl_interpreter.ProcessBoundOperatorMixin
+ assert mixin._tcp_port_is_free("127.0.0.1", web_port)
+ assert mixin._tcp_port_is_free("127.0.0.1", node_port)
+
+
+class TestEnsureOctobotProcessOperatorExchangeAuthData:
+ def test_declares_optional_exchange_auth_parameter(self):
+ params = EnsureOctobotProcessOperator.get_parameters()
+ auth_parameter = next(
+ (parameter for parameter in params if parameter.name == "exchange_auth_data"),
+ None,
+ )
+ assert auth_parameter is not None
+ assert auth_parameter.required is False
+ assert auth_parameter.default is None
+ assert auth_parameter.type == list[dict]
+
+ async def test_pre_compute_passes_dict_exchange_auth_into_ensure_layout(self, tmp_path):
+ exchange_auth_dicts = [
+ {
+ "internal_name": "dsl_exchange_okx",
+ "api_key": "dsl-precompute-key",
+ "api_secret": "dsl-precompute-secret",
+ "exchange_type": commons_constants.CONFIG_EXCHANGE_SPOT,
+ }
+ ]
+ ensure_layout_mock = mock.AsyncMock(
+ return_value={
+ "user_root": str(
+ tmp_path / commons_constants.USER_FOLDER / commons_constants.AUTOMATIONS_FOLDER / "ub"
+ ),
+ "profile_id": "profile-from-mock",
+ "already_prepared": True,
+ }
+ )
+ start_script = tmp_path / "start.py"
+ start_script.write_text("#", encoding="utf-8")
+ operator_instance = EnsureOctobotProcessOperator(
+ user_folder="ub",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ exchange_auth_data=exchange_auth_dicts,
+ last_execution_result=None,
+ )
+ with mock.patch.object(
+ octobot_process_ops.os,
+ "getcwd",
+ return_value=str(tmp_path),
+ ), mock.patch.object(
+ octobot_process_ops,
+ "ensure_user_profile_and_layout",
+ new=ensure_layout_mock,
+ ), mock.patch.object(
+ octobot_process_ops,
+ "_listen_port_pair_with_shared_scan_offset",
+ return_value=(20050, 30050),
+ ), mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ ) as spawn_managed_mock:
+ spawn_managed_mock.return_value.pid = 424242
+ await operator_instance.pre_compute()
+
+ ensure_layout_mock.assert_awaited_once()
+ await_arguments = ensure_layout_mock.await_args.args
+ assert len(await_arguments) >= 5
+ parsed_exchange_auth = await_arguments[4]
+ assert parsed_exchange_auth is not None
+ assert len(parsed_exchange_auth) == 1
+ assert isinstance(parsed_exchange_auth[0], exchange_auth_data_module.ExchangeAuthData)
+ assert parsed_exchange_auth[0].internal_name == "dsl_exchange_okx"
+ assert parsed_exchange_auth[0].api_key == "dsl-precompute-key"
+ assert parsed_exchange_auth[0].api_secret == "dsl-precompute-secret"
+ assert parsed_exchange_auth[0].exchange_type == commons_constants.CONFIG_EXCHANGE_SPOT
+
+
+class TestEnsureOctobotProcessOperatorPrecompute:
+ async def test_returns_recallable_when_process_bot_state_not_live(self, tmp_path):
+ start_script = tmp_path / "start.py"
+ start_script.write_text("#", encoding="utf-8")
+ op = EnsureOctobotProcessOperator(
+ user_folder="ub",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ last_execution_result=None,
+ )
+ with mock.patch.object(
+ octobot_process_ops.os,
+ "getcwd",
+ return_value=str(tmp_path),
+ ), mock.patch.object(
+ octobot_process_ops,
+ "ensure_user_profile_and_layout",
+ new=mock.AsyncMock(
+ return_value={
+ "user_root": str(
+ tmp_path / commons_constants.USER_FOLDER / commons_constants.AUTOMATIONS_FOLDER / "ub"
+ ),
+ "profile_id": "x",
+ "already_prepared": True,
+ }
+ ),
+ ), mock.patch.object(
+ octobot_process_ops,
+ "_listen_port_pair_with_shared_scan_offset",
+ return_value=(20050, 30050),
+ ), mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ ) as spawn_mock, mock.patch.object(
+ octobot_process_ops,
+ "_load_process_bot_state",
+ new=mock.AsyncMock(side_effect=_async_return_none_mock),
+ ):
+ spawn_mock.return_value.pid = 99999
+ await op.pre_compute()
+ assert isinstance(op.value, dict)
+ assert dsl_interpreter.ReCallingOperatorResult.__name__ in op.value
+ rec = op.value[dsl_interpreter.ReCallingOperatorResult.__name__]
+ le = rec["last_execution_result"]
+ assert le.get("init_state_ok") is False
+
+
+class TestEnsureOctobotProcessPrecomputeWhenProcessStateLiveAfterFirstSpawn:
+ async def test_returns_recallable_with_init_state_ok_after_first_spawn(self, tmp_path):
+ start_script = tmp_path / "start.py"
+ start_script.write_text("#", encoding="utf-8")
+ op = EnsureOctobotProcessOperator(
+ user_folder="ub",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ last_execution_result=None,
+ )
+ with mock.patch.object(
+ octobot_process_ops.os,
+ "getcwd",
+ return_value=str(tmp_path),
+ ), mock.patch.object(
+ octobot_process_ops,
+ "ensure_user_profile_and_layout",
+ new=mock.AsyncMock(
+ return_value={
+ "user_root": str(
+ tmp_path / commons_constants.USER_FOLDER / commons_constants.AUTOMATIONS_FOLDER / "ub"
+ ),
+ "profile_id": "x",
+ "already_prepared": True,
+ }
+ ),
+ ), mock.patch.object(
+ octobot_process_ops,
+ "_listen_port_pair_with_shared_scan_offset",
+ return_value=(20050, 30050),
+ ), mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ ) as spawn_mock, mock.patch.object(
+ octobot_process_ops,
+ "_load_process_bot_state",
+ new=mock.AsyncMock(side_effect=_async_live_process_bot_state_mock),
+ ):
+ spawn_mock.return_value.pid = 10001
+ await op.pre_compute()
+ assert isinstance(op.value, dict)
+ assert dsl_interpreter.ReCallingOperatorResult.__name__ in op.value
+ le = op.value[dsl_interpreter.ReCallingOperatorResult.__name__]["last_execution_result"]
+ assert isinstance(le, dict)
+ assert le.get("init_state_ok") is True
+ assert le.get("http_base_url", "").startswith("http://")
+ assert le.get("pid") == 10001
+ assert le.get("waiting_time") == octobot_process_ops.DEFAULT_PING_WAITING_TIME
+ assert octobot_flow_entities.PostIterationActionsDetails.__name__ in le
+ post = octobot_flow_entities.PostIterationActionsDetails.from_dict(
+ le[octobot_flow_entities.PostIterationActionsDetails.__name__]
+ )
+ assert post.updated_exchange_account_elements is not None
+
+
+class TestEnsureOctobotProcessPrecomputeRecallPathWhenProcessStateLive:
+ async def test_returns_recallable_with_init_state_ok_on_recall_path(self, tmp_path):
+ start_script = tmp_path / "start.py"
+ start_script.write_text("#", encoding="utf-8")
+ op1 = EnsureOctobotProcessOperator(
+ user_folder="ub",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ last_execution_result=None,
+ )
+ with mock.patch.object(
+ octobot_process_ops.os,
+ "getcwd",
+ return_value=str(tmp_path),
+ ), mock.patch.object(
+ octobot_process_ops,
+ "ensure_user_profile_and_layout",
+ new=mock.AsyncMock(
+ return_value={
+ "user_root": str(
+ tmp_path / commons_constants.USER_FOLDER / commons_constants.AUTOMATIONS_FOLDER / "ub"
+ ),
+ "profile_id": "x",
+ "already_prepared": True,
+ }
+ ),
+ ), mock.patch.object(
+ octobot_process_ops,
+ "_listen_port_pair_with_shared_scan_offset",
+ return_value=(20050, 30050),
+ ), mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ ) as spawn_mock, mock.patch.object(
+ octobot_process_ops,
+ "_load_process_bot_state",
+ new=mock.AsyncMock(side_effect=_async_return_none_mock),
+ ):
+ spawn_mock.return_value.pid = 10002
+ await op1.pre_compute()
+ first_value = op1.value
+ assert isinstance(first_value, dict)
+ first_le = first_value[dsl_interpreter.ReCallingOperatorResult.__name__]["last_execution_result"]
+ assert isinstance(first_le, dict)
+ anchor = first_le["started_waiting_at"]
+ op2 = EnsureOctobotProcessOperator(
+ user_folder="ub",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ last_execution_result=first_value,
+ )
+ with mock.patch.object(
+ octobot_process_ops.os,
+ "getcwd",
+ return_value=str(tmp_path),
+ ), mock.patch.object(
+ dsl_interpreter.ProcessBoundOperatorMixin,
+ "is_process_running",
+ return_value=True,
+ ), mock.patch.object(
+ octobot_process_ops,
+ "_load_process_bot_state",
+ new=mock.AsyncMock(side_effect=_async_live_process_bot_state_mock),
+ ):
+ await op2.pre_compute()
+ assert isinstance(op2.value, dict)
+ assert dsl_interpreter.ReCallingOperatorResult.__name__ in op2.value
+ le2 = op2.value[dsl_interpreter.ReCallingOperatorResult.__name__]["last_execution_result"]
+ assert isinstance(le2, dict)
+ assert le2.get("init_state_ok") is True
+ assert le2["started_waiting_at"] == anchor
+ assert octobot_flow_entities.PostIterationActionsDetails.__name__ in le2
+ post_after_recall = octobot_flow_entities.PostIterationActionsDetails.from_dict(
+ le2[octobot_flow_entities.PostIterationActionsDetails.__name__]
+ )
+ assert post_after_recall.updated_exchange_account_elements is not None
+
+
+class TestEnsureOctobotProcessInitTimeoutRaisesAndKills:
+ async def test_init_timeout_kills_and_raises_dsl_error(self, tmp_path):
+ user_root = str(
+ tmp_path / commons_constants.USER_FOLDER / commons_constants.AUTOMATIONS_FOLDER / "ub"
+ )
+ state_fn = os.path.join(user_root, octobot_constants.PROCESS_BOT_STATE_FILE_NAME)
+ inner = {
+ "waiting_time": octobot_process_ops.DEFAULT_PING_WAITING_TIME,
+ "last_execution_time": 0.0,
+ "http_base_url": "http://127.0.0.1:20050",
+ "web_port": 20050,
+ "node_port": 30050,
+ "user_root": user_root,
+ "user_folder": "ub",
+ "log_folder": str(tmp_path / "logs" / "a" / "ub"),
+ "profile_id": "p",
+ "pid": 88001,
+ "port_offset": 0,
+ "state_file_path": state_fn,
+ "started_waiting_at": 0.0,
+ "init_state_ok": False,
+ }
+ op = EnsureOctobotProcessOperator(
+ user_folder="ub",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ last_execution_result=_re_calling_ensure_value(inner),
+ )
+ stop_mock = mock.Mock()
+ st_time = mock.MagicMock()
+ st_time.time = mock.Mock(return_value=500.0)
+ with mock.patch.object(
+ octobot_process_ops.os,
+ "getcwd",
+ return_value=str(tmp_path),
+ ), mock.patch.object(
+ dsl_interpreter.ProcessBoundOperatorMixin,
+ "is_process_running",
+ return_value=True,
+ ), mock.patch.object(octobot_process_ops, "time", st_time), mock.patch.object(
+ dsl_interpreter.ProcessBoundOperatorMixin,
+ "request_graceful_stop",
+ new=stop_mock,
+ ), mock.patch.object(
+ octobot_process_ops,
+ "_load_process_bot_state",
+ ) as load_mock, mock.patch.object(
+ octobot_process_ops,
+ "_listen_port_pair_with_shared_scan_offset",
+ ) as ffp:
+ with pytest.raises(commons_errors.DSLInterpreterError, match="Timed out waiting"):
+ await op.pre_compute()
+ stop_mock.assert_called_once_with(logger=mock.ANY)
+ load_mock.assert_not_called()
+ ffp.assert_not_called()
+
+
+class TestEnsureOctobotProcessLivenessNotBlockedByInitTimeout:
+ async def test_does_not_apply_init_timeout_after_init_state_ok(self, tmp_path):
+ user_root = str(
+ tmp_path / commons_constants.USER_FOLDER / commons_constants.AUTOMATIONS_FOLDER / "ub"
+ )
+ state_fn2 = os.path.join(user_root, octobot_constants.PROCESS_BOT_STATE_FILE_NAME)
+ inner = {
+ "waiting_time": octobot_process_ops.DEFAULT_PING_WAITING_TIME,
+ "last_execution_time": 0.0,
+ "http_base_url": "http://127.0.0.1:20050",
+ "web_port": 20050,
+ "node_port": 30050,
+ "user_root": user_root,
+ "user_folder": "ub",
+ "log_folder": str(tmp_path / "logs" / "a" / "ub"),
+ "profile_id": "p",
+ "pid": 88002,
+ "port_offset": 0,
+ "state_file_path": state_fn2,
+ "started_waiting_at": 0.0,
+ "init_state_ok": True,
+ }
+ op = EnsureOctobotProcessOperator(
+ user_folder="ub",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ last_execution_result=_re_calling_ensure_value(inner),
+ )
+ st_time = mock.MagicMock()
+ st_time.time = mock.Mock(return_value=1_000_000.0)
+ with mock.patch.object(
+ octobot_process_ops.os,
+ "getcwd",
+ return_value=str(tmp_path),
+ ), mock.patch.object(
+ dsl_interpreter.ProcessBoundOperatorMixin,
+ "is_process_running",
+ return_value=True,
+ ), mock.patch.object(octobot_process_ops, "time", st_time), mock.patch.object(
+ octobot_process_ops,
+ "_load_process_bot_state",
+ new=mock.AsyncMock(side_effect=_async_live_process_bot_state_mock),
+ ):
+ await op.pre_compute()
+ assert isinstance(op.value, dict)
+ rec = op.value[dsl_interpreter.ReCallingOperatorResult.__name__]
+ assert isinstance(rec, dict)
+ le = rec["last_execution_result"]
+ assert isinstance(le, dict)
+ assert le.get("init_state_ok") is True
+ assert le["started_waiting_at"] == 0.0
+
+
+class TestEnsureOctobotProcessWaitingTimeConstantInPayload:
+ async def test_waiting_time_uses_parameter_for_recall_emissions(self, tmp_path):
+ start_script = tmp_path / "start.py"
+ start_script.write_text("#", encoding="utf-8")
+ op = EnsureOctobotProcessOperator(
+ user_folder="ub",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ last_execution_result=None,
+ waiting_time=7.0,
+ )
+ with mock.patch.object(
+ octobot_process_ops.os,
+ "getcwd",
+ return_value=str(tmp_path),
+ ), mock.patch.object(
+ octobot_process_ops,
+ "ensure_user_profile_and_layout",
+ new=mock.AsyncMock(
+ return_value={
+ "user_root": str(
+ tmp_path / commons_constants.USER_FOLDER / commons_constants.AUTOMATIONS_FOLDER / "ub"
+ ),
+ "profile_id": "x",
+ "already_prepared": True,
+ }
+ ),
+ ), mock.patch.object(
+ octobot_process_ops,
+ "_listen_port_pair_with_shared_scan_offset",
+ return_value=(20050, 30050),
+ ), mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ ) as spawn_mock, mock.patch.object(
+ octobot_process_ops,
+ "_load_process_bot_state",
+ new=mock.AsyncMock(side_effect=_async_return_none_mock),
+ ):
+ spawn_mock.return_value.pid = 99999
+ await op.pre_compute()
+ assert isinstance(op.value, dict)
+ le = op.value[dsl_interpreter.ReCallingOperatorResult.__name__]["last_execution_result"]
+ assert isinstance(le, dict)
+ assert le.get("waiting_time") == 7.0
+
+
+class TestEnsureOctobotProcessDslIntegration:
+ async def test_run_octobot_process_via_dsl(self, tmp_path, monkeypatch):
+ """
+ End-to-end: `run_octobot_process` is registered only on this interpreter; cwd is a fake
+ project root. `spawn_managed_subprocess` is mocked so profile import, free ports, and
+ process state liveness still run. With no state file, the operator returns a re-call;
+ we then assert on-disk layout, re-call `log_folder`, and the single spawn invocation
+ (argv, working_directory, env, hide_console_window).
+ """
+ # Minimal OctoBot project: `getcwd` must resolve `start.py` where `pre_compute` expects it.
+ monkeypatch.chdir(tmp_path)
+ (tmp_path / "start.py").write_text("#", encoding="utf-8")
+ user_folder = "integration_dsl_bot"
+ expression = f"run_octobot_process({user_folder!r}, {repr(_MINIMAL_PROFILE_DATA_DSL_LITERAL)})"
+ # Contextual operator is excluded from get_all_operators(); append it explicitly.
+ interpreter = dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators()
+ + [EnsureOctobotProcessOperator],
+ )
+ try:
+ with mock.patch.object(
+ octobot_process_ops,
+ "_load_process_bot_state",
+ new=mock.AsyncMock(side_effect=_async_return_none_mock),
+ ), mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ ) as spawn_mock:
+ # Fake child pid; real spawn would run `start.py` with the env below.
+ spawn_mock.return_value = mock.Mock(spec=["pid"], pid=12345)
+ result = await interpreter.interprete(expression)
+ assert isinstance(result, dict)
+ # Re-call path: process state not live yet, interpreter should schedule another step.
+ assert dsl_interpreter.ReCallingOperatorResult.__name__ in result
+ user_data_root = (
+ tmp_path
+ / commons_constants.USER_FOLDER
+ / commons_constants.AUTOMATIONS_FOLDER
+ / user_folder
+ )
+ assert (user_data_root / commons_constants.CONFIG_FILE).is_file()
+ assert (user_data_root / octobot_process_ops.DSL_PREPARED_MARKER).is_file()
+ # Same normpath as ensure uses for the computed absolute log path (dir may not exist until the child runs).
+ expected_log_folder = os.path.normpath(
+ os.path.join(
+ str(tmp_path),
+ *octobot_node_constants.AUTOMATION_LOGS_FOLDER.split("/"),
+ user_folder,
+ )
+ )
+ recalling = result[dsl_interpreter.ReCallingOperatorResult.__name__]
+ assert isinstance(recalling, dict)
+ last_execution = recalling["last_execution_result"]
+ assert isinstance(last_execution, dict)
+ assert last_execution.get("init_state_ok") is False
+ assert last_execution["log_folder"] == expected_log_folder
+
+ # spawn_managed_subprocess: project-root cwd, argv to `start.py` with relative --user-folder / --log-folder, and env
+ # carrying chosen ports and bind address (see `EnsureOctobotProcessOperator.pre_compute`).
+ spawn_mock.assert_called_once()
+ spawn_argv = spawn_mock.call_args.args[0]
+ spawn_kwargs = spawn_mock.call_args.kwargs
+ assert spawn_kwargs["working_directory"] == str(tmp_path)
+ expected_start_script = os.path.join(str(tmp_path), "start.py")
+ rel_user = os.path.relpath(last_execution["user_root"], str(tmp_path))
+ rel_log = os.path.relpath(last_execution["log_folder"], str(tmp_path))
+ expected_state_path = os.path.normpath(
+ os.path.join(
+ last_execution["user_root"],
+ octobot_constants.PROCESS_BOT_STATE_FILE_NAME,
+ )
+ )
+ assert spawn_argv == [
+ octobot_process_ops.sys.executable,
+ expected_start_script,
+ "--user-folder",
+ rel_user,
+ "--log-folder",
+ rel_log,
+ "-nt",
+ "--dump-state",
+ expected_state_path,
+ ]
+ child_env = spawn_kwargs["environment"]
+ assert child_env[services_constants.ENV_WEB_PORT] == str(last_execution["web_port"])
+ assert child_env[services_constants.ENV_WEB_ADDRESS] == "127.0.0.1"
+ assert child_env[services_constants.ENV_NODE_API_PORT] == str(last_execution["node_port"])
+ assert child_env[services_constants.ENV_NODE_API_ADDRESS] == "127.0.0.1"
+ assert spawn_kwargs.get("hide_console_window") is True
+ finally:
+ # Redundant with pytest’s tmp_path teardown; makes intent obvious if the test is copied elsewhere.
+ shutil.rmtree(tmp_path / commons_constants.USER_FOLDER, ignore_errors=True)
+ if (tmp_path / "logs").exists():
+ shutil.rmtree(tmp_path / "logs", ignore_errors=True)
+
+ async def test_run_octobot_process_via_dsl_writes_exchange_auth_into_user_config(
+ self, tmp_path, monkeypatch
+ ):
+ """
+ Same pipeline as ``test_run_octobot_process_via_dsl``, plus positional
+ ``exchange_auth_data`` (list of dicts). Verifies API fields land under
+ ``exchanges`` in the user-root ``config.json`` written during layout.
+ """
+ monkeypatch.chdir(tmp_path)
+ (tmp_path / "start.py").write_text("#", encoding="utf-8")
+ user_folder = "integration_dsl_exchange_auth_bot"
+ exchange_internal_name = "dsl_integration_cred_exchange"
+ fake_api_key = "dsl-integration-api-key"
+ fake_api_secret = "dsl-integration-api-secret"
+ fake_api_password = "dsl-integration-api-password"
+ exchange_auth_list = [
+ {
+ "internal_name": exchange_internal_name,
+ "api_key": fake_api_key,
+ "api_secret": fake_api_secret,
+ "api_password": fake_api_password,
+ "exchange_type": commons_constants.CONFIG_EXCHANGE_SPOT,
+ "sandboxed": True,
+ }
+ ]
+ expression = (
+ f"run_octobot_process({user_folder!r}, {repr(_MINIMAL_PROFILE_DATA_DSL_LITERAL)}, "
+ f"{repr(exchange_auth_list)})"
+ )
+ interpreter = dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators()
+ + [EnsureOctobotProcessOperator],
+ )
+ try:
+ with mock.patch.object(
+ octobot_process_ops,
+ "_load_process_bot_state",
+ new=mock.AsyncMock(side_effect=_async_return_none_mock),
+ ), mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ ) as spawn_mock:
+ spawn_mock.return_value = mock.Mock(spec=["pid"], pid=12345)
+ result = await interpreter.interprete(expression)
+ assert isinstance(result, dict)
+ assert dsl_interpreter.ReCallingOperatorResult.__name__ in result
+ user_data_root = (
+ tmp_path
+ / commons_constants.USER_FOLDER
+ / commons_constants.AUTOMATIONS_FOLDER
+ / user_folder
+ )
+ root_config_path = user_data_root / commons_constants.CONFIG_FILE
+ assert root_config_path.is_file()
+ written_root_cfg = json.loads(root_config_path.read_text(encoding="utf-8"))
+ exchange_cfg = written_root_cfg[commons_constants.CONFIG_EXCHANGES][exchange_internal_name]
+ assert exchange_cfg[commons_constants.CONFIG_EXCHANGE_KEY] == fake_api_key
+ assert exchange_cfg[commons_constants.CONFIG_EXCHANGE_SECRET] == fake_api_secret
+ assert exchange_cfg[commons_constants.CONFIG_EXCHANGE_PASSWORD] == fake_api_password
+ assert exchange_cfg[commons_constants.CONFIG_EXCHANGE_TYPE] == commons_constants.CONFIG_EXCHANGE_SPOT
+ assert exchange_cfg[commons_constants.CONFIG_EXCHANGE_SANDBOXED] is True
+ assert exchange_cfg[commons_constants.CONFIG_ENABLED_OPTION] is True
+ finally:
+ shutil.rmtree(tmp_path / commons_constants.USER_FOLDER, ignore_errors=True)
+ if (tmp_path / "logs").exists():
+ shutil.rmtree(tmp_path / "logs", ignore_errors=True)
+
+
+class TestEnsureOctobotProcessOperatorExecutionStop:
+ async def test_execution_stop_dead_child_is_already_stopped(self):
+ inner = _stop_test_ensure_state_dict("http://127.0.0.1:7")
+ operator_signals_holder = dsl_interpreter.OperatorSignals()
+ operator_under_test = octobot_process_ops.create_octobot_process_operators(
+ operator_signals_holder
+ )[0]
+ operator_signals_holder.sync({
+ operator_under_test.get_name(): dsl_interpreter.OperatorSignal.STOP.value,
+ })
+ op = operator_under_test(
+ user_folder="u1",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ last_execution_result=_re_calling_ensure_value(inner),
+ )
+ with (
+ mock.patch.object(
+ dsl_interpreter.ProcessBoundOperatorMixin,
+ "is_process_running",
+ return_value=False,
+ ),
+ ):
+ await op.pre_compute()
+ assert isinstance(op.value, dict)
+ assert op.value["status"] == "already_stopped"
+
+ async def test_execution_stop_short_circuits_without_sigterm_when_not_running(self):
+ """STOP branch returns already_stopped before ``request_graceful_stop`` when ``is_process_running`` is false."""
+ inner = _stop_test_ensure_state_dict("http://127.0.0.1:7")
+ operator_signals_holder = dsl_interpreter.OperatorSignals()
+ operator_under_test = octobot_process_ops.create_octobot_process_operators(
+ operator_signals_holder
+ )[0]
+ operator_signals_holder.sync({
+ operator_under_test.get_name(): dsl_interpreter.OperatorSignal.STOP.value,
+ })
+ op = operator_under_test(
+ user_folder="u1",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ last_execution_result=_re_calling_ensure_value(inner),
+ )
+ graceful_stop_mock = mock.Mock()
+ with (
+ mock.patch.object(
+ dsl_interpreter.ProcessBoundOperatorMixin,
+ "is_process_running",
+ return_value=False,
+ ),
+ mock.patch.object(
+ operator_under_test,
+ "request_graceful_stop",
+ new=graceful_stop_mock,
+ ),
+ ):
+ await op.pre_compute()
+ graceful_stop_mock.assert_not_called()
+ assert op.value == {"status": "already_stopped", "reason": "not_running"}
+
+ async def test_execution_stop_os_kill_failure_raises(self):
+ inner = _stop_test_ensure_state_dict("http://127.0.0.1:7")
+ operator_signals_holder = dsl_interpreter.OperatorSignals()
+ operator_under_test = octobot_process_ops.create_octobot_process_operators(
+ operator_signals_holder
+ )[0]
+ operator_signals_holder.sync({
+ operator_under_test.get_name(): dsl_interpreter.OperatorSignal.STOP.value,
+ })
+ op = operator_under_test(
+ user_folder="u1",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ last_execution_result=_re_calling_ensure_value(inner),
+ )
+
+ def _kill_failed(_pid, _sig):
+ raise OSError("simulated os.kill failure")
+
+ with (
+ mock.patch.object(
+ process_util,
+ "pid_is_running",
+ return_value=True,
+ ),
+ mock.patch(
+ "octobot_commons.process_util.os.kill",
+ side_effect=_kill_failed,
+ ),
+ ):
+ with pytest.raises(commons_errors.DSLInterpreterError, match="simulated"):
+ await op.pre_compute()
+
+
+class TestEnsureOctobotProcessOperatorSignalDispatch:
+ def test_should_dispatch_stop_and_update_config_for_valid_ensure_payload(self):
+ inner = _stop_test_ensure_state_dict("http://127.0.0.1:7")
+ payload = _re_calling_ensure_value(inner)
+ op_cls = EnsureOctobotProcessOperator
+ assert op_cls.should_dispatch_operator_signal_for_result(
+ dsl_interpreter.OperatorSignal.STOP.value,
+ payload,
+ )
+ assert op_cls.should_dispatch_operator_signal_for_result(
+ dsl_interpreter.OperatorSignal.UPDATE_CONFIG.value,
+ payload,
+ )
+
+ def test_should_dispatch_false_for_unsupported_signal(self):
+ inner = _stop_test_ensure_state_dict("http://127.0.0.1:7")
+ payload = _re_calling_ensure_value(inner)
+ assert not EnsureOctobotProcessOperator.should_dispatch_operator_signal_for_result(
+ "OTHER_SIGNAL",
+ payload,
+ )
+
+ def test_should_dispatch_false_when_inner_not_ensure_state(self):
+ payload = _re_calling_ensure_value({"invalid": "not_ensure"})
+ assert not EnsureOctobotProcessOperator.should_dispatch_operator_signal_for_result(
+ dsl_interpreter.OperatorSignal.UPDATE_CONFIG.value,
+ payload,
+ )
+
+
+class TestEnsureOctobotProcessOperatorUpdateConfig:
+ async def test_update_config_triggers_respawn_and_recallable_result(self, tmp_path, monkeypatch):
+ monkeypatch.chdir(tmp_path)
+ (tmp_path / "start.py").write_text("#", encoding="utf-8")
+ user_automation = (
+ tmp_path
+ / commons_constants.USER_FOLDER
+ / commons_constants.AUTOMATIONS_FOLDER
+ / "nested"
+ / "upd_bot"
+ )
+ user_automation.mkdir(parents=True)
+ log_dir = (
+ tmp_path.joinpath(*octobot_node_constants.AUTOMATION_LOGS_FOLDER.split("/")).joinpath(
+ "nested", "upd_bot"
+ )
+ )
+ log_dir.mkdir(parents=True)
+ (user_automation / "stale_marker.txt").write_text("x", encoding="utf-8")
+ inner = octobot_process_ops.EnsureOctobotProcessState(
+ http_base_url="http://127.0.0.1:5001",
+ web_port=5001,
+ node_port=5002,
+ user_root=str(user_automation),
+ user_folder="nested/upd_bot",
+ log_folder=str(log_dir),
+ profile_id="p1",
+ pid=4242,
+ state_file_path=os.path.join(
+ str(user_automation),
+ octobot_constants.PROCESS_BOT_STATE_FILE_NAME,
+ ),
+ init_state_ok=True,
+ ).model_dump()
+ operator_signals_holder = dsl_interpreter.OperatorSignals()
+ operator_under_test = octobot_process_ops.create_octobot_process_operators(
+ operator_signals_holder
+ )[0]
+ operator_signals_holder.sync({
+ operator_under_test.get_name(): dsl_interpreter.OperatorSignal.UPDATE_CONFIG.value,
+ })
+ op = operator_under_test(
+ user_folder="nested/upd_bot",
+ profile_data=_MINIMAL_PROFILE_DATA,
+ last_execution_result=_re_calling_ensure_value(inner),
+ )
+ op.pid = 4242
+ try:
+ with (
+ mock.patch.object(
+ operator_under_test,
+ "wait_until_pid_stopped",
+ new=mock.AsyncMock(),
+ ) as wait_mock,
+ mock.patch.object(
+ dsl_interpreter.ProcessBoundOperatorMixin,
+ "request_graceful_stop",
+ return_value={"status": "stopped", "signal": "sigterm"},
+ ) as stop_mock,
+ mock.patch.object(
+ octobot_process_ops,
+ "_load_process_bot_state",
+ new=mock.AsyncMock(side_effect=_async_return_none_mock),
+ ),
+ mock.patch.object(
+ process_util,
+ "spawn_managed_subprocess",
+ ) as spawn_mock,
+ ):
+ spawn_mock.return_value = mock.Mock(spec=["pid"], pid=5151)
+ await op.pre_compute()
+ stop_mock.assert_called_once()
+ wait_mock.assert_awaited_once()
+ spawn_mock.assert_called_once()
+ assert not (user_automation / "stale_marker.txt").exists()
+ assert isinstance(op.value, dict)
+ assert dsl_interpreter.ReCallingOperatorResult.__name__ in op.value
+ finally:
+ shutil.rmtree(tmp_path / commons_constants.USER_FOLDER, ignore_errors=True)
+ if (tmp_path / "logs").exists():
+ shutil.rmtree(tmp_path / "logs", ignore_errors=True)
diff --git a/packages/tentacles/Services/Interfaces/web_interface/models/medias.py b/packages/tentacles/Services/Interfaces/web_interface/models/medias.py
index e44a0a3f5d..e2f25f1b51 100644
--- a/packages/tentacles/Services/Interfaces/web_interface/models/medias.py
+++ b/packages/tentacles/Services/Interfaces/web_interface/models/medias.py
@@ -14,7 +14,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import octobot_tentacles_manager.constants as tentacles_manager_constants
-import octobot_commons.constants as commons_constants
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
ALLOWED_IMAGE_FORMATS = ["png", "jpg", "jpeg", "gif", "svg"]
ALLOWED_SOUNDS_FORMATS = ["mp3"]
@@ -31,7 +31,9 @@ def is_valid_tentacle_image_path(path):
def is_valid_profile_image_path(path):
path_ending = path.split(".")[-1].lower()
- return path_ending in ALLOWED_IMAGE_FORMATS and _is_valid_path(path, commons_constants.USER_PROFILES_FOLDER)
+ return path_ending in ALLOWED_IMAGE_FORMATS and _is_valid_path(
+ path, user_root_folder_provider.get_user_profiles_folder()
+ )
def is_valid_audio_path(path):
diff --git a/packages/tentacles/Trading/Mode/simple_market_making_trading_mode/simple_market_making_profile_data_adapter.py b/packages/tentacles/Trading/Mode/simple_market_making_trading_mode/simple_market_making_profile_data_adapter.py
index 7499bdbf90..1cbba7e94d 100644
--- a/packages/tentacles/Trading/Mode/simple_market_making_trading_mode/simple_market_making_profile_data_adapter.py
+++ b/packages/tentacles/Trading/Mode/simple_market_making_trading_mode/simple_market_making_profile_data_adapter.py
@@ -54,14 +54,26 @@ async def adapt(
profile_data: octobot_commons.profiles.ProfileData,
auth_data: list[octobot_commons.profiles.exchange_auth_data.ExchangeAuthData]
) -> None:
- exchange_configs = self.additional_data.get(community_enums.BotConfigKeys.EXCHANGES.value)
- if not exchange_configs:
- # legacy exchange configs were stored in options
- exchange_configs = self.additional_data[community_enums.BotConfigKeys.OPTIONS.value][EXCHANGE_CONFIGS]
- commons_logging.get_logger(self.__class__.__name__).warning(
- f"Using legacy exchange configs stored in options: {exchange_configs}"
+ exchange_configs: typing.Optional[list] = None
+ if self.additional_data:
+ exchange_configs = self.additional_data.get(
+ community_enums.BotConfigKeys.EXCHANGES.value
)
- is_simulated = bool(self.additional_data.get(community_enums.BotConfigKeys.IS_SIMULATED.value, False))
+ if not exchange_configs:
+ if options := self.additional_data.get(
+ community_enums.BotConfigKeys.OPTIONS.value
+ ):
+ if legacy_exchange_configs := options.get(EXCHANGE_CONFIGS):
+ exchange_configs = legacy_exchange_configs
+ commons_logging.get_logger(self.__class__.__name__).warning(
+ f"Using legacy exchange configs stored in options: {exchange_configs}"
+ )
+ is_simulated = bool(
+ self.additional_data.get(
+ community_enums.BotConfigKeys.IS_SIMULATED.value,
+ False,
+ )
+ ) if self.additional_data else False
can_trade = True # init at True to allow predicted book calls
if profile_data.profile_details.bot_id:
# this is a running bot, check if it can trade
@@ -101,37 +113,45 @@ async def adapt(
exchange_type=exchange_config.get(EXCHANGE_TYPE, octobot_commons.constants.DEFAULT_EXCHANGE_TYPE),
)
for exchange_config in exchange_configs
- }
+ } if exchange_configs else {}
exchange_account_id_by_name = {
exchange_config[NAME]: exchange_config.get(EXCHANGE_ACCOUNT_ID)
for exchange_config in exchange_configs
- }
- profile_data.exchanges = [
- octobot_commons.profiles.profile_data.ExchangeData(
- internal_name=auth_data.internal_name,
- exchange_account_id=exchange_account_id_by_name.get(auth_data.internal_name),
+ } if exchange_configs else {}
+ if exchange_configs is None:
+ if not profile_data.exchanges:
+ raise ValueError(
+ "No exchanges found in profile data and no exchange "
+ "configs provided in additional data"
+ )
+ else:
+ # configure exchanges from exchange auth data and reference price exchanges
+ profile_data.exchanges = [
+ octobot_commons.profiles.profile_data.ExchangeData(
+ internal_name=auth_data.internal_name,
+ exchange_account_id=exchange_account_id_by_name.get(auth_data.internal_name),
+ )
+ for auth_data in exchange_auth_data_by_name.values()
+ ]
+ reference_price_exchanges = set(
+ ref_exchange[simple_market_making_trading_mode.SimpleMarketMakingTradingMode.EXCHANGE]
+ for pair_config in pair_configs
+ for ref_exchange in pair_config[
+ simple_market_making_trading_mode.SimpleMarketMakingTradingMode.REFERENCE_PRICE
+ ]
+ if ref_exchange[simple_market_making_trading_mode.SimpleMarketMakingTradingMode.EXCHANGE] !=
+ simple_market_making_trading_mode.SimpleMarketMakingTradingMode.LOCAL_EXCHANGE_PRICE
)
- for auth_data in exchange_auth_data_by_name.values()
- ]
- reference_price_exchanges = set(
- ref_exchange[simple_market_making_trading_mode.SimpleMarketMakingTradingMode.EXCHANGE]
- for pair_config in pair_configs
- for ref_exchange in pair_config[
- simple_market_making_trading_mode.SimpleMarketMakingTradingMode.REFERENCE_PRICE
+ profile_data.exchanges += [
+ octobot_commons.profiles.profile_data.ExchangeData(internal_name=exchange)
+ for exchange in reference_price_exchanges
+ if exchange not in exchange_auth_data_by_name
]
- if ref_exchange[simple_market_making_trading_mode.SimpleMarketMakingTradingMode.EXCHANGE] !=
- simple_market_making_trading_mode.SimpleMarketMakingTradingMode.LOCAL_EXCHANGE_PRICE
- )
- profile_data.exchanges += [
- octobot_commons.profiles.profile_data.ExchangeData(internal_name=exchange)
- for exchange in reference_price_exchanges
- if exchange not in exchange_auth_data_by_name
- ]
- # register auto-filled exchange config if any
- self._register_exchange_configs(profile_data, exchange_configs)
+ # register auto-filled exchange config if any
+ self._register_exchange_configs(profile_data, exchange_configs)
# register automations config
- self._register_automations_config(profile_data, exchange_configs, pair_configs)
+ self._register_automations_config(profile_data, pair_configs)
# ensure all required fields are present
for tentacle_config in self.tentacles_data:
@@ -216,10 +236,10 @@ def _register_exchange_configs(
)
@staticmethod
- def _get_pair_exchange_name(pair_config: dict, exchange_configs: typing.Iterable[dict]) -> str:
- for exchange_config in exchange_configs:
- exchange_name = exchange_config.get(NAME, "")
- if simple_market_making_trading_mode.SimpleMarketMakingTradingMode.is_exchange_compatible_pair_setting(
+ def _get_pair_exchange_name(pair_config: dict, profile_data: octobot_commons.profiles.ProfileData) -> str:
+ for exchange_config in profile_data.exchanges:
+ exchange_name = exchange_config.internal_name
+ if exchange_name and simple_market_making_trading_mode.SimpleMarketMakingTradingMode.is_exchange_compatible_pair_setting(
pair_config, exchange_name
):
return exchange_name
@@ -228,14 +248,13 @@ def _get_pair_exchange_name(pair_config: dict, exchange_configs: typing.Iterable
def _register_automations_config(
self,
profile_data: octobot_commons.profiles.ProfileData,
- exchange_configs: typing.Iterable[dict],
pair_configs: typing.Iterable[dict]
):
automations = {}
for pair_config in pair_configs:
exchange_name = self._get_pair_exchange_name(
- pair_config, exchange_configs
+ pair_config, profile_data
)
conditions_configs = pair_config.get("stop_conditions")
if not conditions_configs:
diff --git a/packages/tentacles/Trading/Mode/simple_market_making_trading_mode/tests/test_simple_market_making_trading_adapter.py b/packages/tentacles/Trading/Mode/simple_market_making_trading_mode/tests/test_simple_market_making_trading_adapter.py
index 40314372e3..29c7f453e4 100644
--- a/packages/tentacles/Trading/Mode/simple_market_making_trading_mode/tests/test_simple_market_making_trading_adapter.py
+++ b/packages/tentacles/Trading/Mode/simple_market_making_trading_mode/tests/test_simple_market_making_trading_adapter.py
@@ -151,7 +151,7 @@ async def test_adapt_uses_direct_exchange_configs_and_registers_helpers(
assert ["binance", "kucoin"] == sorted([e.internal_name for e in profile_data.exchanges])
register_exchange_configs_mock.assert_called_once_with(profile_data, exchange_configs)
- register_automations_config_mock.assert_called_once_with(profile_data, exchange_configs, pair_configs)
+ register_automations_config_mock.assert_called_once_with(profile_data, pair_configs)
should_fill_exchange_auth_mock.assert_called_once_with()
async def test_adapt_uses_legacy_exchange_configs_when_exchanges_missing(
@@ -183,7 +183,81 @@ async def test_adapt_uses_legacy_exchange_configs_when_exchanges_missing(
await adapter.adapt(profile_data, auth_data)
register_exchange_configs_mock.assert_called_once_with(profile_data, exchange_configs)
- register_automations_config_mock.assert_called_once_with(profile_data, exchange_configs, pair_configs)
+ register_automations_config_mock.assert_called_once_with(profile_data, pair_configs)
+
+ async def test_adapt_with_empty_additional_data_uses_profile_exchanges(
+ self, adapter, profile_data, auth_data
+ ):
+ profile_data.profile_details.bot_id = None
+ adapter.additional_data.clear()
+ pair_configs = self._build_pair_configs(reference_exchange="binance")
+ mm_tentacle_config = {SimpleMarketMakingTradingMode.CONFIG_PAIR_SETTINGS: pair_configs}
+ adapter.tentacles_data = [
+ octobot_commons.profiles.profile_data.TentaclesData(
+ name=SimpleMarketMakingTradingMode.get_name(),
+ config=mm_tentacle_config,
+ )
+ ]
+ register_exchange_configs_mock = mock.Mock()
+ register_automations_config_mock = mock.Mock()
+ adapter._register_exchange_configs = register_exchange_configs_mock # type: ignore
+ adapter._register_automations_config = register_automations_config_mock # type: ignore
+ adapter._should_fill_exchange_auth = mock.Mock(return_value=False) # type: ignore
+
+ expected_exchange_names = [exchange.internal_name for exchange in profile_data.exchanges]
+
+ with mock.patch(
+ "tentacles.Trading.Mode.simple_market_making_trading_mode.simple_market_making_profile_data_adapter.symbols_util.get_most_common_usd_like_symbol",
+ mock.Mock(return_value="USDT"),
+ ):
+ await adapter.adapt(profile_data, auth_data)
+
+ assert [exchange.internal_name for exchange in profile_data.exchanges] == expected_exchange_names
+ traded_pairs = {currency.name for currency in profile_data.crypto_currencies}
+ assert traded_pairs == {"BTC/USDT"}
+ register_exchange_configs_mock.assert_not_called()
+ register_automations_config_mock.assert_called_once_with(profile_data, pair_configs)
+
+ mm_tentacle_configs = [
+ tentacle.config
+ for tentacle in profile_data.tentacles
+ if tentacle.name == SimpleMarketMakingTradingMode.get_name()
+ ]
+ assert len(mm_tentacle_configs) == 1
+ for pair_setting in mm_tentacle_configs[0][SimpleMarketMakingTradingMode.CONFIG_PAIR_SETTINGS]:
+ for ref in pair_setting[SimpleMarketMakingTradingMode.REFERENCE_PRICE]:
+ assert (
+ ref[SimpleMarketMakingTradingMode.EXCHANGE]
+ in expected_exchange_names
+ )
+
+ async def test_adapt_with_empty_additional_data_raises_when_profile_has_no_exchanges(
+ self, adapter, profile_data, auth_data
+ ):
+ profile_data.profile_details.bot_id = None
+ profile_data.exchanges = []
+ adapter.additional_data.clear()
+ pair_configs = self._build_pair_configs(reference_exchange="binance")
+ mm_tentacle_config = {SimpleMarketMakingTradingMode.CONFIG_PAIR_SETTINGS: pair_configs}
+ adapter.tentacles_data = [
+ octobot_commons.profiles.profile_data.TentaclesData(
+ name=SimpleMarketMakingTradingMode.get_name(),
+ config=mm_tentacle_config,
+ )
+ ]
+ adapter._register_exchange_configs = mock.Mock() # type: ignore
+ adapter._register_automations_config = mock.Mock() # type: ignore
+ adapter._should_fill_exchange_auth = mock.Mock(return_value=False) # type: ignore
+
+ with mock.patch(
+ "tentacles.Trading.Mode.simple_market_making_trading_mode.simple_market_making_profile_data_adapter.symbols_util.get_most_common_usd_like_symbol",
+ mock.Mock(return_value="USDT"),
+ ):
+ with pytest.raises(
+ ValueError,
+ match="No exchanges found in profile data and no exchange",
+ ):
+ await adapter.adapt(profile_data, auth_data)
async def test_adapt_sets_reference_market_from_usd_like_symbol(
self, adapter, profile_data, auth_data
@@ -473,11 +547,6 @@ def test_register_exchange_configs_ignores_config_without_url_or_auth(self, adap
class TestRegisterAutomationsConfig:
def test_register_automations_config_adds_volatility_automation(self, adapter, profile_data):
- exchange_configs = [
- {
- simple_market_making_profile_data_adapter.NAME: "binance",
- }
- ]
pair_configs = [
{
SimpleMarketMakingTradingMode.CONFIG_PAIR: "BTC/USDT",
@@ -490,7 +559,7 @@ def test_register_automations_config_adds_volatility_automation(self, adapter, p
}
]
- adapter._register_automations_config(profile_data, exchange_configs, pair_configs) # type: ignore
+ adapter._register_automations_config(profile_data, pair_configs)
# one automation tentacle added
assert len(profile_data.tentacles) == 1
@@ -520,11 +589,6 @@ def test_register_automations_config_adds_volatility_automation(self, adapter, p
def test_register_automations_config_volatility_still_parses_legacy_average_prive_key(
self, adapter, profile_data
):
- exchange_configs = [
- {
- simple_market_making_profile_data_adapter.NAME: "binance",
- }
- ]
pair_configs = [
{
SimpleMarketMakingTradingMode.CONFIG_PAIR: "BTC/USDT",
@@ -537,7 +601,7 @@ def test_register_automations_config_volatility_still_parses_legacy_average_priv
}
]
- adapter._register_automations_config(profile_data, exchange_configs, pair_configs) # type: ignore
+ adapter._register_automations_config(profile_data, pair_configs)
assert len(profile_data.tentacles) == 1
tentacle = profile_data.tentacles[0]
@@ -548,11 +612,6 @@ def test_register_automations_config_volatility_still_parses_legacy_average_priv
def test_register_automations_config_adds_holding_threshold_automations_for_base_and_quote(
self, adapter, profile_data
):
- exchange_configs = [
- {
- simple_market_making_profile_data_adapter.NAME: "binance",
- }
- ]
pair_configs = [
{
SimpleMarketMakingTradingMode.CONFIG_PAIR: "BTC/USDT",
@@ -564,7 +623,7 @@ def test_register_automations_config_adds_holding_threshold_automations_for_base
}
]
- adapter._register_automations_config(profile_data, exchange_configs, pair_configs) # type: ignore
+ adapter._register_automations_config(profile_data, pair_configs)
# expecting a single automations tentacle
assert len(profile_data.tentacles) == 1
@@ -604,11 +663,6 @@ def test_register_automations_config_adds_holding_threshold_automations_for_base
}
def test_register_automations_config_adds_holding_threshold_automations_for_base_and_quote_and_volatility_threshold(self, adapter, profile_data):
- exchange_configs = [
- {
- simple_market_making_profile_data_adapter.NAME: "binance",
- }
- ]
pair_configs = [
{
SimpleMarketMakingTradingMode.CONFIG_PAIR: "BTC/USDT",
@@ -623,7 +677,7 @@ def test_register_automations_config_adds_holding_threshold_automations_for_base
}
]
- adapter._register_automations_config(profile_data, exchange_configs, pair_configs) # type: ignore
+ adapter._register_automations_config(profile_data, pair_configs)
# expecting a single automations tentacle
assert len(profile_data.tentacles) == 1
@@ -676,11 +730,6 @@ def test_register_automations_config_adds_holding_threshold_automations_for_base
}
def test_register_automations_config_ignores_pairs_without_stop_conditions(self, adapter, profile_data):
- exchange_configs = [
- {
- simple_market_making_profile_data_adapter.NAME: "binance",
- }
- ]
pair_configs = [
{
SimpleMarketMakingTradingMode.CONFIG_PAIR: "BTC/USDT",
@@ -689,7 +738,7 @@ def test_register_automations_config_ignores_pairs_without_stop_conditions(self,
}
]
- adapter._register_automations_config(profile_data, exchange_configs, pair_configs) # type: ignore
+ adapter._register_automations_config(profile_data, pair_configs)
# no automations => no tentacles added
assert profile_data.tentacles == []
diff --git a/packages/tentacles_manager/octobot_tentacles_manager/api/configurator.py b/packages/tentacles_manager/octobot_tentacles_manager/api/configurator.py
index f9b10d5aff..f821b0defe 100644
--- a/packages/tentacles_manager/octobot_tentacles_manager/api/configurator.py
+++ b/packages/tentacles_manager/octobot_tentacles_manager/api/configurator.py
@@ -18,6 +18,7 @@
from typing import TYPE_CHECKING
import octobot_commons.tentacles_management as tentacles_management
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
import octobot_tentacles_manager.api as api
import octobot_tentacles_manager.configuration as configuration
@@ -31,7 +32,9 @@
async def ensure_setup_configuration(tentacle_path=constants.TENTACLES_PATH, bot_path=constants.DEFAULT_BOT_PATH,
bot_install_dir=constants.DEFAULT_BOT_INSTALL_DIR) -> None:
- if not path.exists(path.join(bot_path, constants.USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH)):
+ if not path.exists(
+ path.join(bot_path, user_root_folder_provider.get_user_reference_tentacle_config_file_path())
+ ):
await api.repair_installation(tentacle_path, bot_path, bot_install_dir, verbose=False)
@@ -61,16 +64,20 @@ def refresh_all_tentacles_setup_configs(
def get_tentacles_setup_config(
- config_path=constants.USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH
+ config_path: str = None
) -> "TentaclesSetupConfiguration":
+ if config_path is None:
+ config_path = user_root_folder_provider.get_user_reference_tentacle_config_file_path()
setup_config = configuration.TentaclesSetupConfiguration(config_path=config_path)
setup_config.read_config()
return setup_config
def create_tentacles_setup_config_with_tentacles(
- *tentacles_classes, config_path=constants.USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH
+ *tentacles_classes, config_path: str = None
) -> "TentaclesSetupConfiguration":
+ if config_path is None:
+ config_path = user_root_folder_provider.get_user_reference_tentacle_config_file_path()
setup_config = configuration.TentaclesSetupConfiguration(config_path=config_path)
setup_config.from_activated_tentacles_classes(*tentacles_classes)
return setup_config
@@ -107,7 +114,7 @@ def _apply_reference_tentacles_config_registered_tentacles(
tentacles_setup_config: "TentaclesSetupConfiguration"
):
reference_tentacles_setup_config = get_tentacles_setup_config(
- constants.USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH
+ user_root_folder_provider.get_user_reference_tentacle_config_file_path()
)
tentacles_setup_config.registered_tentacles = reference_tentacles_setup_config.registered_tentacles
diff --git a/packages/tentacles_manager/octobot_tentacles_manager/configuration/tentacles_setup_configuration.py b/packages/tentacles_manager/octobot_tentacles_manager/configuration/tentacles_setup_configuration.py
index aa95050971..a04d4c3646 100644
--- a/packages/tentacles_manager/octobot_tentacles_manager/configuration/tentacles_setup_configuration.py
+++ b/packages/tentacles_manager/octobot_tentacles_manager/configuration/tentacles_setup_configuration.py
@@ -18,6 +18,7 @@
import octobot_commons.logging as logging
import octobot_commons.constants as commons_constants
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
import octobot_commons.profiles as commons_profiles
import octobot_tentacles_manager.constants as constants
@@ -39,10 +40,11 @@ class TentaclesSetupConfiguration:
constants.TENTACLES_TRADING_SUPERVISOR_PATH,
}
- def __init__(self, bot_installation_path=constants.DEFAULT_BOT_PATH,
- config_path=constants.USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH):
+ def __init__(self, bot_installation_path=constants.DEFAULT_BOT_PATH, config_path=None):
self.logger = logging.get_logger(self.__class__.__name__)
self.bot_installation_path = bot_installation_path
+ if config_path is None:
+ config_path = user_root_folder_provider.get_user_reference_tentacle_config_file_path()
self.config_path = path.join(bot_installation_path, config_path)
self.tentacles_activation = {}
self.registered_tentacles = {}
@@ -98,10 +100,12 @@ def is_imported_profile(profile_folder):
def refresh_profiles_tentacles_config(self,
tentacles,
- profiles_path=commons_constants.USER_PROFILES_FOLDER,
+ profiles_path=None,
newly_installed_tentacles=None,
uninstalled_tentacles=None
):
+ if profiles_path is None:
+ profiles_path = user_root_folder_provider.get_user_profiles_folder()
bot_profiles_path = os.path.join(self.bot_installation_path, profiles_path)
if not path.isdir(bot_profiles_path):
return
@@ -205,7 +209,7 @@ def _deactivate_tentacle_if_evaluator(self, element_name, element_type):
def _apply_default_profile_activation(self):
default_profile = commons_profiles.Profile.load_profile(
- commons_constants.USER_PROFILES_FOLDER,
+ user_root_folder_provider.get_user_profiles_folder(),
commons_constants.DEFAULT_PROFILE
)
profile_setup_config = configuration.TentaclesSetupConfiguration(
@@ -220,7 +224,9 @@ def _apply_reference_tentacles_config(self, read_activation_config):
self._apply_default_profile_activation()
else:
self._from_dict(
- configuration.read_config(constants.USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH),
+ configuration.read_config(
+ user_root_folder_provider.get_user_reference_tentacle_config_file_path()
+ ),
read_activation_config
)
except Exception as err:
diff --git a/packages/tentacles_manager/octobot_tentacles_manager/constants.py b/packages/tentacles_manager/octobot_tentacles_manager/constants.py
index cdf29090c1..1a6daf79e8 100644
--- a/packages/tentacles_manager/octobot_tentacles_manager/constants.py
+++ b/packages/tentacles_manager/octobot_tentacles_manager/constants.py
@@ -70,12 +70,7 @@
TENTACLE_REQUIREMENT_VERSION_EQUALS = "=="
# Tentacle user config files and folders
-USER_REFERENCE_TENTACLE_CONFIG_PATH = path.join(constants.USER_FOLDER, "reference_tentacles_config")
-USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH = path.join(USER_REFERENCE_TENTACLE_CONFIG_PATH,
- constants.CONFIG_TENTACLES_FILE)
TENTACLES_SPECIFIC_CONFIG_FOLDER = "specific_config"
-USER_REFERENCE_TENTACLE_SPECIFIC_CONFIG_PATH = path.join(USER_REFERENCE_TENTACLE_CONFIG_PATH,
- TENTACLES_SPECIFIC_CONFIG_FOLDER)
# Current minimum default tentacles version
TENTACLE_CURRENT_MINIMUM_DEFAULT_TENTACLES_VERSION = "1.2.0"
diff --git a/packages/tentacles_manager/octobot_tentacles_manager/loaders/tentacle_loading.py b/packages/tentacles_manager/octobot_tentacles_manager/loaders/tentacle_loading.py
index 86d4fa7564..9758419e1e 100644
--- a/packages/tentacles_manager/octobot_tentacles_manager/loaders/tentacle_loading.py
+++ b/packages/tentacles_manager/octobot_tentacles_manager/loaders/tentacle_loading.py
@@ -87,7 +87,10 @@ def get_documentation(klass) -> str:
doc_content = ""
if path.isfile(doc_file):
with open(doc_file, "r") as doc_file:
- doc_content = doc_file.read()
+ try:
+ doc_content = doc_file.read()
+ except UnicodeDecodeError:
+ doc_content = ""
_tentacle_documentation_by_class_name[doc_key] = doc_content
return doc_content
diff --git a/packages/tentacles_manager/octobot_tentacles_manager/managers/tentacles_setup_manager.py b/packages/tentacles_manager/octobot_tentacles_manager/managers/tentacles_setup_manager.py
index 75308aec41..81a6c95bab 100644
--- a/packages/tentacles_manager/octobot_tentacles_manager/managers/tentacles_setup_manager.py
+++ b/packages/tentacles_manager/octobot_tentacles_manager/managers/tentacles_setup_manager.py
@@ -19,6 +19,7 @@
import shutil
import octobot_commons.logging as logging
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
import octobot_tentacles_manager.configuration as configuration
import octobot_tentacles_manager.constants as constants
@@ -88,7 +89,12 @@ def refresh_profile_tentacles_config(self, profile_folder):
async def create_missing_tentacles_arch(self):
# tentacle user config folder
- await util.find_or_create(path.join(self.bot_installation_path, constants.USER_REFERENCE_TENTACLE_SPECIFIC_CONFIG_PATH))
+ await util.find_or_create(
+ path.join(
+ self.bot_installation_path,
+ user_root_folder_provider.get_user_reference_tentacle_specific_config_path(),
+ )
+ )
# tentacles folder
found_existing_installation = not await util.find_or_create(self.tentacle_setup_root_path)
# tentacle main python init file
@@ -152,8 +158,9 @@ def delete_tentacles_arch(
if TentaclesSetupManager.is_tentacles_arch_valid(verbose=False, raises=raises) \
or (force and path.exists(path.join(bot_installation_path, tentacles_folder_name))):
shutil.rmtree(path.join(bot_installation_path, tentacles_folder_name))
- if with_user_config and path.exists(path.join(bot_installation_path, constants.USER_REFERENCE_TENTACLE_CONFIG_PATH)):
- shutil.rmtree(path.join(bot_installation_path, constants.USER_REFERENCE_TENTACLE_CONFIG_PATH))
+ ref_cfg = user_root_folder_provider.get_user_reference_tentacle_config_path()
+ if with_user_config and path.exists(path.join(bot_installation_path, ref_cfg)):
+ shutil.rmtree(path.join(bot_installation_path, ref_cfg))
@staticmethod
def get_available_tentacles_repos():
diff --git a/packages/tentacles_manager/requirements.txt b/packages/tentacles_manager/requirements.txt
index 5ff59bb917..055ca8d82d 100644
--- a/packages/tentacles_manager/requirements.txt
+++ b/packages/tentacles_manager/requirements.txt
@@ -2,7 +2,7 @@
setuptools
# async files management
-aiofiles==25.1.0
+aiofiles
# async http requests
aiohttp>=3.9.5
diff --git a/packages/tentacles_manager/tests/__init__.py b/packages/tentacles_manager/tests/__init__.py
index 0ec8508ee6..f705075f7d 100644
--- a/packages/tentacles_manager/tests/__init__.py
+++ b/packages/tentacles_manager/tests/__init__.py
@@ -22,6 +22,7 @@
import octobot_commons.asyncio_tools as asyncio_tools
import octobot_commons.constants as commons_constants
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
import octobot_tentacles_manager.constants as constants
@@ -66,8 +67,9 @@ def clean():
@pytest.fixture
def fake_profiles():
- default_profile = path.join(commons_constants.USER_PROFILES_FOLDER, commons_constants.DEFAULT_PROFILE)
- other_profile = path.join(commons_constants.USER_PROFILES_FOLDER, OTHER_PROFILE)
+ user_profiles = user_root_folder_provider.get_user_profiles_folder()
+ default_profile = path.join(user_profiles, commons_constants.DEFAULT_PROFILE)
+ other_profile = path.join(user_profiles, OTHER_PROFILE)
_reset_profile(default_profile)
_reset_profile(other_profile)
yield
@@ -80,10 +82,12 @@ def _cleanup():
shutil.rmtree(TEMP_DIR)
if path.exists(constants.TENTACLES_PATH):
shutil.rmtree(constants.TENTACLES_PATH)
- if path.exists(constants.USER_REFERENCE_TENTACLE_CONFIG_PATH):
- shutil.rmtree(constants.USER_REFERENCE_TENTACLE_CONFIG_PATH)
- if os.path.isdir(commons_constants.USER_PROFILES_FOLDER):
- shutil.rmtree(commons_constants.USER_PROFILES_FOLDER)
+ ref_config = user_root_folder_provider.get_user_reference_tentacle_config_path()
+ if path.exists(ref_config):
+ shutil.rmtree(ref_config)
+ user_profiles = user_root_folder_provider.get_user_profiles_folder()
+ if os.path.isdir(user_profiles):
+ shutil.rmtree(user_profiles)
def _reset_profile(profile_path, re_create=True):
diff --git a/packages/tentacles_manager/tests/api/test_installer.py b/packages/tentacles_manager/tests/api/test_installer.py
index c99f2401db..94ef9ab582 100644
--- a/packages/tentacles_manager/tests/api/test_installer.py
+++ b/packages/tentacles_manager/tests/api/test_installer.py
@@ -23,12 +23,13 @@
from os import path, walk
import octobot_commons.constants as commons_constants
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
from octobot_tentacles_manager.api.installer import install_all_tentacles, install_tentacles, install_single_tentacle, \
repair_installation
from octobot_tentacles_manager.configuration.tentacles_setup_configuration import TentaclesSetupConfiguration
+import octobot_tentacles_manager.constants as tentacles_manager_constants
from octobot_tentacles_manager.constants import TENTACLES_PATH, TENTACLES_REQUIREMENTS_INSTALL_TEMP_DIR, \
- PYTHON_INIT_FILE, TENTACLES_NOTIFIERS_PATH, USER_REFERENCE_TENTACLE_CONFIG_PATH, \
- USER_REFERENCE_TENTACLE_SPECIFIC_CONFIG_PATH, TENTACLES_SERVICES_PATH, TENTACLES_BACKTESTING_PATH, TENTACLES_EVALUATOR_PATH
+ PYTHON_INIT_FILE, TENTACLES_NOTIFIERS_PATH, TENTACLES_SERVICES_PATH, TENTACLES_BACKTESTING_PATH, TENTACLES_EVALUATOR_PATH
from octobot_tentacles_manager.managers.tentacles_setup_manager import TentaclesSetupManager
from tests import event_loop, CLEAN_TENTACLES_ARCHITECTURE_FILES_FOLDERS_COUNT
@@ -104,7 +105,9 @@ async def test_repair_installation():
"OtherInstantFluctuationsEvaluator, SecondOtherInstantFluctuationsEvaluator" in f.readlines()
# restore tentacles_config.json validity and content
- user_config_path = path.join(broken_install, USER_REFERENCE_TENTACLE_CONFIG_PATH)
+ user_config_path = path.join(
+ broken_install, user_root_folder_provider.get_user_reference_tentacle_config_path()
+ )
with open(path.join(user_config_path, commons_constants.CONFIG_TENTACLES_FILE)) as f:
activations = json.load(f)[TentaclesSetupConfiguration.TENTACLE_ACTIVATION_KEY]
# Evaluators are disabled by default by DEFAULT_DEACTIVATABLE_TENTACLE_SUB_TYPES
diff --git a/packages/tentacles_manager/tests/configuration/test_tentacle_configuration.py b/packages/tentacles_manager/tests/configuration/test_tentacle_configuration.py
index 53f5f5aabc..6c7f6162aa 100644
--- a/packages/tentacles_manager/tests/configuration/test_tentacle_configuration.py
+++ b/packages/tentacles_manager/tests/configuration/test_tentacle_configuration.py
@@ -27,6 +27,7 @@
from octobot_tentacles_manager.configuration.tentacle_configuration import get_config, update_config, \
factory_reset_config, get_config_schema_path
import octobot_tentacles_manager.util as util
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
import octobot_tentacles_manager.constants as constants
from octobot_tentacles_manager.loaders.tentacle_loading import reload_tentacle_by_tentacle_class
@@ -286,5 +287,6 @@ def _cleanup():
rmtree(constants.TENTACLES_PATH)
if path.exists(constants.TENTACLE_CONFIG_FILE_NAME):
os.remove(constants.TENTACLE_CONFIG_FILE_NAME)
- if path.exists(constants.USER_REFERENCE_TENTACLE_CONFIG_PATH):
- rmtree(constants.USER_REFERENCE_TENTACLE_CONFIG_PATH)
+ ref_tent = user_root_folder_provider.get_user_reference_tentacle_config_path()
+ if path.exists(ref_tent):
+ rmtree(ref_tent)
diff --git a/packages/tentacles_manager/tests/configuration/test_tentacles_setup_configuration.py b/packages/tentacles_manager/tests/configuration/test_tentacles_setup_configuration.py
index 707d7aba08..30208e5bff 100644
--- a/packages/tentacles_manager/tests/configuration/test_tentacles_setup_configuration.py
+++ b/packages/tentacles_manager/tests/configuration/test_tentacles_setup_configuration.py
@@ -25,6 +25,7 @@
import octobot_tentacles_manager.util as util
import octobot_tentacles_manager.api as api
from octobot_tentacles_manager.configuration import TentaclesSetupConfiguration
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
import octobot_tentacles_manager.constants as constants
# All test coroutines will be treated as marked.
@@ -132,5 +133,6 @@ def _cleanup():
rmtree(constants.TENTACLES_PATH)
if path.exists(constants.TENTACLE_CONFIG_FILE_NAME):
os.remove(constants.TENTACLE_CONFIG_FILE_NAME)
- if path.exists(constants.USER_REFERENCE_TENTACLE_CONFIG_PATH):
- rmtree(constants.USER_REFERENCE_TENTACLE_CONFIG_PATH)
+ ref_tent = user_root_folder_provider.get_user_reference_tentacle_config_path()
+ if path.exists(ref_tent):
+ rmtree(ref_tent)
diff --git a/packages/tentacles_manager/tests/managers/test_tentacles_setup_manager.py b/packages/tentacles_manager/tests/managers/test_tentacles_setup_manager.py
index 3e504521d7..774fdbe98e 100644
--- a/packages/tentacles_manager/tests/managers/test_tentacles_setup_manager.py
+++ b/packages/tentacles_manager/tests/managers/test_tentacles_setup_manager.py
@@ -17,8 +17,9 @@
from shutil import rmtree
from os import walk, path
-from octobot_tentacles_manager.constants import USER_REFERENCE_TENTACLE_CONFIG_PATH, \
- TENTACLES_REQUIREMENTS_INSTALL_TEMP_DIR, TENTACLES_PATH
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
+import octobot_tentacles_manager.constants as tm_constants
+from octobot_tentacles_manager.constants import TENTACLES_REQUIREMENTS_INSTALL_TEMP_DIR, TENTACLES_PATH
from octobot_tentacles_manager.managers.tentacles_setup_manager import TentaclesSetupManager
import tests
@@ -34,7 +35,7 @@ async def test_create_missing_tentacles_arch():
await tentacles_setup_manager.create_missing_tentacles_arch()
trading_mode_files_count = sum(1 for _ in walk(TENTACLES_PATH))
assert trading_mode_files_count == tests.CLEAN_TENTACLES_ARCHITECTURE_FILES_FOLDERS_COUNT
- assert path.exists(USER_REFERENCE_TENTACLE_CONFIG_PATH)
+ assert path.exists(user_root_folder_provider.get_user_reference_tentacle_config_path())
_cleanup()
@@ -45,5 +46,6 @@ def _cleanup():
rmtree(TENTACLES_REQUIREMENTS_INSTALL_TEMP_DIR)
if path.exists(TENTACLES_PATH):
rmtree(TENTACLES_PATH)
- if path.exists(USER_REFERENCE_TENTACLE_CONFIG_PATH):
- rmtree(USER_REFERENCE_TENTACLE_CONFIG_PATH)
+ ref = user_root_folder_provider.get_user_reference_tentacle_config_path()
+ if path.exists(ref):
+ rmtree(ref)
diff --git a/packages/tentacles_manager/tests/workers/test_install_worker.py b/packages/tentacles_manager/tests/workers/test_install_worker.py
index b889876845..f103654d1d 100644
--- a/packages/tentacles_manager/tests/workers/test_install_worker.py
+++ b/packages/tentacles_manager/tests/workers/test_install_worker.py
@@ -20,9 +20,11 @@
from logging import INFO
import octobot_commons.constants as commons_constants
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
+import octobot_tentacles_manager.constants as tm_constants
from octobot_commons.logging.logging_util import set_logging_level
-from octobot_tentacles_manager.constants import USER_REFERENCE_TENTACLE_SPECIFIC_CONFIG_PATH, \
- TENTACLES_REQUIREMENTS_INSTALL_TEMP_DIR, USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH, TENTACLES_PATH, DEFAULT_BOT_PATH, \
+from octobot_tentacles_manager.constants import \
+ TENTACLES_REQUIREMENTS_INSTALL_TEMP_DIR, TENTACLES_PATH, DEFAULT_BOT_PATH, \
UNKNOWN_TENTACLES_PACKAGE_LOCATION, TENTACLES_SPECIFIC_CONFIG_FOLDER
from octobot_tentacles_manager.workers.install_worker import InstallWorker
from octobot_tentacles_manager.models.tentacle import Tentacle
@@ -48,12 +50,12 @@ async def test_install_two_tentacles(clean):
assert trading_mode_files_count == 1
backtesting_mode_files_count = sum(1 for _ in os.walk(os.path.join(TENTACLES_PATH, "Backtesting", "importers")))
assert backtesting_mode_files_count == 7
- config_files = [f for f in os.walk(USER_REFERENCE_TENTACLE_SPECIFIC_CONFIG_PATH)]
+ config_files = [f for f in os.walk(user_root_folder_provider.get_user_reference_tentacle_specific_config_path())]
config_files_count = len(config_files)
assert config_files_count == 1
# test tentacles config
- with open(USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH, "r") as config_f:
+ with open(user_root_folder_provider.get_user_reference_tentacle_config_file_path(), "r") as config_f:
ref_profile_config = json.load(config_f)
assert ref_profile_config == {
'installation_context': {
@@ -88,12 +90,12 @@ async def test_install_one_tentacle_with_requirement(clean):
# test installed files
trading_mode_files_count = sum(1 for _ in os.walk(os.path.join(TENTACLES_PATH, "Trading", "Mode")))
assert trading_mode_files_count == 1
- config_files = [f for f in os.walk(USER_REFERENCE_TENTACLE_SPECIFIC_CONFIG_PATH)]
+ config_files = [f for f in os.walk(user_root_folder_provider.get_user_reference_tentacle_specific_config_path())]
assert len(config_files) == 1
assert len(config_files[0][2]) == 0
# test tentacles config
- with open(USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH, "r") as config_f:
+ with open(user_root_folder_provider.get_user_reference_tentacle_config_file_path(), "r") as config_f:
assert json.load(config_f) == {
'installation_context': {
'octobot_version': 'unknown'
@@ -125,12 +127,12 @@ async def test_install_all_tentacles(clean):
# test installed files
trading_mode_files_count = sum(1 for _ in os.walk(os.path.join(TENTACLES_PATH, "Trading", "Mode")))
assert trading_mode_files_count == 5
- config_files = [f for f in os.walk(USER_REFERENCE_TENTACLE_SPECIFIC_CONFIG_PATH)]
+ config_files = [f for f in os.walk(user_root_folder_provider.get_user_reference_tentacle_specific_config_path())]
config_files_count = len(config_files)
assert config_files_count == 1
# test tentacles config
- with open(USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH, "r") as config_f:
+ with open(user_root_folder_provider.get_user_reference_tentacle_config_file_path(), "r") as config_f:
assert json.load(config_f) == {
'installation_context': {
'octobot_version': 'unknown'
@@ -165,7 +167,7 @@ async def test_install_all_tentacles(clean):
async def test_install_all_tentacles_with_profile(clean):
_enable_loggers()
- profile_path = os.path.join(commons_constants.USER_PROFILES_FOLDER, "many_traded_elements")
+ profile_path = os.path.join(user_root_folder_provider.get_user_profiles_folder(), "many_traded_elements")
assert not os.path.isfile(os.path.join(profile_path, commons_constants.PROFILE_CONFIG_FILE))
tentacles_path = os.path.join("tests", "static", "tentacles_with_profile.zip")
await fetch_and_extract_tentacles(TEMP_DIR, tentacles_path, None)
@@ -204,28 +206,29 @@ async def test_profiles_update(clean, fake_profiles):
assert await worker.process() == 0
# test tentacles setup config
- with open(USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH) as config_f:
+ with open(user_root_folder_provider.get_user_reference_tentacle_config_file_path()) as config_f:
ref_profile_config = json.load(config_f)
-
+ user_profiles = user_root_folder_provider.get_user_profiles_folder()
# test profiles tentacles config
- with open(os.path.join(commons_constants.USER_PROFILES_FOLDER,
+ with open(os.path.join(user_profiles,
commons_constants.DEFAULT_PROFILE,
commons_constants.CONFIG_TENTACLES_FILE)) as default_c:
assert ref_profile_config == json.load(default_c)
- with open(os.path.join(commons_constants.USER_PROFILES_FOLDER,
+ with open(os.path.join(user_profiles,
OTHER_PROFILE,
commons_constants.CONFIG_TENTACLES_FILE)) as other_c:
assert ref_profile_config == json.load(other_c)
# test specific tentacles config
- default_profile_tentacles_config = os.path.join(commons_constants.USER_PROFILES_FOLDER,
+ default_profile_tentacles_config = os.path.join(user_profiles,
commons_constants.DEFAULT_PROFILE,
TENTACLES_SPECIFIC_CONFIG_FOLDER)
- other_profile_tentacles_config = os.path.join(commons_constants.USER_PROFILES_FOLDER,
+ other_profile_tentacles_config = os.path.join(user_profiles,
OTHER_PROFILE,
TENTACLES_SPECIFIC_CONFIG_FOLDER)
- for tentacle_config in os.scandir(os.path.join(os.path.split(USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH)[0],
- TENTACLES_SPECIFIC_CONFIG_FOLDER)):
+ for tentacle_config in os.scandir(os.path.join(os.path.dirname(
+ user_root_folder_provider.get_user_reference_tentacle_config_file_path()),
+ TENTACLES_SPECIFIC_CONFIG_FOLDER)):
with open(tentacle_config) as ref_config_file:
ref_config = json.load(ref_config_file)
with open(os.path.join(default_profile_tentacles_config, tentacle_config.name)) as default_profile_config_file:
@@ -256,7 +259,7 @@ async def test_install_all_tentacles_fetching_requirements(clean):
trading_mode_files_count = sum(1 for _ in os.walk(os.path.join(TENTACLES_PATH, "Trading", "Mode")))
assert trading_mode_files_count == 5
- config_files = [f for f in os.walk(USER_REFERENCE_TENTACLE_SPECIFIC_CONFIG_PATH)]
+ config_files = [f for f in os.walk(user_root_folder_provider.get_user_reference_tentacle_specific_config_path())]
config_files_count = len(config_files)
assert config_files_count == 1
# ensure fetched InstantFluctuationsEvaluator requirement
diff --git a/packages/tentacles_manager/tests/workers/test_uninstall_worker.py b/packages/tentacles_manager/tests/workers/test_uninstall_worker.py
index ccc4cf725f..56530979f5 100644
--- a/packages/tentacles_manager/tests/workers/test_uninstall_worker.py
+++ b/packages/tentacles_manager/tests/workers/test_uninstall_worker.py
@@ -19,9 +19,11 @@
import os
import octobot_commons.constants as commons_constants
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
+import octobot_tentacles_manager.constants as tm_constants
from octobot_commons.logging.logging_util import set_logging_level
from octobot_tentacles_manager.constants import TENTACLES_PATH, \
- USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH, DEFAULT_BOT_PATH, TENTACLE_CONFIG, TENTACLES_EVALUATOR_PATH, \
+ DEFAULT_BOT_PATH, TENTACLE_CONFIG, TENTACLES_EVALUATOR_PATH, \
TENTACLES_EVALUATOR_REALTIME_PATH
from octobot_tentacles_manager.workers.install_worker import InstallWorker
@@ -54,7 +56,7 @@ async def test_uninstall_two_tentacles(clean):
tentacles_files_count_after_uninstall = sum(1 for _ in os.walk(TENTACLES_PATH))
# After uninstalling 2 tentacles, there should be fewer directories than after full install
assert tentacles_files_count_after_uninstall < tentacles_files_count_after_install
- with open(USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH, "r") as config_f:
+ with open(user_root_folder_provider.get_user_reference_tentacle_config_file_path(), "r") as config_f:
assert json.load(config_f) == {
'installation_context': {
'octobot_version': 'unknown'
@@ -111,15 +113,15 @@ async def test_profiles_update(clean, fake_profiles):
assert await uninstall_worker.process(["instant_fluctuations_evaluator", "generic_exchange_importer"]) == 0
# test tentacles setup config
- with open(USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH) as config_f:
+ with open(user_root_folder_provider.get_user_reference_tentacle_config_file_path()) as config_f:
ref_profile_config = json.load(config_f)
-
+ user_profiles = user_root_folder_provider.get_user_profiles_folder()
# test profiles tentacles config
- with open(os.path.join(commons_constants.USER_PROFILES_FOLDER,
+ with open(os.path.join(user_profiles,
commons_constants.DEFAULT_PROFILE,
commons_constants.CONFIG_TENTACLES_FILE)) as default_c:
assert ref_profile_config == json.load(default_c)
- with open(os.path.join(commons_constants.USER_PROFILES_FOLDER,
+ with open(os.path.join(user_profiles,
OTHER_PROFILE,
commons_constants.CONFIG_TENTACLES_FILE)) as other_c:
assert ref_profile_config == json.load(other_c)
@@ -143,7 +145,7 @@ async def test_uninstall_all_tentacles(clean):
assert await uninstall_worker.process() == 0
tentacles_files_count = sum(1 for _ in os.walk(TENTACLES_PATH))
assert tentacles_files_count == CLEAN_TENTACLES_ARCHITECTURE_FILES_FOLDERS_COUNT
- with open(USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH, "r") as config_f:
+ with open(user_root_folder_provider.get_user_reference_tentacle_config_file_path(), "r") as config_f:
assert json.load(config_f) == {
'installation_context': {
'octobot_version': 'unknown'
diff --git a/packages/tentacles_manager/tests/workers/test_update_worker.py b/packages/tentacles_manager/tests/workers/test_update_worker.py
index a632ae8fc0..4a6c948966 100644
--- a/packages/tentacles_manager/tests/workers/test_update_worker.py
+++ b/packages/tentacles_manager/tests/workers/test_update_worker.py
@@ -21,9 +21,10 @@
from os import walk, path
import octobot_commons.constants as commons_constants
+import octobot_commons.user_root_folder_provider as user_root_folder_provider
from octobot_commons.logging.logging_util import set_logging_level
-from octobot_tentacles_manager.constants import USER_REFERENCE_TENTACLE_SPECIFIC_CONFIG_PATH, \
- USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH, TENTACLES_PATH, DEFAULT_BOT_PATH, UNKNOWN_TENTACLES_PACKAGE_LOCATION, \
+import octobot_tentacles_manager.constants as tm_constants
+from octobot_tentacles_manager.constants import TENTACLES_PATH, DEFAULT_BOT_PATH, UNKNOWN_TENTACLES_PACKAGE_LOCATION, \
TENTACLE_CONFIG, TENTACLES_SPECIFIC_CONFIG_FOLDER
from octobot_tentacles_manager.workers.install_worker import InstallWorker
from octobot_tentacles_manager.models.tentacle_factory import TentacleFactory
@@ -58,12 +59,12 @@ async def test_update_two_tentacles(clean):
assert trading_mode_files_count == 1
backtesting_mode_files_count = sum(1 for _ in walk(path.join(TENTACLES_PATH, "Backtesting", "importers")))
assert backtesting_mode_files_count == 7
- config_files = [f for f in walk(USER_REFERENCE_TENTACLE_SPECIFIC_CONFIG_PATH)]
+ config_files = [f for f in walk(user_root_folder_provider.get_user_reference_tentacle_specific_config_path())]
config_files_count = len(config_files)
assert config_files_count == 1
# test tentacles config
- with open(USER_REFERENCE_TENTACLE_CONFIG_FILE_PATH, "r") as config_f:
+ with open(user_root_folder_provider.get_user_reference_tentacle_config_file_path(), "r") as config_f:
assert json.load(config_f) == {
'installation_context': {
'octobot_version': 'unknown'
diff --git a/requirements.txt b/requirements.txt
index 7f617e7a30..1be5f36b03 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -23,6 +23,7 @@ websockets==15.0.1 # used by supabase, a recent version is required, see https:/
# async http requests
aiohttp==3.13.3
+aiofiles==25.1.0
# updating to aiodns==3.2.0 is incompatible (and failing CI)
# raises RuntimeError: aiodns needs a SelectorEventLoop on Windows. See more: https://github.com/saghul/aiodns/issues/86
aiodns==3.1.1 # used by aiohttp