Skip to content

Commit fde039e

Browse files
Xavier-Dod-fence
authored andcommitted
[IMP] runbot: allow to filter on dependencies
This commit adds the possibility to filter modules based on their dependencies and dependants in the build configuration. This allows to easily trigger tests on modules that are impacted by changes, even if they are not directly modified. To make it work the modules listing is now done only in the git repository, without exporting sources, this should help to easily to have faster builds when the only task is to create builds based on modified modules. This should also and mostly help to test a dynamic config without running it.
1 parent 336b66f commit fde039e

13 files changed

Lines changed: 483 additions & 129 deletions

File tree

runbot/container.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -259,7 +259,10 @@ def _docker_run(cmd=False, log_path=False, build_dir=False, container_name=False
259259
else:
260260
run_cmd = cmd
261261
run_cmd = f'cd /data/build;touch start-{container_name};{run_cmd};cd /data/build;touch end-{container_name}'
262-
_logger.info('Docker run command: %s', run_cmd)
262+
run_cmd_repr = str(run_cmd)
263+
if len(run_cmd_repr) > 250:
264+
run_cmd_repr = run_cmd_repr[:250] + '...'
265+
_logger.info('Docker run command: %s', run_cmd_repr)
263266
docker_clear_state(container_name, build_dir) # ensure that no state are remaining
264267
build_dir = file_path(build_dir)
265268

runbot/controllers/frontend.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -323,7 +323,8 @@ def build(self, build_id, search=None, from_batch=None, **post):
323323
@route([
324324
'/runbot/build/search',
325325
], website=True, auth='public', type='http', sitemap=False)
326-
def builds(self, **kwargs):
326+
def builds(self, limit=100, **kwargs):
327+
limit = min(int(limit), 1000)
327328
domain = []
328329
for key in ('config_id', 'version_id', 'project_id', 'trigger_id', 'create_batch_id.bundle_id', 'create_batch_id'): # allowed params
329330
value = kwargs.get(key)
@@ -337,10 +338,12 @@ def builds(self, **kwargs):
337338

338339
for key in ('description',):
339340
if key in kwargs:
340-
domain.append((f'{key}', 'ilike', kwargs.get(key)))
341+
value = kwargs.get(key)
342+
operator = 'ilike' if '%' in value else '='
343+
domain.append((f'{key}', operator, value))
341344

342345
context = {
343-
'builds': request.env['runbot.build'].search(domain, limit=100),
346+
'builds': request.env['runbot.build'].search(domain, limit=limit),
344347
}
345348

346349
return request.render('runbot.build_search', context)

runbot/models/batch.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ class Batch(models.Model):
1515

1616
last_update = fields.Datetime('Last ref update')
1717
bundle_id = fields.Many2one('runbot.bundle', required=True, index=True, ondelete='cascade')
18+
build_all = fields.Boolean('Force all triggers')
1819
commit_link_ids = fields.Many2many('runbot.commit.link')
1920
commit_ids = fields.Many2many('runbot.commit', compute='_compute_commit_ids')
2021
slot_ids = fields.One2many('runbot.batch.slot', 'batch_id')
@@ -187,6 +188,7 @@ def _prepare(self, auto_rebase=False, use_base_commits=False):
187188
priority_offset = self.bundle_id.priority_offset
188189
if not priority_offset and self.bundle_id.branch_ids.forwardport_of_id and self.bundle_id.last_batchs == self: # this is the only batch of a forwardported pr.
189190
priority_offset = - 3600 * 5
191+
self.build_all = True # for normal pr, mergebot will request all ci on r+ if needed, for forward port, we need to ensure they are all created or the chain could be blocked
190192
self.priority_level = int(self.create_date.timestamp() - priority_offset)
191193
if use_base_commits:
192194
self._warning('This batch will use base commits instead of bundle commits')
@@ -383,7 +385,7 @@ def _fill_missing(branch_commits, match_type):
383385
continue
384386
# in any case, search for an existing build
385387
config = trigger.config_id
386-
if not trigger_custom and trigger.light_config_id and not bundle.build_all and not bundle.is_staging and not bundle.is_base:
388+
if not trigger_custom and trigger.light_config_id and not bundle.build_all and not self.build_all and not bundle.is_staging and not bundle.is_base:
387389
if (project.use_light_default
388390
or
389391
project.use_light_draft and any(branch.draft for branch in self.bundle_id.branch_ids)
@@ -455,7 +457,10 @@ def _start_builds(self):
455457
is_dev = not bundle.is_staging and not bundle.is_base
456458
for trigger in self.slot_ids.trigger_id:
457459
enable_on_bundle = (trigger.on_staging and bundle.is_staging) or (trigger.on_base and bundle.is_base) or (trigger.on_dev and is_dev)
458-
if ((trigger.repo_ids & bundle_repos) or bundle.build_all or bundle.sticky) and enable_on_bundle:
460+
common_repo = (trigger.repo_ids & bundle_repos)
461+
if self.build_all and not common_repo:
462+
common_repo = (trigger.dependency_ids & bundle_repos)
463+
if (common_repo or bundle.build_all or bundle.sticky) and enable_on_bundle:
459464
should_start_triggers_ids.add(trigger.id)
460465

461466
disabled_triggers = self.bundle_id.all_trigger_custom_ids.filtered(lambda tc: tc.start_mode == 'disabled').trigger_id

runbot/models/build.py

Lines changed: 89 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# -*- coding: utf-8 -*-
2-
2+
import ast
33
import datetime
44
import getpass
55
import hashlib
@@ -10,23 +10,36 @@
1010
import shutil
1111
import time
1212
import uuid
13-
1413
from collections import defaultdict
15-
from dateutil import parser
1614
from pathlib import Path
15+
16+
from dateutil import parser
1717
from psycopg2 import sql
1818
from psycopg2.extensions import TransactionRollbackError
1919

20-
from ..common import dt2time, now, grep, local_pgadmin_cursor, dest_reg, os, list_local_dbs, pseudo_markdown, RunbotException, findall, sanitize, markdown_escape, tail
21-
from ..container import docker_stop, docker_state, Command, docker_run, docker_pull
22-
from ..fields import JsonDictField
23-
24-
from odoo import models, fields, api
25-
20+
from odoo import api, fields, models
2621
from odoo.exceptions import ValidationError
2722
from odoo.tools import file_open, file_path
2823
from odoo.tools.safe_eval import safe_eval
2924

25+
from ..common import (
26+
RunbotException,
27+
dest_reg,
28+
dt2time,
29+
findall,
30+
grep,
31+
list_local_dbs,
32+
local_pgadmin_cursor,
33+
markdown_escape,
34+
now,
35+
os,
36+
pseudo_markdown,
37+
sanitize,
38+
tail,
39+
transactioncache,
40+
)
41+
from ..container import Command, docker_pull, docker_run, docker_state, docker_stop
42+
from ..fields import JsonDictField
3043

3144
_logger = logging.getLogger(__name__)
3245

@@ -61,7 +74,6 @@ def remove_readonly(func, path_str, exinfo):
6174
def make_selection(array):
6275
return [(elem, elem.replace('_', ' ').capitalize()) if isinstance(elem, str) else elem for elem in array]
6376

64-
6577
class BuildParameters(models.Model):
6678
_name = 'runbot.build.params'
6779
_description = "Build parameters"
@@ -1091,25 +1103,28 @@ def _checkout(self):
10911103

10921104
return exports
10931105

1106+
def _list_available_modules(self):
1107+
for commit in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids:
1108+
for (addons_path, module, manifest_file_name) in commit._list_available_modules():
1109+
yield commit, addons_path, module, manifest_file_name
1110+
10941111
def _get_available_modules(self):
10951112
all_modules = dict()
10961113
available_modules = defaultdict(list)
10971114
# repo_modules = []
1098-
for commit in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids:
1099-
for (addons_path, module, manifest_file_name) in commit._get_available_modules():
1100-
if module in all_modules:
1101-
self._log(
1102-
'Building environment',
1103-
'%s is a duplicated modules (found in "%s", already defined in %s)' % (
1104-
module,
1105-
commit._source_path(addons_path, module, manifest_file_name),
1106-
all_modules[module]._source_path(addons_path, module, manifest_file_name)),
1107-
level='WARNING',
1108-
)
1109-
else:
1110-
available_modules[commit.repo_id].append(module)
1111-
all_modules[module] = commit
1112-
# return repo_modules, available_modules
1115+
for commit, addons_path, module, manifest_file_name in self._list_available_modules():
1116+
if module in all_modules:
1117+
self._log(
1118+
'Building environment',
1119+
'%s is a duplicated modules (found in "%s", already defined in %s)' % (
1120+
module,
1121+
commit._source_path(addons_path, module, manifest_file_name),
1122+
all_modules[module]._source_path(addons_path, module, manifest_file_name)),
1123+
level='WARNING',
1124+
)
1125+
else:
1126+
available_modules[commit.repo_id].append(module)
1127+
all_modules[module] = commit
11131128
return available_modules
11141129

11151130
def _get_modules_to_test(self, modules_patterns=''):
@@ -1120,6 +1135,49 @@ def _get_modules_to_test(self, modules_patterns=''):
11201135
modules_patterns = (modules_patterns or '').split(',')
11211136
return trigger._filter_modules_to_test(modules, params_patterns + modules_patterns) # we may switch params_patterns and modules_patterns order
11221137

1138+
@transactioncache
1139+
def _dependency_graph(self):
1140+
dependency_graph = defaultdict(set)
1141+
dependant_graph = defaultdict(set)
1142+
for commit in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids:
1143+
file_paths = []
1144+
modules = []
1145+
for (addons_path, module, manifest_file_name) in commit._list_available_modules():
1146+
file_paths.append(os.path.join(addons_path, module, manifest_file_name))
1147+
modules.append(module)
1148+
contents = commit._git_show_files(file_paths)
1149+
for module, manifest in zip(modules, contents):
1150+
manifest_content = ast.literal_eval(manifest)
1151+
depends = manifest_content.get('depends', [])
1152+
if not depends and module != 'base':
1153+
depends = ['base']
1154+
for dep in depends:
1155+
dependency_graph[module].add(dep)
1156+
dependant_graph[dep].add(module)
1157+
return dependency_graph, dependant_graph
1158+
1159+
def search_modules_graph(self, modules, graph, depth=None):
1160+
def search(modules, depth=None, visited=None):
1161+
visited = visited or set()
1162+
modules = set(modules) - visited
1163+
visited |= modules
1164+
dependencies = set(modules)
1165+
if depth == 0 or not modules:
1166+
return dependencies
1167+
for module in modules:
1168+
dependencies |= search(graph[module], depth - 1 if depth is not None else None, visited)
1169+
return dependencies
1170+
return sorted(search(modules, depth))
1171+
1172+
def _get_modules_dependencies(self, modules, depth=None):
1173+
self.ensure_one()
1174+
dependency_graph, _ = self._dependency_graph()
1175+
return self.search_modules_graph(modules, dependency_graph, depth)
1176+
1177+
def _get_dependant_modules(self, modules, depth=None):
1178+
_, dependant_graph = self._dependency_graph()
1179+
return self.search_modules_graph(modules, dependant_graph, depth)
1180+
11231181
def _local_pg_dropdb(self, dbname):
11241182
msg = ''
11251183
try:
@@ -1249,13 +1307,17 @@ def _modified_files(self, commit_link_links=None):
12491307
modified_files[commit_link] = files
12501308
return modified_files
12511309

1252-
def _modified_modules(self, commit_link_links=None):
1310+
def _modified_modules(self, commit_link_links=None, defaults=None):
12531311
modified_files = self._modified_files(commit_link_links)
12541312
modified_modules = set()
12551313
for commit_link, files in modified_files.items():
12561314
commit = commit_link.commit_id
12571315
for file in files:
1258-
modified_modules.add(commit.repo_id._get_module(file))
1316+
module = commit.repo_id._get_module(file)
1317+
if module:
1318+
modified_modules.add(module)
1319+
elif defaults:
1320+
modified_modules |= set(defaults)
12591321
return modified_modules
12601322

12611323
def _get_upgrade_path(self):

0 commit comments

Comments
 (0)