Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion runbot/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,10 @@ def _docker_run(cmd=False, log_path=False, build_dir=False, container_name=False
else:
run_cmd = cmd
run_cmd = f'cd /data/build;touch start-{container_name};{run_cmd};cd /data/build;touch end-{container_name}'
_logger.info('Docker run command: %s', run_cmd)
run_cmd_repr = str(run_cmd)
if len(run_cmd_repr) > 250:
run_cmd_repr = run_cmd_repr[:250] + '...'
_logger.info('Docker run command: %s', run_cmd_repr)
docker_clear_state(container_name, build_dir) # ensure that no state are remaining
build_dir = file_path(build_dir)

Expand Down
9 changes: 6 additions & 3 deletions runbot/controllers/frontend.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,8 @@ def build(self, build_id, search=None, from_batch=None, **post):
@route([
'/runbot/build/search',
], website=True, auth='public', type='http', sitemap=False)
def builds(self, **kwargs):
def builds(self, limit=100, **kwargs):
limit = min(int(limit), 1000)
domain = []
for key in ('config_id', 'version_id', 'project_id', 'trigger_id', 'create_batch_id.bundle_id', 'create_batch_id'): # allowed params
value = kwargs.get(key)
Expand All @@ -337,10 +338,12 @@ def builds(self, **kwargs):

for key in ('description',):
if key in kwargs:
domain.append((f'{key}', 'ilike', kwargs.get(key)))
value = kwargs.get(key)
operator = 'ilike' if '%' in value else '='
domain.append((f'{key}', operator, value))

context = {
'builds': request.env['runbot.build'].search(domain, limit=100),
'builds': request.env['runbot.build'].search(domain, limit=limit),
}

return request.render('runbot.build_search', context)
Expand Down
9 changes: 7 additions & 2 deletions runbot/models/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ class Batch(models.Model):

last_update = fields.Datetime('Last ref update')
bundle_id = fields.Many2one('runbot.bundle', required=True, index=True, ondelete='cascade')
build_all = fields.Boolean('Force all triggers')
commit_link_ids = fields.Many2many('runbot.commit.link')
commit_ids = fields.Many2many('runbot.commit', compute='_compute_commit_ids')
slot_ids = fields.One2many('runbot.batch.slot', 'batch_id')
Expand Down Expand Up @@ -187,6 +188,7 @@ def _prepare(self, auto_rebase=False, use_base_commits=False):
priority_offset = self.bundle_id.priority_offset
if not priority_offset and self.bundle_id.branch_ids.forwardport_of_id and self.bundle_id.last_batchs == self: # this is the only batch of a forwardported pr.
priority_offset = - 3600 * 5
self.build_all = True # for normal pr, mergebot will request all ci on r+ if needed, for forward port, we need to ensure they are all created or the chain could be blocked
self.priority_level = int(self.create_date.timestamp() - priority_offset)
if use_base_commits:
self._warning('This batch will use base commits instead of bundle commits')
Expand Down Expand Up @@ -383,7 +385,7 @@ def _fill_missing(branch_commits, match_type):
continue
# in any case, search for an existing build
config = trigger.config_id
if not trigger_custom and trigger.light_config_id and not bundle.build_all and not bundle.is_staging and not bundle.is_base:
if not trigger_custom and trigger.light_config_id and not bundle.build_all and not self.build_all and not bundle.is_staging and not bundle.is_base:
if (project.use_light_default
or
project.use_light_draft and any(branch.draft for branch in self.bundle_id.branch_ids)
Expand Down Expand Up @@ -455,7 +457,10 @@ def _start_builds(self):
is_dev = not bundle.is_staging and not bundle.is_base
for trigger in self.slot_ids.trigger_id:
enable_on_bundle = (trigger.on_staging and bundle.is_staging) or (trigger.on_base and bundle.is_base) or (trigger.on_dev and is_dev)
if ((trigger.repo_ids & bundle_repos) or bundle.build_all or bundle.sticky) and enable_on_bundle:
common_repo = (trigger.repo_ids & bundle_repos)
if self.build_all and not common_repo:
common_repo = (trigger.dependency_ids & bundle_repos)
if (common_repo or bundle.build_all or bundle.sticky) and enable_on_bundle:
should_start_triggers_ids.add(trigger.id)

disabled_triggers = self.bundle_id.all_trigger_custom_ids.filtered(lambda tc: tc.start_mode == 'disabled').trigger_id
Expand Down
116 changes: 89 additions & 27 deletions runbot/models/build.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-

import ast
import datetime
import getpass
import hashlib
Expand All @@ -10,23 +10,36 @@
import shutil
import time
import uuid

from collections import defaultdict
from dateutil import parser
from pathlib import Path

from dateutil import parser
from psycopg2 import sql
from psycopg2.extensions import TransactionRollbackError

from ..common import dt2time, now, grep, local_pgadmin_cursor, dest_reg, os, list_local_dbs, pseudo_markdown, RunbotException, findall, sanitize, markdown_escape, tail
from ..container import docker_stop, docker_state, Command, docker_run, docker_pull
from ..fields import JsonDictField

from odoo import models, fields, api

from odoo import api, fields, models
from odoo.exceptions import ValidationError
from odoo.tools import file_open, file_path
from odoo.tools.safe_eval import safe_eval

from ..common import (
RunbotException,
dest_reg,
dt2time,
findall,
grep,
list_local_dbs,
local_pgadmin_cursor,
markdown_escape,
now,
os,
pseudo_markdown,
sanitize,
tail,
transactioncache,
)
from ..container import Command, docker_pull, docker_run, docker_state, docker_stop
from ..fields import JsonDictField

_logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -61,7 +74,6 @@ def remove_readonly(func, path_str, exinfo):
def make_selection(array):
return [(elem, elem.replace('_', ' ').capitalize()) if isinstance(elem, str) else elem for elem in array]


class BuildParameters(models.Model):
_name = 'runbot.build.params'
_description = "Build parameters"
Expand Down Expand Up @@ -1091,25 +1103,28 @@ def _checkout(self):

return exports

def _list_available_modules(self):
for commit in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids:
for (addons_path, module, manifest_file_name) in commit._list_available_modules():
yield commit, addons_path, module, manifest_file_name

def _get_available_modules(self):
all_modules = dict()
available_modules = defaultdict(list)
# repo_modules = []
for commit in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids:
for (addons_path, module, manifest_file_name) in commit._get_available_modules():
if module in all_modules:
self._log(
'Building environment',
'%s is a duplicated modules (found in "%s", already defined in %s)' % (
module,
commit._source_path(addons_path, module, manifest_file_name),
all_modules[module]._source_path(addons_path, module, manifest_file_name)),
level='WARNING',
)
else:
available_modules[commit.repo_id].append(module)
all_modules[module] = commit
# return repo_modules, available_modules
for commit, addons_path, module, manifest_file_name in self._list_available_modules():
if module in all_modules:
self._log(
'Building environment',
'%s is a duplicated modules (found in "%s", already defined in %s)' % (
module,
commit._source_path(addons_path, module, manifest_file_name),
all_modules[module]._source_path(addons_path, module, manifest_file_name)),
level='WARNING',
)
else:
available_modules[commit.repo_id].append(module)
all_modules[module] = commit
return available_modules

def _get_modules_to_test(self, modules_patterns=''):
Expand All @@ -1120,6 +1135,49 @@ def _get_modules_to_test(self, modules_patterns=''):
modules_patterns = (modules_patterns or '').split(',')
return trigger._filter_modules_to_test(modules, params_patterns + modules_patterns) # we may switch params_patterns and modules_patterns order

@transactioncache
def _dependency_graph(self):
dependency_graph = defaultdict(set)
dependant_graph = defaultdict(set)
for commit in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids:
file_paths = []
modules = []
for (addons_path, module, manifest_file_name) in commit._list_available_modules():
file_paths.append(os.path.join(addons_path, module, manifest_file_name))
modules.append(module)
contents = commit._git_show_files(file_paths)
for module, manifest in zip(modules, contents):
manifest_content = ast.literal_eval(manifest)
depends = manifest_content.get('depends', [])
if not depends and module != 'base':
depends = ['base']
for dep in depends:
dependency_graph[module].add(dep)
dependant_graph[dep].add(module)
return dependency_graph, dependant_graph

def search_modules_graph(self, modules, graph, depth=None):
def search(modules, depth=None, visited=None):
visited = visited or set()
modules = set(modules) - visited
visited |= modules
dependencies = set(modules)
if depth == 0 or not modules:
return dependencies
for module in modules:
dependencies |= search(graph[module], depth - 1 if depth is not None else None, visited)
return dependencies
return sorted(search(modules, depth))

def _get_modules_dependencies(self, modules, depth=None):
self.ensure_one()
dependency_graph, _ = self._dependency_graph()
return self.search_modules_graph(modules, dependency_graph, depth)

def _get_dependant_modules(self, modules, depth=None):
_, dependant_graph = self._dependency_graph()
return self.search_modules_graph(modules, dependant_graph, depth)

def _local_pg_dropdb(self, dbname):
msg = ''
try:
Expand Down Expand Up @@ -1249,13 +1307,17 @@ def _modified_files(self, commit_link_links=None):
modified_files[commit_link] = files
return modified_files

def _modified_modules(self, commit_link_links=None):
def _modified_modules(self, commit_link_links=None, defaults=None):
modified_files = self._modified_files(commit_link_links)
modified_modules = set()
for commit_link, files in modified_files.items():
commit = commit_link.commit_id
for file in files:
modified_modules.add(commit.repo_id._get_module(file))
module = commit.repo_id._get_module(file)
if module:
modified_modules.add(module)
elif defaults:
modified_modules |= set(defaults)
return modified_modules

def _get_upgrade_path(self):
Expand Down
Loading