diff --git a/runbot/container.py b/runbot/container.py index 679ea35f1..f52456c1e 100644 --- a/runbot/container.py +++ b/runbot/container.py @@ -259,7 +259,10 @@ def _docker_run(cmd=False, log_path=False, build_dir=False, container_name=False else: run_cmd = cmd run_cmd = f'cd /data/build;touch start-{container_name};{run_cmd};cd /data/build;touch end-{container_name}' - _logger.info('Docker run command: %s', run_cmd) + run_cmd_repr = str(run_cmd) + if len(run_cmd_repr) > 250: + run_cmd_repr = run_cmd_repr[:250] + '...' + _logger.info('Docker run command: %s', run_cmd_repr) docker_clear_state(container_name, build_dir) # ensure that no state are remaining build_dir = file_path(build_dir) diff --git a/runbot/controllers/frontend.py b/runbot/controllers/frontend.py index 3110aadd4..a8f15e44a 100644 --- a/runbot/controllers/frontend.py +++ b/runbot/controllers/frontend.py @@ -323,7 +323,8 @@ def build(self, build_id, search=None, from_batch=None, **post): @route([ '/runbot/build/search', ], website=True, auth='public', type='http', sitemap=False) - def builds(self, **kwargs): + def builds(self, limit=100, **kwargs): + limit = min(int(limit), 1000) domain = [] for key in ('config_id', 'version_id', 'project_id', 'trigger_id', 'create_batch_id.bundle_id', 'create_batch_id'): # allowed params value = kwargs.get(key) @@ -337,10 +338,12 @@ def builds(self, **kwargs): for key in ('description',): if key in kwargs: - domain.append((f'{key}', 'ilike', kwargs.get(key))) + value = kwargs.get(key) + operator = 'ilike' if '%' in value else '=' + domain.append((f'{key}', operator, value)) context = { - 'builds': request.env['runbot.build'].search(domain, limit=100), + 'builds': request.env['runbot.build'].search(domain, limit=limit), } return request.render('runbot.build_search', context) diff --git a/runbot/models/batch.py b/runbot/models/batch.py index 331f44b51..6cd395827 100644 --- a/runbot/models/batch.py +++ b/runbot/models/batch.py @@ -15,6 +15,7 @@ class Batch(models.Model): last_update = fields.Datetime('Last ref update') bundle_id = fields.Many2one('runbot.bundle', required=True, index=True, ondelete='cascade') + build_all = fields.Boolean('Force all triggers') commit_link_ids = fields.Many2many('runbot.commit.link') commit_ids = fields.Many2many('runbot.commit', compute='_compute_commit_ids') slot_ids = fields.One2many('runbot.batch.slot', 'batch_id') @@ -187,6 +188,7 @@ def _prepare(self, auto_rebase=False, use_base_commits=False): priority_offset = self.bundle_id.priority_offset if not priority_offset and self.bundle_id.branch_ids.forwardport_of_id and self.bundle_id.last_batchs == self: # this is the only batch of a forwardported pr. priority_offset = - 3600 * 5 + self.build_all = True # for normal pr, mergebot will request all ci on r+ if needed, for forward port, we need to ensure they are all created or the chain could be blocked self.priority_level = int(self.create_date.timestamp() - priority_offset) if use_base_commits: self._warning('This batch will use base commits instead of bundle commits') @@ -383,7 +385,7 @@ def _fill_missing(branch_commits, match_type): continue # in any case, search for an existing build config = trigger.config_id - if not trigger_custom and trigger.light_config_id and not bundle.build_all and not bundle.is_staging and not bundle.is_base: + if not trigger_custom and trigger.light_config_id and not bundle.build_all and not self.build_all and not bundle.is_staging and not bundle.is_base: if (project.use_light_default or project.use_light_draft and any(branch.draft for branch in self.bundle_id.branch_ids) @@ -455,7 +457,10 @@ def _start_builds(self): is_dev = not bundle.is_staging and not bundle.is_base for trigger in self.slot_ids.trigger_id: enable_on_bundle = (trigger.on_staging and bundle.is_staging) or (trigger.on_base and bundle.is_base) or (trigger.on_dev and is_dev) - if ((trigger.repo_ids & bundle_repos) or bundle.build_all or bundle.sticky) and enable_on_bundle: + common_repo = (trigger.repo_ids & bundle_repos) + if self.build_all and not common_repo: + common_repo = (trigger.dependency_ids & bundle_repos) + if (common_repo or bundle.build_all or bundle.sticky) and enable_on_bundle: should_start_triggers_ids.add(trigger.id) disabled_triggers = self.bundle_id.all_trigger_custom_ids.filtered(lambda tc: tc.start_mode == 'disabled').trigger_id diff --git a/runbot/models/build.py b/runbot/models/build.py index 2a5ab26d6..340aec464 100644 --- a/runbot/models/build.py +++ b/runbot/models/build.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- - +import ast import datetime import getpass import hashlib @@ -10,23 +10,36 @@ import shutil import time import uuid - from collections import defaultdict -from dateutil import parser from pathlib import Path + +from dateutil import parser from psycopg2 import sql from psycopg2.extensions import TransactionRollbackError -from ..common import dt2time, now, grep, local_pgadmin_cursor, dest_reg, os, list_local_dbs, pseudo_markdown, RunbotException, findall, sanitize, markdown_escape, tail -from ..container import docker_stop, docker_state, Command, docker_run, docker_pull -from ..fields import JsonDictField - -from odoo import models, fields, api - +from odoo import api, fields, models from odoo.exceptions import ValidationError from odoo.tools import file_open, file_path from odoo.tools.safe_eval import safe_eval +from ..common import ( + RunbotException, + dest_reg, + dt2time, + findall, + grep, + list_local_dbs, + local_pgadmin_cursor, + markdown_escape, + now, + os, + pseudo_markdown, + sanitize, + tail, + transactioncache, +) +from ..container import Command, docker_pull, docker_run, docker_state, docker_stop +from ..fields import JsonDictField _logger = logging.getLogger(__name__) @@ -61,7 +74,6 @@ def remove_readonly(func, path_str, exinfo): def make_selection(array): return [(elem, elem.replace('_', ' ').capitalize()) if isinstance(elem, str) else elem for elem in array] - class BuildParameters(models.Model): _name = 'runbot.build.params' _description = "Build parameters" @@ -1091,25 +1103,28 @@ def _checkout(self): return exports + def _list_available_modules(self): + for commit in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids: + for (addons_path, module, manifest_file_name) in commit._list_available_modules(): + yield commit, addons_path, module, manifest_file_name + def _get_available_modules(self): all_modules = dict() available_modules = defaultdict(list) # repo_modules = [] - for commit in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids: - for (addons_path, module, manifest_file_name) in commit._get_available_modules(): - if module in all_modules: - self._log( - 'Building environment', - '%s is a duplicated modules (found in "%s", already defined in %s)' % ( - module, - commit._source_path(addons_path, module, manifest_file_name), - all_modules[module]._source_path(addons_path, module, manifest_file_name)), - level='WARNING', - ) - else: - available_modules[commit.repo_id].append(module) - all_modules[module] = commit - # return repo_modules, available_modules + for commit, addons_path, module, manifest_file_name in self._list_available_modules(): + if module in all_modules: + self._log( + 'Building environment', + '%s is a duplicated modules (found in "%s", already defined in %s)' % ( + module, + commit._source_path(addons_path, module, manifest_file_name), + all_modules[module]._source_path(addons_path, module, manifest_file_name)), + level='WARNING', + ) + else: + available_modules[commit.repo_id].append(module) + all_modules[module] = commit return available_modules def _get_modules_to_test(self, modules_patterns=''): @@ -1120,6 +1135,49 @@ def _get_modules_to_test(self, modules_patterns=''): modules_patterns = (modules_patterns or '').split(',') return trigger._filter_modules_to_test(modules, params_patterns + modules_patterns) # we may switch params_patterns and modules_patterns order + @transactioncache + def _dependency_graph(self): + dependency_graph = defaultdict(set) + dependant_graph = defaultdict(set) + for commit in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids: + file_paths = [] + modules = [] + for (addons_path, module, manifest_file_name) in commit._list_available_modules(): + file_paths.append(os.path.join(addons_path, module, manifest_file_name)) + modules.append(module) + contents = commit._git_show_files(file_paths) + for module, manifest in zip(modules, contents): + manifest_content = ast.literal_eval(manifest) + depends = manifest_content.get('depends', []) + if not depends and module != 'base': + depends = ['base'] + for dep in depends: + dependency_graph[module].add(dep) + dependant_graph[dep].add(module) + return dependency_graph, dependant_graph + + def search_modules_graph(self, modules, graph, depth=None): + def search(modules, depth=None, visited=None): + visited = visited or set() + modules = set(modules) - visited + visited |= modules + dependencies = set(modules) + if depth == 0 or not modules: + return dependencies + for module in modules: + dependencies |= search(graph[module], depth - 1 if depth is not None else None, visited) + return dependencies + return sorted(search(modules, depth)) + + def _get_modules_dependencies(self, modules, depth=None): + self.ensure_one() + dependency_graph, _ = self._dependency_graph() + return self.search_modules_graph(modules, dependency_graph, depth) + + def _get_dependant_modules(self, modules, depth=None): + _, dependant_graph = self._dependency_graph() + return self.search_modules_graph(modules, dependant_graph, depth) + def _local_pg_dropdb(self, dbname): msg = '' try: @@ -1249,13 +1307,17 @@ def _modified_files(self, commit_link_links=None): modified_files[commit_link] = files return modified_files - def _modified_modules(self, commit_link_links=None): + def _modified_modules(self, commit_link_links=None, defaults=None): modified_files = self._modified_files(commit_link_links) modified_modules = set() for commit_link, files in modified_files.items(): commit = commit_link.commit_id for file in files: - modified_modules.add(commit.repo_id._get_module(file)) + module = commit.repo_id._get_module(file) + if module: + modified_modules.add(module) + elif defaults: + modified_modules |= set(defaults) return modified_modules def _get_upgrade_path(self): diff --git a/runbot/models/build_config.py b/runbot/models/build_config.py index 19f8c908a..0c86e5eb8 100644 --- a/runbot/models/build_config.py +++ b/runbot/models/build_config.py @@ -46,8 +46,21 @@ def filter_all_modules(selector, build, dynamic_vars): return filter_default_modules(selector, build, dynamic_vars) +def get_dependencies(modules, build, dynamic_vars, depth=None): + depth = int(depth) if depth else None + modules = modules.split(',') + dependant = set(build._get_modules_dependencies(modules, depth)) - set(modules) + return ','.join(sorted(dependant)) + + +def get_dependant(modules, build, dynamic_vars, depth=None): + depth = int(depth) if depth else None + modules = modules.split(',') + dependant = set(build._get_dependant_modules(modules, depth)) - set(modules) + return ','.join(sorted(dependant)) + + def filter_default_modules(selector, build, dynamic_vars): - build._checkout() # we need to ensure source are exported before _get_modules_to_test modules = build._get_modules_to_test(selector) return ','.join(modules) @@ -57,22 +70,17 @@ def select_existing_modules(selector, build, dynamic_vars): return filter_default_modules(selector, build, dynamic_vars) -def keep_modified_modules(modules, build, dynamic_vars): +def keep_modified_modules(modules, build, dynamic_vars, *defaults): if build.params_id.config_data.get('skip_modified_modules_filter', False): return modules - modified_modules = build._modified_modules() + if defaults: + defaults = [d[1:-1] if re.match(r'^[\'"].*[\'"]$', d) else d for d in defaults] + modified_modules = build._modified_modules(defaults=defaults) modules = modules.split(',') filtered_modules = [module for module in modules if module in modified_modules] return ','.join(filtered_modules) -def keep_modified_modules_or_base(modules, build, dynamic_vars): - bundle = build.params_id.create_batch_id.bundle_id - if bundle.is_base or bundle.is_staging: - return modules - return keep_modified_modules(modules, build, dynamic_vars) - - def make_module_test_tags(modules, build, dynamic_vars): return ','.join([f'/{module}' for module in modules.split(',')]) @@ -93,6 +101,17 @@ def append_string(modules, build, dynamic_vars, element): return ','.join([f'{module}{element}' for module in modules.split(',')]) +def union(modules, build, dynamic_vars, element): + if re.match(r'^[\'"].*[\'"]$', element): + element = element[1:-1] + else: + element = dynamic_vars.get(element, element) + element = element.strip() + modules = set(modules.split(',')) if modules else set() + new_modules = set(element.split(',')) if element else set() + return ','.join(sorted(modules | new_modules)) + + class Config(models.Model): _name = 'runbot.build.config' _description = "Build config" @@ -227,11 +246,15 @@ def wrapper(value, path): return wrapper def VARS(vars, path): - if not isinstance(vars, dict): - raise ValidationError(f'{path} ({vars}) should be a dict') - for key, val in vars.items(): - TECHNICAL_NAME(key, f'{path}.{key}') - STR(val, f'{path}.{key}') + if isinstance(vars, list): + for item in vars: + VARS(item, path) + else: + if not isinstance(vars, dict): + raise ValidationError(f'{path} ({vars}) should be a dict') + for key, val in vars.items(): + TECHNICAL_NAME(key, f'{path}.{key}') + STR(val, f'{path}.{key}') NAME = str_checker(r'^[\w \-]+$') STR = str_checker(r'.*') @@ -246,6 +269,7 @@ def VARS(vars, path): 'vars': OPTIONAL(VARS), 'steps': REQUIRED(LIST(STEP)), 'description': OPTIONAL(DYNAMIC_VALUE), + 'log': OPTIONAL(DYNAMIC_VALUE), } valid_steps['odoo'] = { 'name': REQUIRED(NAME), @@ -260,6 +284,7 @@ def VARS(vars, path): 'cpu_limit': OPTIONAL(INT), 'export_database': OPTIONAL(BOOL), 'make_stats': OPTIONAL(BOOL), + 'log': OPTIONAL(DYNAMIC_VALUE), } valid_steps['create_build'] = { 'name': REQUIRED(NAME), @@ -268,6 +293,8 @@ def VARS(vars, path): 'for_each_vars': OPTIONAL(LIST(VARS)), 'for_each_module': OPTIONAL(DYNAMIC_VALUE), 'max_builds': OPTIONAL(INT), + 'if': OPTIONAL(DYNAMIC_VALUE), + 'log': OPTIONAL(DYNAMIC_VALUE), } valid_steps['restore'] = { 'name': REQUIRED(NAME), @@ -277,6 +304,7 @@ def VARS(vars, path): 'trigger_id': OPTIONAL(INT), 'use_current_batch': OPTIONAL(BOOL), 'zip_url': OPTIONAL(STR), + 'log': OPTIONAL(DYNAMIC_VALUE), } valid_steps['command'] = { 'name': REQUIRED(NAME), @@ -289,6 +317,7 @@ def VARS(vars, path): 'check_logs': OPTIONAL(LIST(STR)), 'expected_logs': OPTIONAL(LIST(STR)), 'make_stats': OPTIONAL(BOOL), + 'log': OPTIONAL(DYNAMIC_VALUE), } validate(config_schema, config, 'config') @@ -1192,7 +1221,7 @@ def _coverage_params(self, build, modules_to_install): docker_source_folder = build._docker_source_folder(commit) for manifest_file in commit.repo_id.manifest_files.split(','): pattern_to_omit.add('*%s' % manifest_file) - for (addons_path, module, _) in commit._get_available_modules(): + for (addons_path, module, _) in commit._list_available_modules(): if module not in modules_to_install: # we want to omit docker_source_folder/[addons/path/]module/* module_path_in_docker = os.sep.join([docker_source_folder, addons_path, module]) @@ -1505,35 +1534,70 @@ def _run_dynamic(self, build): raise RunbotException('Too many ancestors builds, possible cyclic dynamic build creation') if build.parent_id and build.dynamic_config == build.parent_id.dynamic_config: raise RunbotException('A child build cannot load the same dynamic config if parent, recursion detected') + + config_vars_list = build.dynamic_config.get('vars', {}) + if not isinstance(config_vars_list, list): + config_vars_list = [config_vars_list] + raw_vars = {} + for config_vars in config_vars_list: + raw_vars.update(config_vars) + + raw_vars.update(build.params_id.config_data.get('dynamic_vars', {})) + dynamic_vars = {} + # dynamic_vars can either be raw value like 'account', value to evaluate lazily in anothed dynamic value like 'account->!mail' + # or dynamic value that we want to evaluate early like '{{*|filter_all_modules|modified_modules}}' (between {{}}) + # this loop will evalute the third category + # this alows to evaluate only once an expression that could be expensive to use it in multiple dynamic values + # this also allow to clarify the config by chaining vars definition + # TODO check ordering + for key, value in raw_vars.items(): + dynamic_vars[key] = self._parse_dynamic_entry(value, build, dynamic_vars=dynamic_vars) + current_step = self._get_dynamic_step(build) if not current_step: build._log('Dynamic Step', 'No dynamic config or steps found, skipping', level="WARNING") return + if current_step.get('log'): + text = self._parse_dynamic_entry(current_step['log'], build, dynamic_vars=dynamic_vars) + build._log('_run_dynamic', text) if current_step['job_type'] == 'create_build': for_each_vars_list = current_step.get('for_each_vars', [{}]) if 'for_each_module' in current_step: modules_vars = [] for for_each_vars in for_each_vars_list: - modules_entry = self._parse_dynamic_entry(current_step['for_each_module'], build, additional_dynamic_vars=for_each_vars) + modules_entry = self._parse_dynamic_entry(current_step['for_each_module'], build, dynamic_vars={**dynamic_vars, **for_each_vars}) modules = [m.strip() for m in modules_entry.split(',') if m.strip()] for module in modules: module_vars = {**for_each_vars, 'module': module} modules_vars.append(module_vars) for_each_vars_list = modules_vars - parent_vars = {**build.dynamic_config.get('vars', {}), **build.params_id.config_data.get('dynamic_vars', {})} + child_data_list = [] for child_index, child in enumerate(current_step.get('children', [])): child_vars = child.get('vars', {}) for for_each_vars in for_each_vars_list: config_name = child.get('name', build.params_id.config_id.name) - dynamic_vars = {**parent_vars, **child_vars, **for_each_vars} + raw_dynamic_vars = {**dynamic_vars, **for_each_vars, **child_vars} + child_dynamic_vars = {} + # evaluate for_each_vars + for key, value in raw_dynamic_vars.items(): + child_dynamic_vars[key] = self._parse_dynamic_entry(value, build, dynamic_vars=child_dynamic_vars) + if 'if' in current_step: + condition = self._parse_dynamic_entry(current_step['if'], build, dynamic_vars=child_dynamic_vars) + if not condition: + continue if 'description' in child: - description = self._parse_dynamic_entry(child['description'], build, additional_dynamic_vars=dynamic_vars) + description = self._parse_dynamic_entry(child['description'], build, dynamic_vars=child_dynamic_vars) # note: we mainly need to provide additional_dynamic_vars because the child is not created yet at this point else: description = config_name + # filter vars not prefixed with _ to simplify child values + if child.get('log'): + text = self._parse_dynamic_entry(child['log'], build, dynamic_vars=child_dynamic_vars) + build._log('_run_dynamic', text) + public_child_dynamic_vars = {key: value for key, value in child_dynamic_vars.items() if not key.startswith('_')} child_data = { - 'config_data': {**build.params_id.config_data.dict, "dynamic_vars": dynamic_vars}, + 'config_data': {**build.params_id.config_data.dict, "dynamic_vars": public_child_dynamic_vars}, 'config_id': build.params_id.config_id.id, 'dynamic_active_step_index': 0, 'dynamic_config_position': f'{build.params_id.dynamic_config_position or ""}/{build.dynamic_active_step_index}.{child_index}', @@ -1564,14 +1628,14 @@ def _run_dynamic(self, build): install_modules_pattern = current_step.get('install_modules', '') if install_modules_pattern.split(',', 1)[0] not in ('*', '-*'): install_modules_pattern = '-*,' + install_modules_pattern - config_data['install_module_pattern'] = self._parse_dynamic_entry(install_modules_pattern, build) + config_data['install_module_pattern'] = self._parse_dynamic_entry(install_modules_pattern, build, dynamic_vars) if 'test_tags' in current_step: - config_data['test_tags'] = self._parse_dynamic_entry(current_step.get('test_tags'), build) + config_data['test_tags'] = self._parse_dynamic_entry(current_step.get('test_tags'), build, dynamic_vars) config_data['test_enable'] = bool(current_step.get('test_enable') or current_step.get('test_tags')) if 'extra_params' in current_step: - config_data['extra_params'] = self._parse_dynamic_entry(current_step.get('extra_params'), build) + config_data['extra_params'] = self._parse_dynamic_entry(current_step.get('extra_params'), build, dynamic_vars) for key in ('screencast', 'demo_mode', 'enable_auto_tags'): if key in current_step: @@ -1593,6 +1657,7 @@ def _run_dynamic(self, build): 'addons_path': ",".join(build._get_addons_path()), 'exports': ",".join(exports.keys()), 'exports_paths': ",".join(exports.values()), + **dynamic_vars, } command = [shlex.quote(self._parse_dynamic_entry(part, build, values)) for part in command] pres = [] @@ -1614,23 +1679,23 @@ def _get_dynamic_db_suffix(self, step): db_suffix = re.sub(r'[^a-z0-9_\-]', '_', db_suffix.lower()) return db_suffix - def _parse_dynamic_entry(self, entry, build, additional_dynamic_vars=None): + def _parse_dynamic_entry(self, entry, build, dynamic_vars): """ transforms a module/test-tags entry dynamically """ - dynamic_config = build.dynamic_config - expression_filters = { 'filter_all_modules': filter_all_modules, 'filter_default_modules': filter_default_modules, 'make_module_test_tags': make_module_test_tags, 'select_existing_modules': select_existing_modules, + 'get_dependencies': get_dependencies, + 'get_dependant': get_dependant, 'prepend': prepend_string, 'append': append_string, 'modified_modules': keep_modified_modules, - 'modified_modules_or_base': keep_modified_modules_or_base, + 'union': union, } - dynamic_vars = {**dynamic_config.get('vars', {}), **build.params_id.config_data.get('dynamic_vars', {}), **(additional_dynamic_vars or {})} + dynamic_vars = dynamic_vars or {} def parse_expression(match): # inspired by jinja but with limited features diff --git a/runbot/models/commit.py b/runbot/models/commit.py index b7423f494..18d6922d8 100644 --- a/runbot/models/commit.py +++ b/runbot/models/commit.py @@ -3,7 +3,6 @@ import subprocess from ..common import os, RunbotException, make_github_session, transactioncache -import glob import shutil from odoo import models, fields, api @@ -66,22 +65,14 @@ def _rebase_on(self, commit): return self return self._get(self.name, self.repo_id.id, self.read()[0], commit.id) - def _get_available_modules(self): - for manifest_file_name in self.repo_id.manifest_files.split(','): # '__manifest__.py' '__openerp__.py' - for addons_path in (self.repo_id.addons_paths or '').split(','): # '' 'addons' 'odoo/addons' - sep = os.path.join(addons_path, '*') - for manifest_path in glob.glob(self._source_path(sep, manifest_file_name)): - module = os.path.basename(os.path.dirname(manifest_path)) - yield (addons_path, module, manifest_file_name) - def _list_files(self, patterns): #example: git ls-files --with-tree=abcf390f90dbdd39fd61abc53f8516e7278e0931 ':(glob)addons/*/*.py' ':(glob)odoo/addons/*/*.py' # note that glob is needed to avoid the star matching ** self.ensure_one() + self._fetch() return self.repo_id._git(['ls-files', '--with-tree', self.name, *patterns]).split('\n') def _list_available_modules(self): - # beta version, may replace _get_available_modules latter addons_paths = (self.repo_id.addons_paths or '').split(',') patterns = [] for manifest_file_name in self.repo_id.manifest_files.split(','): # '__manifest__.py' '__openerp__.py' @@ -98,6 +89,7 @@ def _list_available_modules(self): module, manifest_file_name = elems yield (addons_path, module, manifest_file_name) + @transactioncache # hack to avoid to fetch two time the same commit inside the same transaction def _fetch(self): self.repo_id._fetch(self.name) if not self.repo_id._hash_exists(self.name): @@ -170,12 +162,43 @@ def _read_source(self, file, mode='r'): @transactioncache def _git_show_file(self, file): + return self._git_show_files([file])[0] + + def _git_show_files(self, files): self.ensure_one() + if not files: + return [] + self.repo_id._fetch(self.name) + + queries = "\n".join([f"{self.name}:{f}" for f in files]) + "\n" + try: - return self.repo_id._git(['show', '%s:%s' % (self.name, file)]) + buffer = self.repo_id._git( + ['cat-file', '--batch'], + input_data=queries, + raw=True, + ) except subprocess.CalledProcessError: - return False + return [False] * len(files) + + results = [] + offset = 0 + buffer_len = len(buffer) + while offset < buffer_len: + newline_idx = buffer.find(b'\n', offset) + if newline_idx == -1: + break + header = buffer[offset:newline_idx].decode('utf-8') + offset = newline_idx + 1 + try: + size_in_bytes = int(header.rsplit(' ', 1)[-1]) + except ValueError: # most likely missing + results.append(False) + continue + results.append(buffer[offset : offset + size_in_bytes].decode('utf-8', errors='replace')) + offset += size_in_bytes + 1 + return results def _source_path(self, *paths): if not self.tree_hash: diff --git a/runbot/models/repo.py b/runbot/models/repo.py index db7543a9d..175721603 100644 --- a/runbot/models/repo.py +++ b/runbot/models/repo.py @@ -503,11 +503,19 @@ def _get_git_command(self, cmd, errors='strict'): cmd = ['git', '-C', self.path] + config_args + cmd return cmd - def _git(self, cmd, errors='strict', quiet=False): + def _git(self, cmd, errors='strict', quiet=False, input_data=None, raw=False): cmd = self._get_git_command(cmd, errors) if not quiet: _logger.info("git command: %s", shlex.join(cmd)) - return subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode(errors=errors) + kwargs = {'stderr': subprocess.STDOUT} + if input_data is not None: + if isinstance(input_data, str): + input_data = input_data.encode('utf-8') + kwargs['input'] = input_data + output = subprocess.check_output(cmd, **kwargs) + if raw: + return output + return output.decode(errors=errors) def _fetch(self, sha): if not self._hash_exists(sha): diff --git a/runbot/templates/build.xml b/runbot/templates/build.xml index 0e50b7ce4..3c701df09 100644 --- a/runbot/templates/build.xml +++ b/runbot/templates/build.xml @@ -44,7 +44,7 @@
  • - +
  • @@ -68,7 +68,7 @@
    -
    +
    This build is referenced in bundles
    @@ -225,7 +225,7 @@
    - + Build @@ -294,7 +294,7 @@ - + diff --git a/runbot/templates/bundle.xml b/runbot/templates/bundle.xml index af1036a40..49b4a5efe 100644 --- a/runbot/templates/bundle.xml +++ b/runbot/templates/bundle.xml @@ -91,14 +91,9 @@ Default - - - - Force all - - + Apply -
    +
    diff --git a/runbot/tests/common.py b/runbot/tests/common.py index 9f6ba86c8..81cc8e784 100644 --- a/runbot/tests/common.py +++ b/runbot/tests/common.py @@ -12,7 +12,7 @@ class RunbotCase(TransactionCase): - def mock_git_helper(self, repo, cmd): + def mock_git_helper(self, repo, cmd, input_data=None, raw=False): """Helper that returns a mock for repo._git()""" if cmd[:2] == ['show', '-s'] or cmd[:3] == ['show', '--pretty="%H -- %s"', '-s']: return 'commit message for %s' % cmd[-1] @@ -82,7 +82,9 @@ def setUp(self): self.repo_odoo: [ ('odoo/addons', 'base', '__manifest__.py'), ('odoo/addons', 'test_lint', '__manifest__.py'), + ('addons', 'account', '__manifest__.py'), ('addons', 'mail', '__manifest__.py'), + ('addons', 'test_mail', '__manifest__.py'), ('addons', 'web', '__manifest__.py'), ('addons', 'crm', '__manifest__.py'), ('addons', 'project', '__manifest__.py'), @@ -194,8 +196,8 @@ def setUp(self): self.docker_run_calls = [] self.diff = '' - def mock_git(repo, cmd, quiet=False): - return self.mock_git_helper(repo, cmd) + def mock_git(repo, cmd, quiet=False, input_data=None, raw=False): + return self.mock_git_helper(repo, cmd, input_data=input_data, raw=raw) self.start_patcher('git_patcher', 'odoo.addons.runbot.models.repo.Repo._git', new=mock_git) self.start_patcher('hostname_patcher', 'odoo.addons.runbot.common.socket.gethostname', 'host.runbot.com') @@ -232,10 +234,10 @@ def mock_git(repo, cmd, quiet=False): self.start_patcher('_write_file', 'odoo.addons.runbot.models.build.BuildResult._write_file', None) self.start_patcher('_parse_config', 'odoo.addons.runbot.models.build.BuildResult._parse_config', {'--test-enable', '--test-tags', '--with-demo'}) - def get_available_modules(self_commit): + def _list_available_modules(self_commit): return self.addons_per_repo.get(self_commit.repo_id, []) - self.start_patcher('_get_available_modules', 'odoo.addons.runbot.models.commit.Commit._get_available_modules', new=get_available_modules) + self.start_patcher('_list_available_modules', 'odoo.addons.runbot.models.commit.Commit._list_available_modules', new=_list_available_modules) def no_commit(*_args, **_kwargs): _logger.info('Skipping commit') diff --git a/runbot/tests/test_build_config_step.py b/runbot/tests/test_build_config_step.py index 063c318ec..2e52171a3 100644 --- a/runbot/tests/test_build_config_step.py +++ b/runbot/tests/test_build_config_step.py @@ -439,25 +439,58 @@ def setUp(self): }).id, 'local_result': 'ok', }) + self.module_dependencies = { + "test_mail": ["mail"], + "mail": ["web"], + "account": ["web"], + "crm": ["web"], + "project": ["web"], + "test_l10n": ["l10n_be", "l10n_in"], + "l10n_be": ["account"], + "l10n_in": ["account"], + "web_enterprise": ["web"], + } + + def mock_git_helper(self, repo, cmd, input_data=None, raw=False): + def make_catfile_output(commit, content): + content_bytes = content.encode('utf-8') + header = f"{commit} blob {len(content_bytes)}\n".encode() + result = header + content_bytes + b"\n" + return result + + if cmd == ['cat-file', '--batch']: + if repo == self.repo_odoo and input_data == 'dfdfcfcf0000ffffffffffffffffffffffffffff:odoo/tests/.runbot/parallel_testing.json\n': + return make_catfile_output('dfdfcfcf0000ffffffffffffffffffffffffffff', self.config_file) + if repo == self.repo_odoo and input_data == 'dfdfcfcf0000ffffffffffffffffffffffffffff:odoo/tests/.runbot/l10n_standalone_testing.json\n': + return make_catfile_output('dfdfcfcf0000ffffffffffffffffffffffffffff', self.l10n_standalone_testing_file) + + if "__manifest__.py" in input_data: + modules_info = [ + (line, line.split(':')[-1].split('/')[-2]) + for line in input_data.splitlines() + if line.endswith('__manifest__.py') + ] + result = b"" + for original_query, module in modules_info: + content = '''{'name': '%s', 'depends': %s}''' % (module, self.module_dependencies.get(module, [])) + result += make_catfile_output(original_query.split(':')[0], content) + return result - def mock_git_helper(self, repo, cmd): - if repo == self.repo_odoo and cmd == ['show', 'dfdfcfcf0000ffffffffffffffffffffffffffff:odoo/tests/.runbot/parallel_testing.json']: - return self.config_file - elif repo == self.repo_odoo and cmd == ['show', 'dfdfcfcf0000ffffffffffffffffffffffffffff:odoo/tests/.runbot/l10n_standalone_testing.json']: - return self.l10n_standalone_testing_file - elif 'show' in cmd: + if cmd == ['cat-file', '--batch']: raise subprocess.CalledProcessError(cmd, 128) - return super().mock_git_helper(repo, cmd) + elif 'diff' in cmd: + return 'odoo/addons/crm/some/file.py\nodoo/addons/project/some/file.py' + return super().mock_git_helper(repo, cmd, input_data, raw) def test_module_filters(self): - self.assertEqual(self.build._get_modules_to_test('-> !mail'), ['base', 'crm', 'documents']) - self.assertEqual(self.build._get_modules_to_test('mail -> !web'), ['mail', 'project', 'test_l10n', 'test_lint']) + self.assertEqual(self.build._get_modules_to_test('-> !mail'), ['account', 'base', 'crm', 'documents']) + self.assertEqual(self.build._get_modules_to_test('mail -> !web'), ['mail', 'project', 'test_l10n', 'test_lint', 'test_mail']) self.assertEqual(self.build._get_modules_to_test('web -> web'), ['web']) self.assertEqual(self.build._get_modules_to_test('!web ->'), ['web_enterprise']) - self.assertEqual(self.build._get_modules_to_test('-> !mail, -crm'), ['base', 'documents']) - self.assertEqual(self.build._get_modules_to_test('mail -> !web, !project'), ['mail', 'test_l10n', 'test_lint']) - self.assertEqual(self.build._get_modules_to_test('-*,odoo/*'), ['base', 'crm', 'hw_drivers', 'mail', 'project', 'test_l10n', 'test_lint', 'web']) - self.assertEqual(self.build._get_modules_to_test('-*,odoo/test_*'), ['test_l10n', 'test_lint']) + self.assertEqual(self.build._get_modules_to_test('-> !mail, -crm'), ['account', 'base', 'documents']) + self.assertEqual(self.build._get_modules_to_test('mail -> !web, !project'), ['mail', 'test_l10n', 'test_lint', 'test_mail']) + self.assertEqual(self.build._get_modules_to_test('-*,odoo/*'), ['account', 'base', 'crm', 'hw_drivers', 'mail', 'project', 'test_l10n', 'test_lint', 'test_mail', 'web']) + self.assertEqual(self.build._get_modules_to_test('-*,odoo/test_*'), ['test_l10n', 'test_lint', 'test_mail']) self.assertEqual(self.build._get_modules_to_test('-*,enterprise/*'), ['documents', 'l10n_be', 'l10n_in', 'web_enterprise']) self.assertEqual(self.build._get_modules_to_test('-*,web*'), ['web', 'web_enterprise']) self.assertEqual(self.build._get_modules_to_test('-*,web*,-enterprise/web*'), ['web']) @@ -467,6 +500,35 @@ def test_config_extension(self): self.assertEqual(json.loads(self.config.default_dynamic_config)['vars']['module_filter'], '*,-hw_*') self.assertEqual(self.build.dynamic_config['vars']['module_filter'], '*,-hw_*,-l10n_*') + def test_parse_dynamic_entry(self): + Step = self.env['runbot.build.config.step'] + + def check_parse(entry, expected): + res = Step._parse_dynamic_entry(entry, self.build, {'key': 'value', 'test_method': '.test_method'}) + self.assertEqual(res, expected) + check_parse('{{-test_*|filter_all_modules}}', 'account,base,crm,documents,hw_drivers,l10n_be,l10n_in,mail,project,web,web_enterprise') + check_parse('{{-*,web*|filter_all_modules}}', 'web,web_enterprise') + check_parse('{{-*,web*|filter_all_modules|make_module_test_tags}}', '/web,/web_enterprise') + check_parse('{{-*,web*|filter_all_modules|make_module_test_tags|prepend("some_tag")}}', 'some_tag/web,some_tag/web_enterprise') + check_parse('{{-*,web*|filter_all_modules|make_module_test_tags|prepend(key)}}', 'value/web,value/web_enterprise') + check_parse('{{-*,web*|filter_all_modules|make_module_test_tags|append(".test_method")}}', '/web.test_method,/web_enterprise.test_method') + check_parse('{{-*,web*|filter_all_modules|make_module_test_tags|append(test_method)}}', '/web.test_method,/web_enterprise.test_method') + + self.patch(type(self.build), '_modified_modules', lambda cl, defaults=None: {'crm'}) + + check_parse('{{*|filter_all_modules|modified_modules}}', 'crm') + + def test_modules_dependencies(self): + self.assertEqual(self.build._get_modules_dependencies(['test_mail'], 1), ['mail', 'test_mail']) + self.assertEqual(self.build._get_modules_dependencies(['test_mail']), ['base', 'mail', 'test_mail', 'web']) + self.assertEqual(self.build._get_modules_dependencies(['test_l10n']), ['account', 'base', 'l10n_be', 'l10n_in', 'test_l10n', 'web']) + self.assertEqual(self.build._get_modules_dependencies(['test_mail', 'test_l10n']), ['account', 'base', 'l10n_be', 'l10n_in', 'mail', 'test_l10n', 'test_mail', 'web']) + self.assertEqual(self.build._get_modules_dependencies(['test_mail', 'test_l10n'], 1), ['l10n_be', 'l10n_in', 'mail', 'test_l10n', 'test_mail']) + + self.assertEqual(self.build._get_dependant_modules(['account'], 1), ['account', 'l10n_be', 'l10n_in']) + self.assertEqual(self.build._get_dependant_modules(['account']), ['account', 'l10n_be', 'l10n_in', 'test_l10n']) + self.assertEqual(self.build._get_dependant_modules(['base']), ['account', 'base', 'crm', 'documents', 'hw_drivers', 'l10n_be', 'l10n_in', 'mail', 'project', 'test_l10n', 'test_lint', 'test_mail', 'web', 'web_enterprise']) + def check_server_cmd(self, cmd, install, test_enable, test_tags, db=None): self.assertIn('odoo/server.py', cmd) if install: @@ -537,7 +599,7 @@ def test_dynamic_step_parallel_testing(self): cmd = self.docker_run_calls[0][0] odoo_cmd = cmd.cmd self.check_server_cmd(odoo_cmd, - install=['base', 'crm', 'documents', 'mail', 'project', 'test_l10n', 'test_lint', 'web', 'web_enterprise'], + install=['account', 'base', 'crm', 'documents', 'mail', 'project', 'test_l10n', 'test_lint', 'test_mail', 'web', 'web_enterprise'], test_enable=False, test_tags=None, db=f'{build.dest}-all', @@ -572,7 +634,7 @@ def test_dynamic_step_parallel_testing(self): cmd = self.docker_run_calls[0][0] odoo_cmd = cmd.cmd self.check_server_cmd(odoo_cmd, - install=['base', 'crm', 'documents', 'mail', 'project', 'test_l10n', 'test_lint', 'web', 'web_enterprise'], + install=['account', 'base', 'crm', 'documents', 'mail', 'project', 'test_l10n', 'test_lint', 'test_mail', 'web', 'web_enterprise'], test_enable=True, test_tags='-post_install,-/test_lint', ) @@ -589,8 +651,8 @@ def test_dynamic_step_parallel_testing(self): ) for post_install, expected_tags in [ - (post_install_1, '-at_install,/base,/crm,/documents,/hw_drivers,/l10n_be,/l10n_in'), # we need the blacklisted modules here - (post_install_2, '-at_install,/mail,/project,/test_l10n,/test_lint'), + (post_install_1, '-at_install,/account,/base,/crm,/documents,/hw_drivers,/l10n_be,/l10n_in'), # we need the blacklisted modules here + (post_install_2, '-at_install,/mail,/project,/test_l10n,/test_lint,/test_mail'), (post_install_3, '-at_install,/web'), (post_install_4, '-at_install,/web_enterprise'), ]: @@ -694,7 +756,7 @@ def test_dynamic_step_l10n_standalone(self): (post_install_1, '-external,-external_l10n,post_install_l10n/l10n_hr_payroll_be,post_install_l10n/l10n_hr_payroll_in'), # we need the blacklisted modules here (post_install_2, '-external,-external_l10n,post_install_l10n/l10n_edi_be,post_install_l10n/l10n_edi_in'), (post_install_3, '-external,-external_l10n,post_install_l10n/l10n_reports_be,post_install_l10n/l10n_reports_in'), - (post_install_4, Like('-external,-external_l10n,post_install_l10n/base,post_install_l10n/crm,...')), + (post_install_4, Like('-external,-external_l10n,post_install_l10n/account,post_install_l10n/base,post_install_l10n/crm,...')), ]: with self.subTest(post_install=expected_tags): # 4.1 post install restore @@ -738,6 +800,7 @@ def test_foreach_module(self): self.config.step_ids[0]._run_dynamic(self.build) self.assertEqual(self.build.children_ids.mapped('description'), [ + 'Post install tests for **account**', 'Post install tests for **base**', 'Post install tests for **crm**', 'Post install tests for **documents**', @@ -767,31 +830,154 @@ def test_foreach_modified_module(self): }] }''' - self.patch(type(self.build), '_modified_modules', lambda cl: {'crm'}) + self.patch(type(self.build), '_modified_modules', lambda cl, defaults=None: {'crm'}) self.config.default_dynamic_config = dynamic_config self.config.step_ids[0]._run_dynamic(self.build) self.assertEqual(self.build.children_ids.mapped('description'), - [ - 'Post install tests for **crm**', + [ + 'Post install tests for **crm**', ]) - def test_parse_dynamic_entry(self): - Step = self.env['runbot.build.config.step'] + def test_modified_existing_module(self): + dynamic_config = '''{ + "vars": { + "modified_modules": "{{*|filter_all_modules|modified_modules}}", + "test_modules": "{{modified_modules|prepend('test_')|select_existing_modules}}", + "modules_to_test": "{{modified_modules|union(test_modules)}}" + }, + "name": "Foreach module testing", + "steps": [{ + "name": "Create module builds", + "job_type": "create_build", + "children": [{ + "name": "Test single module", + "description": "Post install tests for **{{modules_to_test}}**", + "steps": [{ + "name": "Start single module test", + "job_type": "odoo", + "install_modules": "{{modules_to_test}}", + "test_tags": "{{modules_to_test|make_module_test_tags}}" + }] + }] + }] + }''' - def check_parse(entry, expected): - res = Step._parse_dynamic_entry(entry, self.build, {'key': 'value', 'test_method': '.test_method'}) - self.assertEqual(res, expected) - check_parse('{{-test_*|filter_all_modules}}', 'base,crm,documents,hw_drivers,l10n_be,l10n_in,mail,project,web,web_enterprise') - check_parse('{{-*,web*|filter_all_modules}}', 'web,web_enterprise') - check_parse('{{-*,web*|filter_all_modules|make_module_test_tags}}', '/web,/web_enterprise') - check_parse('{{-*,web*|filter_all_modules|make_module_test_tags|prepend("some_tag")}}', 'some_tag/web,some_tag/web_enterprise') - check_parse('{{-*,web*|filter_all_modules|make_module_test_tags|prepend(key)}}', 'value/web,value/web_enterprise') - check_parse('{{-*,web*|filter_all_modules|make_module_test_tags|append(".test_method")}}', '/web.test_method,/web_enterprise.test_method') - check_parse('{{-*,web*|filter_all_modules|make_module_test_tags|append(test_method)}}', '/web.test_method,/web_enterprise.test_method') + self.patch(type(self.build), '_modified_modules', lambda cl, defaults=None: {'crm', 'mail'}) + self.config.default_dynamic_config = dynamic_config + self.config.step_ids[0]._run_dynamic(self.build) + self.assertEqual(self.build.children_ids.mapped('description'), + [ + 'Post install tests for **crm,mail,test_mail**', + ]) + child_dynamic_vars = self.build.children_ids.params_id.config_data['dynamic_vars'] + self.assertEqual(child_dynamic_vars, { + 'modified_modules': 'crm,mail', + 'test_modules': 'test_mail', + 'modules_to_test': 'crm,mail,test_mail', + }) - self.patch(type(self.build), '_modified_modules', lambda cl: {'crm'}) + def test_modified_existing_module_parallel(self): + dynamic_config = '''{ + "vars": { + "modified_modules": "{{*|filter_all_modules|modified_modules}}", + "modules_to_test": "{{modified_modules|prepend('test_')|select_existing_modules|union(modified_modules)}}" + }, + "name": "Parallel split modified", + "steps": [{ + "name": "Create module builds", + "job_type": "create_build", + "for_each_vars": [{ + "test_module_filter": "{{modules_to_test}},->!mail" + }, + { + "test_module_filter": "{{modules_to_test}},mail->!website" + }, + { + "test_module_filter": "{{modules_to_test}},website->" + } + ], + "if": "{{child_modules_to_test}}", + "children": [{ + "vars": { + "child_modules_to_test": "{{test_module_filter|select_existing_modules}}" + }, + "name": "Test single module", + "description": "Post install tests for **{{child_modules_to_test}}**", + "steps": [{ + "name": "Start single module test", + "job_type": "odoo", + "install_modules": "{{child_modules_to_test}}", + "test_tags": "{{child_modules_to_test|make_module_test_tags}}" + }] + }] + }] + }''' - check_parse('{{*|filter_all_modules|modified_modules}}', 'crm') + self.patch(type(self.build), '_modified_modules', lambda cl, defaults=None: {'crm', 'mail'}) + self.config.default_dynamic_config = dynamic_config + self.config.step_ids[0]._run_dynamic(self.build) + self.assertEqual(self.build.children_ids.mapped('description'), + [ + 'Post install tests for **crm**', + 'Post install tests for **mail,test_mail**', + ]) + + self.assertEqual(self.build.children_ids[0].params_id.config_data['dynamic_vars']['child_modules_to_test'], 'crm') + self.assertEqual(self.build.children_ids[1].params_id.config_data['dynamic_vars']['child_modules_to_test'], 'mail,test_mail') + + def test_modified_existing_module_parallel_relations(self): + dynamic_config = '''{ + "vars": [ + {"module_filter": "*,-hw_*,-*l10n_*,-theme_*,-account_bacs,-account_reports_cash_basis,-auth_ldap,-base_gengo,-document_ftp,-iot_drivers,-note_pad,-odoo_referral,-odoo_referral_portal,-pad,-pad_project,-pos_blackbox_be,-pos_cache,-pos_six,-social_demo,-website_gengo,-website_instantclick,test_l10n_be_hr_payroll_account,test_l10n_us_hr_payroll_account"}, + {"_modified_modules": "{{module_filter|filter_all_modules|modified_modules}}"}, + {"_modules_dependencies": "{{_modified_modules|get_dependencies(1)}}"}, + {"_dependant_modules": "{{_modified_modules|get_dependant(1)}}"}, + {"_test_modules": "{{_modified_modules|prepend('test_')|select_existing_modules}}"}, + {"_modules_to_test": "{{_modified_modules|union(_test_modules)|union(_dependant_modules)|union(_modules_dependencies)}}"} + ], + "name": "Parallel split modified", + "steps": [{ + "name": "Create module builds", + "job_type": "create_build", + "for_each_vars": [{ + "_test_module_filter": "{{_modules_to_test}},->!mail" + }, + { + "_test_module_filter": "{{_modules_to_test}},mail->!website" + }, + { + "_test_module_filter": "{{_modules_to_test}},website->" + } + ], + "if": "{{child_modules_to_test}}", + "log": "Modified modules: {{_modified_modules}}\\nDepenencies: {{_modules_dependencies}}\\nDependant: {{_dependant_modules}}\\nTest modules: {{_test_modules}}", + "children": [{ + "vars": { + "child_modules_to_test": "{{_test_module_filter|select_existing_modules}}" + }, + "name": "Test single module", + "description": "Post install tests for **{{child_modules_to_test}}**", + "steps": [{ + "name": "Start single module test", + "job_type": "odoo", + "install_modules": "{{child_modules_to_test}}", + "test_tags": "{{child_modules_to_test|make_module_test_tags}}" + }] + }] + }] + }''' + + self.patch(type(self.build), '_modified_modules', lambda cl, defaults=None: {'crm', 'mail'}) + self.config.default_dynamic_config = dynamic_config + self.config.step_ids[0]._run_dynamic(self.build) + self.assertEqual(self.build.children_ids.mapped('description'), + [ + 'Post install tests for **crm**', + 'Post install tests for **mail,test_mail,web**', + ]) + self.assertEqual(self.build.children_ids[0].params_id.config_data['dynamic_vars']['child_modules_to_test'], 'crm') + self.assertEqual(self.build.children_ids[1].params_id.config_data['dynamic_vars']['child_modules_to_test'], 'mail,test_mail,web') + self.assertEqual(list(self.build.children_ids[0].params_id.config_data['dynamic_vars'].keys()), ['module_filter', 'child_modules_to_test']) class TestBuildConfigStep(TestBuildConfigStepCommon): diff --git a/runbot/tests/test_repo.py b/runbot/tests/test_repo.py index 81c940ab0..cb8195a05 100644 --- a/runbot/tests/test_repo.py +++ b/runbot/tests/test_repo.py @@ -372,7 +372,7 @@ def setUp(self): self.fetch_count = 0 self.force_failure = False - def mock_git_helper(self, repo, cmd): + def mock_git_helper(self, repo, cmd, input_data=None, raw=False): self.assertIn('fetch', cmd) self.fetch_count += 1 if self.fetch_count < 3 or self.force_failure: @@ -457,7 +457,7 @@ def setUp(self): super().setUp() self.test_refs = [] - def mock_git_helper(self, repo, cmd): + def mock_git_helper(self, repo, cmd, input_data=None, raw=False): self.assertIn('for-each-ref', cmd) self.assertIn('refs/*/pull/*', cmd) return '\n'.join(['\x00'.join(ref_data) for ref_data in self.test_refs]) diff --git a/runbot/views/build_views.xml b/runbot/views/build_views.xml index 3a6064cfa..25532541f 100644 --- a/runbot/views/build_views.xml +++ b/runbot/views/build_views.xml @@ -12,6 +12,8 @@ + +