diff --git a/Makefile b/Makefile index 1e565aa9..b3ece8c7 100644 --- a/Makefile +++ b/Makefile @@ -194,19 +194,22 @@ flake8: FORCE src/cwl_utils/parser/cwl_v1_0.py: FORCE schema-salad-tool --codegen python \ --codegen-parser-info "org.w3id.cwl.v1_0" \ - https://github.com/common-workflow-language/common-workflow-language/raw/codegen/v1.0/extensions.yml \ + --codegen-parent "https://w3id.org/cwl/salad=schema_salad.metaschema" \ + https://github.com/common-workflow-language/common-workflow-language/raw/codegen/v1.0/CommonWorkflowLanguage.yml \ > $@ src/cwl_utils/parser/cwl_v1_1.py: FORCE schema-salad-tool --codegen python \ --codegen-parser-info "org.w3id.cwl.v1_1" \ - https://github.com/common-workflow-language/cwl-v1.1/raw/codegen/extensions.yml \ + --codegen-parent "https://w3id.org/cwl/salad=schema_salad.metaschema" \ + https://github.com/common-workflow-language/cwl-v1.1/raw/codegen/CommonWorkflowLanguage.yml \ > $@ src/cwl_utils/parser/cwl_v1_2.py: FORCE schema-salad-tool --codegen python \ --codegen-parser-info "org.w3id.cwl.v1_2" \ - https://github.com/common-workflow-language/cwl-v1.2/raw/codegen/extensions.yml \ + --codegen-parent "https://w3id.org/cwl/salad=schema_salad.metaschema" \ + https://github.com/common-workflow-language/cwl-v1.2/raw/codegen/CommonWorkflowLanguage.yml \ > $@ regen_parsers: src/cwl_utils/parser/cwl_v1_*.py diff --git a/README.rst b/README.rst index b8fba953..4d0bf215 100644 --- a/README.rst +++ b/README.rst @@ -160,13 +160,13 @@ Regenerate parsers To regenerate install the ``schema_salad`` package and run: ``cwl_utils/parser/cwl_v1_0.py`` was created via -``schema-salad-tool --codegen python https://github.com/common-workflow-language/common-workflow-language/raw/codegen/v1.0/extensions.yml --codegen-parser-info "org.w3id.cwl.v1_0" > cwl_utils/parser/cwl_v1_0.py`` +``schema-salad-tool --codegen python https://github.com/common-workflow-language/common-workflow-language/raw/codegen/v1.0/CommonWorkflowLanguage.yml --codegen-parent "https://w3id.org/cwl/salad=schema_salad.metaschema" --codegen-parser-info "org.w3id.cwl.v1_0" > cwl_utils/parser/cwl_v1_0.py`` ``cwl_utils/parser/cwl_v1_1.py`` was created via -``schema-salad-tool --codegen python https://github.com/common-workflow-language/cwl-v1.1/raw/codegen/extensions.yml --codegen-parser-info "org.w3id.cwl.v1_1" > cwl_utils/parser/cwl_v1_1.py`` +``schema-salad-tool --codegen python https://github.com/common-workflow-language/cwl-v1.1/raw/codegen/CommonWorkflowLanguage.yml --codegen-parent "https://w3id.org/cwl/salad=schema_salad.metaschema" --codegen-parser-info "org.w3id.cwl.v1_1" > cwl_utils/parser/cwl_v1_1.py`` ``cwl_utils/parser/cwl_v1_2.py`` was created via -``schema-salad-tool --codegen python https://github.com/common-workflow-language/cwl-v1.2/raw/codegen/extensions.yml --codegen-parser-info "org.w3id.cwl.v1_2" > cwl_utils/parser/cwl_v1_2.py`` +``schema-salad-tool --codegen python https://github.com/common-workflow-language/cwl-v1.2/raw/codegen/CommonWorkflowLanguage.yml --codegen-parent "https://w3id.org/cwl/salad=schema_salad.metaschema" --codegen-parser-info "org.w3id.cwl.v1_2" > cwl_utils/parser/cwl_v1_2.py`` Release ~~~~~~~ diff --git a/pyproject.toml b/pyproject.toml index 0cb7d0be..7e622b05 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,7 @@ dependencies = [ "packaging", "rdflib", "requests", - "schema-salad >= 8.8.20250205075315,<9", + "schema-salad @ git+https://github.com/common-workflow-language/schema_salad.git@refs/pull/1028/head", "ruamel.yaml >= 0.17.6, < 0.20", "typing_extensions >= 4.10.0", ] @@ -73,6 +73,9 @@ cwl-inputs-schema-gen = "cwl_utils.inputs_schema_gen:main" [tool.pytest.ini_options] addopts = "-rsx -n auto --pyargs cwl_utils.tests" +[tool.hatch.metadata] +allow-direct-references = true + [tool.hatch.version] path = "src/cwl_utils/__meta__.py" diff --git a/src/cwl_utils/cwl_v1_0_expression_refactor.py b/src/cwl_utils/cwl_v1_0_expression_refactor.py index 1446319a..c097bedb 100755 --- a/src/cwl_utils/cwl_v1_0_expression_refactor.py +++ b/src/cwl_utils/cwl_v1_0_expression_refactor.py @@ -11,6 +11,8 @@ from typing import Any, cast from ruamel import yaml +from schema_salad.metaschema import ArraySchema +from schema_salad.runtime import LoadingOptions, save from schema_salad.sourceline import SourceLine from schema_salad.utils import json_dumps @@ -41,7 +43,7 @@ def expand_stream_shortcuts(process: cwl.CommandLineTool) -> cwl.CommandLineTool stdout_path = process.stdout if not stdout_path: stdout_path = hashlib.sha1( # nosec - json_dumps(cwl.save(process)).encode("utf-8") + json_dumps(save(process)).encode("utf-8") ).hexdigest() result.stdout = stdout_path result.outputs[index].type_ = "File" @@ -57,11 +59,11 @@ def escape_expression_field(contents: str) -> str: def clean_type_ids( - cwltype: cwl.ArraySchema | cwl.InputRecordSchema, -) -> cwl.ArraySchema | cwl.InputRecordSchema: + cwltype: ArraySchema | cwl.InputRecordSchema, +) -> ArraySchema | cwl.InputRecordSchema: """Simplify type identifiers.""" result = copy.deepcopy(cwltype) - if isinstance(result, cwl.ArraySchema): + if isinstance(result, ArraySchema): if isinstance(result.items, MutableSequence): for item in result.items: if hasattr(item, "id"): @@ -340,8 +342,8 @@ def generate_etool_from_expr( self_type = target if isinstance(self_type, list): new_type: ( - list[cwl.ArraySchema | cwl.InputRecordSchema] - | cwl.ArraySchema + list[ArraySchema | cwl.InputRecordSchema] + | ArraySchema | cwl.InputRecordSchema ) = [clean_type_ids(t.type_) for t in self_type if t.type_] elif self_type.type_: @@ -1218,7 +1220,7 @@ def process_level_reqs( def add_input_to_process( - process: cwl.Process, name: str, inptype: Any, loadingOptions: cwl.LoadingOptions + process: cwl.Process, name: str, inptype: Any, loadingOptions: LoadingOptions ) -> None: """Add a new InputParameter to the given CommandLineTool.""" if isinstance(process, cwl.CommandLineTool): @@ -1347,7 +1349,7 @@ def traverse_CommandLineTool( modified = True inp_id = "_{}_glob".format(outp.id.split("#")[-1]) etool_id = f"_expression_{step_id}{inp_id}" - glob_target_type = ["string", cwl.ArraySchema("string", "array")] + glob_target_type = ["string", ArraySchema("string", "array")] target = cwl.InputParameter(id=None, type_=glob_target_type) replace_step_clt_expr_with_etool( expression, etool_id, parent, target, step, replace_etool @@ -1843,7 +1845,7 @@ def traverse_step( source_types.append(temp_type) source_type = cwl.InputParameter( id=None, - type_=cwl.ArraySchema(source_types, "array"), + type_=ArraySchema(source_types, "array"), ) else: input_source_id = inp.source.split("#")[-1] diff --git a/src/cwl_utils/cwl_v1_1_expression_refactor.py b/src/cwl_utils/cwl_v1_1_expression_refactor.py index 1bedf4c2..12b02690 100755 --- a/src/cwl_utils/cwl_v1_1_expression_refactor.py +++ b/src/cwl_utils/cwl_v1_1_expression_refactor.py @@ -11,6 +11,8 @@ from typing import Any, cast from ruamel import yaml +from schema_salad.metaschema import ArraySchema +from schema_salad.runtime import LoadingOptions, save from schema_salad.sourceline import SourceLine from schema_salad.utils import json_dumps @@ -41,7 +43,7 @@ def expand_stream_shortcuts(process: cwl.CommandLineTool) -> cwl.CommandLineTool stdout_path = process.stdout if not stdout_path: stdout_path = hashlib.sha1( # nosec - json_dumps(cwl.save(process)).encode("utf-8") + json_dumps(save(process)).encode("utf-8") ).hexdigest() result.stdout = stdout_path result.outputs[index].type_ = "File" @@ -57,11 +59,11 @@ def escape_expression_field(contents: str) -> str: def clean_type_ids( - cwltype: cwl.ArraySchema | cwl.InputRecordSchema, -) -> cwl.ArraySchema | cwl.InputRecordSchema: + cwltype: ArraySchema | cwl.InputRecordSchema, +) -> ArraySchema | cwl.InputRecordSchema: """Simplify type identifiers.""" result = copy.deepcopy(cwltype) - if isinstance(result, cwl.ArraySchema): + if isinstance(result, ArraySchema): if isinstance(result.items, MutableSequence): for item in result.items: if hasattr(item, "id"): @@ -340,8 +342,8 @@ def generate_etool_from_expr( self_type = target if isinstance(self_type, list): new_type: ( - list[cwl.ArraySchema | cwl.InputRecordSchema] - | cwl.ArraySchema + list[ArraySchema | cwl.InputRecordSchema] + | ArraySchema | cwl.InputRecordSchema ) = [clean_type_ids(t.type_) for t in self_type] else: @@ -1220,7 +1222,7 @@ def process_level_reqs( def add_input_to_process( - process: cwl.Process, name: str, inptype: Any, loadingOptions: cwl.LoadingOptions + process: cwl.Process, name: str, inptype: Any, loadingOptions: LoadingOptions ) -> None: """Add a new InputParameter to the given CommandLineTool.""" if isinstance(process, cwl.CommandLineTool): @@ -1349,7 +1351,7 @@ def traverse_CommandLineTool( modified = True inp_id = "_{}_glob".format(outp.id.split("#")[-1]) etool_id = f"_expression_{step_id}{inp_id}" - glob_target_type = ["string", cwl.ArraySchema("string", "array")] + glob_target_type = ["string", ArraySchema("string", "array")] target = cwl.WorkflowInputParameter(id=None, type_=glob_target_type) replace_step_clt_expr_with_etool( expression, etool_id, parent, target, step, replace_etool @@ -1851,7 +1853,7 @@ def traverse_step( source_types.append(temp_type) source_type = cwl.WorkflowInputParameter( id=None, - type_=cwl.ArraySchema(source_types, "array"), + type_=ArraySchema(source_types, "array"), ) else: input_source_id = inp.source.split("#")[-1] diff --git a/src/cwl_utils/cwl_v1_2_expression_refactor.py b/src/cwl_utils/cwl_v1_2_expression_refactor.py index 1fd73d74..8a11c9b6 100755 --- a/src/cwl_utils/cwl_v1_2_expression_refactor.py +++ b/src/cwl_utils/cwl_v1_2_expression_refactor.py @@ -11,6 +11,8 @@ from typing import Any, cast from ruamel import yaml +from schema_salad.metaschema import ArraySchema +from schema_salad.runtime import LoadingOptions, save from schema_salad.sourceline import SourceLine from schema_salad.utils import json_dumps @@ -40,7 +42,7 @@ def expand_stream_shortcuts(process: cwl.CommandLineTool) -> cwl.CommandLineTool stdout_path = process.stdout if not stdout_path: stdout_path = hashlib.sha1( # nosec - json_dumps(cwl.save(process)).encode("utf-8") + json_dumps(save(process)).encode("utf-8") ).hexdigest() result.stdout = stdout_path result.outputs[index].type_ = "File" @@ -56,11 +58,11 @@ def escape_expression_field(contents: str) -> str: def clean_type_ids( - cwltype: cwl.ArraySchema | cwl.InputRecordSchema, -) -> cwl.ArraySchema | cwl.InputRecordSchema: + cwltype: ArraySchema | cwl.InputRecordSchema, +) -> ArraySchema | cwl.InputRecordSchema: """Simplify type identifiers.""" result = copy.deepcopy(cwltype) - if isinstance(result, cwl.ArraySchema): + if isinstance(result, ArraySchema): if isinstance(result.items, MutableSequence): for item in result.items: if hasattr(item, "id"): @@ -339,8 +341,8 @@ def generate_etool_from_expr( self_type = target if isinstance(self_type, list): new_type: ( - list[cwl.ArraySchema | cwl.InputRecordSchema] - | cwl.ArraySchema + list[ArraySchema | cwl.InputRecordSchema] + | ArraySchema | cwl.InputRecordSchema ) = [clean_type_ids(t.type_) for t in self_type] else: @@ -715,7 +717,7 @@ def process_workflow_inputs_and_outputs( else: sources = [s.split("#")[-1] for s in param2.outputSource] source_type_items = utils.type_for_source(workflow, sources) - if isinstance(source_type_items, cwl.ArraySchema): + if isinstance(source_type_items, ArraySchema): if isinstance(source_type_items.items, list): if "null" not in source_type_items.items: source_type_items.items.append("null") @@ -1323,7 +1325,7 @@ def process_level_reqs( def add_input_to_process( - process: cwl.Process, name: str, inptype: Any, loadingOptions: cwl.LoadingOptions + process: cwl.Process, name: str, inptype: Any, loadingOptions: LoadingOptions ) -> None: """Add a new InputParameter to the given CommandLineTool.""" if isinstance(process, cwl.CommandLineTool): @@ -1452,7 +1454,7 @@ def traverse_CommandLineTool( modified = True inp_id = "_{}_glob".format(outp.id.split("#")[-1]) etool_id = f"_expression_{step_id}{inp_id}" - glob_target_type = ["string", cwl.ArraySchema("string", "array")] + glob_target_type = ["string", ArraySchema("string", "array")] target = cwl.WorkflowInputParameter(id=None, type_=glob_target_type) replace_step_clt_expr_with_etool( expression, etool_id, parent, target, step, replace_etool @@ -1954,7 +1956,7 @@ def traverse_step( source_types.append(temp_type) source_type = cwl.WorkflowInputParameter( id=None, - type_=cwl.ArraySchema(source_types, "array"), + type_=ArraySchema(source_types, "array"), ) else: input_source_id = inp.source.split("#")[-1] diff --git a/src/cwl_utils/expression_refactor.py b/src/cwl_utils/expression_refactor.py index 094f5810..a3da703c 100755 --- a/src/cwl_utils/expression_refactor.py +++ b/src/cwl_utils/expression_refactor.py @@ -13,6 +13,7 @@ from ruamel.yaml.main import YAML from ruamel.yaml.scalarstring import walk_tree +from schema_salad.runtime import save from cwl_utils import ( cwl_v1_0_expression_refactor, @@ -111,15 +112,12 @@ def refactor(args: argparse.Namespace) -> int: traverse: Callable[[Any, bool, bool, bool, bool], tuple[Any, bool]] = ( cwl_v1_0_expression_refactor.traverse ) - save: saveCWL = cwl_v1_0.save case "v1.1": top = cwl_v1_1.load_document_by_yaml(result, uri) traverse = cwl_v1_1_expression_refactor.traverse - save = cwl_v1_1.save case "v1.2": top = cwl_v1_2.load_document_by_yaml(result, uri) traverse = cwl_v1_2_expression_refactor.traverse - save = cwl_v1_2.save case _: _logger.error( "Sorry, %s is not a supported CWL version by this tool.", diff --git a/src/cwl_utils/normalizer.py b/src/cwl_utils/normalizer.py index 9e5107b3..0cfed248 100644 --- a/src/cwl_utils/normalizer.py +++ b/src/cwl_utils/normalizer.py @@ -12,12 +12,13 @@ from cwlupgrader import main as cwlupgrader from ruamel import yaml +from schema_salad.runtime import save from schema_salad.sourceline import add_lc_filename from cwl_utils import cwl_v1_2_expression_refactor from cwl_utils.loghandler import _logger as _cwlutilslogger from cwl_utils.pack import pack -from cwl_utils.parser.cwl_v1_2 import load_document_by_yaml, save +from cwl_utils.parser.cwl_v1_2 import load_document_by_yaml _logger = logging.getLogger("cwl-normalizer") # pylint: disable=invalid-name defaultStreamHandler = logging.StreamHandler() # pylint: disable=invalid-name diff --git a/src/cwl_utils/parser/__init__.py b/src/cwl_utils/parser/__init__.py index 6e105072..c083e04e 100644 --- a/src/cwl_utils/parser/__init__.py +++ b/src/cwl_utils/parser/__init__.py @@ -4,9 +4,11 @@ from abc import ABC from collections.abc import MutableMapping, MutableSequence from pathlib import Path -from typing import Any, Optional, TypeAlias, cast +from typing import Any, TypeAlias, cast from urllib.parse import unquote_plus, urlparse +import schema_salad.metaschema +import schema_salad.runtime from schema_salad.exceptions import ValidationException from schema_salad.utils import yaml_no_ts @@ -18,11 +20,9 @@ class NoType(ABC): pass -LoadingOptions: TypeAlias = ( - cwl_v1_0.LoadingOptions | cwl_v1_1.LoadingOptions | cwl_v1_2.LoadingOptions -) +LoadingOptions: TypeAlias = schema_salad.runtime.LoadingOptions """Type union for a CWL v1.x LoadingOptions object.""" -Saveable: TypeAlias = cwl_v1_0.Saveable | cwl_v1_1.Saveable | cwl_v1_2.Saveable +Saveable: TypeAlias = schema_salad.runtime.Saveable """Type union for a CWL v1.x Saveable object.""" InputParameter: TypeAlias = ( cwl_v1_0.InputParameter | cwl_v1_1.InputParameter | cwl_v1_2.InputParameter @@ -181,9 +181,7 @@ class NoType(ABC): cwl_v1_2.SoftwareRequirement, ) """Type union for a CWL v1.x SoftwareRequirement object.""" -ArraySchema: TypeAlias = ( - cwl_v1_0.ArraySchema | cwl_v1_1.ArraySchema | cwl_v1_2.ArraySchema -) +ArraySchema: TypeAlias = schema_salad.metaschema.ArraySchema InputArraySchema: TypeAlias = ( cwl_v1_0.InputArraySchema | cwl_v1_1.InputArraySchema | cwl_v1_2.InputArraySchema ) @@ -193,7 +191,7 @@ class NoType(ABC): cwl_v1_2.InputArraySchema, ) """Type Union for a CWL v1.x ArraySchema object.""" -EnumSchema: TypeAlias = cwl_v1_0.EnumSchema | cwl_v1_1.EnumSchema | cwl_v1_2.EnumSchema +EnumSchema: TypeAlias = schema_salad.metaschema.EnumSchema InputEnumSchema: TypeAlias = ( cwl_v1_0.InputEnumSchema | cwl_v1_1.InputEnumSchema | cwl_v1_2.InputEnumSchema ) @@ -203,9 +201,7 @@ class NoType(ABC): cwl_v1_2.InputEnumSchema, ) """Type Union for a CWL v1.x EnumSchema object.""" -RecordSchema: TypeAlias = ( - cwl_v1_0.RecordSchema | cwl_v1_1.RecordSchema | cwl_v1_2.RecordSchema -) +RecordSchema: TypeAlias = schema_salad.metaschema.RecordSchema InputRecordSchema: TypeAlias = ( cwl_v1_0.InputRecordSchema | cwl_v1_1.InputRecordSchema | cwl_v1_2.InputRecordSchema ) @@ -289,53 +285,17 @@ def load_document_by_uri( base_uri = path.resolve().parent.as_uri() id_ = path.resolve().name.split("#")[1] if "#" in path.resolve().name else None - match loadingOptions: - case cwl_v1_0.LoadingOptions(): - loadingOptions = cwl_v1_0.LoadingOptions( - fileuri=real_uri, baseuri=base_uri, copyfrom=loadingOptions - ) - return load_document_by_string( - loadingOptions.fetcher.fetch_text(real_uri), - real_uri, - loadingOptions, - id_, - load_all, - ) - case cwl_v1_1.LoadingOptions(): - loadingOptions = cwl_v1_1.LoadingOptions( - fileuri=real_uri, baseuri=base_uri, copyfrom=loadingOptions - ) - return load_document_by_string( - loadingOptions.fetcher.fetch_text(real_uri), - real_uri, - loadingOptions, - id_, - load_all, - ) - case cwl_v1_2.LoadingOptions(): - loadingOptions = cwl_v1_2.LoadingOptions( - fileuri=real_uri, baseuri=base_uri, copyfrom=loadingOptions - ) - return load_document_by_string( - loadingOptions.fetcher.fetch_text(real_uri), - real_uri, - loadingOptions, - id_, - load_all, - ) - case None: - loadingOptions = cwl_v1_2.LoadingOptions(fileuri=real_uri, baseuri=base_uri) - return load_document_by_string( - loadingOptions.fetcher.fetch_text(real_uri), - real_uri, - None, - id_, - load_all, - ) - case _: - raise ValidationException( - f"Unsupported loadingOptions type: {type(loadingOptions)}" - ) + loadingOptions = LoadingOptions( + fileuri=real_uri, baseuri=base_uri, copyfrom=loadingOptions + ) + + return load_document_by_string( + loadingOptions.fetcher.fetch_text(real_uri), + real_uri, + loadingOptions, + id_, + load_all, + ) def load_document( @@ -347,7 +307,7 @@ def load_document( ) -> Any: """Load a CWL object from a serialized YAML string or a YAML object.""" if baseuri is None: - baseuri = cwl_v1_0.file_uri(str(Path.cwd())) + "/" + baseuri = schema_salad.runtime.file_uri(str(Path.cwd())) + "/" if isinstance(doc, str): return load_document_by_string(doc, baseuri, loadingOptions, id_) return load_document_by_yaml(doc, baseuri, loadingOptions, id_, load_all) @@ -379,17 +339,11 @@ def load_document_by_yaml( yaml["cwlVersion"] = version match version: case "v1.0": - result = cwl_v1_0.load_document_by_yaml( - yaml, uri, cast(Optional[cwl_v1_0.LoadingOptions], loadingOptions) - ) + result = cwl_v1_0.load_document_by_yaml(yaml, uri, loadingOptions) case "v1.1": - result = cwl_v1_1.load_document_by_yaml( - yaml, uri, cast(Optional[cwl_v1_1.LoadingOptions], loadingOptions) - ) + result = cwl_v1_1.load_document_by_yaml(yaml, uri, loadingOptions) case "v1.2": - result = cwl_v1_2.load_document_by_yaml( - yaml, uri, cast(Optional[cwl_v1_2.LoadingOptions], loadingOptions) - ) + result = cwl_v1_2.load_document_by_yaml(yaml, uri, loadingOptions) case None: raise ValidationException("could not get the cwlVersion") case _: @@ -415,7 +369,7 @@ def save( ) -> Any: """Convert a CWL Python object into a JSON/YAML serializable object.""" match val: - case cwl_v1_0.Saveable() | cwl_v1_1.Saveable() | cwl_v1_2.Saveable(): + case Saveable(): return val.save(top=top, base_url=base_url, relative_uris=relative_uris) case MutableSequence(): lst = [ diff --git a/src/cwl_utils/parser/cwl_v1_0.py b/src/cwl_utils/parser/cwl_v1_0.py index 7f3110dd..24131c76 100644 --- a/src/cwl_utils/parser/cwl_v1_0.py +++ b/src/cwl_utils/parser/cwl_v1_0.py @@ -2,430 +2,51 @@ # This file was autogenerated using schema-salad-tool --codegen=python # The code itself is released under the Apache 2.0 license and the help text is # subject to the license of the original schema. +from __future__ import annotations -import copy -import logging import os -import pathlib -import tempfile -import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 -import xml.sax # nosec -from abc import ABC, abstractmethod -from collections.abc import MutableMapping, MutableSequence, Sequence +import sys +import uuid as _uuid__ +from collections.abc import Collection +from typing import ClassVar + +from schema_salad.runtime import ( + Saveable, + file_uri, + parse_errors, + prefix_url, + save, + save_relative_uri, +) + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self + +import schema_salad.metaschema + +import copy +from collections.abc import MutableSequence, Sequence, MutableMapping from io import StringIO from itertools import chain -from typing import Any, Final, Optional, Union, cast -from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit -from urllib.request import pathname2url +from typing import Any, Final, cast, Generic +from urllib.parse import urldefrag, urlsplit, urlunsplit -from rdflib import Graph -from rdflib.plugins.parsers.notation3 import BadSyntax from ruamel.yaml.comments import CommentedMap -from schema_salad.exceptions import SchemaSaladException, ValidationException -from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher +from schema_salad.exceptions import ValidationException, SchemaSaladException +from schema_salad.runtime import ( + LoadingOptions, + convert_typing, + extract_type, + SaveableType, +) from schema_salad.sourceline import SourceLine, add_lc_filename -from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ - -_vocab: dict[str, str] = {} -_rvocab: dict[str, str] = {} - -_logger: Final = logging.getLogger("salad") - - -IdxType = MutableMapping[str, tuple[Any, "LoadingOptions"]] - - -class LoadingOptions: - idx: Final[IdxType] - fileuri: Final[Optional[str]] - baseuri: Final[str] - namespaces: Final[MutableMapping[str, str]] - schemas: Final[MutableSequence[str]] - original_doc: Final[Optional[Any]] - addl_metadata: Final[MutableMapping[str, Any]] - fetcher: Final[Fetcher] - vocab: Final[dict[str, str]] - rvocab: Final[dict[str, str]] - cache: Final[CacheType] - imports: Final[list[str]] - includes: Final[list[str]] - no_link_check: Final[Optional[bool]] - container: Final[Optional[str]] - - def __init__( - self, - fetcher: Optional[Fetcher] = None, - namespaces: Optional[dict[str, str]] = None, - schemas: Optional[list[str]] = None, - fileuri: Optional[str] = None, - copyfrom: Optional["LoadingOptions"] = None, - original_doc: Optional[Any] = None, - addl_metadata: Optional[dict[str, str]] = None, - baseuri: Optional[str] = None, - idx: Optional[IdxType] = None, - imports: Optional[list[str]] = None, - includes: Optional[list[str]] = None, - no_link_check: Optional[bool] = None, - container: Optional[str] = None, - ) -> None: - """Create a LoadingOptions object.""" - self.original_doc = original_doc - - if idx is not None: - temp_idx = idx - else: - temp_idx = copyfrom.idx if copyfrom is not None else {} - self.idx = temp_idx - - if fileuri is not None: - temp_fileuri: Optional[str] = fileuri - else: - temp_fileuri = copyfrom.fileuri if copyfrom is not None else None - self.fileuri = temp_fileuri - - if baseuri is not None: - temp_baseuri = baseuri - else: - temp_baseuri = copyfrom.baseuri if copyfrom is not None else "" - self.baseuri = temp_baseuri - - if namespaces is not None: - temp_namespaces: MutableMapping[str, str] = namespaces - else: - temp_namespaces = copyfrom.namespaces if copyfrom is not None else {} - self.namespaces = temp_namespaces - - if schemas is not None: - temp_schemas: MutableSequence[str] = schemas - else: - temp_schemas = copyfrom.schemas if copyfrom is not None else [] - self.schemas = temp_schemas - - if addl_metadata is not None: - temp_addl_metadata: MutableMapping[str, Any] = addl_metadata - else: - temp_addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} - self.addl_metadata = temp_addl_metadata - - if imports is not None: - temp_imports = imports - else: - temp_imports = copyfrom.imports if copyfrom is not None else [] - self.imports = temp_imports - - if includes is not None: - temp_includes = includes - else: - temp_includes = copyfrom.includes if copyfrom is not None else [] - self.includes = temp_includes - - if no_link_check is not None: - temp_no_link_check: Optional[bool] = no_link_check - else: - temp_no_link_check = copyfrom.no_link_check if copyfrom is not None else False - self.no_link_check = temp_no_link_check - - if container is not None: - temp_container: Optional[str] = container - else: - temp_container = copyfrom.container if copyfrom is not None else None - self.container = temp_container - - if fetcher is not None: - temp_fetcher = fetcher - elif copyfrom is not None: - temp_fetcher = copyfrom.fetcher - else: - import requests - from cachecontrol.caches import SeparateBodyFileCache - from cachecontrol.wrapper import CacheControl - - root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) - session = CacheControl( - requests.Session(), - cache=SeparateBodyFileCache(root / ".cache" / "salad"), - ) - temp_fetcher = DefaultFetcher({}, session) - self.fetcher = temp_fetcher - - self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} - - if self.namespaces != {}: - temp_vocab = _vocab.copy() - temp_rvocab = _rvocab.copy() - for k, v in self.namespaces.items(): - temp_vocab[k] = v - temp_rvocab[v] = k - else: - temp_vocab = _vocab - temp_rvocab = _rvocab - self.vocab = temp_vocab - self.rvocab = temp_rvocab - - @property - def graph(self) -> Graph: - """Generate a merged rdflib.Graph from all entries in self.schemas.""" - graph = Graph() - if not self.schemas: - return graph - key: Final = str(hash(tuple(self.schemas))) - if key in self.cache: - return cast(Graph, self.cache[key]) - for schema in self.schemas: - fetchurl = ( - self.fetcher.urljoin(self.fileuri, schema) - if self.fileuri is not None - else pathlib.Path(schema).resolve().as_uri() - ) - if fetchurl not in self.cache or self.cache[fetchurl] is True: - _logger.debug("Getting external schema %s", fetchurl) - try: - content = self.fetcher.fetch_text(fetchurl) - except Exception as e: - _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) - continue - newGraph = Graph() - err_msg = "unknown error" - for fmt in ["xml", "turtle"]: - try: - newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) - self.cache[fetchurl] = newGraph - graph += newGraph - break - except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: - err_msg = str(e) - else: - _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) - self.cache[key] = graph - return graph - - -class Saveable(ABC): - """Mark classes than have a save() and fromDoc() function.""" - - @classmethod - @abstractmethod - def fromDoc( - cls, - _doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Saveable": - """Construct this object from the result of yaml.load().""" - - @abstractmethod - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - """Convert this object to a JSON/YAML friendly dictionary.""" - - -def load_field( - val: Union[str, dict[str, str]], - fieldtype: "_Loader", - baseuri: str, - loadingOptions: LoadingOptions, - lc: Optional[list[Any]] = None, -) -> Any: - """Load field.""" - if isinstance(val, MutableMapping): - if "$import" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url1: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) - result, metadata = _document_load_by_url( - fieldtype, - url1, - loadingOptions, - ) - loadingOptions.imports.append(url1) - return result - if "$include" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url2: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) - val = loadingOptions.fetcher.fetch_text(url2) - loadingOptions.includes.append(url2) - return fieldtype.load(val, baseuri, loadingOptions, lc=lc) - - -save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] - - -def extract_type(val_type: type[Any]) -> str: - """Take a type of value, and extracts the value as a string.""" - val_str: Final = str(val_type) - return val_str.split("'")[1] - - -def convert_typing(val_type: str) -> str: - """Normalize type names to schema-salad types.""" - if "None" in val_type: - return "null" - if "CommentedSeq" in val_type or "list" in val_type: - return "array" - if "CommentedMap" in val_type or "dict" in val_type: - return "object" - if "False" in val_type or "True" in val_type: - return "boolean" - return val_type - - -def parse_errors(error_message: str) -> tuple[str, str, str]: - """Parse error messages from several loaders into one error message.""" - if not error_message.startswith("Expected"): - return error_message, "", "" - vals: Final = error_message.split("\n") - if len(vals) == 1: - return error_message, "", "" - types1: Final = set() - for val in vals: - individual_vals = val.split(" ") - if val == "": - continue - if individual_vals[1] == "one": - individual_vals = val.split("(")[1].split(",") - for t in individual_vals: - types1.add(t.strip(" ").strip(")\n")) - elif individual_vals[2] == "").replace("'", "")) - elif individual_vals[0] == "Value": - types1.add(individual_vals[-1].strip(".")) - else: - types1.add(individual_vals[1].replace(",", "")) - types2: Final = {val for val in types1 if val != "NoneType"} - if "str" in types2: - types3 = {convert_typing(val) for val in types2 if "'" not in val} - else: - types3 = types2 - to_print = "" - for val in types3: - if "'" in val: - to_print = "value" if len(types3) == 1 else "values" - - if to_print == "": - to_print = "type" if len(types3) == 1 else "types" - - verb_tensage: Final = "is" if len(types3) == 1 else "are" - - return str(types3).replace("{", "(").replace("}", ")").replace("'", ""), to_print, verb_tensage - - -def save( - val: Any, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, -) -> save_type: - if isinstance(val, Saveable): - return val.save(top=top, base_url=base_url, relative_uris=relative_uris) - if isinstance(val, MutableSequence): - return [save(v, top=False, base_url=base_url, relative_uris=relative_uris) for v in val] - if isinstance(val, MutableMapping): - newdict: Final = {} - for key in val: - newdict[key] = save(val[key], top=False, base_url=base_url, relative_uris=relative_uris) - return newdict - if val is None or isinstance(val, (int, float, bool, str)): - return val - raise Exception("Not Saveable: %s" % type(val)) - - -def save_with_metadata( - val: Any, - valLoadingOpts: LoadingOptions, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, -) -> save_type: - """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" - saved_val: Final = save(val, top, base_url, relative_uris) - newdict: MutableMapping[str, Any] = {} - if isinstance(saved_val, MutableSequence): - newdict = {"$graph": saved_val} - elif isinstance(saved_val, MutableMapping): - newdict = saved_val - - if valLoadingOpts.namespaces: - newdict["$namespaces"] = valLoadingOpts.namespaces - if valLoadingOpts.schemas: - newdict["$schemas"] = valLoadingOpts.schemas - if valLoadingOpts.baseuri: - newdict["$base"] = valLoadingOpts.baseuri - for k, v in valLoadingOpts.addl_metadata.items(): - if k not in newdict: - newdict[k] = v - - return newdict - - -def expand_url( - url: str, - base_url: str, - loadingOptions: LoadingOptions, - scoped_id: bool = False, - vocab_term: bool = False, - scoped_ref: Optional[int] = None, -) -> str: - if url in ("@id", "@type"): - return url - - if vocab_term and url in loadingOptions.vocab: - return url - - if bool(loadingOptions.vocab) and ":" in url: - prefix: Final = url.split(":")[0] - if prefix in loadingOptions.vocab: - url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] - - split1: Final = urlsplit(url) - - if ( - (bool(split1.scheme) and split1.scheme in loadingOptions.fetcher.supported_schemes()) - or url.startswith("$(") - or url.startswith("${") - ): - pass - elif scoped_id and not bool(split1.fragment): - splitbase1: Final = urlsplit(base_url) - frg: str - if bool(splitbase1.fragment): - frg = splitbase1.fragment + "/" + split1.path - else: - frg = split1.path - pt: Final = splitbase1.path if splitbase1.path != "" else "/" - url = urlunsplit((splitbase1.scheme, splitbase1.netloc, pt, splitbase1.query, frg)) - elif scoped_ref is not None and not bool(split1.fragment): - splitbase2: Final = urlsplit(base_url) - sp = splitbase2.fragment.split("/") - n = scoped_ref - while n > 0 and len(sp) > 0: - sp.pop() - n -= 1 - sp.append(url) - url = urlunsplit( - ( - splitbase2.scheme, - splitbase2.netloc, - splitbase2.path, - splitbase2.query, - "/".join(sp), - ) - ) - else: - url = loadingOptions.fetcher.urljoin(base_url, url) - - if vocab_term: - split2: Final = urlsplit(url) - if bool(split2.scheme): - if url in loadingOptions.rvocab: - return loadingOptions.rvocab[url] - else: - raise ValidationException(f"Term {url!r} not in vocabulary") +from schema_salad.utils import yaml_no_ts # requires schema-salad v8.2+ - return url +_vocab: Final[dict[str, str]] = {} +_rvocab: Final[dict[str, str]] = {} class _Loader: @@ -434,9 +55,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> Any | None: pass @@ -446,8 +67,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if doc is not None: return doc @@ -455,7 +76,7 @@ def load( class _PrimitiveLoader(_Loader): - def __init__(self, tp: Union[type, tuple[type[str], type[str]]]) -> None: + def __init__(self, tp: type | tuple[type[str], type[str]]) -> None: self.tp: Final = tp def load( @@ -463,8 +84,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if not isinstance(doc, self.tp): raise ValidationException(f"Expected a {self.tp} but got {doc.__class__.__name__}") @@ -483,9 +104,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> list[Any]: if not isinstance(doc, MutableSequence): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " @@ -496,7 +117,7 @@ def load( fields: Final[list[str]] = [] for i in range(0, len(doc)): try: - lf = load_field( + lf = _load_field( doc[i], _UnionLoader([self, self.items]), baseuri, loadingOptions, lc=lc ) flatten = loadingOptions.container != "@list" @@ -535,9 +156,9 @@ class _MapLoader(_Loader): def __init__( self, values: _Loader, - name: Optional[str] = None, - container: Optional[str] = None, - no_link_check: Optional[bool] = None, + name: str | None = None, + container: str | None = None, + no_link_check: bool | None = None, ) -> None: self.values: Final = values self.name: Final = name @@ -549,9 +170,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> dict[str, Any]: if not isinstance(doc, MutableMapping): raise ValidationException(f"Expected a map, was {type(doc)}") if self.container is not None or self.no_link_check is not None: @@ -562,7 +183,7 @@ def load( errors: Final[list[SchemaSaladException]] = [] for k, v in doc.items(): try: - lf = load_field(v, self.values, baseuri, loadingOptions, lc) + lf = _load_field(v, self.values, baseuri, loadingOptions, lc) r[k] = lf except ValidationException as e: errors.append(e.with_sourceline(SourceLine(doc, k, str))) @@ -584,11 +205,11 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> str: if doc in self.symbols: - return doc + return cast(str, doc) raise ValidationException(f"Expected one of {self.symbols}") def __repr__(self) -> str: @@ -604,75 +225,76 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: r: Final[list[dict[str, Any]]] = [] - if isinstance(doc, MutableSequence): - for d in doc: - if isinstance(d, str): - if d.endswith("?"): - r.append({"pattern": d[:-1], "required": False}) - else: - r.append({"pattern": d}) - elif isinstance(d, dict): - new_dict1: dict[str, Any] = {} - dict_copy = copy.deepcopy(d) - if "pattern" in dict_copy: - new_dict1["pattern"] = dict_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {d}" + match doc: + case MutableSequence() as dlist: + for d in dlist: + if isinstance(d, str): + if d.endswith("?"): + r.append({"pattern": d[:-1], "required": False}) + else: + r.append({"pattern": d}) + elif isinstance(d, dict): + new_dict1: dict[str, Any] = {} + dict_copy = copy.deepcopy(d) + if "pattern" in dict_copy: + new_dict1["pattern"] = dict_copy.pop("pattern") + else: + raise ValidationException( + f"Missing pattern in secondaryFiles specification entry: {d}" + ) + new_dict1["required"] = ( + dict_copy.pop("required") if "required" in dict_copy else None ) - new_dict1["required"] = ( - dict_copy.pop("required") if "required" in dict_copy else None - ) - if len(dict_copy): - raise ValidationException( - "Unallowed values in secondaryFiles specification entry: {}".format( - dict_copy + if len(dict_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + dict_copy + ) ) - ) - r.append(new_dict1) + r.append(new_dict1) + else: + raise ValidationException( + "Expected a string or sequence of (strings or mappings)." + ) + case MutableMapping() as decl: + new_dict2 = {} + doc_copy = copy.deepcopy(decl) + if "pattern" in doc_copy: + new_dict2["pattern"] = doc_copy.pop("pattern") else: raise ValidationException( - "Expected a string or sequence of (strings or mappings)." + f"Missing pattern in secondaryFiles specification entry: {decl}" ) - elif isinstance(doc, MutableMapping): - new_dict2: Final = {} - doc_copy: Final = copy.deepcopy(doc) - if "pattern" in doc_copy: - new_dict2["pattern"] = doc_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {doc}" - ) - new_dict2["required"] = doc_copy.pop("required") if "required" in doc_copy else None + new_dict2["required"] = doc_copy.pop("required") if "required" in doc_copy else None - if len(doc_copy): - raise ValidationException( - f"Unallowed values in secondaryFiles specification entry: {doc_copy}" - ) - r.append(new_dict2) + if len(doc_copy): + raise ValidationException( + f"Unallowed values in secondaryFiles specification entry: {doc_copy}" + ) + r.append(new_dict2) - elif isinstance(doc, str): - if doc.endswith("?"): - r.append({"pattern": doc[:-1], "required": False}) - else: - r.append({"pattern": doc}) - else: - raise ValidationException("Expected str or sequence of str") + case str(decl): + if decl.endswith("?"): + r.append({"pattern": decl[:-1], "required": False}) + else: + r.append({"pattern": decl}) + case _: + raise ValidationException("Expected str or sequence of str") return self.inner.load(r, baseuri, loadingOptions, docRoot, lc=lc) -class _RecordLoader(_Loader): +class _RecordLoader(_Loader, Generic[SaveableType]): def __init__( self, - classtype: type[Saveable], - container: Optional[str] = None, - no_link_check: Optional[bool] = None, + classtype: type[SaveableType], + container: str | None = None, + no_link_check: bool | None = None, ) -> None: self.classtype: Final = classtype self.container: Final = container @@ -683,9 +305,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> SaveableType: if not isinstance(doc, MutableMapping): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " @@ -710,19 +332,20 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> str: if not isinstance(doc, str): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " f"but valid type for this field is a str." ) - return doc + else: + return doc class _UnionLoader(_Loader): - def __init__(self, alternates: Sequence[_Loader], name: Optional[str] = None) -> None: + def __init__(self, alternates: Sequence[_Loader], name: str | None = None) -> None: self.alternates = alternates self.name: Final = name @@ -734,8 +357,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: errors: Final = [] @@ -817,8 +440,8 @@ def __init__( inner: _Loader, scoped_id: bool, vocab_term: bool, - scoped_ref: Optional[int], - no_link_check: Optional[bool], + scoped_ref: int | None, + no_link_check: bool | None, ) -> None: self.inner: Final = inner self.scoped_id: Final = scoped_id @@ -831,39 +454,40 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if self.no_link_check is not None: loadingOptions = LoadingOptions( copyfrom=loadingOptions, no_link_check=self.no_link_check ) - if isinstance(doc, MutableSequence): - newdoc: Final = [] - for i in doc: - if isinstance(i, str): - newdoc.append( - expand_url( - i, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) - ) - else: - newdoc.append(i) - doc = newdoc - elif isinstance(doc, str): - doc = expand_url( - doc, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) + match doc: + case MutableSequence() as decl: + newdoc: Final = [] + for i in decl: + if isinstance(i, str): + newdoc.append( + _expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + ) + else: + newdoc.append(i) + doc = newdoc + case str(decl): + doc = _expand_url( + decl, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) if isinstance(doc, str): if not loadingOptions.no_link_check: errors: Final = [] @@ -880,7 +504,12 @@ def load( class _TypeDSLLoader(_Loader): - def __init__(self, inner: _Loader, refScope: Optional[int], salad_version: str) -> None: + def __init__( + self, + inner: _Loader, + refScope: int | None, + salad_version: str, + ) -> None: self.inner: Final = inner self.refScope: Final = refScope self.salad_version: Final = salad_version @@ -890,7 +519,7 @@ def resolve( doc: str, baseuri: str, loadingOptions: LoadingOptions, - ) -> Union[list[Union[dict[str, Any], str]], dict[str, Any], str]: + ) -> list[dict[str, Any] | str] | dict[str, Any] | str: doc_ = doc optional = False if doc_.endswith("?"): @@ -899,21 +528,42 @@ def resolve( if doc_.endswith("[]"): salad_versions: Final = [int(v) for v in self.salad_version[1:].split(".")] - items: Union[list[Union[dict[str, Any], str]], dict[str, Any], str] = "" + items: list[dict[str, Any] | str] | dict[str, Any] | str = "" rest: Final = doc_[0:-2] if salad_versions < [1, 3]: if rest.endswith("[]"): # To show the error message with the original type return doc else: - items = expand_url(rest, baseuri, loadingOptions, False, True, self.refScope) + items = _expand_url( + rest, + baseuri, + loadingOptions, + False, + True, + self.refScope, + ) else: items = self.resolve(rest, baseuri, loadingOptions) if isinstance(items, str): - items = expand_url(items, baseuri, loadingOptions, False, True, self.refScope) - expanded: Union[dict[str, Any], str] = {"type": "array", "items": items} + items = _expand_url( + items, + baseuri, + loadingOptions, + False, + True, + self.refScope, + ) + expanded: dict[str, Any] | str = {"type": "array", "items": items} else: - expanded = expand_url(doc_, baseuri, loadingOptions, False, True, self.refScope) + expanded = _expand_url( + doc_, + baseuri, + loadingOptions, + False, + True, + self.refScope, + ) if optional: return ["null", expanded] @@ -925,8 +575,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if isinstance(doc, MutableSequence): r: Final[list[Any]] = [] @@ -950,7 +600,7 @@ def load( class _IdMapLoader(_Loader): - def __init__(self, inner: _Loader, mapSubject: str, mapPredicate: Optional[str]) -> None: + def __init__(self, inner: _Loader, mapSubject: str, mapPredicate: str | None) -> None: self.inner: Final = inner self.mapSubject: Final = mapSubject self.mapPredicate: Final = mapPredicate @@ -960,8 +610,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if isinstance(doc, MutableMapping): r: Final[list[Any]] = [] @@ -990,10 +640,10 @@ def load( def _document_load( loader: _Loader, - doc: Union[str, MutableMapping[str, Any], MutableSequence[Any]], + doc: str | MutableMapping[str, Any] | MutableSequence[Any], baseuri: str, loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, + addl_metadata_fields: MutableSequence[str] | None = None, ) -> tuple[Any, LoadingOptions]: if isinstance(doc, str): return _document_load_by_url( @@ -1062,7 +712,7 @@ def _document_load_by_url( loader: _Loader, url: str, loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, + addl_metadata_fields: MutableSequence[str] | None = None, ) -> tuple[Any, LoadingOptions]: if url in loadingOptions.idx: return loadingOptions.idx[url] @@ -1089,103 +739,316 @@ def _document_load_by_url( return loadingOptions.idx[url] -def file_uri(path: str, split_frag: bool = False) -> str: - """Transform a file path into a URL with file scheme.""" - if path.startswith("file://"): - return path - if split_frag: - pathsp: Final = path.split("#", 2) - frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" - urlpath = pathname2url(str(pathsp[0])) - else: - urlpath = pathname2url(path) - frag = "" - if urlpath.startswith("//"): - return f"file:{urlpath}{frag}" - return f"file://{urlpath}{frag}" - - -def prefix_url(url: str, namespaces: dict[str, str]) -> str: - """Expand short forms into full URLs using the given namespace dictionary.""" - for k, v in namespaces.items(): - if url.startswith(v): - return k + ":" + url[len(v) :] - return url - - -def save_relative_uri( - uri: Any, +def _expand_url( + url: str, base_url: str, - scoped_id: bool, - ref_scope: Optional[int], - relative_uris: bool, -) -> Any: - """Convert any URI to a relative one, obeying the scoping rules.""" - if isinstance(uri, MutableSequence): - return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] - elif isinstance(uri, str): - if not relative_uris or uri == base_url: - return uri - urisplit: Final = urlsplit(uri) - basesplit: Final = urlsplit(base_url) - if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: - if urisplit.path != basesplit.path: - p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) - if urisplit.fragment: - p = p + "#" + urisplit.fragment - return p - - basefrag = basesplit.fragment + "/" - if ref_scope: - sp = basefrag.split("/") - i = 0 - while i < ref_scope: - sp.pop() - i += 1 - basefrag = "/".join(sp) - - if urisplit.fragment.startswith(basefrag): - return urisplit.fragment[len(basefrag) :] - return urisplit.fragment - return uri - else: - return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) + loadingOptions: LoadingOptions, + scoped_id: bool = False, + vocab_term: bool = False, + scoped_ref: int | None = None, +) -> str: + if url in ("@id", "@type"): + return url + vocab = _vocab | loadingOptions.vocab + if vocab_term and url in vocab: + return url -def shortname(inputid: str) -> str: - """ - Compute the shortname of a fully qualified identifier. + if bool(vocab) and ":" in url: + prefix: Final = url.split(":")[0] + if prefix in vocab: + url = vocab[prefix] + url[len(prefix) + 1 :] - See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. - """ - parsed_id: Final = urlparse(inputid) - if parsed_id.fragment: - return parsed_id.fragment.split("/")[-1] - return parsed_id.path.split("/")[-1] + split1: Final = urlsplit(url) + if ( + (bool(split1.scheme) and split1.scheme in loadingOptions.fetcher.supported_schemes()) + or url.startswith("$(") + or url.startswith("${") + ): + pass + elif scoped_id and not bool(split1.fragment): + splitbase1: Final = urlsplit(base_url) + frg: str + if bool(splitbase1.fragment): + frg = splitbase1.fragment + "/" + split1.path + else: + frg = split1.path + pt: Final = splitbase1.path if splitbase1.path != "" else "/" + url = urlunsplit((splitbase1.scheme, splitbase1.netloc, pt, splitbase1.query, frg)) + elif scoped_ref is not None and not bool(split1.fragment): + splitbase2: Final = urlsplit(base_url) + sp = splitbase2.fragment.split("/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urlunsplit( + ( + splitbase2.scheme, + splitbase2.netloc, + splitbase2.path, + splitbase2.query, + "/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) -def parser_info() -> str: - return "org.w3id.cwl.v1_0" + if vocab_term: + split2: Final = urlsplit(url) + if bool(split2.scheme): + if url in (rvocab := _rvocab | loadingOptions.rvocab): + return rvocab[url] + else: + raise ValidationException(f"Term {url!r} not in vocabulary") + return url -class Documented(Saveable): - pass + +def _load_field( + val: Any | None, + fieldtype: "_Loader", + baseuri: str, + loadingOptions: LoadingOptions, + lc: Any | None = None, +) -> Any: + """Load field.""" + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url1: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) + result, metadata = _document_load_by_url( + fieldtype, + url1, + loadingOptions, + ) + loadingOptions.imports.append(url1) + return result + if "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url2: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) + val = loadingOptions.fetcher.fetch_text(url2) + loadingOptions.includes.append(url2) + return fieldtype.load(val, baseuri, loadingOptions, lc=lc) -class RecordField(Documented): - """ - A field of a record. - """ +def parser_info() -> str: + return "org.w3id.cwl.v1_0" + + +class CWLArraySchema(schema_salad.metaschema.ArraySchema): + def __init__( + self, + items: Any, + type_: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CWLArraySchema): + return bool(self.items == other.items and self.type_ == other.type_) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = _load_field( + _doc.get("items"), + uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.items is not None: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["items", "type"]) + + +class CWLRecordField(schema_salad.metaschema.RecordField): name: str def __init__( self, name: Any, type_: Any, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + doc: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -1200,7 +1063,7 @@ def __init__( self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordField): + if isinstance(other, CWLRecordField): return bool( self.doc == other.doc and self.name == other.name @@ -1217,8 +1080,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "RecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -1228,7 +1091,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, @@ -1284,7 +1147,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -1332,9 +1195,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2, + typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -1376,7 +1239,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -1384,7 +1247,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -1441,16 +1304,16 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type"]) + attrs: ClassVar[Collection[str]] = frozenset(["doc", "name", "type"]) -class RecordSchema(Saveable): +class CWLRecordSchema(schema_salad.metaschema.RecordSchema): def __init__( self, type_: Any, - fields: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + fields: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -1464,7 +1327,7 @@ def __init__( self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordSchema): + if isinstance(other, CWLRecordSchema): return bool(self.fields == other.fields and self.type_ == other.type_) return False @@ -1477,8 +1340,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "RecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -1488,9 +1351,9 @@ def fromDoc( fields = None if "fields" in _doc: try: - fields = load_field( + fields = _load_field( _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, + idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader, baseuri, loadingOptions, lc=_doc.get("fields") @@ -1536,7 +1399,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Record_nameLoader_2, baseuri, @@ -1580,7 +1443,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -1588,7 +1451,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -1640,24 +1503,52 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type"]) + attrs: ClassVar[Collection[str]] = frozenset(["fields", "type"]) -class EnumSchema(Saveable): +class File(Saveable): """ - Define an enumerated type. + Represents a file (or group of files when ``secondaryFiles`` is provided) that will be accessible by tools using standard POSIX file system call API such as open(2) and read(2). - """ + Files are represented as objects with ``class`` of ``File``. File objects have a number of properties that provide metadata about the file. - name: str + The ``location`` property of a File is a URI that uniquely identifies the file. Implementations must support the file:// URI scheme and may support other schemes such as http://. The value of ``location`` may also be a relative reference, in which case it must be resolved relative to the URI of the document it appears in. Alternately to ``location``, implementations must also accept the ``path`` property on File, which must be a filesystem path available on the same host as the CWL runner (for inputs) or the runtime environment of a command line tool execution (for command line tool outputs). + + If no ``location`` or ``path`` is specified, a file object must specify ``contents`` with the UTF-8 text content of the file. This is a "file literal". File literals do not correspond to external resources, but are created on disk with ``contents`` with when needed for a executing a tool. Where appropriate, expressions can return file literals to define new files on a runtime. The maximum size of ``contents`` is 64 kilobytes. + + The ``basename`` property defines the filename on disk where the file is staged. This may differ from the resource name. If not provided, ``basename`` must be computed from the last path part of ``location`` and made available to expressions. + + The ``secondaryFiles`` property is a list of File or Directory objects that must be staged in the same directory as the primary file. It is an error for file names to be duplicated in ``secondaryFiles``. + + The ``size`` property is the size in bytes of the File. It must be computed from the resource and made available to expressions. The ``checksum`` field contains a cryptographic hash of the file content for use it verifying file contents. Implementations may, at user option, enable or disable computation of the ``checksum`` field for performance or other reasons. However, the ability to compute output checksums is required to pass the CWL conformance test suite. + + When executing a CommandLineTool, the files and secondary files may be staged to an arbitrary directory, but must use the value of ``basename`` for the filename. The ``path`` property must be file path in the context of the tool execution runtime (local to the compute node, or within the executing container). All computed properties should be available to expressions. File literals also must be staged and ``path`` must be set. + + When collecting CommandLineTool outputs, ``glob`` matching returns file paths (with the ``path`` property) and the derived properties. This can all be modified by ``outputEval``. Alternately, if the file ``cwl.output.json`` is present in the output, ``outputBinding`` is ignored. + + File objects in the output must provide either a ``location`` URI or a ``path`` property in the context of the tool execution runtime (local to the compute node, or within the executing container). + + When evaluating an ExpressionTool, file objects must be referenced via ``location`` (the expression tool does not have access to files on disk so ``path`` is meaningless) or as file literals. It is legal to return a file object with an existing ``location`` but a different ``basename``. The ``loadContents`` field of ExpressionTool inputs behaves the same as on CommandLineTool inputs, however it is not meaningful on the outputs. + + An ExpressionTool may forward file references from input to output by using the same value for ``location``. + + """ def __init__( self, - symbols: Any, - type_: Any, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + location: Any | None = None, + path: Any | None = None, + basename: Any | None = None, + dirname: Any | None = None, + nameroot: Any | None = None, + nameext: Any | None = None, + checksum: Any | None = None, + size: Any | None = None, + secondaryFiles: Any | None = None, + format: Any | None = None, + contents: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -1667,21 +1558,54 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols - self.type_ = type_ + self.class_: Final[str] = "File" + self.location = location + self.path = path + self.basename = basename + self.dirname = dirname + self.nameroot = nameroot + self.nameext = nameext + self.checksum = checksum + self.size = size + self.secondaryFiles = secondaryFiles + self.format = format + self.contents = contents def __eq__(self, other: Any) -> bool: - if isinstance(other, EnumSchema): + if isinstance(other, File): return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type_ == other.type_ + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.dirname == other.dirname + and self.nameroot == other.nameroot + and self.nameext == other.nameext + and self.checksum == other.checksum + and self.size == other.size + and self.secondaryFiles == other.secondaryFiles + and self.format == other.format + and self.contents == other.contents ) return False def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type_)) + return hash( + ( + self.class_, + self.location, + self.path, + self.basename, + self.dirname, + self.nameroot, + self.nameext, + self.checksum, + self.size, + self.secondaryFiles, + self.format, + self.contents, + ) + ) @classmethod def fromDoc( @@ -1689,29 +1613,46 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "EnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_File_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + location = None + if "location" in _doc: try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + location = _load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("location") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `location`": _errors__.append( ValidationException( str(e), @@ -1719,13 +1660,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("location") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -1737,537 +1678,499 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `location` field with value `{val}` " "is not valid because:", ) ) + path = None + if "path" in _doc: + try: + path = _load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("path") + ) - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) - - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("symbols") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `symbols`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("symbols") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `path`": _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - detailed_message=f"the `symbols` field with value `{val}` " - "is not valid because:", + val = _doc.get("path") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + detailed_message=f"the `path` field with value `{val}` " + "is not valid because:", + ) + ) + basename = None + if "basename" in _doc: + try: + basename = _load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("basename") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `basename`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("basename") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + detailed_message=f"the `basename` field with value `{val}` " + "is not valid because:", + ) + ) + dirname = None + if "dirname" in _doc: + try: + dirname = _load_field( + _doc.get("dirname"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("dirname") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `dirname`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`".format( - k - ), - SourceLine(_doc, k, str), + val = _doc.get("dirname") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `dirname` field is not valid because:", + SourceLine(_doc, "dirname", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - name=name, - symbols=symbols, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "symbols", "type"]) - - -class ArraySchema(Saveable): - def __init__( - self, - items: Any, - type_: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ArraySchema): - return bool(self.items == other.items and self.type_ == other.type_) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ArraySchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) + else: + _errors__.append( + ValidationException( + "the `dirname` field is not valid because:", + SourceLine(_doc, "dirname", str), + [e], + detailed_message=f"the `dirname` field with value `{val}` " + "is not valid because:", + ) + ) + nameroot = None + if "nameroot" in _doc: + try: + nameroot = _load_field( + _doc.get("nameroot"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("nameroot") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `nameroot`": _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", + val = _doc.get("nameroot") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `nameroot` field is not valid because:", + SourceLine(_doc, "nameroot", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + _errors__.append( + ValidationException( + "the `nameroot` field is not valid because:", + SourceLine(_doc, "nameroot", str), + [e], + detailed_message=f"the `nameroot` field with value `{val}` " + "is not valid because:", + ) + ) + nameext = None + if "nameext" in _doc: + try: + nameext = _load_field( + _doc.get("nameext"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("nameext") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `nameext`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("nameext") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `nameext` field is not valid because:", + SourceLine(_doc, "nameext", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), - SourceLine(_doc, k, str), + else: + _errors__.append( + ValidationException( + "the `nameext` field is not valid because:", + SourceLine(_doc, "nameext", str), + [e], + detailed_message=f"the `nameext` field with value `{val}` " + "is not valid because:", + ) ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - items=items, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.items is not None: - u = save_relative_uri(self.items, base_url, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type"]) - - -class MapSchema(Saveable): - def __init__( - self, - type_: Any, - values: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.type_ = type_ - self.values = values - - def __eq__(self, other: Any) -> bool: - if isinstance(other, MapSchema): - return bool(self.type_ == other.type_ and self.values == other.values) - return False - - def __hash__(self) -> int: - return hash((self.type_, self.values)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "MapSchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Map_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + checksum = None + if "checksum" in _doc: + try: + checksum = _load_field( + _doc.get("checksum"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("checksum") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `checksum`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("checksum") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `checksum` field is not valid because:", + SourceLine(_doc, "checksum", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("values") is None: - raise ValidationException("missing required field `values`", None, []) - - values = load_field( - _doc.get("values"), - uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("values") - ) + else: + _errors__.append( + ValidationException( + "the `checksum` field is not valid because:", + SourceLine(_doc, "checksum", str), + [e], + detailed_message=f"the `checksum` field with value `{val}` " + "is not valid because:", + ) + ) + size = None + if "size" in _doc: + try: + size = _load_field( + _doc.get("size"), + union_of_None_type_or_inttype, + baseuri, + loadingOptions, + lc=_doc.get("size") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `values`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("values") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `size`": _errors__.append( ValidationException( - "the `values` field is not valid because:", - SourceLine(_doc, "values", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("size") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `size` field is not valid because:", + SourceLine(_doc, "size", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `size` field is not valid because:", + SourceLine(_doc, "size", str), + [e], + detailed_message=f"the `size` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - "the `values` field is not valid because:", - SourceLine(_doc, "values", str), - [e], - detailed_message=f"the `values` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + contents = None + if "contents" in _doc: + try: + contents = _load_field( + _doc.get("contents"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("contents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `contents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("contents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `contents` field is not valid because:", + SourceLine(_doc, "contents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `contents` field is not valid because:", + SourceLine(_doc, "contents", str), + [e], + detailed_message=f"the `contents` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `type`, `values`".format( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( k ), SourceLine(_doc, k, str), @@ -2277,8 +2180,17 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - type_=type_, - values=values, + location=location, + path=path, + basename=basename, + dirname=dirname, + nameroot=nameroot, + nameext=nameext, + checksum=checksum, + size=size, + secondaryFiles=secondaryFiles, + format=format, + contents=contents, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2295,13 +2207,60 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.location is not None: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + if self.path is not None: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + if self.basename is not None: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.dirname is not None: + r["dirname"] = save( + self.dirname, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.nameroot is not None: + r["nameroot"] = save( + self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.nameext is not None: + r["nameext"] = save( + self.nameext, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.checksum is not None: + r["checksum"] = save( + self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.size is not None: + r["size"] = save( + self.size, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, base_url, True, None, relative_uris) + r["format"] = u + if self.contents is not None: + r["contents"] = save( + self.contents, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.values is not None: - u = save_relative_uri(self.values, base_url, False, 2, relative_uris) - r["values"] = u # top refers to the directory level if top: @@ -2311,16 +2270,56 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["type", "values"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "class", + "location", + "path", + "basename", + "dirname", + "nameroot", + "nameext", + "checksum", + "size", + "secondaryFiles", + "format", + "contents", + ] + ) + + +class Directory(Saveable): + """ + Represents a directory to present to a command line tool. + + Directories are represented as objects with ``class`` of ``Directory``. Directory objects have a number of properties that provide metadata about the directory. + + The ``location`` property of a Directory is a URI that uniquely identifies the directory. Implementations must support the file:// URI scheme and may support other schemes such as http://. Alternately to ``location``, implementations must also accept the ``path`` property on Directory, which must be a filesystem path available on the same host as the CWL runner (for inputs) or the runtime environment of a command line tool execution (for command line tool outputs). + + A Directory object may have a ``listing`` field. This is a list of File and Directory objects that are contained in the Directory. For each entry in ``listing``, the ``basename`` property defines the name of the File or Subdirectory when staged to disk. If ``listing`` is not provided, the implementation must have some way of fetching the Directory listing at runtime based on the ``location`` field. + + If a Directory does not have ``location``, it is a Directory literal. A Directory literal must provide ``listing``. Directory literals must be created on disk at runtime as needed. + + The resources in a Directory literal do not need to have any implied relationship in their ``location``. For example, a Directory listing may contain two files located on different hosts. It is the responsibility of the runtime to ensure that those files are staged to disk appropriately. Secondary files associated with files in ``listing`` must also be staged to the same Directory. + When executing a CommandLineTool, Directories must be recursively staged first and have local values of ``path`` assigned. + + Directory objects in CommandLineTool output must provide either a ``location`` URI or a ``path`` property in the context of the tool execution runtime (local to the compute node, or within the executing container). + + An ExpressionTool may forward file references from input to output by using the same value for ``location``. + + Name conflicts (the same ``basename`` appearing multiple times in ``listing`` or in any entry in ``secondaryFiles`` in the listing) is a fatal error. + + """ -class UnionSchema(Saveable): def __init__( self, - names: Any, - type_: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + location: Any | None = None, + path: Any | None = None, + basename: Any | None = None, + listing: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -2330,16 +2329,27 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.names = names - self.type_ = type_ + self.class_: Final[str] = "Directory" + self.location = location + self.path = path + self.basename = basename + self.listing = listing def __eq__(self, other: Any) -> bool: - if isinstance(other, UnionSchema): - return bool(self.names == other.names and self.type_ == other.type_) + if isinstance(other, Directory): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.listing == other.listing + ) return False def __hash__(self) -> int: - return hash((self.names, self.type_)) + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) @classmethod def fromDoc( @@ -2347,8 +2357,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "UnionSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -2356,102 +2366,211 @@ def fromDoc( _doc.lc.filename = doc.lc.filename _errors__ = [] try: - if _doc.get("names") is None: - raise ValidationException("missing required field `names`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - names = load_field( - _doc.get("names"), - uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, + class_ = _load_field( + _doc.get("class"), + uri_Directory_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("names") + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + raise e + location = None + if "location" in _doc: + try: + location = _load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("location") + ) - if str(e) == "missing required field `names`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("names") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `location`": _errors__.append( ValidationException( - "the `names` field is not valid because:", - SourceLine(_doc, "names", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("location") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [e], + detailed_message=f"the `location` field with value `{val}` " + "is not valid because:", + ) + ) + path = None + if "path" in _doc: + try: + path = _load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("path") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `path`": _errors__.append( ValidationException( - "the `names` field is not valid because:", - SourceLine(_doc, "names", str), - [e], - detailed_message=f"the `names` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Union_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + val = _doc.get("path") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + detailed_message=f"the `path` field with value `{val}` " + "is not valid because:", + ) + ) + basename = None + if "basename" in _doc: + try: + basename = _load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("basename") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `basename`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("basename") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + detailed_message=f"the `basename` field with value `{val}` " + "is not valid because:", + ) + ) + listing = None + if "listing" in _doc: + try: + listing = _load_field( + _doc.get("listing"), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + lc=_doc.get("listing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `listing`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} + else: + val = _doc.get("listing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + detailed_message=f"the `listing` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -2459,14 +2578,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `names`, `type`".format( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( k ), SourceLine(_doc, k, str), @@ -2476,8 +2595,10 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - names=names, - type_=type_, + location=location, + path=path, + basename=basename, + listing=listing, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2494,12 +2615,29 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.names is not None: - u = save_relative_uri(self.names, base_url, False, 2, relative_uris) - r["names"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.location is not None: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + if self.path is not None: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + if self.basename is not None: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.listing is not None: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -2510,218 +2648,52 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["names", "type"]) - + attrs: ClassVar[Collection[str]] = frozenset( + ["class", "location", "path", "basename", "listing"] + ) -class CWLArraySchema(ArraySchema): - def __init__( - self, - items: Any, - type_: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ - def __eq__(self, other: Any) -> bool: - if isinstance(other, CWLArraySchema): - return bool(self.items == other.items and self.type_ == other.type_) - return False +class SchemaBase(Saveable): + pass - def __hash__(self) -> int: - return hash((self.items, self.type_)) - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CWLArraySchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) +class Parameter(SchemaBase): + """ + Define an input or output parameter to a process. - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + """ - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + pass - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) +class InputBinding(Saveable): + pass - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - items=items, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed +class OutputBinding(Saveable): + pass - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.items is not None: - u = save_relative_uri(self.items, base_url, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris - ) +class InputSchema(SchemaBase): + pass - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - attrs = frozenset(["items", "type"]) +class OutputSchema(SchemaBase): + pass -class CWLRecordField(RecordField): +class InputRecordField(CWLRecordField): name: str def __init__( self, name: Any, type_: Any, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + doc: Any | None = None, + inputBinding: Any | None = None, + label: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -2734,18 +2706,22 @@ def __init__( self.doc = doc self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) self.type_ = type_ + self.inputBinding = inputBinding + self.label = label def __eq__(self, other: Any) -> bool: - if isinstance(other, CWLRecordField): + if isinstance(other, InputRecordField): return bool( self.doc == other.doc and self.name == other.name and self.type_ == other.type_ + and self.inputBinding == other.inputBinding + and self.label == other.label ) return False def __hash__(self) -> int: - return hash((self.doc, self.name, self.type_)) + return hash((self.doc, self.name, self.type_, self.inputBinding, self.label)) @classmethod def fromDoc( @@ -2753,8 +2729,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CWLRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -2764,7 +2740,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, @@ -2820,7 +2796,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -2868,9 +2844,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2, + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -2912,35 +2888,131 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ValidationException( + str(e), + None + ) ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - doc=doc, - name=name, - type_=type_, - extension_fields=extension_fields, + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `inputBinding`, `label`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + doc=doc, + name=name, + type_=type_, + inputBinding=inputBinding, + label=label, + extension_fields=extension_fields, loadingOptions=loadingOptions, ) loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) @@ -2968,6 +3040,17 @@ def save( r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -2977,16 +3060,22 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["doc", "name", "type", "inputBinding", "label"] + ) + +class InputRecordSchema(CWLRecordSchema, InputSchema): + name: str -class CWLRecordSchema(RecordSchema): def __init__( self, type_: Any, - fields: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + fields: Any | None = None, + label: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -2998,14 +3087,21 @@ def __init__( self.loadingOptions = LoadingOptions() self.fields = fields self.type_ = type_ + self.label = label + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, CWLRecordSchema): - return bool(self.fields == other.fields and self.type_ == other.type_) + if isinstance(other, InputRecordSchema): + return bool( + self.fields == other.fields + and self.type_ == other.type_ + and self.label == other.label + and self.name == other.name + ) return False def __hash__(self) -> int: - return hash((self.fields, self.type_)) + return hash((self.fields, self.type_, self.label, self.name)) @classmethod def fromDoc( @@ -3013,20 +3109,76 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CWLRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] + name = None + if "name" in _doc: + try: + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) fields = None if "fields" in _doc: try: - fields = load_field( + fields = _load_field( _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader, + idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, baseuri, loadingOptions, lc=_doc.get("fields") @@ -3072,7 +3224,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Record_nameLoader_2, baseuri, @@ -3116,7 +3268,54 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -3124,14 +3323,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( k ), SourceLine(_doc, k, str), @@ -3143,9 +3342,12 @@ def fromDoc( _constructed = cls( fields=fields, type_=type_, + label=label, + name=name, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -3159,13 +3361,20 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u if self.fields is not None: r["fields"] = save( - self.fields, top=False, base_url=base_url, relative_uris=relative_uris + self.fields, top=False, base_url=self.name, relative_uris=relative_uris ) if self.type_ is not None: r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris ) # top refers to the directory level @@ -3176,95 +3385,21 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type"]) + attrs: ClassVar[Collection[str]] = frozenset(["fields", "type", "label", "name"]) -class File(Saveable): - """ - Represents a file (or group of files when `secondaryFiles` is provided) that - will be accessible by tools using standard POSIX file system call API such as - open(2) and read(2). - - Files are represented as objects with `class` of `File`. File objects have - a number of properties that provide metadata about the file. - - The `location` property of a File is a URI that uniquely identifies the - file. Implementations must support the file:// URI scheme and may support - other schemes such as http://. The value of `location` may also be a - relative reference, in which case it must be resolved relative to the URI - of the document it appears in. Alternately to `location`, implementations - must also accept the `path` property on File, which must be a filesystem - path available on the same host as the CWL runner (for inputs) or the - runtime environment of a command line tool execution (for command line tool - outputs). - - If no `location` or `path` is specified, a file object must specify - `contents` with the UTF-8 text content of the file. This is a "file - literal". File literals do not correspond to external resources, but are - created on disk with `contents` with when needed for a executing a tool. - Where appropriate, expressions can return file literals to define new files - on a runtime. The maximum size of `contents` is 64 kilobytes. - - The `basename` property defines the filename on disk where the file is - staged. This may differ from the resource name. If not provided, - `basename` must be computed from the last path part of `location` and made - available to expressions. - - The `secondaryFiles` property is a list of File or Directory objects that - must be staged in the same directory as the primary file. It is an error - for file names to be duplicated in `secondaryFiles`. - - The `size` property is the size in bytes of the File. It must be computed - from the resource and made available to expressions. The `checksum` field - contains a cryptographic hash of the file content for use it verifying file - contents. Implementations may, at user option, enable or disable - computation of the `checksum` field for performance or other reasons. - However, the ability to compute output checksums is required to pass the - CWL conformance test suite. - - When executing a CommandLineTool, the files and secondary files may be - staged to an arbitrary directory, but must use the value of `basename` for - the filename. The `path` property must be file path in the context of the - tool execution runtime (local to the compute node, or within the executing - container). All computed properties should be available to expressions. - File literals also must be staged and `path` must be set. - - When collecting CommandLineTool outputs, `glob` matching returns file paths - (with the `path` property) and the derived properties. This can all be - modified by `outputEval`. Alternately, if the file `cwl.output.json` is - present in the output, `outputBinding` is ignored. - - File objects in the output must provide either a `location` URI or a `path` - property in the context of the tool execution runtime (local to the compute - node, or within the executing container). - - When evaluating an ExpressionTool, file objects must be referenced via - `location` (the expression tool does not have access to files on disk so - `path` is meaningless) or as file literals. It is legal to return a file - object with an existing `location` but a different `basename`. The - `loadContents` field of ExpressionTool inputs behaves the same as on - CommandLineTool inputs, however it is not meaningful on the outputs. - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - """ +class InputEnumSchema(schema_salad.metaschema.EnumSchema, InputSchema): + name: str def __init__( self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - dirname: Optional[Any] = None, - nameroot: Optional[Any] = None, - nameext: Optional[Any] = None, - checksum: Optional[Any] = None, - size: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - format: Optional[Any] = None, - contents: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + symbols: Any, + type_: Any, + name: Any | None = None, + label: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -3274,53 +3409,26 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "File" - self.location = location - self.path = path - self.basename = basename - self.dirname = dirname - self.nameroot = nameroot - self.nameext = nameext - self.checksum = checksum - self.size = size - self.secondaryFiles = secondaryFiles - self.format = format - self.contents = contents + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols + self.type_ = type_ + self.label = label + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, File): + if isinstance(other, InputEnumSchema): return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.dirname == other.dirname - and self.nameroot == other.nameroot - and self.nameext == other.nameext - and self.checksum == other.checksum - and self.size == other.size - and self.secondaryFiles == other.secondaryFiles - and self.format == other.format - and self.contents == other.contents + self.name == other.name + and self.symbols == other.symbols + and self.type_ == other.type_ + and self.label == other.label + and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: return hash( - ( - self.class_, - self.location, - self.path, - self.basename, - self.dirname, - self.nameroot, - self.nameext, - self.checksum, - self.size, - self.secondaryFiles, - self.format, - self.contents, - ) + (self.name, self.symbols, self.type_, self.label, self.inputBinding) ) @classmethod @@ -3329,45 +3437,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "File": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_File_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - location = None - if "location" in _doc: + name = None + if "name" in _doc: try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None_None, + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("location") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `location`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -3375,13 +3467,13 @@ def fromDoc( ) ) else: - val = _doc.get("location") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3393,122 +3485,133 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `location` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - path = None - if "path" in _doc: - try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("path") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) - if str(e) == "missing required field `path`": + symbols = _load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("path") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), - [e], - detailed_message=f"the `path` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", ) - basename = None - if "basename" in _doc: - try: - basename = load_field( - _doc.get("basename"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("basename") - ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + type_ = _load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) - if str(e) == "missing required field `basename`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("basename") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), - [e], - detailed_message=f"the `basename` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", ) - dirname = None - if "dirname" in _doc: + ) + label = None + if "label" in _doc: try: - dirname = load_field( - _doc.get("dirname"), + label = _load_field( + _doc.get("label"), union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("dirname") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `dirname`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -3516,13 +3619,13 @@ def fromDoc( ) ) else: - val = _doc.get("dirname") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `dirname` field is not valid because:", - SourceLine(_doc, "dirname", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3534,28 +3637,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `dirname` field is not valid because:", - SourceLine(_doc, "dirname", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `dirname` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - nameroot = None - if "nameroot" in _doc: + inputBinding = None + if "inputBinding" in _doc: try: - nameroot = load_field( - _doc.get("nameroot"), - union_of_None_type_or_strtype, + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("nameroot") + lc=_doc.get("inputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `nameroot`": + if str(e) == "missing required field `inputBinding`": _errors__.append( ValidationException( str(e), @@ -3563,13 +3666,13 @@ def fromDoc( ) ) else: - val = _doc.get("nameroot") + val = _doc.get("inputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `nameroot` field is not valid because:", - SourceLine(_doc, "nameroot", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3581,28 +3684,256 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `nameroot` field is not valid because:", - SourceLine(_doc, "nameroot", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [e], - detailed_message=f"the `nameroot` field with value `{val}` " + detailed_message=f"the `inputBinding` field with value `{val}` " "is not valid because:", ) ) - nameext = None - if "nameext" in _doc: - try: - nameext = load_field( - _doc.get("nameext"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("nameext") + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + name=name, + symbols=symbols, + type_=type_, + label=label, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "inputBinding"] + ) + + +class InputArraySchema(CWLArraySchema, InputSchema): + def __init__( + self, + items: Any, + type_: Any, + label: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type_ = type_ + self.label = label + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputArraySchema): + return bool( + self.items == other.items + and self.type_ == other.type_ + and self.label == other.label + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type_, self.label, self.inputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = _load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `nameext`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -3610,13 +3941,13 @@ def fromDoc( ) ) else: - val = _doc.get("nameext") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `nameext` field is not valid because:", - SourceLine(_doc, "nameext", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3628,28 +3959,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `nameext` field is not valid because:", - SourceLine(_doc, "nameext", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `nameext` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - checksum = None - if "checksum" in _doc: + inputBinding = None + if "inputBinding" in _doc: try: - checksum = load_field( - _doc.get("checksum"), - union_of_None_type_or_strtype, + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("checksum") + lc=_doc.get("inputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `checksum`": + if str(e) == "missing required field `inputBinding`": _errors__.append( ValidationException( str(e), @@ -3657,13 +3988,13 @@ def fromDoc( ) ) else: - val = _doc.get("checksum") + val = _doc.get("inputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `checksum` field is not valid because:", - SourceLine(_doc, "checksum", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3675,28 +4006,157 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `checksum` field is not valid because:", - SourceLine(_doc, "checksum", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [e], - detailed_message=f"the `checksum` field with value `{val}` " + detailed_message=f"the `inputBinding` field with value `{val}` " "is not valid because:", ) ) - size = None - if "size" in _doc: + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + label=label, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.items is not None: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "inputBinding"] + ) + + +class OutputRecordField(CWLRecordField): + name: str + + def __init__( + self, + name: Any, + type_: Any, + doc: Any | None = None, + outputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.type_ = type_ + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type_ == other.type_ + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash((self.doc, self.name, self.type_, self.outputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: try: - size = load_field( - _doc.get("size"), - union_of_None_type_or_inttype, + name = _load_field( + _doc.get("name"), + uri_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("size") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `size`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -3704,13 +4164,13 @@ def fromDoc( ) ) else: - val = _doc.get("size") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `size` field is not valid because:", - SourceLine(_doc, "size", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3722,28 +4182,37 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `size` field is not valid because:", - SourceLine(_doc, "size", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `size` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + _errors__.append(ValidationException("missing name")) + if not __original_name_is_none: + baseuri = cast(str, name) + doc = None + if "doc" in _doc: try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("secondaryFiles") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secondaryFiles`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -3751,13 +4220,13 @@ def fromDoc( ) ) else: - val = _doc.get("secondaryFiles") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3769,28 +4238,76 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - format = None - if "format" in _doc: + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + outputBinding = None + if "outputBinding" in _doc: try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_True_False_None_True, + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions, - lc=_doc.get("format") + lc=_doc.get("outputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `format`": + if str(e) == "missing required field `outputBinding`": _errors__.append( ValidationException( str(e), @@ -3798,13 +4315,13 @@ def fromDoc( ) ) else: - val = _doc.get("format") + val = _doc.get("outputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3816,61 +4333,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `outputBinding` field with value `{val}` " "is not valid because:", ) ) - contents = None - if "contents" in _doc: - try: - contents = load_field( - _doc.get("contents"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("contents") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `contents`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("contents") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `contents` field is not valid because:", - SourceLine(_doc, "contents", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `contents` field is not valid because:", - SourceLine(_doc, "contents", str), - [e], - detailed_message=f"the `contents` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -3878,14 +4348,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `outputBinding`".format( k ), SourceLine(_doc, k, str), @@ -3895,20 +4365,14 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - location=location, - path=path, - basename=basename, - dirname=dirname, - nameroot=nameroot, - nameext=nameext, - checksum=checksum, - size=size, - secondaryFiles=secondaryFiles, - format=format, - contents=contents, + doc=doc, + name=name, + type_=type_, + outputBinding=outputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -3922,58 +4386,24 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.location is not None: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - if self.path is not None: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - if self.basename is not None: - r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.dirname is not None: - r["dirname"] = save( - self.dirname, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.nameroot is not None: - r["nameroot"] = save( - self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.nameext is not None: - r["nameext"] = save( - self.nameext, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.checksum is not None: - r["checksum"] = save( - self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.size is not None: - r["size"] = save( - self.size, top=False, base_url=base_url, relative_uris=relative_uris + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, top=False, - base_url=base_url, + base_url=self.name, relative_uris=relative_uris, ) - if self.format is not None: - u = save_relative_uri(self.format, base_url, True, None, relative_uris) - r["format"] = u - if self.contents is not None: - r["contents"] = save( - self.contents, top=False, base_url=base_url, relative_uris=relative_uris - ) # top refers to the directory level if top: @@ -3983,80 +4413,19 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "class", - "location", - "path", - "basename", - "dirname", - "nameroot", - "nameext", - "checksum", - "size", - "secondaryFiles", - "format", - "contents", - ] + attrs: ClassVar[Collection[str]] = frozenset( + ["doc", "name", "type", "outputBinding"] ) -class Directory(Saveable): - """ - Represents a directory to present to a command line tool. - - Directories are represented as objects with `class` of `Directory`. Directory objects have - a number of properties that provide metadata about the directory. - - The `location` property of a Directory is a URI that uniquely identifies - the directory. Implementations must support the file:// URI scheme and may - support other schemes such as http://. Alternately to `location`, - implementations must also accept the `path` property on Directory, which - must be a filesystem path available on the same host as the CWL runner (for - inputs) or the runtime environment of a command line tool execution (for - command line tool outputs). - - A Directory object may have a `listing` field. This is a list of File and - Directory objects that are contained in the Directory. For each entry in - `listing`, the `basename` property defines the name of the File or - Subdirectory when staged to disk. If `listing` is not provided, the - implementation must have some way of fetching the Directory listing at - runtime based on the `location` field. - - If a Directory does not have `location`, it is a Directory literal. A - Directory literal must provide `listing`. Directory literals must be - created on disk at runtime as needed. - - The resources in a Directory literal do not need to have any implied - relationship in their `location`. For example, a Directory listing may - contain two files located on different hosts. It is the responsibility of - the runtime to ensure that those files are staged to disk appropriately. - Secondary files associated with files in `listing` must also be staged to - the same Directory. - - When executing a CommandLineTool, Directories must be recursively staged - first and have local values of `path` assigend. - - Directory objects in CommandLineTool output must provide either a - `location` URI or a `path` property in the context of the tool execution - runtime (local to the compute node, or within the executing container). - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - Name conflicts (the same `basename` appearing multiple times in `listing` - or in any entry in `secondaryFiles` in the listing) is a fatal error. - - """ - +class OutputRecordSchema(CWLRecordSchema, OutputSchema): def __init__( self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - listing: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + type_: Any, + fields: Any | None = None, + label: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -4066,27 +4435,21 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "Directory" - self.location = location - self.path = path - self.basename = basename - self.listing = listing + self.fields = fields + self.type_ = type_ + self.label = label def __eq__(self, other: Any) -> bool: - if isinstance(other, Directory): + if isinstance(other, OutputRecordSchema): return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.listing == other.listing + self.fields == other.fields + and self.type_ == other.type_ + and self.label == other.label ) return False def __hash__(self) -> int: - return hash( - (self.class_, self.location, self.path, self.basename, self.listing) - ) + return hash((self.fields, self.type_, self.label)) @classmethod def fromDoc( @@ -4094,45 +4457,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Directory": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_Directory_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - location = None - if "location" in _doc: + fields = None + if "fields" in _doc: try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None_None, + fields = _load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, baseuri, loadingOptions, - lc=_doc.get("location") + lc=_doc.get("fields") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `location`": + if str(e) == "missing required field `fields`": _errors__.append( ValidationException( str(e), @@ -4140,13 +4487,13 @@ def fromDoc( ) ) else: - val = _doc.get("location") + val = _doc.get("fields") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4158,122 +4505,76 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), [e], - detailed_message=f"the `location` field with value `{val}` " + detailed_message=f"the `fields` field with value `{val}` " "is not valid because:", ) ) - path = None - if "path" in _doc: - try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("path") - ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + type_ = _load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) - if str(e) == "missing required field `path`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("path") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), - [e], - detailed_message=f"the `path` field with value `{val}` " - "is not valid because:", - ) - ) - basename = None - if "basename" in _doc: - try: - basename = load_field( - _doc.get("basename"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("basename") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `basename`": _errors__.append( ValidationException( - str(e), - None + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", ) ) - else: - val = _doc.get("basename") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), - [e], - detailed_message=f"the `basename` field with value `{val}` " - "is not valid because:", - ) - ) - listing = None - if "listing" in _doc: + label = None + if "label" in _doc: try: - listing = load_field( - _doc.get("listing"), - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("listing") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `listing`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -4281,13 +4582,13 @@ def fromDoc( ) ) else: - val = _doc.get("listing") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4299,14 +4600,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `listing` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -4314,14 +4615,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( + "invalid field `{}`, expected one of: `fields`, `type`, `label`".format( k ), SourceLine(_doc, k, str), @@ -4331,10 +4632,9 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - location=location, - path=path, - basename=basename, - listing=listing, + fields=fields, + type_=type_, + label=label, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -4351,27 +4651,17 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.location is not None: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - if self.path is not None: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - if self.basename is not None: - r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.listing is not None: - r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -4382,50 +4672,21 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "location", "path", "basename", "listing"]) - - -class SchemaBase(Saveable): - pass - - -class Parameter(SchemaBase): - """ - Define an input or output parameter to a process. - - """ - - pass - - -class InputBinding(Saveable): - pass - - -class OutputBinding(Saveable): - pass - - -class InputSchema(SchemaBase): - pass - - -class OutputSchema(SchemaBase): - pass + attrs: ClassVar[Collection[str]] = frozenset(["fields", "type", "label"]) -class InputRecordField(CWLRecordField): +class OutputEnumSchema(schema_salad.metaschema.EnumSchema, OutputSchema): name: str def __init__( self, - name: Any, + symbols: Any, type_: Any, - doc: Optional[Any] = None, - inputBinding: Optional[Any] = None, - label: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + name: Any | None = None, + label: Any | None = None, + outputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -4435,25 +4696,27 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.doc = doc self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols self.type_ = type_ - self.inputBinding = inputBinding self.label = label + self.outputBinding = outputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordField): + if isinstance(other, OutputEnumSchema): return bool( - self.doc == other.doc - and self.name == other.name + self.name == other.name + and self.symbols == other.symbols and self.type_ == other.type_ - and self.inputBinding == other.inputBinding and self.label == other.label + and self.outputBinding == other.outputBinding ) return False def __hash__(self) -> int: - return hash((self.doc, self.name, self.type_, self.inputBinding, self.label)) + return hash( + (self.name, self.symbols, self.type_, self.label, self.outputBinding) + ) @classmethod def fromDoc( @@ -4461,8 +4724,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -4472,9 +4735,9 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), - uri_strtype_True_False_None_None, + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") @@ -4522,72 +4785,25 @@ def fromDoc( if docRoot is not None: name = docRoot else: - _errors__.append(ValidationException("missing name")) + name = "_:" + str(_uuid__.uuid4()) if not __original_name_is_none: baseuri = cast(str, name) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + symbols = _load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("symbols") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `symbols`": _errors__.append( ValidationException( str(e), @@ -4595,13 +4811,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("symbols") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4613,28 +4829,76 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `symbols` field with value `{val}` " "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("inputBinding") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inputBinding`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -4642,13 +4906,13 @@ def fromDoc( ) ) else: - val = _doc.get("inputBinding") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4660,28 +4924,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `inputBinding` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - label = None - if "label" in _doc: + outputBinding = None + if "outputBinding" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("outputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `outputBinding`": _errors__.append( ValidationException( str(e), @@ -4689,13 +4953,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("outputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4707,14 +4971,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `outputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -4722,14 +4986,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `inputBinding`, `label`".format( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `outputBinding`".format( k ), SourceLine(_doc, k, str), @@ -4739,11 +5003,11 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - doc=doc, name=name, + symbols=symbols, type_=type_, - inputBinding=inputBinding, label=label, + outputBinding=outputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -4764,25 +5028,24 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) if self.label is not None: r["label"] = save( self.label, top=False, base_url=self.name, relative_uris=relative_uris ) + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) # top refers to the directory level if top: @@ -4792,20 +5055,20 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type", "inputBinding", "label"]) - + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "outputBinding"] + ) -class InputRecordSchema(CWLRecordSchema, InputSchema): - name: str +class OutputArraySchema(CWLArraySchema, OutputSchema): def __init__( self, + items: Any, type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + outputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -4815,23 +5078,23 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.fields = fields + self.items = items self.type_ = type_ self.label = label - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.outputBinding = outputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordSchema): + if isinstance(other, OutputArraySchema): return bool( - self.fields == other.fields + self.items == other.items and self.type_ == other.type_ and self.label == other.label - and self.name == other.name + and self.outputBinding == other.outputBinding ) return False def __hash__(self) -> int: - return hash((self.fields, self.type_, self.label, self.name)) + return hash((self.items, self.type_, self.label, self.outputBinding)) @classmethod def fromDoc( @@ -4839,29 +5102,125 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = _load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -4869,13 +5228,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4887,132 +5246,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - fields = None - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, - baseuri, - loadingOptions, - lc=_doc.get("fields") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `fields`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("fields") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - detailed_message=f"the `fields` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - label = None - if "label" in _doc: + outputBinding = None + if "outputBinding" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("outputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `outputBinding`": _errors__.append( ValidationException( str(e), @@ -5020,13 +5275,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("outputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -5038,14 +5293,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `outputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -5053,14 +5308,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `outputBinding`".format( k ), SourceLine(_doc, k, str), @@ -5070,14 +5325,13 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - fields=fields, + items=items, type_=type_, label=label, - name=name, + outputBinding=outputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -5091,20 +5345,23 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.items is not None: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u if self.type_ is not None: r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + self.type_, top=False, base_url=base_url, relative_uris=relative_uris ) if self.label is not None: r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris + self.label, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -5115,21 +5372,27 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "label", "name"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "outputBinding"] + ) -class InputEnumSchema(EnumSchema, InputSchema): - name: str +class InputParameter(Parameter): + id: str def __init__( self, - symbols: Any, - type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + inputBinding: Any | None = None, + default: Any | None = None, + type_: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -5139,26 +5402,44 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols - self.type_ = type_ self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format self.inputBinding = inputBinding + self.default = default + self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, InputEnumSchema): + if isinstance(other, InputParameter): return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type_ == other.type_ - and self.label == other.label + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format and self.inputBinding == other.inputBinding + and self.default == other.default + and self.type_ == other.type_ ) return False def __hash__(self) -> int: return hash( - (self.name, self.symbols, self.type_, self.label, self.inputBinding) + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.inputBinding, + self.default, + self.type_, + ) ) @classmethod @@ -5167,29 +5448,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputEnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + id = None + if "id" in _doc: try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("id") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `id`": _errors__.append( ValidationException( str(e), @@ -5197,13 +5478,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("id") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -5215,133 +5496,225 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `id` field with value `{val}` " "is not valid because:", ) ) - __original_name_is_none = name is None - if name is None: + __original_id_is_none = id is None + if id is None: if docRoot is not None: - name = docRoot + id = docRoot else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) - - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("symbols") - ) + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `symbols`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("symbols") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `label`": _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - detailed_message=f"the `symbols` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - label = None - if "label" in _doc: + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("format") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `format`": _errors__.append( ValidationException( str(e), @@ -5349,13 +5722,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("format") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -5367,17 +5740,17 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `format` field with value `{val}` " "is not valid because:", ) ) inputBinding = None if "inputBinding" in _doc: try: - inputBinding = load_field( + inputBinding = _load_field( _doc.get("inputBinding"), union_of_None_type_or_CommandLineBindingLoader, baseuri, @@ -5421,22 +5794,116 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + default = None + if "default" in _doc: + try: + default = _load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, + baseuri, + loadingOptions, + lc=_doc.get("default") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `default`": _errors__.append( - ValidationException("mapping with implicit null key") + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("default") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + detailed_message=f"the `default` field with value `{val}` " + "is not valid because:", + ) + ) + type_ = None + if "type" in _doc: + try: + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `inputBinding`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `inputBinding`, `default`, `type`".format( k ), SourceLine(_doc, k, str), @@ -5446,15 +5913,19 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - name=name, - symbols=symbols, - type_=type_, label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, inputBinding=inputBinding, + default=default, + type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -5468,27 +5939,49 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u if self.label is not None: r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u if self.inputBinding is not None: r["inputBinding"] = save( self.inputBinding, top=False, - base_url=self.name, + base_url=self.id, relative_uris=relative_uris, ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -5498,18 +5991,35 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "label", "inputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "inputBinding", + "default", + "type", + ] + ) + +class OutputParameter(Parameter): + id: str -class InputArraySchema(CWLArraySchema, InputSchema): def __init__( self, - items: Any, - type_: Any, - label: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + outputBinding: Any | None = None, + format: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -5519,23 +6029,39 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ self.label = label - self.inputBinding = inputBinding + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.outputBinding = outputBinding + self.format = format def __eq__(self, other: Any) -> bool: - if isinstance(other, InputArraySchema): + if isinstance(other, OutputParameter): return bool( - self.items == other.items - and self.type_ == other.type_ - and self.label == other.label - and self.inputBinding == other.inputBinding + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.outputBinding == other.outputBinding + and self.format == other.format ) return False def __hash__(self) -> int: - return hash((self.items, self.type_, self.label, self.inputBinding)) + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.outputBinding, + self.format, + ) + ) @classmethod def fromDoc( @@ -5543,114 +6069,74 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `id`": _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) ) - ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -5694,21 +6180,21 @@ def fromDoc( "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: + secondaryFiles = None + if "secondaryFiles" in _doc: try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("inputBinding") + lc=_doc.get("secondaryFiles") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inputBinding`": + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( str(e), @@ -5716,13 +6202,13 @@ def fromDoc( ) ) else: - val = _doc.get("inputBinding") + val = _doc.get("secondaryFiles") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -5734,155 +6220,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [e], - detailed_message=f"the `inputBinding` field with value `{val}` " + detailed_message=f"the `secondaryFiles` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - items=items, - type_=type_, - label=label, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.items is not None: - u = save_relative_uri(self.items, base_url, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "inputBinding"]) - - -class OutputRecordField(CWLRecordField): - name: str - - def __init__( - self, - name: Any, - type_: Any, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.type_ = type_ - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type_ == other.type_ - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.doc, self.name, self.type_, self.outputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputRecordField": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - name = None - if "name" in _doc: + streamable = None + if "streamable" in _doc: try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None_None, + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("streamable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( str(e), @@ -5890,13 +6249,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("streamable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -5908,26 +6267,17 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `streamable` field with value `{val}` " "is not valid because:", ) ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - _errors__.append(ValidationException("missing name")) - if not __original_name_is_none: - baseuri = cast(str, name) doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -5971,69 +6321,68 @@ def fromDoc( "is not valid because:", ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + outputBinding = None + if "outputBinding" in _doc: + try: + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputBinding") + ) - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `outputBinding`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("outputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - outputBinding = None - if "outputBinding" in _doc: + else: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + detailed_message=f"the `outputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, - lc=_doc.get("outputBinding") + lc=_doc.get("format") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputBinding`": + if str(e) == "missing required field `format`": _errors__.append( ValidationException( str(e), @@ -6041,13 +6390,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputBinding") + val = _doc.get("format") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -6059,14 +6408,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [e], - detailed_message=f"the `outputBinding` field with value `{val}` " + detailed_message=f"the `format` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -6074,14 +6423,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `outputBinding`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`".format( k ), SourceLine(_doc, k, str), @@ -6091,14 +6440,17 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, doc=doc, - name=name, - type_=type_, + id=id, outputBinding=outputBinding, + format=format, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -6112,24 +6464,41 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) if self.doc is not None: r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + self.doc, top=False, base_url=self.id, relative_uris=relative_uris ) if self.outputBinding is not None: r["outputBinding"] = save( self.outputBinding, top=False, - base_url=self.name, + base_url=self.id, relative_uris=relative_uris, ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u # top refers to the directory level if top: @@ -6139,17 +6508,50 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type", "outputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "outputBinding", + "format", + ] + ) -class OutputRecordSchema(CWLRecordSchema, OutputSchema): +class ProcessRequirement(Saveable): + """ + A process requirement declares a prerequisite that may or must be fulfilled before executing a process. See ```Process.hints`` <#process>`__ and ```Process.requirements`` <#process>`__. + + Process requirements are the primary mechanism for specifying extensions to the CWL core specification. + + """ + + pass + + +class Process(Saveable): + """ + The base executable type in CWL is the ``Process`` object defined by the document. Note that the ``Process`` object is abstract and cannot be directly executed. + + """ + + pass + + +class InlineJavascriptRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support inline Javascript expressions. If this requirement is not present, the workflow platform must not perform expression interpolatation. + + """ + def __init__( self, - type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + expressionLib: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -6159,21 +6561,19 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.fields = fields - self.type_ = type_ - self.label = label + self.class_: Final[str] = "InlineJavascriptRequirement" + self.expressionLib = expressionLib def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordSchema): + if isinstance(other, InlineJavascriptRequirement): return bool( - self.fields == other.fields - and self.type_ == other.type_ - and self.label == other.label + self.class_ == other.class_ + and self.expressionLib == other.expressionLib ) return False def __hash__(self) -> int: - return hash((self.fields, self.type_, self.label)) + return hash((self.class_, self.expressionLib)) @classmethod def fromDoc( @@ -6181,29 +6581,46 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - fields = None - if "fields" in _doc: + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_InlineJavascriptRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + expressionLib = None + if "expressionLib" in _doc: try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, + expressionLib = _load_field( + _doc.get("expressionLib"), + union_of_None_type_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("fields") + lc=_doc.get("expressionLib") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `fields`": + if str(e) == "missing required field `expressionLib`": _errors__.append( ValidationException( str(e), @@ -6211,13 +6628,13 @@ def fromDoc( ) ) else: - val = _doc.get("fields") + val = _doc.get("expressionLib") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), + "the `expressionLib` field is not valid because:", + SourceLine(_doc, "expressionLib", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -6229,124 +6646,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), + "the `expressionLib` field is not valid because:", + SourceLine(_doc, "expressionLib", str), [e], - detailed_message=f"the `fields` field with value `{val}` " + detailed_message=f"the `expressionLib` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`".format( + "invalid field `{}`, expected one of: `class`, `expressionLib`".format( k ), SourceLine(_doc, k, str), @@ -6356,9 +6678,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - fields=fields, - type_=type_, - label=label, + expressionLib=expressionLib, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -6375,17 +6695,22 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.expressionLib is not None: + r["expressionLib"] = save( + self.expressionLib, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -6396,21 +6721,20 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "label"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "expressionLib"]) -class OutputEnumSchema(EnumSchema, OutputSchema): - name: str +class SchemaDefRequirement(ProcessRequirement): + """ + This field consists of an array of type definitions which must be used when interpreting the ``inputs`` and ``outputs`` fields. When a ``type`` field contain a IRI, the implementation must check if the type is defined in ``schemaDefs`` and use that definition. If the type is not found in ``schemaDefs``, it is an error. The entries in ``schemaDefs`` must be processed in the order listed such that later schema definitions may refer to earlier schema definitions. + + """ def __init__( self, - symbols: Any, - type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + types: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -6420,27 +6744,16 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols - self.type_ = type_ - self.label = label - self.outputBinding = outputBinding + self.class_: Final[str] = "SchemaDefRequirement" + self.types = types def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputEnumSchema): - return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type_ == other.type_ - and self.label == other.label - and self.outputBinding == other.outputBinding - ) + if isinstance(other, SchemaDefRequirement): + return bool(self.class_ == other.class_ and self.types == other.types) return False def __hash__(self) -> int: - return hash( - (self.name, self.symbols, self.type_, self.label, self.outputBinding) - ) + return hash((self.class_, self.types)) @classmethod def fromDoc( @@ -6448,134 +6761,47 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputEnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("name") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `name`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("name") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [e], - detailed_message=f"the `name` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, + class_ = _load_field( + _doc.get("class"), + uri_SchemaDefRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("symbols") + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `symbols`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("symbols") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - detailed_message=f"the `symbols` field with value `{val}` " - "is not valid because:", - ) - ) + raise e try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("types") is None: + raise ValidationException("missing required field `types`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, + types = _load_field( + _doc.get("types"), + array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("types") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `types`": _errors__.append( ValidationException( str(e), @@ -6583,13 +6809,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("types") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `types` field is not valid because:", + SourceLine(_doc, "types", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -6601,108 +6827,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `types` field is not valid because:", + SourceLine(_doc, "types", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `types` field with value `{val}` " "is not valid because:", ) ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - outputBinding = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - detailed_message=f"the `outputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -6710,14 +6842,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `outputBinding`".format( + "invalid field `{}`, expected one of: `class`, `types`".format( k ), SourceLine(_doc, k, str), @@ -6727,15 +6859,10 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - name=name, - symbols=symbols, - type_=type_, - label=label, - outputBinding=outputBinding, + types=types, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -6749,26 +6876,19 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=self.name, - relative_uris=relative_uris, + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.types is not None: + r["types"] = save( + self.types, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -6779,18 +6899,21 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "label", "outputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "types"]) -class OutputArraySchema(CWLArraySchema, OutputSchema): +class EnvironmentDef(Saveable): + """ + Define an environment variable that will be set in the runtime environment by the workflow platform when executing the command line tool. May be the result of executing an expression, such as getting a parameter from input. + + """ + def __init__( self, - items: Any, - type_: Any, - label: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + envName: Any, + envValue: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -6800,23 +6923,18 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ - self.label = label - self.outputBinding = outputBinding + self.envName = envName + self.envValue = envValue def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputArraySchema): + if isinstance(other, EnvironmentDef): return bool( - self.items == other.items - and self.type_ == other.type_ - and self.label == other.label - and self.outputBinding == other.outputBinding + self.envName == other.envName and self.envValue == other.envValue ) return False def __hash__(self) -> int: - return hash((self.items, self.type_, self.label, self.outputBinding)) + return hash((self.envName, self.envValue)) @classmethod def fromDoc( @@ -6824,8 +6942,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -6833,21 +6951,21 @@ def fromDoc( _doc.lc.filename = doc.lc.filename _errors__ = [] try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) + if _doc.get("envName") is None: + raise ValidationException("missing required field `envName`", None, []) - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None, + envName = _load_field( + _doc.get("envName"), + strtype, baseuri, loadingOptions, - lc=_doc.get("items") + lc=_doc.get("envName") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `items`": + if str(e) == "missing required field `envName`": _errors__.append( ValidationException( str(e), @@ -6855,13 +6973,13 @@ def fromDoc( ) ) else: - val = _doc.get("items") + val = _doc.get("envName") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), + "the `envName` field is not valid because:", + SourceLine(_doc, "envName", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -6873,29 +6991,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), + "the `envName` field is not valid because:", + SourceLine(_doc, "envName", str), [e], - detailed_message=f"the `items` field with value `{val}` " + detailed_message=f"the `envName` field with value `{val}` " "is not valid because:", ) ) try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("envValue") is None: + raise ValidationException("missing required field `envValue`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, + envValue = _load_field( + _doc.get("envValue"), + union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("envValue") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `envValue`": _errors__.append( ValidationException( str(e), @@ -6903,13 +7021,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("envValue") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `envValue` field is not valid because:", + SourceLine(_doc, "envValue", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -6921,136 +7039,40 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `envValue` field is not valid because:", + SourceLine(_doc, "envValue", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `envValue` field with value `{val}` " "is not valid because:", ) ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - str(e), - None - ) + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - outputBinding = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - detailed_message=f"the `outputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `envName`, `envValue`".format( + k + ), + SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - items=items, - type_=type_, - label=label, - outputBinding=outputBinding, + envName=envName, + envValue=envValue, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -7067,23 +7089,13 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.items is not None: - u = save_relative_uri(self.items, base_url, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris + if self.envName is not None: + r["envName"] = save( + self.envName, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.envValue is not None: + r["envValue"] = save( + self.envValue, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -7094,25 +7106,44 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["items", "type", "label", "outputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset(["envName", "envValue"]) -class InputParameter(Parameter): - id: str +class CommandLineBinding(InputBinding): + """ + When listed under ``inputBinding`` in the input schema, the term "value" refers to the the corresponding value in the input object. For binding objects listed in ``CommandLineTool.arguments``, the term "value" refers to the effective value after evaluating ``valueFrom``. + + The binding behavior when building the command line depends on the data type of the value. If there is a mismatch between the type described by the input schema and the effective value, such as resulting from an expression evaluation, an implementation must use the data type of the effective value. + + - **string**: Add ``prefix`` and the string to the command line. + + - **number**: Add ``prefix`` and decimal representation to command line. + + - **boolean**: If true, add ``prefix`` to the command line. If false, add nothing. + + - **File**: Add ``prefix`` and the value of ```File.path`` <#File>`__ to the command line. + + - **Directory**: Add ``prefix`` and the value of ```Directory.path`` <#Directory>`__ to the command line. + + - **array**: If ``itemSeparator`` is specified, add ``prefix`` and the join the array into a single string with ``itemSeparator`` separating the items. Otherwise first add ``prefix``, then recursively process individual elements. If the array is empty, it does not add anything to command line. + + - **object**: Add ``prefix`` only, and recursively add object fields for which ``inputBinding`` is specified. + + - **null**: Add nothing. + + """ def __init__( self, - id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - inputBinding: Optional[Any] = None, - default: Optional[Any] = None, - type_: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + loadContents: Any | None = None, + position: Any | None = None, + prefix: Any | None = None, + separate: Any | None = None, + itemSeparator: Any | None = None, + valueFrom: Any | None = None, + shellQuote: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -7122,43 +7153,37 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.format = format - self.inputBinding = inputBinding - self.default = default - self.type_ = type_ + self.loadContents = loadContents + self.position = position + self.prefix = prefix + self.separate = separate + self.itemSeparator = itemSeparator + self.valueFrom = valueFrom + self.shellQuote = shellQuote def __eq__(self, other: Any) -> bool: - if isinstance(other, InputParameter): + if isinstance(other, CommandLineBinding): return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.inputBinding == other.inputBinding - and self.default == other.default - and self.type_ == other.type_ + self.loadContents == other.loadContents + and self.position == other.position + and self.prefix == other.prefix + and self.separate == other.separate + and self.itemSeparator == other.itemSeparator + and self.valueFrom == other.valueFrom + and self.shellQuote == other.shellQuote ) return False def __hash__(self) -> int: return hash( ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.inputBinding, - self.default, - self.type_, + self.loadContents, + self.position, + self.prefix, + self.separate, + self.itemSeparator, + self.valueFrom, + self.shellQuote, ) ) @@ -7168,29 +7193,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - id = None - if "id" in _doc: + loadContents = None + if "loadContents" in _doc: try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("id") + lc=_doc.get("loadContents") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `id`": + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( str(e), @@ -7198,13 +7223,13 @@ def fromDoc( ) ) else: - val = _doc.get("id") + val = _doc.get("loadContents") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7216,37 +7241,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [e], - detailed_message=f"the `id` field with value `{val}` " + detailed_message=f"the `loadContents` field with value `{val}` " "is not valid because:", ) ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - label = None - if "label" in _doc: + position = None + if "position" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + position = _load_field( + _doc.get("position"), + union_of_None_type_or_inttype, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("position") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `position`": _errors__.append( ValidationException( str(e), @@ -7254,13 +7270,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("position") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `position` field is not valid because:", + SourceLine(_doc, "position", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7272,28 +7288,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `position` field is not valid because:", + SourceLine(_doc, "position", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `position` field with value `{val}` " "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: + prefix = None + if "prefix" in _doc: try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + prefix = _load_field( + _doc.get("prefix"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("secondaryFiles") + lc=_doc.get("prefix") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secondaryFiles`": + if str(e) == "missing required field `prefix`": _errors__.append( ValidationException( str(e), @@ -7301,13 +7317,13 @@ def fromDoc( ) ) else: - val = _doc.get("secondaryFiles") + val = _doc.get("prefix") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `prefix` field is not valid because:", + SourceLine(_doc, "prefix", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7319,28 +7335,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `prefix` field is not valid because:", + SourceLine(_doc, "prefix", str), [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " + detailed_message=f"the `prefix` field with value `{val}` " "is not valid because:", ) ) - streamable = None - if "streamable" in _doc: + separate = None + if "separate" in _doc: try: - streamable = load_field( - _doc.get("streamable"), + separate = _load_field( + _doc.get("separate"), union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("streamable") + lc=_doc.get("separate") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `streamable`": + if str(e) == "missing required field `separate`": _errors__.append( ValidationException( str(e), @@ -7348,13 +7364,13 @@ def fromDoc( ) ) else: - val = _doc.get("streamable") + val = _doc.get("separate") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `separate` field is not valid because:", + SourceLine(_doc, "separate", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7366,122 +7382,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - detailed_message=f"the `streamable` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - format = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, - baseuri, - loadingOptions, - lc=_doc.get("format") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `format`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("format") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `separate` field is not valid because:", + SourceLine(_doc, "separate", str), [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `separate` field with value `{val}` " "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: + itemSeparator = None + if "itemSeparator" in _doc: try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, + itemSeparator = _load_field( + _doc.get("itemSeparator"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("inputBinding") + lc=_doc.get("itemSeparator") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inputBinding`": + if str(e) == "missing required field `itemSeparator`": _errors__.append( ValidationException( str(e), @@ -7489,13 +7411,13 @@ def fromDoc( ) ) else: - val = _doc.get("inputBinding") + val = _doc.get("itemSeparator") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `itemSeparator` field is not valid because:", + SourceLine(_doc, "itemSeparator", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7507,28 +7429,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `itemSeparator` field is not valid because:", + SourceLine(_doc, "itemSeparator", str), [e], - detailed_message=f"the `inputBinding` field with value `{val}` " + detailed_message=f"the `itemSeparator` field with value `{val}` " "is not valid because:", ) ) - default = None - if "default" in _doc: + valueFrom = None + if "valueFrom" in _doc: try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_CWLObjectTypeLoader, + valueFrom = _load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("default") + lc=_doc.get("valueFrom") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `default`": + if str(e) == "missing required field `valueFrom`": _errors__.append( ValidationException( str(e), @@ -7536,13 +7458,13 @@ def fromDoc( ) ) else: - val = _doc.get("default") + val = _doc.get("valueFrom") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7554,28 +7476,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), [e], - detailed_message=f"the `default` field with value `{val}` " + detailed_message=f"the `valueFrom` field with value `{val}` " "is not valid because:", ) ) - type_ = None - if "type" in _doc: + shellQuote = None + if "shellQuote" in _doc: try: - type_ = load_field( - _doc.get("type"), - typedsl_union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + shellQuote = _load_field( + _doc.get("shellQuote"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("shellQuote") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `shellQuote`": _errors__.append( ValidationException( str(e), @@ -7583,13 +7505,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("shellQuote") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `shellQuote` field is not valid because:", + SourceLine(_doc, "shellQuote", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7601,14 +7523,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `shellQuote` field is not valid because:", + SourceLine(_doc, "shellQuote", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `shellQuote` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -7616,14 +7538,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `inputBinding`, `default`, `type`".format( + "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( k ), SourceLine(_doc, k, str), @@ -7633,19 +7555,16 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - inputBinding=inputBinding, - default=default, - type_=type_, + loadContents=loadContents, + position=position, + prefix=prefix, + separate=separate, + itemSeparator=itemSeparator, + valueFrom=valueFrom, + shellQuote=shellQuote, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -7659,48 +7578,45 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.id, - relative_uris=relative_uris, + if self.position is not None: + r["position"] = save( + self.position, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris + if self.prefix is not None: + r["prefix"] = save( + self.prefix, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, + if self.separate is not None: + r["separate"] = save( + self.separate, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.itemSeparator is not None: + r["itemSeparator"] = save( + self.itemSeparator, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.default is not None: - r["default"] = save( - self.default, top=False, base_url=self.id, relative_uris=relative_uris + if self.valueFrom is not None: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris + if self.shellQuote is not None: + r["shellQuote"] = save( + self.shellQuote, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -7711,35 +7627,39 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "inputBinding", - "default", - "type", + "loadContents", + "position", + "prefix", + "separate", + "itemSeparator", + "valueFrom", + "shellQuote", ] ) -class OutputParameter(Parameter): - id: str +class CommandOutputBinding(OutputBinding): + """ + Describes how to generate an output parameter based on the files produced by a CommandLineTool. + + The output parameter value is generated by applying these operations in the following order: + + - glob + - loadContents + - outputEval + - secondaryFiles + + """ def __init__( self, - id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + glob: Any | None = None, + loadContents: Any | None = None, + outputEval: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -7749,39 +7669,21 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.outputBinding = outputBinding - self.format = format + self.glob = glob + self.loadContents = loadContents + self.outputEval = outputEval def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputParameter): + if isinstance(other, CommandOutputBinding): return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.outputBinding == other.outputBinding - and self.format == other.format + self.glob == other.glob + and self.loadContents == other.loadContents + and self.outputEval == other.outputEval ) return False def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.outputBinding, - self.format, - ) - ) + return hash((self.glob, self.loadContents, self.outputEval)) @classmethod def fromDoc( @@ -7789,29 +7691,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - id = None - if "id" in _doc: + glob = None + if "glob" in _doc: try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, + glob = _load_field( + _doc.get("glob"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("id") + lc=_doc.get("glob") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `id`": + if str(e) == "missing required field `glob`": _errors__.append( ValidationException( str(e), @@ -7819,13 +7721,13 @@ def fromDoc( ) ) else: - val = _doc.get("id") + val = _doc.get("glob") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `glob` field is not valid because:", + SourceLine(_doc, "glob", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7837,37 +7739,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `glob` field is not valid because:", + SourceLine(_doc, "glob", str), [e], - detailed_message=f"the `id` field with value `{val}` " + detailed_message=f"the `glob` field with value `{val}` " "is not valid because:", ) ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - label = None - if "label" in _doc: + loadContents = None + if "loadContents" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("loadContents") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( str(e), @@ -7875,13 +7768,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("loadContents") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7893,28 +7786,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `loadContents` field with value `{val}` " "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: + outputEval = None + if "outputEval" in _doc: try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + outputEval = _load_field( + _doc.get("outputEval"), + union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("secondaryFiles") + lc=_doc.get("outputEval") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secondaryFiles`": + if str(e) == "missing required field `outputEval`": _errors__.append( ValidationException( str(e), @@ -7922,13 +7815,13 @@ def fromDoc( ) ) else: - val = _doc.get("secondaryFiles") + val = _doc.get("outputEval") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `outputEval` field is not valid because:", + SourceLine(_doc, "outputEval", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7940,238 +7833,45 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `outputEval` field is not valid because:", + SourceLine(_doc, "outputEval", str), [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " + detailed_message=f"the `outputEval` field with value `{val}` " "is not valid because:", ) ) - streamable = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("streamable") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `streamable`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - str(e), - None - ) + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("streamable") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - detailed_message=f"the `streamable` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": _errors__.append( ValidationException( - str(e), - None + "invalid field `{}`, expected one of: `glob`, `loadContents`, `outputEval`".format( + k + ), + SourceLine(_doc, k, str), ) ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - outputBinding = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputBinding") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - detailed_message=f"the `outputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - format = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, - baseuri, - loadingOptions, - lc=_doc.get("format") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `format`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("format") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [e], - detailed_message=f"the `format` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - outputBinding=outputBinding, - format=format, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + glob=glob, + loadContents=loadContents, + outputEval=outputEval, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True @@ -8184,41 +7884,24 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.id, - relative_uris=relative_uris, + if self.glob is not None: + r["glob"] = save( + self.glob, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, + if self.outputEval is not None: + r["outputEval"] = save( + self.outputEval, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u # top refers to the directory level if top: @@ -8228,58 +7911,21 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "outputBinding", - "format", - ] - ) - - -class ProcessRequirement(Saveable): - """ - A process requirement declares a prerequisite that may or must be fulfilled - before executing a process. See [`Process.hints`](#process) and - [`Process.requirements`](#process). - - Process requirements are the primary mechanism for specifying extensions to - the CWL core specification. - - """ - - pass - - -class Process(Saveable): - """ - - The base executable type in CWL is the `Process` object defined by the - document. Note that the `Process` object is abstract and cannot be - directly executed. - - """ - - pass - + attrs: ClassVar[Collection[str]] = frozenset(["glob", "loadContents", "outputEval"]) -class InlineJavascriptRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support inline Javascript expressions. - If this requirement is not present, the workflow platform must not perform expression - interpolatation. - """ +class CommandInputRecordField(InputRecordField): + name: str def __init__( self, - expressionLib: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + name: Any, + type_: Any, + doc: Any | None = None, + inputBinding: Any | None = None, + label: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -8289,19 +7935,25 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "InlineJavascriptRequirement" - self.expressionLib = expressionLib + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.type_ = type_ + self.inputBinding = inputBinding + self.label = label def __eq__(self, other: Any) -> bool: - if isinstance(other, InlineJavascriptRequirement): + if isinstance(other, CommandInputRecordField): return bool( - self.class_ == other.class_ - and self.expressionLib == other.expressionLib + self.doc == other.doc + and self.name == other.name + and self.type_ == other.type_ + and self.inputBinding == other.inputBinding + and self.label == other.label ) return False def __hash__(self) -> int: - return hash((self.class_, self.expressionLib)) + return hash((self.doc, self.name, self.type_, self.inputBinding, self.label)) @classmethod def fromDoc( @@ -8309,45 +7961,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InlineJavascriptRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_InlineJavascriptRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - expressionLib = None - if "expressionLib" in _doc: + name = None + if "name" in _doc: try: - expressionLib = load_field( - _doc.get("expressionLib"), - union_of_None_type_or_array_of_strtype, + name = _load_field( + _doc.get("name"), + uri_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("expressionLib") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `expressionLib`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -8355,13 +7991,13 @@ def fromDoc( ) ) else: - val = _doc.get("expressionLib") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `expressionLib` field is not valid because:", - SourceLine(_doc, "expressionLib", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8373,31 +8009,229 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `expressionLib` field is not valid because:", - SourceLine(_doc, "expressionLib", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `expressionLib` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `expressionLib`".format( - k - ), + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + _errors__.append(ValidationException("missing name")) + if not __original_name_is_none: + baseuri = cast(str, name) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `inputBinding`, `label`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -8405,10 +8239,15 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - expressionLib=expressionLib, + doc=doc, + name=name, + type_=type_, + inputBinding=inputBinding, + label=label, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -8422,21 +8261,28 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.expressionLib is not None: - r["expressionLib"] = save( - self.expressionLib, + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, top=False, - base_url=base_url, + base_url=self.name, relative_uris=relative_uris, ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -8446,26 +8292,22 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "expressionLib"]) - + attrs: ClassVar[Collection[str]] = frozenset( + ["doc", "name", "type", "inputBinding", "label"] + ) -class SchemaDefRequirement(ProcessRequirement): - """ - This field consists of an array of type definitions which must be used when - interpreting the `inputs` and `outputs` fields. When a `type` field - contain a IRI, the implementation must check if the type is defined in - `schemaDefs` and use that definition. If the type is not found in - `schemaDefs`, it is an error. The entries in `schemaDefs` must be - processed in the order listed such that later schema definitions may refer - to earlier schema definitions. - """ +class CommandInputRecordSchema(InputRecordSchema): + name: str def __init__( self, - types: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + type_: Any, + fields: Any | None = None, + label: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -8475,16 +8317,23 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "SchemaDefRequirement" - self.types = types + self.fields = fields + self.type_ = type_ + self.label = label + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, SchemaDefRequirement): - return bool(self.class_ == other.class_ and self.types == other.types) + if isinstance(other, CommandInputRecordSchema): + return bool( + self.fields == other.fields + and self.type_ == other.type_ + and self.label == other.label + and self.name == other.name + ) return False def __hash__(self) -> int: - return hash((self.class_, self.types)) + return hash((self.fields, self.type_, self.label, self.name)) @classmethod def fromDoc( @@ -8492,46 +8341,133 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SchemaDefRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + name = None + if "name" in _doc: + try: + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) - class_ = load_field( - _doc.get("class"), - uri_SchemaDefRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + fields = None + if "fields" in _doc: + try: + fields = _load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, + baseuri, + loadingOptions, + lc=_doc.get("fields") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `fields`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("fields") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + detailed_message=f"the `fields` field with value `{val}` " + "is not valid because:", + ) + ) try: - if _doc.get("types") is None: - raise ValidationException("missing required field `types`", None, []) + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - types = load_field( - _doc.get("types"), - array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader, + type_ = _load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, baseuri, loadingOptions, - lc=_doc.get("types") + lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `types`": + if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), @@ -8539,13 +8475,13 @@ def fromDoc( ) ) else: - val = _doc.get("types") + val = _doc.get("type") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `types` field is not valid because:", - SourceLine(_doc, "types", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8557,14 +8493,61 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `types` field is not valid because:", - SourceLine(_doc, "types", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [e], - detailed_message=f"the `types` field with value `{val}` " + detailed_message=f"the `type` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -8572,14 +8555,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( k ), SourceLine(_doc, k, str), @@ -8589,10 +8572,14 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - types=types, + fields=fields, + type_=type_, + label=label, + name=name, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -8606,17 +8593,20 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.types is not None: - r["types"] = save( - self.types, top=False, base_url=base_url, relative_uris=relative_uris + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris ) # top refers to the directory level @@ -8627,23 +8617,21 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "types"]) - + attrs: ClassVar[Collection[str]] = frozenset(["fields", "type", "label", "name"]) -class EnvironmentDef(Saveable): - """ - Define an environment variable that will be set in the runtime environment - by the workflow platform when executing the command line tool. May be the - result of executing an expression, such as getting a parameter from input. - """ +class CommandInputEnumSchema(InputEnumSchema): + name: str def __init__( self, - envName: Any, - envValue: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + symbols: Any, + type_: Any, + name: Any | None = None, + label: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -8653,18 +8641,27 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.envName = envName - self.envValue = envValue + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols + self.type_ = type_ + self.label = label + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvironmentDef): + if isinstance(other, CommandInputEnumSchema): return bool( - self.envName == other.envName and self.envValue == other.envValue + self.name == other.name + and self.symbols == other.symbols + and self.type_ == other.type_ + and self.label == other.label + and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: - return hash((self.envName, self.envValue)) + return hash( + (self.name, self.symbols, self.type_, self.label, self.inputBinding) + ) @classmethod def fromDoc( @@ -8672,30 +8669,86 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "EnvironmentDef": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("envName") is None: - raise ValidationException("missing required field `envName`", None, []) + name = None + if "name" in _doc: + try: + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) - envName = load_field( - _doc.get("envName"), - strtype, + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) + + symbols = _load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("envName") + lc=_doc.get("symbols") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `envName`": + if str(e) == "missing required field `symbols`": _errors__.append( ValidationException( str(e), @@ -8703,13 +8756,13 @@ def fromDoc( ) ) else: - val = _doc.get("envName") + val = _doc.get("symbols") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `envName` field is not valid because:", - SourceLine(_doc, "envName", str), + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8721,29 +8774,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `envName` field is not valid because:", - SourceLine(_doc, "envName", str), + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), [e], - detailed_message=f"the `envName` field with value `{val}` " + detailed_message=f"the `symbols` field with value `{val}` " "is not valid because:", ) ) try: - if _doc.get("envValue") is None: - raise ValidationException("missing required field `envValue`", None, []) + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - envValue = load_field( - _doc.get("envValue"), - union_of_strtype_or_ExpressionLoader, + type_ = _load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, baseuri, loadingOptions, - lc=_doc.get("envValue") + lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `envValue`": + if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), @@ -8751,13 +8804,13 @@ def fromDoc( ) ) else: - val = _doc.get("envValue") + val = _doc.get("type") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `envValue` field is not valid because:", - SourceLine(_doc, "envValue", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8769,14 +8822,108 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `envValue` field is not valid because:", - SourceLine(_doc, "envValue", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [e], - detailed_message=f"the `envValue` field with value `{val}` " + detailed_message=f"the `type` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -8784,14 +8931,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `inputBinding`".format( k ), SourceLine(_doc, k, str), @@ -8801,11 +8948,15 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - envName=envName, - envValue=envValue, + name=name, + symbols=symbols, + type_=type_, + label=label, + inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -8819,13 +8970,26 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.envName is not None: - r["envName"] = save( - self.envName, top=False, base_url=base_url, relative_uris=relative_uris + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.envValue is not None: - r["envValue"] = save( - self.envValue, top=False, base_url=base_url, relative_uris=relative_uris + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.name, + relative_uris=relative_uris, ) # top refers to the directory level @@ -8836,60 +9000,20 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["envName", "envValue"]) - - -class CommandLineBinding(InputBinding): - """ - - When listed under `inputBinding` in the input schema, the term - "value" refers to the the corresponding value in the input object. For - binding objects listed in `CommandLineTool.arguments`, the term "value" - refers to the effective value after evaluating `valueFrom`. - - The binding behavior when building the command line depends on the data - type of the value. If there is a mismatch between the type described by - the input schema and the effective value, such as resulting from an - expression evaluation, an implementation must use the data type of the - effective value. - - - **string**: Add `prefix` and the string to the command line. - - - **number**: Add `prefix` and decimal representation to command line. - - - **boolean**: If true, add `prefix` to the command line. If false, add - nothing. - - - **File**: Add `prefix` and the value of - [`File.path`](#File) to the command line. - - - **Directory**: Add `prefix` and the value of - [`Directory.path`](#Directory) to the command line. - - - **array**: If `itemSeparator` is specified, add `prefix` and the join - the array into a single string with `itemSeparator` separating the - items. Otherwise first add `prefix`, then recursively process - individual elements. - If the array is empty, it does not add anything to command line. - - - **object**: Add `prefix` only, and recursively add object fields for - which `inputBinding` is specified. - - - **null**: Add nothing. + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "inputBinding"] + ) - """ +class CommandInputArraySchema(InputArraySchema): def __init__( self, - loadContents: Optional[Any] = None, - position: Optional[Any] = None, - prefix: Optional[Any] = None, - separate: Optional[Any] = None, - itemSeparator: Optional[Any] = None, - valueFrom: Optional[Any] = None, - shellQuote: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + items: Any, + type_: Any, + label: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -8899,39 +9023,23 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - self.position = position - self.prefix = prefix - self.separate = separate - self.itemSeparator = itemSeparator - self.valueFrom = valueFrom - self.shellQuote = shellQuote + self.items = items + self.type_ = type_ + self.label = label + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineBinding): + if isinstance(other, CommandInputArraySchema): return bool( - self.loadContents == other.loadContents - and self.position == other.position - and self.prefix == other.prefix - and self.separate == other.separate - and self.itemSeparator == other.itemSeparator - and self.valueFrom == other.valueFrom - and self.shellQuote == other.shellQuote + self.items == other.items + and self.type_ == other.type_ + and self.label == other.label + and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: - return hash( - ( - self.loadContents, - self.position, - self.prefix, - self.separate, - self.itemSeparator, - self.valueFrom, - self.shellQuote, - ) - ) + return hash((self.items, self.type_, self.label, self.inputBinding)) @classmethod def fromDoc( @@ -8939,123 +9047,125 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandLineBinding": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - loadContents = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("loadContents") - ) + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + items = _load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) - if str(e) == "missing required field `loadContents`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("loadContents") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - detailed_message=f"the `loadContents` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", ) - position = None - if "position" in _doc: - try: - position = load_field( - _doc.get("position"), - union_of_None_type_or_inttype, - baseuri, - loadingOptions, - lc=_doc.get("position") - ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + type_ = _load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) - if str(e) == "missing required field `position`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("position") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `position` field is not valid because:", - SourceLine(_doc, "position", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `position` field is not valid because:", - SourceLine(_doc, "position", str), - [e], - detailed_message=f"the `position` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", ) - prefix = None - if "prefix" in _doc: + ) + label = None + if "label" in _doc: try: - prefix = load_field( - _doc.get("prefix"), + label = _load_field( + _doc.get("label"), union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("prefix") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `prefix`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -9063,13 +9173,13 @@ def fromDoc( ) ) else: - val = _doc.get("prefix") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `prefix` field is not valid because:", - SourceLine(_doc, "prefix", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9081,28 +9191,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `prefix` field is not valid because:", - SourceLine(_doc, "prefix", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `prefix` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - separate = None - if "separate" in _doc: + inputBinding = None + if "inputBinding" in _doc: try: - separate = load_field( - _doc.get("separate"), - union_of_None_type_or_booltype, + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("separate") + lc=_doc.get("inputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `separate`": + if str(e) == "missing required field `inputBinding`": _errors__.append( ValidationException( str(e), @@ -9110,13 +9220,13 @@ def fromDoc( ) ) else: - val = _doc.get("separate") + val = _doc.get("inputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `separate` field is not valid because:", - SourceLine(_doc, "separate", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9128,170 +9238,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `separate` field is not valid because:", - SourceLine(_doc, "separate", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [e], - detailed_message=f"the `separate` field with value `{val}` " + detailed_message=f"the `inputBinding` field with value `{val}` " "is not valid because:", ) ) - itemSeparator = None - if "itemSeparator" in _doc: - try: - itemSeparator = load_field( - _doc.get("itemSeparator"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("itemSeparator") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `itemSeparator`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - str(e), - None - ) + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) - else: - val = _doc.get("itemSeparator") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `itemSeparator` field is not valid because:", - SourceLine(_doc, "itemSeparator", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `itemSeparator` field is not valid because:", - SourceLine(_doc, "itemSeparator", str), - [e], - detailed_message=f"the `itemSeparator` field with value `{val}` " - "is not valid because:", - ) - ) - valueFrom = None - if "valueFrom" in _doc: - try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("valueFrom") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `valueFrom`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("valueFrom") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), - [e], - detailed_message=f"the `valueFrom` field with value `{val}` " - "is not valid because:", - ) - ) - shellQuote = None - if "shellQuote" in _doc: - try: - shellQuote = load_field( - _doc.get("shellQuote"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("shellQuote") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `shellQuote`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("shellQuote") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `shellQuote` field is not valid because:", - SourceLine(_doc, "shellQuote", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `shellQuote` field is not valid because:", - SourceLine(_doc, "shellQuote", str), - [e], - detailed_message=f"the `shellQuote` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] + extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `inputBinding`".format( k ), SourceLine(_doc, k, str), @@ -9301,13 +9270,10 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - loadContents=loadContents, - position=position, - prefix=prefix, - separate=separate, - itemSeparator=itemSeparator, - valueFrom=valueFrom, - shellQuote=shellQuote, + items=items, + type_=type_, + label=label, + inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -9324,42 +9290,20 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.position is not None: - r["position"] = save( - self.position, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.prefix is not None: - r["prefix"] = save( - self.prefix, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.separate is not None: - r["separate"] = save( - self.separate, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.itemSeparator is not None: - r["itemSeparator"] = save( - self.itemSeparator, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.items is not None: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.valueFrom is not None: - r["valueFrom"] = save( - self.valueFrom, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.shellQuote is not None: - r["shellQuote"] = save( - self.shellQuote, + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, top=False, base_url=base_url, relative_uris=relative_uris, @@ -9373,41 +9317,22 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "loadContents", - "position", - "prefix", - "separate", - "itemSeparator", - "valueFrom", - "shellQuote", - ] + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "inputBinding"] ) -class CommandOutputBinding(OutputBinding): - """ - Describes how to generate an output parameter based on the files produced - by a CommandLineTool. - - The output parameter value is generated by applying these operations in the - following order: - - - glob - - loadContents - - outputEval - - secondaryFiles - - """ +class CommandOutputRecordField(OutputRecordField): + name: str def __init__( self, - glob: Optional[Any] = None, - loadContents: Optional[Any] = None, - outputEval: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + name: Any, + type_: Any, + doc: Any | None = None, + outputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -9417,21 +9342,23 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.glob = glob - self.loadContents = loadContents - self.outputEval = outputEval + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.type_ = type_ + self.outputBinding = outputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputBinding): + if isinstance(other, CommandOutputRecordField): return bool( - self.glob == other.glob - and self.loadContents == other.loadContents - and self.outputEval == other.outputEval + self.doc == other.doc + and self.name == other.name + and self.type_ == other.type_ + and self.outputBinding == other.outputBinding ) return False def __hash__(self) -> int: - return hash((self.glob, self.loadContents, self.outputEval)) + return hash((self.doc, self.name, self.type_, self.outputBinding)) @classmethod def fromDoc( @@ -9439,29 +9366,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputBinding": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - glob = None - if "glob" in _doc: + name = None + if "name" in _doc: try: - glob = load_field( - _doc.get("glob"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, + name = _load_field( + _doc.get("name"), + uri_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("glob") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `glob`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -9469,13 +9396,13 @@ def fromDoc( ) ) else: - val = _doc.get("glob") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `glob` field is not valid because:", - SourceLine(_doc, "glob", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9487,28 +9414,37 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `glob` field is not valid because:", - SourceLine(_doc, "glob", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `glob` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - loadContents = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("loadContents") - ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + _errors__.append(ValidationException("missing name")) + if not __original_name_is_none: + baseuri = cast(str, name) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadContents`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -9516,13 +9452,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadContents") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9534,28 +9470,76 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `loadContents` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - outputEval = None - if "outputEval" in _doc: + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + outputBinding = None + if "outputBinding" in _doc: try: - outputEval = load_field( - _doc.get("outputEval"), - union_of_None_type_or_strtype_or_ExpressionLoader, + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions, - lc=_doc.get("outputEval") + lc=_doc.get("outputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputEval`": + if str(e) == "missing required field `outputBinding`": _errors__.append( ValidationException( str(e), @@ -9563,13 +9547,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputEval") + val = _doc.get("outputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputEval` field is not valid because:", - SourceLine(_doc, "outputEval", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9581,14 +9565,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputEval` field is not valid because:", - SourceLine(_doc, "outputEval", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [e], - detailed_message=f"the `outputEval` field with value `{val}` " + detailed_message=f"the `outputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -9596,14 +9580,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `glob`, `loadContents`, `outputEval`".format( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `outputBinding`".format( k ), SourceLine(_doc, k, str), @@ -9613,12 +9597,14 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - glob=glob, - loadContents=loadContents, - outputEval=outputEval, + doc=doc, + name=name, + type_=type_, + outputBinding=outputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -9632,22 +9618,22 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.glob is not None: - r["glob"] = save( - self.glob, top=False, base_url=base_url, relative_uris=relative_uris + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.outputEval is not None: - r["outputEval"] = save( - self.outputEval, + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, top=False, - base_url=base_url, + base_url=self.name, relative_uris=relative_uris, ) @@ -9659,21 +9645,22 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["glob", "loadContents", "outputEval"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["doc", "name", "type", "outputBinding"] + ) -class CommandInputRecordField(InputRecordField): +class CommandOutputRecordSchema(OutputRecordSchema): name: str def __init__( self, - name: Any, type_: Any, - doc: Optional[Any] = None, - inputBinding: Optional[Any] = None, - label: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + fields: Any | None = None, + label: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -9683,25 +9670,23 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.fields = fields self.type_ = type_ - self.inputBinding = inputBinding self.label = label + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordField): + if isinstance(other, CommandOutputRecordSchema): return bool( - self.doc == other.doc - and self.name == other.name + self.fields == other.fields and self.type_ == other.type_ - and self.inputBinding == other.inputBinding and self.label == other.label + and self.name == other.name ) return False def __hash__(self) -> int: - return hash((self.doc, self.name, self.type_, self.inputBinding, self.label)) + return hash((self.fields, self.type_, self.label, self.name)) @classmethod def fromDoc( @@ -9709,8 +9694,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -9720,9 +9705,9 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), - uri_strtype_True_False_None_None, + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") @@ -9770,24 +9755,24 @@ def fromDoc( if docRoot is not None: name = docRoot else: - _errors__.append(ValidationException("missing name")) + name = "_:" + str(_uuid__.uuid4()) if not __original_name_is_none: baseuri = cast(str, name) - doc = None - if "doc" in _doc: + fields = None + if "fields" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + fields = _load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("fields") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `fields`": _errors__.append( ValidationException( str(e), @@ -9795,13 +9780,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("fields") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9813,10 +9798,10 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `fields` field with value `{val}` " "is not valid because:", ) ) @@ -9824,9 +9809,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + typedsl_Record_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -9868,57 +9853,10 @@ def fromDoc( "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - detailed_message=f"the `inputBinding` field with value `{val}` " - "is not valid because:", - ) - ) label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -9962,7 +9900,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -9970,14 +9908,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `inputBinding`, `label`".format( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( k ), SourceLine(_doc, k, str), @@ -9987,11 +9925,10 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - doc=doc, - name=name, + fields=fields, type_=type_, - inputBinding=inputBinding, label=label, + name=name, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -10012,21 +9949,14 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=self.name, relative_uris=relative_uris ) if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) if self.label is not None: r["label"] = save( self.label, top=False, base_url=self.name, relative_uris=relative_uris @@ -10040,20 +9970,21 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type", "inputBinding", "label"]) + attrs: ClassVar[Collection[str]] = frozenset(["fields", "type", "label", "name"]) -class CommandInputRecordSchema(InputRecordSchema): +class CommandOutputEnumSchema(OutputEnumSchema): name: str def __init__( self, + symbols: Any, type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + name: Any | None = None, + label: Any | None = None, + outputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -10063,23 +9994,27 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.fields = fields + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols self.type_ = type_ self.label = label - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.outputBinding = outputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordSchema): + if isinstance(other, CommandOutputEnumSchema): return bool( - self.fields == other.fields + self.name == other.name + and self.symbols == other.symbols and self.type_ == other.type_ and self.label == other.label - and self.name == other.name + and self.outputBinding == other.outputBinding ) return False def __hash__(self) -> int: - return hash((self.fields, self.type_, self.label, self.name)) + return hash( + (self.name, self.symbols, self.type_, self.label, self.outputBinding) + ) @classmethod def fromDoc( @@ -10087,8 +10022,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -10098,7 +10033,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -10151,60 +10086,61 @@ def fromDoc( name = "_:" + str(_uuid__.uuid4()) if not __original_name_is_none: baseuri = cast(str, name) - fields = None - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, - baseuri, - loadingOptions, - lc=_doc.get("fields") - ) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + symbols = _load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) - if str(e) == "missing required field `fields`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("fields") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - detailed_message=f"the `fields` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", ) + ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_Record_nameLoader_2, + typedsl_Enum_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -10249,7 +10185,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -10293,7 +10229,54 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + outputBinding = None + if "outputBinding" in _doc: + try: + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + detailed_message=f"the `outputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -10301,14 +10284,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `outputBinding`".format( k ), SourceLine(_doc, k, str), @@ -10318,10 +10301,11 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - fields=fields, + name=name, + symbols=symbols, type_=type_, label=label, - name=name, + outputBinding=outputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -10342,10 +10326,9 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris @@ -10354,6 +10337,13 @@ def save( r["label"] = save( self.label, top=False, base_url=self.name, relative_uris=relative_uris ) + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) # top refers to the directory level if top: @@ -10363,21 +10353,20 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "label", "name"]) - + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "outputBinding"] + ) -class CommandInputEnumSchema(InputEnumSchema): - name: str +class CommandOutputArraySchema(OutputArraySchema): def __init__( self, - symbols: Any, + items: Any, type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + outputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -10387,27 +10376,23 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols + self.items = items self.type_ = type_ self.label = label - self.inputBinding = inputBinding + self.outputBinding = outputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputEnumSchema): + if isinstance(other, CommandOutputArraySchema): return bool( - self.name == other.name - and self.symbols == other.symbols + self.items == other.items and self.type_ == other.type_ and self.label == other.label - and self.inputBinding == other.inputBinding + and self.outputBinding == other.outputBinding ) return False def __hash__(self) -> int: - return hash( - (self.name, self.symbols, self.type_, self.label, self.inputBinding) - ) + return hash((self.items, self.type_, self.label, self.outputBinding)) @classmethod def fromDoc( @@ -10415,86 +10400,30 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputEnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("name") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `name`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("name") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [e], - detailed_message=f"the `name` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, + items = _load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None, baseuri, loadingOptions, - lc=_doc.get("symbols") + lc=_doc.get("items") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `symbols`": + if str(e) == "missing required field `items`": _errors__.append( ValidationException( str(e), @@ -10502,13 +10431,13 @@ def fromDoc( ) ) else: - val = _doc.get("symbols") + val = _doc.get("items") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -10520,10 +10449,10 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), [e], - detailed_message=f"the `symbols` field with value `{val}` " + detailed_message=f"the `items` field with value `{val}` " "is not valid because:", ) ) @@ -10531,9 +10460,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_Enum_nameLoader_2, + typedsl_Array_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -10578,7 +10507,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -10622,21 +10551,21 @@ def fromDoc( "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: + outputBinding = None + if "outputBinding" in _doc: try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions, - lc=_doc.get("inputBinding") + lc=_doc.get("outputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inputBinding`": + if str(e) == "missing required field `outputBinding`": _errors__.append( ValidationException( str(e), @@ -10644,13 +10573,13 @@ def fromDoc( ) ) else: - val = _doc.get("inputBinding") + val = _doc.get("outputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -10662,14 +10591,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [e], - detailed_message=f"the `inputBinding` field with value `{val}` " + detailed_message=f"the `outputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -10677,14 +10606,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `inputBinding`".format( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `outputBinding`".format( k ), SourceLine(_doc, k, str), @@ -10694,15 +10623,13 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - name=name, - symbols=symbols, + items=items, type_=type_, label=label, - inputBinding=inputBinding, + outputBinding=outputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -10716,25 +10643,22 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u + if self.items is not None: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u if self.type_ is not None: r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + self.type_, top=False, base_url=base_url, relative_uris=relative_uris ) if self.label is not None: r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris + self.label, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, top=False, - base_url=self.name, + base_url=base_url, relative_uris=relative_uris, ) @@ -10746,18 +10670,32 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "label", "inputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "outputBinding"] + ) + + +class CommandInputParameter(InputParameter): + """ + An input parameter for a CommandLineTool. + """ + + id: str -class CommandInputArraySchema(InputArraySchema): def __init__( self, - items: Any, - type_: Any, - label: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + inputBinding: Any | None = None, + default: Any | None = None, + type_: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -10767,23 +10705,45 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format self.inputBinding = inputBinding + self.default = default + self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputArraySchema): + if isinstance(other, CommandInputParameter): return bool( - self.items == other.items - and self.type_ == other.type_ - and self.label == other.label + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format and self.inputBinding == other.inputBinding + and self.default == other.default + and self.type_ == other.type_ ) return False def __hash__(self) -> int: - return hash((self.items, self.type_, self.label, self.inputBinding)) + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.inputBinding, + self.default, + self.type_, + ) + ) @classmethod def fromDoc( @@ -10791,114 +10751,74 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `id`": _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -10942,21 +10862,21 @@ def fromDoc( "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: + secondaryFiles = None + if "secondaryFiles" in _doc: try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("inputBinding") + lc=_doc.get("secondaryFiles") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inputBinding`": + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( str(e), @@ -10964,13 +10884,13 @@ def fromDoc( ) ) else: - val = _doc.get("inputBinding") + val = _doc.get("secondaryFiles") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -10982,155 +10902,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [e], - detailed_message=f"the `inputBinding` field with value `{val}` " + detailed_message=f"the `secondaryFiles` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - items=items, - type_=type_, - label=label, - inputBinding=inputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.items is not None: - u = save_relative_uri(self.items, base_url, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "inputBinding"]) - - -class CommandOutputRecordField(OutputRecordField): - name: str - - def __init__( - self, - name: Any, - type_: Any, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.type_ = type_ - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordField): - return bool( - self.doc == other.doc - and self.name == other.name - and self.type_ == other.type_ - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash((self.doc, self.name, self.type_, self.outputBinding)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputRecordField": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - name = None - if "name" in _doc: + streamable = None + if "streamable" in _doc: try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None_None, + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("streamable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( str(e), @@ -11138,13 +10931,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("streamable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -11156,26 +10949,17 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `streamable` field with value `{val}` " "is not valid because:", ) ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - _errors__.append(ValidationException("missing name")) - if not __original_name_is_none: - baseuri = cast(str, name) doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -11219,69 +11003,68 @@ def fromDoc( "is not valid because:", ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + format = None + if "format" in _doc: + try: + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `format`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - outputBinding = None - if "outputBinding" in _doc: + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("outputBinding") + lc=_doc.get("inputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputBinding`": + if str(e) == "missing required field `inputBinding`": _errors__.append( ValidationException( str(e), @@ -11289,13 +11072,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputBinding") + val = _doc.get("inputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -11307,14 +11090,108 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [e], - detailed_message=f"the `outputBinding` field with value `{val}` " + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + default = None + if "default" in _doc: + try: + default = _load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, + baseuri, + loadingOptions, + lc=_doc.get("default") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `default`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("default") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + detailed_message=f"the `default` field with value `{val}` " + "is not valid because:", + ) + ) + type_ = None + if "type" in _doc: + try: + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -11322,14 +11199,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `outputBinding`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `inputBinding`, `default`, `type`".format( k ), SourceLine(_doc, k, str), @@ -11339,14 +11216,19 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, doc=doc, - name=name, + id=id, + format=format, + inputBinding=inputBinding, + default=default, type_=type_, - outputBinding=outputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -11360,24 +11242,49 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) if self.doc is not None: r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + self.doc, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, top=False, - base_url=self.name, + base_url=self.id, relative_uris=relative_uris, ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -11387,20 +11294,41 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type", "outputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "inputBinding", + "default", + "type", + ] + ) + +class CommandOutputParameter(OutputParameter): + """ + An output parameter for a CommandLineTool. -class CommandOutputRecordSchema(OutputRecordSchema): - name: str + """ + + id: str def __init__( self, - type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + outputBinding: Any | None = None, + format: Any | None = None, + type_: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -11410,23 +11338,42 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.fields = fields - self.type_ = type_ self.label = label - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.outputBinding = outputBinding + self.format = format + self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordSchema): + if isinstance(other, CommandOutputParameter): return bool( - self.fields == other.fields + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.outputBinding == other.outputBinding + and self.format == other.format and self.type_ == other.type_ - and self.label == other.label - and self.name == other.name ) return False def __hash__(self) -> int: - return hash((self.fields, self.type_, self.label, self.name)) + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.outputBinding, + self.format, + self.type_, + ) + ) @classmethod def fromDoc( @@ -11434,29 +11381,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + id = None + if "id" in _doc: try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("id") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `id`": _errors__.append( ValidationException( str(e), @@ -11464,13 +11411,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("id") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -11482,37 +11429,37 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `id` field with value `{val}` " "is not valid because:", ) ) - __original_name_is_none = name is None - if name is None: + __original_id_is_none = id is None + if id is None: if docRoot is not None: - name = docRoot + id = docRoot else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - fields = None - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, - baseuri, + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, loadingOptions, - lc=_doc.get("fields") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `fields`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -11520,13 +11467,13 @@ def fromDoc( ) ) else: - val = _doc.get("fields") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -11538,76 +11485,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `fields` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - label = None - if "label" in _doc: + secondaryFiles = None + if "secondaryFiles" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("secondaryFiles") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( str(e), @@ -11615,13 +11514,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("secondaryFiles") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -11633,158 +11532,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `secondaryFiles` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - fields=fields, - type_=type_, - label=label, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type", "label", "name"]) - - -class CommandOutputEnumSchema(OutputEnumSchema): - name: str - - def __init__( - self, - symbols: Any, - type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols - self.type_ = type_ - self.label = label - self.outputBinding = outputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputEnumSchema): - return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type_ == other.type_ - and self.label == other.label - and self.outputBinding == other.outputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - (self.name, self.symbols, self.type_, self.label, self.outputBinding) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputEnumSchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - name = None - if "name" in _doc: + streamable = None + if "streamable" in _doc: try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("streamable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( str(e), @@ -11792,13 +11561,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("streamable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -11810,133 +11579,122 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `streamable` field with value `{val}` " "is not valid because:", ) ) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) - - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("symbols") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `symbols`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("symbols") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - detailed_message=f"the `symbols` field with value `{val}` " - "is not valid because:", + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + outputBinding = None + if "outputBinding" in _doc: + try: + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputBinding") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `outputBinding`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("outputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - label = None - if "label" in _doc: + else: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + detailed_message=f"the `outputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("format") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `format`": _errors__.append( ValidationException( str(e), @@ -11944,13 +11702,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("format") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -11962,28 +11720,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `format` field with value `{val}` " "is not valid because:", ) ) - outputBinding = None - if "outputBinding" in _doc: + type_ = None + if "type" in _doc: try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, - lc=_doc.get("outputBinding") + lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputBinding`": + if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), @@ -11991,13 +11749,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputBinding") + val = _doc.get("type") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12009,14 +11767,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [e], - detailed_message=f"the `outputBinding` field with value `{val}` " + detailed_message=f"the `type` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -12024,14 +11782,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `outputBinding`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `type`".format( k ), SourceLine(_doc, k, str), @@ -12041,15 +11799,18 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - name=name, - symbols=symbols, - type_=type_, label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, outputBinding=outputBinding, + format=format, + type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -12063,27 +11824,45 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u if self.label is not None: r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris ) if self.outputBinding is not None: r["outputBinding"] = save( self.outputBinding, top=False, - base_url=self.name, + base_url=self.id, relative_uris=relative_uris, ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -12093,18 +11872,48 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "label", "outputBinding"]) - - -class CommandOutputArraySchema(OutputArraySchema): - def __init__( - self, - items: Any, - type_: Any, - label: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + attrs: ClassVar[Collection[str]] = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "outputBinding", + "format", + "type", + ] + ) + + +class CommandLineTool(Process): + """ + This defines the schema of the CWL Command Line Tool Description document. + + """ + + id: str + + def __init__( + self, + inputs: Any, + outputs: Any, + id: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + cwlVersion: Any | None = None, + baseCommand: Any | None = None, + arguments: Any | None = None, + stdin: Any | None = None, + stderr: Any | None = None, + stdout: Any | None = None, + successCodes: Any | None = None, + temporaryFailCodes: Any | None = None, + permanentFailCodes: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -12114,23 +11923,69 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints self.label = label - self.outputBinding = outputBinding + self.doc = doc + self.cwlVersion = cwlVersion + self.class_: Final[str] = "CommandLineTool" + self.baseCommand = baseCommand + self.arguments = arguments + self.stdin = stdin + self.stderr = stderr + self.stdout = stdout + self.successCodes = successCodes + self.temporaryFailCodes = temporaryFailCodes + self.permanentFailCodes = permanentFailCodes def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputArraySchema): + if isinstance(other, CommandLineTool): return bool( - self.items == other.items - and self.type_ == other.type_ + self.id == other.id + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints and self.label == other.label - and self.outputBinding == other.outputBinding + and self.doc == other.doc + and self.cwlVersion == other.cwlVersion + and self.class_ == other.class_ + and self.baseCommand == other.baseCommand + and self.arguments == other.arguments + and self.stdin == other.stdin + and self.stderr == other.stderr + and self.stdout == other.stdout + and self.successCodes == other.successCodes + and self.temporaryFailCodes == other.temporaryFailCodes + and self.permanentFailCodes == other.permanentFailCodes ) return False def __hash__(self) -> int: - return hash((self.items, self.type_, self.label, self.outputBinding)) + return hash( + ( + self.id, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.label, + self.doc, + self.cwlVersion, + self.class_, + self.baseCommand, + self.arguments, + self.stdin, + self.stderr, + self.stdout, + self.successCodes, + self.temporaryFailCodes, + self.permanentFailCodes, + ) + ) @classmethod def fromDoc( @@ -12138,30 +11993,103 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None, + class_ = _load_field( + _doc.get("class"), + uri_CommandLineTool_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("items") + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("inputs") is None: + raise ValidationException("missing required field `inputs`", None, []) + + inputs = _load_field( + _doc.get("inputs"), + idmap_inputs_array_of_CommandInputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputs") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `items`": + if str(e) == "missing required field `inputs`": _errors__.append( ValidationException( str(e), @@ -12169,13 +12097,13 @@ def fromDoc( ) ) else: - val = _doc.get("items") + val = _doc.get("inputs") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12187,29 +12115,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), [e], - detailed_message=f"the `items` field with value `{val}` " + detailed_message=f"the `inputs` field with value `{val}` " "is not valid because:", ) ) try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("outputs") is None: + raise ValidationException("missing required field `outputs`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, + outputs = _load_field( + _doc.get("outputs"), + idmap_outputs_array_of_CommandOutputParameterLoader, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("outputs") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `outputs`": _errors__.append( ValidationException( str(e), @@ -12217,13 +12145,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("outputs") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12235,28 +12163,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `outputs` field with value `{val}` " "is not valid because:", ) ) - label = None - if "label" in _doc: + requirements = None + if "requirements" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + requirements = _load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("requirements") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `requirements`": _errors__.append( ValidationException( str(e), @@ -12264,13 +12192,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("requirements") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12282,28 +12210,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `requirements` field with value `{val}` " "is not valid because:", ) ) - outputBinding = None - if "outputBinding" in _doc: + hints = None + if "hints" in _doc: try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, + hints = _load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, baseuri, loadingOptions, - lc=_doc.get("outputBinding") + lc=_doc.get("hints") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputBinding`": + if str(e) == "missing required field `hints`": _errors__.append( ValidationException( str(e), @@ -12311,13 +12239,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputBinding") + val = _doc.get("hints") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12329,186 +12257,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [e], - detailed_message=f"the `outputBinding` field with value `{val}` " + detailed_message=f"the `hints` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - items=items, - type_=type_, - label=label, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.items is not None: - u = save_relative_uri(self.items, base_url, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "outputBinding"]) - - -class CommandInputParameter(InputParameter): - """ - An input parameter for a CommandLineTool. - """ - - id: str - - def __init__( - self, - id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - inputBinding: Optional[Any] = None, - default: Optional[Any] = None, - type_: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.format = format - self.inputBinding = inputBinding - self.default = default - self.type_ = type_ - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.inputBinding == other.inputBinding - and self.default == other.default - and self.type_ == other.type_ - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.inputBinding, - self.default, - self.type_, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputParameter": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: + label = None + if "label" in _doc: try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("id") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `id`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -12516,13 +12286,13 @@ def fromDoc( ) ) else: - val = _doc.get("id") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12534,37 +12304,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `id` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - label = None - if "label" in _doc: + doc = None + if "doc" in _doc: try: - label = load_field( - _doc.get("label"), + doc = _load_field( + _doc.get("doc"), union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -12572,13 +12333,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12590,28 +12351,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: + cwlVersion = None + if "cwlVersion" in _doc: try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + cwlVersion = _load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("secondaryFiles") + lc=_doc.get("cwlVersion") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secondaryFiles`": + if str(e) == "missing required field `cwlVersion`": _errors__.append( ValidationException( str(e), @@ -12619,13 +12380,13 @@ def fromDoc( ) ) else: - val = _doc.get("secondaryFiles") + val = _doc.get("cwlVersion") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12637,28 +12398,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " + detailed_message=f"the `cwlVersion` field with value `{val}` " "is not valid because:", ) ) - streamable = None - if "streamable" in _doc: + baseCommand = None + if "baseCommand" in _doc: try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, + baseCommand = _load_field( + _doc.get("baseCommand"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("streamable") + lc=_doc.get("baseCommand") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `streamable`": + if str(e) == "missing required field `baseCommand`": _errors__.append( ValidationException( str(e), @@ -12666,13 +12427,13 @@ def fromDoc( ) ) else: - val = _doc.get("streamable") + val = _doc.get("baseCommand") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `baseCommand` field is not valid because:", + SourceLine(_doc, "baseCommand", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12684,28 +12445,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `baseCommand` field is not valid because:", + SourceLine(_doc, "baseCommand", str), [e], - detailed_message=f"the `streamable` field with value `{val}` " + detailed_message=f"the `baseCommand` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + arguments = None + if "arguments" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + arguments = _load_field( + _doc.get("arguments"), + union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("arguments") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `arguments`": _errors__.append( ValidationException( str(e), @@ -12713,13 +12474,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("arguments") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `arguments` field is not valid because:", + SourceLine(_doc, "arguments", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12731,28 +12492,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `arguments` field is not valid because:", + SourceLine(_doc, "arguments", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `arguments` field with value `{val}` " "is not valid because:", ) ) - format = None - if "format" in _doc: + stdin = None + if "stdin" in _doc: try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + stdin = _load_field( + _doc.get("stdin"), + union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("format") + lc=_doc.get("stdin") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `format`": + if str(e) == "missing required field `stdin`": _errors__.append( ValidationException( str(e), @@ -12760,13 +12521,13 @@ def fromDoc( ) ) else: - val = _doc.get("format") + val = _doc.get("stdin") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `stdin` field is not valid because:", + SourceLine(_doc, "stdin", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12778,28 +12539,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `stdin` field is not valid because:", + SourceLine(_doc, "stdin", str), [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `stdin` field with value `{val}` " "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: + stderr = None + if "stderr" in _doc: try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, + stderr = _load_field( + _doc.get("stderr"), + union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("inputBinding") + lc=_doc.get("stderr") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inputBinding`": + if str(e) == "missing required field `stderr`": _errors__.append( ValidationException( str(e), @@ -12807,13 +12568,13 @@ def fromDoc( ) ) else: - val = _doc.get("inputBinding") + val = _doc.get("stderr") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `stderr` field is not valid because:", + SourceLine(_doc, "stderr", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12825,28 +12586,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `stderr` field is not valid because:", + SourceLine(_doc, "stderr", str), [e], - detailed_message=f"the `inputBinding` field with value `{val}` " + detailed_message=f"the `stderr` field with value `{val}` " "is not valid because:", ) ) - default = None - if "default" in _doc: + stdout = None + if "stdout" in _doc: try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_CWLObjectTypeLoader, + stdout = _load_field( + _doc.get("stdout"), + union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("default") + lc=_doc.get("stdout") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `default`": + if str(e) == "missing required field `stdout`": _errors__.append( ValidationException( str(e), @@ -12854,13 +12615,13 @@ def fromDoc( ) ) else: - val = _doc.get("default") + val = _doc.get("stdout") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `stdout` field is not valid because:", + SourceLine(_doc, "stdout", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12872,28 +12633,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `stdout` field is not valid because:", + SourceLine(_doc, "stdout", str), [e], - detailed_message=f"the `default` field with value `{val}` " + detailed_message=f"the `stdout` field with value `{val}` " "is not valid because:", ) ) - type_ = None - if "type" in _doc: + successCodes = None + if "successCodes" in _doc: try: - type_ = load_field( - _doc.get("type"), - typedsl_union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + successCodes = _load_field( + _doc.get("successCodes"), + union_of_None_type_or_array_of_inttype, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("successCodes") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `successCodes`": _errors__.append( ValidationException( str(e), @@ -12901,13 +12662,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("successCodes") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `successCodes` field is not valid because:", + SourceLine(_doc, "successCodes", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12919,14 +12680,108 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `successCodes` field is not valid because:", + SourceLine(_doc, "successCodes", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `successCodes` field with value `{val}` " + "is not valid because:", + ) + ) + temporaryFailCodes = None + if "temporaryFailCodes" in _doc: + try: + temporaryFailCodes = _load_field( + _doc.get("temporaryFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + lc=_doc.get("temporaryFailCodes") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `temporaryFailCodes`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("temporaryFailCodes") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `temporaryFailCodes` field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `temporaryFailCodes` field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), + [e], + detailed_message=f"the `temporaryFailCodes` field with value `{val}` " + "is not valid because:", + ) + ) + permanentFailCodes = None + if "permanentFailCodes" in _doc: + try: + permanentFailCodes = _load_field( + _doc.get("permanentFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + lc=_doc.get("permanentFailCodes") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `permanentFailCodes`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("permanentFailCodes") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `permanentFailCodes` field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `permanentFailCodes` field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), + [e], + detailed_message=f"the `permanentFailCodes` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -12934,14 +12789,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `inputBinding`, `default`, `type`".format( + "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( k ), SourceLine(_doc, k, str), @@ -12951,15 +12806,22 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + id=id, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, doc=doc, - id=id, - format=format, - inputBinding=inputBinding, - default=default, - type_=type_, + cwlVersion=cwlVersion, + baseCommand=baseCommand, + arguments=arguments, + stdin=stdin, + stderr=stderr, + stdout=stdout, + successCodes=successCodes, + temporaryFailCodes=temporaryFailCodes, + permanentFailCodes=permanentFailCodes, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -12980,45 +12842,89 @@ def save( if self.id is not None: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, self.id, False, None, relative_uris) + r["class"] = u + if self.inputs is not None: + r["inputs"] = save( + self.inputs, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.id, - relative_uris=relative_uris, + if self.outputs is not None: + r["outputs"] = save( + self.outputs, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, + if self.requirements is not None: + r["requirements"] = save( + self.requirements, top=False, base_url=self.id, relative_uris=relative_uris, ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) if self.doc is not None: r["doc"] = save( self.doc, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, + if self.cwlVersion is not None: + u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) + r["cwlVersion"] = u + if self.baseCommand is not None: + r["baseCommand"] = save( + self.baseCommand, top=False, base_url=self.id, relative_uris=relative_uris, ) - if self.default is not None: - r["default"] = save( - self.default, top=False, base_url=self.id, relative_uris=relative_uris + if self.arguments is not None: + r["arguments"] = save( + self.arguments, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris + if self.stdin is not None: + r["stdin"] = save( + self.stdin, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.stderr is not None: + r["stderr"] = save( + self.stderr, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.stdout is not None: + r["stdout"] = save( + self.stdout, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.successCodes is not None: + r["successCodes"] = save( + self.successCodes, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.temporaryFailCodes is not None: + r["temporaryFailCodes"] = save( + self.temporaryFailCodes, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.permanentFailCodes is not None: + r["permanentFailCodes"] = save( + self.permanentFailCodes, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) # top refers to the directory level @@ -13029,40 +12935,60 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ + "id", + "inputs", + "outputs", + "requirements", + "hints", "label", - "secondaryFiles", - "streamable", "doc", - "id", - "format", - "inputBinding", - "default", - "type", + "cwlVersion", + "class", + "baseCommand", + "arguments", + "stdin", + "stderr", + "stdout", + "successCodes", + "temporaryFailCodes", + "permanentFailCodes", ] ) -class CommandOutputParameter(OutputParameter): - """ - An output parameter for a CommandLineTool. +class DockerRequirement(ProcessRequirement): """ + Indicates that a workflow component should be run in a `Docker `__ container, and specifies how to fetch or build the image. - id: str + If a CommandLineTool lists ``DockerRequirement`` under ``hints`` (or ``requirements``), it may (or must) be run in the specified Docker container. + + The platform must first acquire or install the correct Docker image as specified by ``dockerPull``, ``dockerImport``, ``dockerLoad`` or ``dockerFile``. + + The platform must execute the tool in the container using ``docker run`` with the appropriate Docker image and tool command line. + + The workflow platform may provide input files and the designated output directory through the use of volume bind mounts. The platform should rewrite file paths in the input object to correspond to the Docker bind mounted locations. That is, the platform should rewrite values in the parameter context such as ``runtime.outdir``, ``runtime.tmpdir`` and others to be valid paths within the container. + + When running a tool contained in Docker, the workflow platform must not assume anything about the contents of the Docker container, such as the presence or absence of specific software, except to assume that the generated command line represents a valid command within the runtime environment of the container. + + Interaction with other requirements + ----------------------------------- + + If `EnvVarRequirement <#EnvVarRequirement>`__ is specified alongside a DockerRequirement, the environment variables must be provided to Docker using ``--env`` or ``--env-file`` and interact with the container's preexisting environment as defined by Docker. + + """ def __init__( self, - id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - format: Optional[Any] = None, - type_: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + dockerPull: Any | None = None, + dockerLoad: Any | None = None, + dockerFile: Any | None = None, + dockerImport: Any | None = None, + dockerImageId: Any | None = None, + dockerOutputDirectory: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -13072,40 +12998,37 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.outputBinding = outputBinding - self.format = format - self.type_ = type_ + self.class_: Final[str] = "DockerRequirement" + self.dockerPull = dockerPull + self.dockerLoad = dockerLoad + self.dockerFile = dockerFile + self.dockerImport = dockerImport + self.dockerImageId = dockerImageId + self.dockerOutputDirectory = dockerOutputDirectory def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputParameter): + if isinstance(other, DockerRequirement): return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.outputBinding == other.outputBinding - and self.format == other.format - and self.type_ == other.type_ + self.class_ == other.class_ + and self.dockerPull == other.dockerPull + and self.dockerLoad == other.dockerLoad + and self.dockerFile == other.dockerFile + and self.dockerImport == other.dockerImport + and self.dockerImageId == other.dockerImageId + and self.dockerOutputDirectory == other.dockerOutputDirectory ) return False def __hash__(self) -> int: return hash( ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.outputBinding, - self.format, - self.type_, + self.class_, + self.dockerPull, + self.dockerLoad, + self.dockerFile, + self.dockerImport, + self.dockerImageId, + self.dockerOutputDirectory, ) ) @@ -13115,29 +13038,46 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - id = None - if "id" in _doc: + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_DockerRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + dockerPull = None + if "dockerPull" in _doc: try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, + dockerPull = _load_field( + _doc.get("dockerPull"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("id") + lc=_doc.get("dockerPull") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `id`": + if str(e) == "missing required field `dockerPull`": _errors__.append( ValidationException( str(e), @@ -13145,13 +13085,13 @@ def fromDoc( ) ) else: - val = _doc.get("id") + val = _doc.get("dockerPull") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `dockerPull` field is not valid because:", + SourceLine(_doc, "dockerPull", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13163,37 +13103,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `dockerPull` field is not valid because:", + SourceLine(_doc, "dockerPull", str), [e], - detailed_message=f"the `id` field with value `{val}` " + detailed_message=f"the `dockerPull` field with value `{val}` " "is not valid because:", ) ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - label = None - if "label" in _doc: + dockerLoad = None + if "dockerLoad" in _doc: try: - label = load_field( - _doc.get("label"), + dockerLoad = _load_field( + _doc.get("dockerLoad"), union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("dockerLoad") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `dockerLoad`": _errors__.append( ValidationException( str(e), @@ -13201,13 +13132,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("dockerLoad") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `dockerLoad` field is not valid because:", + SourceLine(_doc, "dockerLoad", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13219,28 +13150,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `dockerLoad` field is not valid because:", + SourceLine(_doc, "dockerLoad", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `dockerLoad` field with value `{val}` " "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: + dockerFile = None + if "dockerFile" in _doc: try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + dockerFile = _load_field( + _doc.get("dockerFile"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("secondaryFiles") + lc=_doc.get("dockerFile") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secondaryFiles`": + if str(e) == "missing required field `dockerFile`": _errors__.append( ValidationException( str(e), @@ -13248,13 +13179,13 @@ def fromDoc( ) ) else: - val = _doc.get("secondaryFiles") + val = _doc.get("dockerFile") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `dockerFile` field is not valid because:", + SourceLine(_doc, "dockerFile", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13266,28 +13197,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `dockerFile` field is not valid because:", + SourceLine(_doc, "dockerFile", str), [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " + detailed_message=f"the `dockerFile` field with value `{val}` " "is not valid because:", ) ) - streamable = None - if "streamable" in _doc: + dockerImport = None + if "dockerImport" in _doc: try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, + dockerImport = _load_field( + _doc.get("dockerImport"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("streamable") + lc=_doc.get("dockerImport") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `streamable`": + if str(e) == "missing required field `dockerImport`": _errors__.append( ValidationException( str(e), @@ -13295,13 +13226,13 @@ def fromDoc( ) ) else: - val = _doc.get("streamable") + val = _doc.get("dockerImport") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `dockerImport` field is not valid because:", + SourceLine(_doc, "dockerImport", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13313,122 +13244,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `dockerImport` field is not valid because:", + SourceLine(_doc, "dockerImport", str), [e], - detailed_message=f"the `streamable` field with value `{val}` " + detailed_message=f"the `dockerImport` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + dockerImageId = None + if "dockerImageId" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + dockerImageId = _load_field( + _doc.get("dockerImageId"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("dockerImageId") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - outputBinding = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - detailed_message=f"the `outputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - format = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, - baseuri, - loadingOptions, - lc=_doc.get("format") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `format`": + if str(e) == "missing required field `dockerImageId`": _errors__.append( ValidationException( str(e), @@ -13436,13 +13273,13 @@ def fromDoc( ) ) else: - val = _doc.get("format") + val = _doc.get("dockerImageId") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `dockerImageId` field is not valid because:", + SourceLine(_doc, "dockerImageId", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13454,28 +13291,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `dockerImageId` field is not valid because:", + SourceLine(_doc, "dockerImageId", str), [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `dockerImageId` field with value `{val}` " "is not valid because:", ) ) - type_ = None - if "type" in _doc: + dockerOutputDirectory = None + if "dockerOutputDirectory" in _doc: try: - type_ = load_field( - _doc.get("type"), - typedsl_union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + dockerOutputDirectory = _load_field( + _doc.get("dockerOutputDirectory"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("dockerOutputDirectory") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `dockerOutputDirectory`": _errors__.append( ValidationException( str(e), @@ -13483,13 +13320,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("dockerOutputDirectory") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `dockerOutputDirectory` field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13501,14 +13338,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `dockerOutputDirectory` field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `dockerOutputDirectory` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -13516,14 +13353,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `type`".format( + "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( k ), SourceLine(_doc, k, str), @@ -13533,18 +13370,15 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - outputBinding=outputBinding, - format=format, - type_=type_, + dockerPull=dockerPull, + dockerLoad=dockerLoad, + dockerFile=dockerFile, + dockerImport=dockerImport, + dockerImageId=dockerImageId, + dockerOutputDirectory=dockerOutputDirectory, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -13558,44 +13392,57 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.dockerPull is not None: + r["dockerPull"] = save( + self.dockerPull, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, + if self.dockerLoad is not None: + r["dockerLoad"] = save( + self.dockerLoad, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, + if self.dockerFile is not None: + r["dockerFile"] = save( + self.dockerFile, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris + if self.dockerImport is not None: + r["dockerImport"] = save( + self.dockerImport, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, + if self.dockerImageId is not None: + r["dockerImageId"] = save( + self.dockerImageId, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris + if self.dockerOutputDirectory is not None: + r["dockerOutputDirectory"] = save( + self.dockerOutputDirectory, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -13606,48 +13453,30 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "outputBinding", - "format", - "type", + "class", + "dockerPull", + "dockerLoad", + "dockerFile", + "dockerImport", + "dockerImageId", + "dockerOutputDirectory", ] ) -class CommandLineTool(Process): +class SoftwareRequirement(ProcessRequirement): """ - This defines the schema of the CWL Command Line Tool Description document. + A list of software packages that should be configured in the environment of the defined process. """ - id: str - def __init__( self, - inputs: Any, - outputs: Any, - id: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - baseCommand: Optional[Any] = None, - arguments: Optional[Any] = None, - stdin: Optional[Any] = None, - stderr: Optional[Any] = None, - stdout: Optional[Any] = None, - successCodes: Optional[Any] = None, - temporaryFailCodes: Optional[Any] = None, - permanentFailCodes: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + packages: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -13657,69 +13486,16 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.label = label - self.doc = doc - self.cwlVersion = cwlVersion - self.class_ = "CommandLineTool" - self.baseCommand = baseCommand - self.arguments = arguments - self.stdin = stdin - self.stderr = stderr - self.stdout = stdout - self.successCodes = successCodes - self.temporaryFailCodes = temporaryFailCodes - self.permanentFailCodes = permanentFailCodes + self.class_: Final[str] = "SoftwareRequirement" + self.packages = packages def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineTool): - return bool( - self.id == other.id - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.label == other.label - and self.doc == other.doc - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.baseCommand == other.baseCommand - and self.arguments == other.arguments - and self.stdin == other.stdin - and self.stderr == other.stderr - and self.stdout == other.stdout - and self.successCodes == other.successCodes - and self.temporaryFailCodes == other.temporaryFailCodes - and self.permanentFailCodes == other.permanentFailCodes - ) + if isinstance(other, SoftwareRequirement): + return bool(self.class_ == other.class_ and self.packages == other.packages) return False def __hash__(self) -> int: - return hash( - ( - self.id, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.label, - self.doc, - self.cwlVersion, - self.class_, - self.baseCommand, - self.arguments, - self.stdin, - self.stderr, - self.stdout, - self.successCodes, - self.temporaryFailCodes, - self.permanentFailCodes, - ) - ) + return hash((self.class_, self.packages)) @classmethod def fromDoc( @@ -13727,102 +13503,47 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandLineTool": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = cast(str, id) try: if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_CommandLineTool_classLoader_False_True_None_None, + uri_SoftwareRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + raise e try: - if _doc.get("inputs") is None: - raise ValidationException("missing required field `inputs`", None, []) + if _doc.get("packages") is None: + raise ValidationException("missing required field `packages`", None, []) - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_CommandInputParameterLoader, + packages = _load_field( + _doc.get("packages"), + idmap_packages_array_of_SoftwarePackageLoader, baseuri, loadingOptions, - lc=_doc.get("inputs") + lc=_doc.get("packages") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inputs`": + if str(e) == "missing required field `packages`": _errors__.append( ValidationException( str(e), @@ -13830,13 +13551,13 @@ def fromDoc( ) ) else: - val = _doc.get("inputs") + val = _doc.get("packages") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), + "the `packages` field is not valid because:", + SourceLine(_doc, "packages", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13848,29 +13569,144 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), + "the `packages` field is not valid because:", + SourceLine(_doc, "packages", str), [e], - detailed_message=f"the `inputs` field with value `{val}` " + detailed_message=f"the `packages` field with value `{val}` " "is not valid because:", ) ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + packages=packages, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.packages is not None: + r["packages"] = save( + self.packages, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["class", "packages"]) + + +class SoftwarePackage(Saveable): + def __init__( + self, + package: Any, + version: Any | None = None, + specs: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.package = package + self.version = version + self.specs = specs + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwarePackage): + return bool( + self.package == other.package + and self.version == other.version + and self.specs == other.specs + ) + return False + + def __hash__(self) -> int: + return hash((self.package, self.version, self.specs)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] try: - if _doc.get("outputs") is None: - raise ValidationException("missing required field `outputs`", None, []) + if _doc.get("package") is None: + raise ValidationException("missing required field `package`", None, []) - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_CommandOutputParameterLoader, + package = _load_field( + _doc.get("package"), + strtype, baseuri, loadingOptions, - lc=_doc.get("outputs") + lc=_doc.get("package") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputs`": + if str(e) == "missing required field `package`": _errors__.append( ValidationException( str(e), @@ -13878,13 +13714,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputs") + val = _doc.get("package") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), + "the `package` field is not valid because:", + SourceLine(_doc, "package", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13896,28 +13732,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), + "the `package` field is not valid because:", + SourceLine(_doc, "package", str), [e], - detailed_message=f"the `outputs` field with value `{val}` " + detailed_message=f"the `package` field with value `{val}` " "is not valid because:", ) ) - requirements = None - if "requirements" in _doc: + version = None + if "version" in _doc: try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, + version = _load_field( + _doc.get("version"), + union_of_None_type_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("requirements") + lc=_doc.get("version") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `requirements`": + if str(e) == "missing required field `version`": _errors__.append( ValidationException( str(e), @@ -13925,13 +13761,13 @@ def fromDoc( ) ) else: - val = _doc.get("requirements") + val = _doc.get("version") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), + "the `version` field is not valid because:", + SourceLine(_doc, "version", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13943,28 +13779,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), + "the `version` field is not valid because:", + SourceLine(_doc, "version", str), [e], - detailed_message=f"the `requirements` field with value `{val}` " + detailed_message=f"the `version` field with value `{val}` " "is not valid because:", ) ) - hints = None - if "hints" in _doc: + specs = None + if "specs" in _doc: try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, + specs = _load_field( + _doc.get("specs"), + uri_union_of_None_type_or_array_of_strtype_False_False_None_True, baseuri, loadingOptions, - lc=_doc.get("hints") + lc=_doc.get("specs") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `hints`": + if str(e) == "missing required field `specs`": _errors__.append( ValidationException( str(e), @@ -13972,13 +13808,13 @@ def fromDoc( ) ) else: - val = _doc.get("hints") + val = _doc.get("specs") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `specs` field is not valid because:", + SourceLine(_doc, "specs", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13990,28 +13826,147 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `specs` field is not valid because:", + SourceLine(_doc, "specs", str), [e], - detailed_message=f"the `hints` field with value `{val}` " + detailed_message=f"the `specs` field with value `{val}` " "is not valid because:", ) ) - label = None - if "label" in _doc: + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + package=package, + version=version, + specs=specs, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.package is not None: + r["package"] = save( + self.package, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.version is not None: + r["version"] = save( + self.version, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.specs is not None: + u = save_relative_uri(self.specs, base_url, False, None, relative_uris) + r["specs"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["package", "version", "specs"]) + + +class Dirent(Saveable): + """ + Define a file or subdirectory that must be placed in the designated output directory prior to executing the command line tool. May be the result of executing an expression, such as building a configuration file from a template. + + """ + + def __init__( + self, + entry: Any, + entryname: Any | None = None, + writable: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.entryname = entryname + self.entry = entry + self.writable = writable + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Dirent): + return bool( + self.entryname == other.entryname + and self.entry == other.entry + and self.writable == other.writable + ) + return False + + def __hash__(self) -> int: + return hash((self.entryname, self.entry, self.writable)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + entryname = None + if "entryname" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + entryname = _load_field( + _doc.get("entryname"), + union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("entryname") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `entryname`": _errors__.append( ValidationException( str(e), @@ -14019,13 +13974,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("entryname") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `entryname` field is not valid because:", + SourceLine(_doc, "entryname", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14037,28 +13992,76 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `entryname` field is not valid because:", + SourceLine(_doc, "entryname", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `entryname` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + try: + if _doc.get("entry") is None: + raise ValidationException("missing required field `entry`", None, []) + + entry = _load_field( + _doc.get("entry"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("entry") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `entry`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("entry") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `entry` field is not valid because:", + SourceLine(_doc, "entry", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `entry` field is not valid because:", + SourceLine(_doc, "entry", str), + [e], + detailed_message=f"the `entry` field with value `{val}` " + "is not valid because:", + ) + ) + writable = None + if "writable" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, + writable = _load_field( + _doc.get("writable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("writable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `writable`": _errors__.append( ValidationException( str(e), @@ -14066,13 +14069,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("writable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `writable` field is not valid because:", + SourceLine(_doc, "writable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14084,481 +14087,401 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `writable` field is not valid because:", + SourceLine(_doc, "writable", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `writable` field with value `{val}` " "is not valid because:", ) ) - cwlVersion = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("cwlVersion") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `cwlVersion`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - str(e), - None - ) + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("cwlVersion") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - detailed_message=f"the `cwlVersion` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( + k + ), + SourceLine(_doc, k, str), ) - baseCommand = None - if "baseCommand" in _doc: - try: - baseCommand = load_field( - _doc.get("baseCommand"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("baseCommand") - ) + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + entryname=entryname, + entry=entry, + writable=writable, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed - if str(e) == "missing required field `baseCommand`": + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.entryname is not None: + r["entryname"] = save( + self.entryname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.entry is not None: + r["entry"] = save( + self.entry, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.writable is not None: + r["writable"] = save( + self.writable, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["entryname", "entry", "writable"]) + + +class InitialWorkDirRequirement(ProcessRequirement): + """ + Define a list of files and subdirectories that must be created by the workflow platform in the designated output directory prior to executing the command line tool. + + """ + + def __init__( + self, + listing: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_: Final[str] = "InitialWorkDirRequirement" + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InitialWorkDirRequirement): + return bool(self.class_ == other.class_ and self.listing == other.listing) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.listing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_InitialWorkDirRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("listing") is None: + raise ValidationException("missing required field `listing`", None, []) + + listing = _load_field( + _doc.get("listing"), + union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("listing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `listing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("listing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("baseCommand") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `baseCommand` field is not valid because:", - SourceLine(_doc, "baseCommand", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `baseCommand` field is not valid because:", - SourceLine(_doc, "baseCommand", str), - [e], - detailed_message=f"the `baseCommand` field with value `{val}` " - "is not valid because:", - ) - ) - arguments = None - if "arguments" in _doc: - try: - arguments = load_field( - _doc.get("arguments"), - union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("arguments") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `arguments`": _errors__.append( ValidationException( - str(e), - None + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + detailed_message=f"the `listing` field with value `{val}` " + "is not valid because:", ) ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("arguments") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `arguments` field is not valid because:", - SourceLine(_doc, "arguments", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `arguments` field is not valid because:", - SourceLine(_doc, "arguments", str), - [e], - detailed_message=f"the `arguments` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), + SourceLine(_doc, k, str), ) - stdin = None - if "stdin" in _doc: - try: - stdin = load_field( - _doc.get("stdin"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("stdin") - ) + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed - if str(e) == "missing required field `stdin`": + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.listing is not None: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["class", "listing"]) + + +class EnvVarRequirement(ProcessRequirement): + """ + Define a list of environment variables which will be set in the execution environment of the tool. See ``EnvironmentDef`` for details. + + """ + + def __init__( + self, + envDef: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_: Final[str] = "EnvVarRequirement" + self.envDef = envDef + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvVarRequirement): + return bool(self.class_ == other.class_ and self.envDef == other.envDef) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.envDef)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_EnvVarRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("envDef") is None: + raise ValidationException("missing required field `envDef`", None, []) + + envDef = _load_field( + _doc.get("envDef"), + idmap_envDef_array_of_EnvironmentDefLoader, + baseuri, + loadingOptions, + lc=_doc.get("envDef") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `envDef`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("envDef") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `envDef` field is not valid because:", + SourceLine(_doc, "envDef", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("stdin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `stdin` field is not valid because:", - SourceLine(_doc, "stdin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `stdin` field is not valid because:", - SourceLine(_doc, "stdin", str), - [e], - detailed_message=f"the `stdin` field with value `{val}` " - "is not valid because:", - ) - ) - stderr = None - if "stderr" in _doc: - try: - stderr = load_field( - _doc.get("stderr"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("stderr") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `stderr`": _errors__.append( ValidationException( - str(e), - None + "the `envDef` field is not valid because:", + SourceLine(_doc, "envDef", str), + [e], + detailed_message=f"the `envDef` field with value `{val}` " + "is not valid because:", ) ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("stderr") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `stderr` field is not valid because:", - SourceLine(_doc, "stderr", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `stderr` field is not valid because:", - SourceLine(_doc, "stderr", str), - [e], - detailed_message=f"the `stderr` field with value `{val}` " - "is not valid because:", - ) - ) - stdout = None - if "stdout" in _doc: - try: - stdout = load_field( - _doc.get("stdout"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("stdout") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `stdout`": _errors__.append( ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("stdout") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `stdout` field is not valid because:", - SourceLine(_doc, "stdout", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `stdout` field is not valid because:", - SourceLine(_doc, "stdout", str), - [e], - detailed_message=f"the `stdout` field with value `{val}` " - "is not valid because:", - ) - ) - successCodes = None - if "successCodes" in _doc: - try: - successCodes = load_field( - _doc.get("successCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - lc=_doc.get("successCodes") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `successCodes`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("successCodes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `successCodes` field is not valid because:", - SourceLine(_doc, "successCodes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `successCodes` field is not valid because:", - SourceLine(_doc, "successCodes", str), - [e], - detailed_message=f"the `successCodes` field with value `{val}` " - "is not valid because:", - ) - ) - temporaryFailCodes = None - if "temporaryFailCodes" in _doc: - try: - temporaryFailCodes = load_field( - _doc.get("temporaryFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - lc=_doc.get("temporaryFailCodes") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `temporaryFailCodes`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("temporaryFailCodes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `temporaryFailCodes` field is not valid because:", - SourceLine(_doc, "temporaryFailCodes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `temporaryFailCodes` field is not valid because:", - SourceLine(_doc, "temporaryFailCodes", str), - [e], - detailed_message=f"the `temporaryFailCodes` field with value `{val}` " - "is not valid because:", - ) - ) - permanentFailCodes = None - if "permanentFailCodes" in _doc: - try: - permanentFailCodes = load_field( - _doc.get("permanentFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - lc=_doc.get("permanentFailCodes") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `permanentFailCodes`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("permanentFailCodes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `permanentFailCodes` field is not valid because:", - SourceLine(_doc, "permanentFailCodes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `permanentFailCodes` field is not valid because:", - SourceLine(_doc, "permanentFailCodes", str), - [e], - detailed_message=f"the `permanentFailCodes` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( - k - ), - SourceLine(_doc, k, str), + "invalid field `{}`, expected one of: `class`, `envDef`".format( + k + ), + SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - id=id, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - label=label, - doc=doc, - cwlVersion=cwlVersion, - baseCommand=baseCommand, - arguments=arguments, - stdin=stdin, - stderr=stderr, - stdout=stdout, - successCodes=successCodes, - temporaryFailCodes=temporaryFailCodes, - permanentFailCodes=permanentFailCodes, + envDef=envDef, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -14572,90 +14495,19 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ - u = save_relative_uri(uri, self.id, False, None, relative_uris) + u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.inputs is not None: - r["inputs"] = save( - self.inputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputs is not None: - r["outputs"] = save( - self.outputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.requirements is not None: - r["requirements"] = save( - self.requirements, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.cwlVersion is not None: - u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) - r["cwlVersion"] = u - if self.baseCommand is not None: - r["baseCommand"] = save( - self.baseCommand, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.arguments is not None: - r["arguments"] = save( - self.arguments, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.stdin is not None: - r["stdin"] = save( - self.stdin, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.stderr is not None: - r["stderr"] = save( - self.stderr, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.stdout is not None: - r["stdout"] = save( - self.stdout, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.successCodes is not None: - r["successCodes"] = save( - self.successCodes, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.temporaryFailCodes is not None: - r["temporaryFailCodes"] = save( - self.temporaryFailCodes, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.permanentFailCodes is not None: - r["permanentFailCodes"] = save( - self.permanentFailCodes, - top=False, - base_url=self.id, - relative_uris=relative_uris, + if self.envDef is not None: + r["envDef"] = save( + self.envDef, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -14666,77 +14518,160 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "id", - "inputs", - "outputs", - "requirements", - "hints", - "label", - "doc", - "cwlVersion", - "class", - "baseCommand", - "arguments", - "stdin", - "stderr", - "stdout", - "successCodes", - "temporaryFailCodes", - "permanentFailCodes", - ] - ) + attrs: ClassVar[Collection[str]] = frozenset(["class", "envDef"]) -class DockerRequirement(ProcessRequirement): +class ShellCommandRequirement(ProcessRequirement): """ - Indicates that a workflow component should be run in a - [Docker](http://docker.com) container, and specifies how to fetch or build - the image. + Modify the behavior of CommandLineTool to generate a single string containing a shell command line. Each item in the argument list must be joined into a string separated by single spaces and quoted to prevent interpretation by the shell, unless ``CommandLineBinding`` for that argument contains ``shellQuote: false``. If ``shellQuote: false`` is specified, the argument is joined into the command string without quoting, which allows the use of shell metacharacters such as ``|`` for pipes. + + """ + + def __init__( + self, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_: Final[str] = "ShellCommandRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ShellCommandRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_ShellCommandRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["class"]) + - If a CommandLineTool lists `DockerRequirement` under - `hints` (or `requirements`), it may (or must) be run in the specified Docker - container. +class ResourceRequirement(ProcessRequirement): + """ + Specify basic hardware resource requirements. - The platform must first acquire or install the correct Docker image as - specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. + "min" is the minimum amount of a resource that must be reserved to schedule a job. If "min" cannot be satisfied, the job should not be run. - The platform must execute the tool in the container using `docker run` with - the appropriate Docker image and tool command line. + "max" is the maximum amount of a resource that the job shall be permitted to use. If a node has sufficient resources, multiple jobs may be scheduled on a single node provided each job's "max" resource requirements are met. If a job attempts to exceed its "max" resource allocation, an implementation may deny additional resources, which may result in job failure. - The workflow platform may provide input files and the designated output - directory through the use of volume bind mounts. The platform should rewrite - file paths in the input object to correspond to the Docker bind mounted - locations. That is, the platform should rewrite values in the parameter context - such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths - within the container. + If "min" is specified but "max" is not, then "max" == "min" If "max" is specified by "min" is not, then "min" == "max". - When running a tool contained in Docker, the workflow platform must not - assume anything about the contents of the Docker container, such as the - presence or absence of specific software, except to assume that the - generated command line represents a valid command within the runtime - environment of the container. + It is an error if max < min. - ## Interaction with other requirements + It is an error if the value of any of these fields is negative. - If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a - DockerRequirement, the environment variables must be provided to Docker - using `--env` or `--env-file` and interact with the container's preexisting - environment as defined by Docker. + If neither "min" nor "max" is specified for a resource, an implementation may provide a default. """ def __init__( self, - dockerPull: Optional[Any] = None, - dockerLoad: Optional[Any] = None, - dockerFile: Optional[Any] = None, - dockerImport: Optional[Any] = None, - dockerImageId: Optional[Any] = None, - dockerOutputDirectory: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + coresMin: Any | None = None, + coresMax: Any | None = None, + ramMin: Any | None = None, + ramMax: Any | None = None, + tmpdirMin: Any | None = None, + tmpdirMax: Any | None = None, + outdirMin: Any | None = None, + outdirMax: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -14746,24 +14681,28 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "DockerRequirement" - self.dockerPull = dockerPull - self.dockerLoad = dockerLoad - self.dockerFile = dockerFile - self.dockerImport = dockerImport - self.dockerImageId = dockerImageId - self.dockerOutputDirectory = dockerOutputDirectory + self.class_: Final[str] = "ResourceRequirement" + self.coresMin = coresMin + self.coresMax = coresMax + self.ramMin = ramMin + self.ramMax = ramMax + self.tmpdirMin = tmpdirMin + self.tmpdirMax = tmpdirMax + self.outdirMin = outdirMin + self.outdirMax = outdirMax def __eq__(self, other: Any) -> bool: - if isinstance(other, DockerRequirement): + if isinstance(other, ResourceRequirement): return bool( self.class_ == other.class_ - and self.dockerPull == other.dockerPull - and self.dockerLoad == other.dockerLoad - and self.dockerFile == other.dockerFile - and self.dockerImport == other.dockerImport - and self.dockerImageId == other.dockerImageId - and self.dockerOutputDirectory == other.dockerOutputDirectory + and self.coresMin == other.coresMin + and self.coresMax == other.coresMax + and self.ramMin == other.ramMin + and self.ramMax == other.ramMax + and self.tmpdirMin == other.tmpdirMin + and self.tmpdirMax == other.tmpdirMax + and self.outdirMin == other.outdirMin + and self.outdirMax == other.outdirMax ) return False @@ -14771,12 +14710,14 @@ def __hash__(self) -> int: return hash( ( self.class_, - self.dockerPull, - self.dockerLoad, - self.dockerFile, - self.dockerImport, - self.dockerImageId, - self.dockerOutputDirectory, + self.coresMin, + self.coresMax, + self.ramMin, + self.ramMax, + self.tmpdirMin, + self.tmpdirMax, + self.outdirMin, + self.outdirMax, ) ) @@ -14786,8 +14727,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "DockerRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -14798,33 +14739,34 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_DockerRequirement_classLoader_False_True_None_None, + uri_ResourceRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e - dockerPull = None - if "dockerPull" in _doc: + raise e + coresMin = None + if "coresMin" in _doc: try: - dockerPull = load_field( - _doc.get("dockerPull"), - union_of_None_type_or_strtype, + coresMin = _load_field( + _doc.get("coresMin"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("dockerPull") + lc=_doc.get("coresMin") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `dockerPull`": + if str(e) == "missing required field `coresMin`": _errors__.append( ValidationException( str(e), @@ -14832,13 +14774,13 @@ def fromDoc( ) ) else: - val = _doc.get("dockerPull") + val = _doc.get("coresMin") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `dockerPull` field is not valid because:", - SourceLine(_doc, "dockerPull", str), + "the `coresMin` field is not valid because:", + SourceLine(_doc, "coresMin", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14850,28 +14792,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `dockerPull` field is not valid because:", - SourceLine(_doc, "dockerPull", str), + "the `coresMin` field is not valid because:", + SourceLine(_doc, "coresMin", str), [e], - detailed_message=f"the `dockerPull` field with value `{val}` " + detailed_message=f"the `coresMin` field with value `{val}` " "is not valid because:", ) ) - dockerLoad = None - if "dockerLoad" in _doc: + coresMax = None + if "coresMax" in _doc: try: - dockerLoad = load_field( - _doc.get("dockerLoad"), - union_of_None_type_or_strtype, + coresMax = _load_field( + _doc.get("coresMax"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("dockerLoad") + lc=_doc.get("coresMax") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `dockerLoad`": + if str(e) == "missing required field `coresMax`": _errors__.append( ValidationException( str(e), @@ -14879,13 +14821,13 @@ def fromDoc( ) ) else: - val = _doc.get("dockerLoad") + val = _doc.get("coresMax") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `dockerLoad` field is not valid because:", - SourceLine(_doc, "dockerLoad", str), + "the `coresMax` field is not valid because:", + SourceLine(_doc, "coresMax", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14897,28 +14839,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `dockerLoad` field is not valid because:", - SourceLine(_doc, "dockerLoad", str), + "the `coresMax` field is not valid because:", + SourceLine(_doc, "coresMax", str), [e], - detailed_message=f"the `dockerLoad` field with value `{val}` " + detailed_message=f"the `coresMax` field with value `{val}` " "is not valid because:", ) ) - dockerFile = None - if "dockerFile" in _doc: + ramMin = None + if "ramMin" in _doc: try: - dockerFile = load_field( - _doc.get("dockerFile"), - union_of_None_type_or_strtype, + ramMin = _load_field( + _doc.get("ramMin"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("dockerFile") + lc=_doc.get("ramMin") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `dockerFile`": + if str(e) == "missing required field `ramMin`": _errors__.append( ValidationException( str(e), @@ -14926,13 +14868,13 @@ def fromDoc( ) ) else: - val = _doc.get("dockerFile") + val = _doc.get("ramMin") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `dockerFile` field is not valid because:", - SourceLine(_doc, "dockerFile", str), + "the `ramMin` field is not valid because:", + SourceLine(_doc, "ramMin", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14944,28 +14886,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `dockerFile` field is not valid because:", - SourceLine(_doc, "dockerFile", str), + "the `ramMin` field is not valid because:", + SourceLine(_doc, "ramMin", str), [e], - detailed_message=f"the `dockerFile` field with value `{val}` " + detailed_message=f"the `ramMin` field with value `{val}` " "is not valid because:", ) ) - dockerImport = None - if "dockerImport" in _doc: + ramMax = None + if "ramMax" in _doc: try: - dockerImport = load_field( - _doc.get("dockerImport"), - union_of_None_type_or_strtype, + ramMax = _load_field( + _doc.get("ramMax"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("dockerImport") + lc=_doc.get("ramMax") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `dockerImport`": + if str(e) == "missing required field `ramMax`": _errors__.append( ValidationException( str(e), @@ -14973,13 +14915,13 @@ def fromDoc( ) ) else: - val = _doc.get("dockerImport") + val = _doc.get("ramMax") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `dockerImport` field is not valid because:", - SourceLine(_doc, "dockerImport", str), + "the `ramMax` field is not valid because:", + SourceLine(_doc, "ramMax", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14991,28 +14933,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `dockerImport` field is not valid because:", - SourceLine(_doc, "dockerImport", str), + "the `ramMax` field is not valid because:", + SourceLine(_doc, "ramMax", str), [e], - detailed_message=f"the `dockerImport` field with value `{val}` " + detailed_message=f"the `ramMax` field with value `{val}` " "is not valid because:", ) ) - dockerImageId = None - if "dockerImageId" in _doc: + tmpdirMin = None + if "tmpdirMin" in _doc: try: - dockerImageId = load_field( - _doc.get("dockerImageId"), - union_of_None_type_or_strtype, + tmpdirMin = _load_field( + _doc.get("tmpdirMin"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("dockerImageId") + lc=_doc.get("tmpdirMin") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `dockerImageId`": + if str(e) == "missing required field `tmpdirMin`": _errors__.append( ValidationException( str(e), @@ -15020,13 +14962,13 @@ def fromDoc( ) ) else: - val = _doc.get("dockerImageId") + val = _doc.get("tmpdirMin") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `dockerImageId` field is not valid because:", - SourceLine(_doc, "dockerImageId", str), + "the `tmpdirMin` field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15038,28 +14980,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `dockerImageId` field is not valid because:", - SourceLine(_doc, "dockerImageId", str), + "the `tmpdirMin` field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), [e], - detailed_message=f"the `dockerImageId` field with value `{val}` " + detailed_message=f"the `tmpdirMin` field with value `{val}` " "is not valid because:", ) ) - dockerOutputDirectory = None - if "dockerOutputDirectory" in _doc: + tmpdirMax = None + if "tmpdirMax" in _doc: try: - dockerOutputDirectory = load_field( - _doc.get("dockerOutputDirectory"), - union_of_None_type_or_strtype, + tmpdirMax = _load_field( + _doc.get("tmpdirMax"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("dockerOutputDirectory") + lc=_doc.get("tmpdirMax") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `dockerOutputDirectory`": + if str(e) == "missing required field `tmpdirMax`": _errors__.append( ValidationException( str(e), @@ -15067,13 +15009,13 @@ def fromDoc( ) ) else: - val = _doc.get("dockerOutputDirectory") + val = _doc.get("tmpdirMax") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `dockerOutputDirectory` field is not valid because:", - SourceLine(_doc, "dockerOutputDirectory", str), + "the `tmpdirMax` field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15085,14 +15027,108 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `dockerOutputDirectory` field is not valid because:", - SourceLine(_doc, "dockerOutputDirectory", str), + "the `tmpdirMax` field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), [e], - detailed_message=f"the `dockerOutputDirectory` field with value `{val}` " + detailed_message=f"the `tmpdirMax` field with value `{val}` " + "is not valid because:", + ) + ) + outdirMin = None + if "outdirMin" in _doc: + try: + outdirMin = _load_field( + _doc.get("outdirMin"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("outdirMin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outdirMin`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outdirMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outdirMin` field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outdirMin` field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [e], + detailed_message=f"the `outdirMin` field with value `{val}` " + "is not valid because:", + ) + ) + outdirMax = None + if "outdirMax" in _doc: + try: + outdirMax = _load_field( + _doc.get("outdirMax"), + union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("outdirMax") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outdirMax`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outdirMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outdirMax` field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outdirMax` field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [e], + detailed_message=f"the `outdirMax` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -15100,14 +15136,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( + "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( k ), SourceLine(_doc, k, str), @@ -15117,12 +15153,14 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - dockerPull=dockerPull, - dockerLoad=dockerLoad, - dockerFile=dockerFile, - dockerImport=dockerImport, - dockerImageId=dockerImageId, - dockerOutputDirectory=dockerOutputDirectory, + coresMin=coresMin, + coresMax=coresMax, + ramMin=ramMin, + ramMax=ramMax, + tmpdirMin=tmpdirMin, + tmpdirMax=tmpdirMax, + outdirMin=outdirMin, + outdirMax=outdirMax, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -15140,51 +15178,55 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.dockerPull is not None: - r["dockerPull"] = save( - self.dockerPull, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.coresMin is not None: + r["coresMin"] = save( + self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.dockerLoad is not None: - r["dockerLoad"] = save( - self.dockerLoad, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.coresMax is not None: + r["coresMax"] = save( + self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.dockerFile is not None: - r["dockerFile"] = save( - self.dockerFile, + if self.ramMin is not None: + r["ramMin"] = save( + self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.ramMax is not None: + r["ramMax"] = save( + self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.tmpdirMin is not None: + r["tmpdirMin"] = save( + self.tmpdirMin, top=False, base_url=base_url, relative_uris=relative_uris, ) - if self.dockerImport is not None: - r["dockerImport"] = save( - self.dockerImport, + if self.tmpdirMax is not None: + r["tmpdirMax"] = save( + self.tmpdirMax, top=False, base_url=base_url, relative_uris=relative_uris, ) - if self.dockerImageId is not None: - r["dockerImageId"] = save( - self.dockerImageId, + if self.outdirMin is not None: + r["outdirMin"] = save( + self.outdirMin, top=False, base_url=base_url, relative_uris=relative_uris, ) - if self.dockerOutputDirectory is not None: - r["dockerOutputDirectory"] = save( - self.dockerOutputDirectory, + if self.outdirMax is not None: + r["outdirMax"] = save( + self.outdirMax, top=False, base_url=base_url, relative_uris=relative_uris, @@ -15198,31 +15240,36 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ "class", - "dockerPull", - "dockerLoad", - "dockerFile", - "dockerImport", - "dockerImageId", - "dockerOutputDirectory", - ] - ) - - -class SoftwareRequirement(ProcessRequirement): - """ - A list of software packages that should be configured in the environment of - the defined process. + "coresMin", + "coresMax", + "ramMin", + "ramMax", + "tmpdirMin", + "tmpdirMax", + "outdirMin", + "outdirMax", + ] + ) - """ + +class ExpressionToolOutputParameter(OutputParameter): + id: str def __init__( self, - packages: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + outputBinding: Any | None = None, + format: Any | None = None, + type_: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -15232,16 +15279,42 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "SoftwareRequirement" - self.packages = packages + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.outputBinding = outputBinding + self.format = format + self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwareRequirement): - return bool(self.class_ == other.class_ and self.packages == other.packages) + if isinstance(other, ExpressionToolOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.outputBinding == other.outputBinding + and self.format == other.format + and self.type_ == other.type_ + ) return False def __hash__(self) -> int: - return hash((self.class_, self.packages)) + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.outputBinding, + self.format, + self.type_, + ) + ) @classmethod def fromDoc( @@ -15249,254 +15322,226 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SoftwareRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_SoftwareRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("packages") is None: - raise ValidationException("missing required field `packages`", None, []) - - packages = load_field( - _doc.get("packages"), - idmap_packages_array_of_SoftwarePackageLoader, - baseuri, - loadingOptions, - lc=_doc.get("packages") - ) + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `packages`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("packages") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `id`": _errors__.append( ValidationException( - "the `packages` field is not valid because:", - SourceLine(_doc, "packages", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": _errors__.append( ValidationException( - "the `packages` field is not valid because:", - SourceLine(_doc, "packages", str), - [e], - detailed_message=f"the `packages` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - packages=packages, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.packages is not None: - r["packages"] = save( - self.packages, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "packages"]) - - -class SoftwarePackage(Saveable): - def __init__( - self, - package: Any, - version: Optional[Any] = None, - specs: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.package = package - self.version = version - self.specs = specs - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwarePackage): - return bool( - self.package == other.package - and self.version == other.version - and self.specs == other.specs - ) - return False - - def __hash__(self) -> int: - return hash((self.package, self.version, self.specs)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SoftwarePackage": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("package") is None: - raise ValidationException("missing required field `package`", None, []) - - package = load_field( - _doc.get("package"), - strtype, - baseuri, - loadingOptions, - lc=_doc.get("package") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `package`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("package") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( - "the `package` field is not valid because:", - SourceLine(_doc, "package", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `package` field is not valid because:", - SourceLine(_doc, "package", str), - [e], - detailed_message=f"the `package` field with value `{val}` " - "is not valid because:", + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - version = None - if "version" in _doc: + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: try: - version = load_field( - _doc.get("version"), - union_of_None_type_or_array_of_strtype, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("version") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `version`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -15504,13 +15549,13 @@ def fromDoc( ) ) else: - val = _doc.get("version") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `version` field is not valid because:", - SourceLine(_doc, "version", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15522,28 +15567,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `version` field is not valid because:", - SourceLine(_doc, "version", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `version` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - specs = None - if "specs" in _doc: + outputBinding = None + if "outputBinding" in _doc: try: - specs = load_field( - _doc.get("specs"), - uri_union_of_None_type_or_array_of_strtype_False_False_None_True, + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions, - lc=_doc.get("specs") + lc=_doc.get("outputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `specs`": + if str(e) == "missing required field `outputBinding`": _errors__.append( ValidationException( str(e), @@ -15551,13 +15596,13 @@ def fromDoc( ) ) else: - val = _doc.get("specs") + val = _doc.get("outputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `specs` field is not valid because:", - SourceLine(_doc, "specs", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15569,164 +15614,42 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `specs` field is not valid because:", - SourceLine(_doc, "specs", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [e], - detailed_message=f"the `specs` field with value `{val}` " + detailed_message=f"the `outputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: + format = None + if "format" in _doc: + try: + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - package=package, - version=version, - specs=specs, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.package is not None: - r["package"] = save( - self.package, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.version is not None: - r["version"] = save( - self.version, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.specs is not None: - u = save_relative_uri(self.specs, base_url, False, None, relative_uris) - r["specs"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["package", "version", "specs"]) - - -class Dirent(Saveable): - """ - Define a file or subdirectory that must be placed in the designated output - directory prior to executing the command line tool. May be the result of - executing an expression, such as building a configuration file from a - template. - - """ - - def __init__( - self, - entry: Any, - entryname: Optional[Any] = None, - writable: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.entryname = entryname - self.entry = entry - self.writable = writable - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Dirent): - return bool( - self.entryname == other.entryname - and self.entry == other.entry - and self.writable == other.writable - ) - return False - - def __hash__(self) -> int: - return hash((self.entryname, self.entry, self.writable)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Dirent": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - entryname = None - if "entryname" in _doc: - try: - entryname = load_field( - _doc.get("entryname"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("entryname") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `entryname`": - _errors__.append( - ValidationException( - str(e), - None + str(e), + None ) ) else: - val = _doc.get("entryname") + val = _doc.get("format") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `entryname` field is not valid because:", - SourceLine(_doc, "entryname", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15738,76 +15661,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `entryname` field is not valid because:", - SourceLine(_doc, "entryname", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [e], - detailed_message=f"the `entryname` field with value `{val}` " + detailed_message=f"the `format` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("entry") is None: - raise ValidationException("missing required field `entry`", None, []) - - entry = load_field( - _doc.get("entry"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("entry") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `entry`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("entry") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `entry` field is not valid because:", - SourceLine(_doc, "entry", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `entry` field is not valid because:", - SourceLine(_doc, "entry", str), - [e], - detailed_message=f"the `entry` field with value `{val}` " - "is not valid because:", - ) - ) - writable = None - if "writable" in _doc: + type_ = None + if "type" in _doc: try: - writable = load_field( - _doc.get("writable"), - union_of_None_type_or_booltype, + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, - lc=_doc.get("writable") + lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `writable`": + if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), @@ -15815,13 +15690,13 @@ def fromDoc( ) ) else: - val = _doc.get("writable") + val = _doc.get("type") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `writable` field is not valid because:", - SourceLine(_doc, "writable", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15833,14 +15708,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `writable` field is not valid because:", - SourceLine(_doc, "writable", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [e], - detailed_message=f"the `writable` field with value `{val}` " + detailed_message=f"the `type` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -15848,14 +15723,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `type`".format( k ), SourceLine(_doc, k, str), @@ -15865,12 +15740,18 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - entryname=entryname, - entry=entry, - writable=writable, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + outputBinding=outputBinding, + format=format, + type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -15884,20 +15765,44 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.entryname is not None: - r["entryname"] = save( - self.entryname, + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, top=False, - base_url=base_url, + base_url=self.id, relative_uris=relative_uris, ) - if self.entry is not None: - r["entry"] = save( - self.entry, top=False, base_url=base_url, relative_uris=relative_uris + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) - if self.writable is not None: - r["writable"] = save( - self.writable, top=False, base_url=base_url, relative_uris=relative_uris + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris ) # top refers to the directory level @@ -15908,19 +15813,41 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["entryname", "entry", "writable"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "outputBinding", + "format", + "type", + ] + ) -class InitialWorkDirRequirement(ProcessRequirement): +class ExpressionTool(Process): """ - Define a list of files and subdirectories that must be created by the workflow platform in the designated output directory prior to executing the command line tool. + Execute an expression as a Workflow step. + """ + id: str + def __init__( self, - listing: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + inputs: Any, + outputs: Any, + expression: Any, + id: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + cwlVersion: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -15930,16 +15857,48 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "InitialWorkDirRequirement" - self.listing = listing + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.label = label + self.doc = doc + self.cwlVersion = cwlVersion + self.class_: Final[str] = "ExpressionTool" + self.expression = expression def __eq__(self, other: Any) -> bool: - if isinstance(other, InitialWorkDirRequirement): - return bool(self.class_ == other.class_ and self.listing == other.listing) + if isinstance(other, ExpressionTool): + return bool( + self.id == other.id + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.label == other.label + and self.doc == other.doc + and self.cwlVersion == other.cwlVersion + and self.class_ == other.class_ + and self.expression == other.expression + ) return False def __hash__(self) -> int: - return hash((self.class_, self.listing)) + return hash( + ( + self.id, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.label, + self.doc, + self.cwlVersion, + self.class_, + self.expression, + ) + ) @classmethod def fromDoc( @@ -15947,46 +15906,103 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InitialWorkDirRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) try: if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_InitialWorkDirRequirement_classLoader_False_True_None_None, + uri_ExpressionTool_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + raise e try: - if _doc.get("listing") is None: - raise ValidationException("missing required field `listing`", None, []) + if _doc.get("inputs") is None: + raise ValidationException("missing required field `inputs`", None, []) - listing = load_field( - _doc.get("listing"), - union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader, + inputs = _load_field( + _doc.get("inputs"), + idmap_inputs_array_of_InputParameterLoader, baseuri, loadingOptions, - lc=_doc.get("listing") + lc=_doc.get("inputs") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `listing`": + if str(e) == "missing required field `inputs`": _errors__.append( ValidationException( str(e), @@ -15994,13 +16010,13 @@ def fromDoc( ) ) else: - val = _doc.get("listing") + val = _doc.get("inputs") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16012,157 +16028,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), [e], - detailed_message=f"the `listing` field with value `{val}` " + detailed_message=f"the `inputs` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - listing=listing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.listing is not None: - r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "listing"]) - - -class EnvVarRequirement(ProcessRequirement): - """ - Define a list of environment variables which will be set in the - execution environment of the tool. See `EnvironmentDef` for details. - - """ - - def __init__( - self, - envDef: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "EnvVarRequirement" - self.envDef = envDef - - def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvVarRequirement): - return bool(self.class_ == other.class_ and self.envDef == other.envDef) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.envDef)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "EnvVarRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_EnvVarRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e try: - if _doc.get("envDef") is None: - raise ValidationException("missing required field `envDef`", None, []) + if _doc.get("outputs") is None: + raise ValidationException("missing required field `outputs`", None, []) - envDef = load_field( - _doc.get("envDef"), - idmap_envDef_array_of_EnvironmentDefLoader, + outputs = _load_field( + _doc.get("outputs"), + idmap_outputs_array_of_ExpressionToolOutputParameterLoader, baseuri, loadingOptions, - lc=_doc.get("envDef") + lc=_doc.get("outputs") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `envDef`": + if str(e) == "missing required field `outputs`": _errors__.append( ValidationException( str(e), @@ -16170,13 +16058,13 @@ def fromDoc( ) ) else: - val = _doc.get("envDef") + val = _doc.get("outputs") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `envDef` field is not valid because:", - SourceLine(_doc, "envDef", str), + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16188,334 +16076,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `envDef` field is not valid because:", - SourceLine(_doc, "envDef", str), + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), [e], - detailed_message=f"the `envDef` field with value `{val}` " + detailed_message=f"the `outputs` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - envDef=envDef, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.envDef is not None: - r["envDef"] = save( - self.envDef, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "envDef"]) - - -class ShellCommandRequirement(ProcessRequirement): - """ - Modify the behavior of CommandLineTool to generate a single string - containing a shell command line. Each item in the argument list must be - joined into a string separated by single spaces and quoted to prevent - intepretation by the shell, unless `CommandLineBinding` for that argument - contains `shellQuote: false`. If `shellQuote: false` is specified, the - argument is joined into the command string without quoting, which allows - the use of shell metacharacters such as `|` for pipes. - - """ - - def __init__( - self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ShellCommandRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ShellCommandRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ShellCommandRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_ShellCommandRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class ResourceRequirement(ProcessRequirement): - """ - Specify basic hardware resource requirements. - - "min" is the minimum amount of a resource that must be reserved to schedule - a job. If "min" cannot be satisfied, the job should not be run. - - "max" is the maximum amount of a resource that the job shall be permitted - to use. If a node has sufficient resources, multiple jobs may be scheduled - on a single node provided each job's "max" resource requirements are - met. If a job attempts to exceed its "max" resource allocation, an - implementation may deny additional resources, which may result in job - failure. - - If "min" is specified but "max" is not, then "max" == "min" - If "max" is specified by "min" is not, then "min" == "max". - - It is an error if max < min. - - It is an error if the value of any of these fields is negative. - - If neither "min" nor "max" is specified for a resource, an implementation may provide a default. - - """ - - def __init__( - self, - coresMin: Optional[Any] = None, - coresMax: Optional[Any] = None, - ramMin: Optional[Any] = None, - ramMax: Optional[Any] = None, - tmpdirMin: Optional[Any] = None, - tmpdirMax: Optional[Any] = None, - outdirMin: Optional[Any] = None, - outdirMax: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ResourceRequirement" - self.coresMin = coresMin - self.coresMax = coresMax - self.ramMin = ramMin - self.ramMax = ramMax - self.tmpdirMin = tmpdirMin - self.tmpdirMax = tmpdirMax - self.outdirMin = outdirMin - self.outdirMax = outdirMax - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ResourceRequirement): - return bool( - self.class_ == other.class_ - and self.coresMin == other.coresMin - and self.coresMax == other.coresMax - and self.ramMin == other.ramMin - and self.ramMax == other.ramMax - and self.tmpdirMin == other.tmpdirMin - and self.tmpdirMax == other.tmpdirMax - and self.outdirMin == other.outdirMin - and self.outdirMax == other.outdirMax - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.coresMin, - self.coresMax, - self.ramMin, - self.ramMax, - self.tmpdirMin, - self.tmpdirMax, - self.outdirMin, - self.outdirMax, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ResourceRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_ResourceRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - coresMin = None - if "coresMin" in _doc: + requirements = None + if "requirements" in _doc: try: - coresMin = load_field( - _doc.get("coresMin"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + requirements = _load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions, - lc=_doc.get("coresMin") + lc=_doc.get("requirements") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `coresMin`": + if str(e) == "missing required field `requirements`": _errors__.append( ValidationException( str(e), @@ -16523,13 +16105,13 @@ def fromDoc( ) ) else: - val = _doc.get("coresMin") + val = _doc.get("requirements") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `coresMin` field is not valid because:", - SourceLine(_doc, "coresMin", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16541,28 +16123,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `coresMin` field is not valid because:", - SourceLine(_doc, "coresMin", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [e], - detailed_message=f"the `coresMin` field with value `{val}` " + detailed_message=f"the `requirements` field with value `{val}` " "is not valid because:", ) ) - coresMax = None - if "coresMax" in _doc: + hints = None + if "hints" in _doc: try: - coresMax = load_field( - _doc.get("coresMax"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + hints = _load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, baseuri, loadingOptions, - lc=_doc.get("coresMax") + lc=_doc.get("hints") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `coresMax`": + if str(e) == "missing required field `hints`": _errors__.append( ValidationException( str(e), @@ -16570,13 +16152,13 @@ def fromDoc( ) ) else: - val = _doc.get("coresMax") + val = _doc.get("hints") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `coresMax` field is not valid because:", - SourceLine(_doc, "coresMax", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16588,28 +16170,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `coresMax` field is not valid because:", - SourceLine(_doc, "coresMax", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [e], - detailed_message=f"the `coresMax` field with value `{val}` " + detailed_message=f"the `hints` field with value `{val}` " "is not valid because:", ) ) - ramMin = None - if "ramMin" in _doc: + label = None + if "label" in _doc: try: - ramMin = load_field( - _doc.get("ramMin"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("ramMin") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `ramMin`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -16617,13 +16199,13 @@ def fromDoc( ) ) else: - val = _doc.get("ramMin") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `ramMin` field is not valid because:", - SourceLine(_doc, "ramMin", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16635,28 +16217,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `ramMin` field is not valid because:", - SourceLine(_doc, "ramMin", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `ramMin` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - ramMax = None - if "ramMax" in _doc: + doc = None + if "doc" in _doc: try: - ramMax = load_field( - _doc.get("ramMax"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("ramMax") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `ramMax`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -16664,13 +16246,13 @@ def fromDoc( ) ) else: - val = _doc.get("ramMax") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `ramMax` field is not valid because:", - SourceLine(_doc, "ramMax", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16682,28 +16264,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `ramMax` field is not valid because:", - SourceLine(_doc, "ramMax", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `ramMax` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - tmpdirMin = None - if "tmpdirMin" in _doc: + cwlVersion = None + if "cwlVersion" in _doc: try: - tmpdirMin = load_field( - _doc.get("tmpdirMin"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, + cwlVersion = _load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("tmpdirMin") + lc=_doc.get("cwlVersion") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `tmpdirMin`": + if str(e) == "missing required field `cwlVersion`": _errors__.append( ValidationException( str(e), @@ -16711,13 +16293,13 @@ def fromDoc( ) ) else: - val = _doc.get("tmpdirMin") + val = _doc.get("cwlVersion") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `tmpdirMin` field is not valid because:", - SourceLine(_doc, "tmpdirMin", str), + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16729,190 +16311,99 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `tmpdirMin` field is not valid because:", - SourceLine(_doc, "tmpdirMin", str), + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), [e], - detailed_message=f"the `tmpdirMin` field with value `{val}` " + detailed_message=f"the `cwlVersion` field with value `{val}` " "is not valid because:", ) ) - tmpdirMax = None - if "tmpdirMax" in _doc: - try: - tmpdirMax = load_field( - _doc.get("tmpdirMax"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("tmpdirMax") - ) + try: + if _doc.get("expression") is None: + raise ValidationException("missing required field `expression`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + expression = _load_field( + _doc.get("expression"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("expression") + ) - if str(e) == "missing required field `tmpdirMax`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `expression`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("expression") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `expression` field is not valid because:", + SourceLine(_doc, "expression", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("tmpdirMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `tmpdirMax` field is not valid because:", - SourceLine(_doc, "tmpdirMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `tmpdirMax` field is not valid because:", - SourceLine(_doc, "tmpdirMax", str), - [e], - detailed_message=f"the `tmpdirMax` field with value `{val}` " - "is not valid because:", - ) - ) - outdirMin = None - if "outdirMin" in _doc: - try: - outdirMin = load_field( - _doc.get("outdirMin"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("outdirMin") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outdirMin`": _errors__.append( ValidationException( - str(e), - None + "the `expression` field is not valid because:", + SourceLine(_doc, "expression", str), + [e], + detailed_message=f"the `expression` field with value `{val}` " + "is not valid because:", ) ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("outdirMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outdirMin` field is not valid because:", - SourceLine(_doc, "outdirMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outdirMin` field is not valid because:", - SourceLine(_doc, "outdirMin", str), - [e], - detailed_message=f"the `outdirMin` field with value `{val}` " - "is not valid because:", - ) - ) - outdirMax = None - if "outdirMax" in _doc: - try: - outdirMax = load_field( - _doc.get("outdirMax"), - union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("outdirMax") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outdirMax`": _errors__.append( ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outdirMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outdirMax` field is not valid because:", - SourceLine(_doc, "outdirMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outdirMax` field is not valid because:", - SourceLine(_doc, "outdirMax", str), - [e], - detailed_message=f"the `outdirMax` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( - k - ), - SourceLine(_doc, k, str), + "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `expression`".format( + k + ), + SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - coresMin=coresMin, - coresMax=coresMax, - ramMin=ramMin, - ramMax=ramMax, - tmpdirMin=tmpdirMin, - tmpdirMax=tmpdirMax, - outdirMin=outdirMin, - outdirMax=outdirMax, + id=id, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + label=label, + doc=doc, + cwlVersion=cwlVersion, + expression=expression, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -16926,56 +16417,54 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) + u = save_relative_uri(uri, self.id, False, None, relative_uris) r["class"] = u - if self.coresMin is not None: - r["coresMin"] = save( - self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.coresMax is not None: - r["coresMax"] = save( - self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.ramMin is not None: - r["ramMin"] = save( - self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris + if self.inputs is not None: + r["inputs"] = save( + self.inputs, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.ramMax is not None: - r["ramMax"] = save( - self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris + if self.outputs is not None: + r["outputs"] = save( + self.outputs, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.tmpdirMin is not None: - r["tmpdirMin"] = save( - self.tmpdirMin, + if self.requirements is not None: + r["requirements"] = save( + self.requirements, top=False, - base_url=base_url, + base_url=self.id, relative_uris=relative_uris, ) - if self.tmpdirMax is not None: - r["tmpdirMax"] = save( - self.tmpdirMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.outdirMin is not None: - r["outdirMin"] = save( - self.outdirMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.outdirMax is not None: - r["outdirMax"] = save( - self.outdirMax, + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.cwlVersion is not None: + u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) + r["cwlVersion"] = u + if self.expression is not None: + r["expression"] = save( + self.expression, top=False, - base_url=base_url, + base_url=self.id, relative_uris=relative_uris, ) @@ -16987,36 +16476,44 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ + "id", + "inputs", + "outputs", + "requirements", + "hints", + "label", + "doc", + "cwlVersion", "class", - "coresMin", - "coresMax", - "ramMin", - "ramMax", - "tmpdirMin", - "tmpdirMax", - "outdirMin", - "outdirMax", + "expression", ] ) -class ExpressionToolOutputParameter(OutputParameter): +class WorkflowOutputParameter(OutputParameter): + """ + Describe an output parameter of a workflow. The parameter must be connected to one or more parameters defined in the workflow that will provide the value of the output parameter. + + """ + id: str def __init__( self, id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - format: Optional[Any] = None, - type_: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + outputBinding: Any | None = None, + format: Any | None = None, + outputSource: Any | None = None, + linkMerge: Any | None = None, + type_: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -17033,10 +16530,12 @@ def __init__( self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) self.outputBinding = outputBinding self.format = format + self.outputSource = outputSource + self.linkMerge = linkMerge self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, ExpressionToolOutputParameter): + if isinstance(other, WorkflowOutputParameter): return bool( self.label == other.label and self.secondaryFiles == other.secondaryFiles @@ -17045,6 +16544,8 @@ def __eq__(self, other: Any) -> bool: and self.id == other.id and self.outputBinding == other.outputBinding and self.format == other.format + and self.outputSource == other.outputSource + and self.linkMerge == other.linkMerge and self.type_ == other.type_ ) return False @@ -17059,6 +16560,8 @@ def __hash__(self) -> int: self.id, self.outputBinding, self.format, + self.outputSource, + self.linkMerge, self.type_, ) ) @@ -17069,8 +16572,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ExpressionToolOutputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -17080,7 +16583,7 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), uri_strtype_True_False_None_None, baseuri, @@ -17136,7 +16639,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -17183,7 +16686,7 @@ def fromDoc( secondaryFiles = None if "secondaryFiles" in _doc: try: - secondaryFiles = load_field( + secondaryFiles = _load_field( _doc.get("secondaryFiles"), union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, baseuri, @@ -17230,7 +16733,7 @@ def fromDoc( streamable = None if "streamable" in _doc: try: - streamable = load_field( + streamable = _load_field( _doc.get("streamable"), union_of_None_type_or_booltype, baseuri, @@ -17277,7 +16780,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -17324,7 +16827,7 @@ def fromDoc( outputBinding = None if "outputBinding" in _doc: try: - outputBinding = load_field( + outputBinding = _load_field( _doc.get("outputBinding"), union_of_None_type_or_CommandOutputBindingLoader, baseuri, @@ -17371,7 +16874,7 @@ def fromDoc( format = None if "format" in _doc: try: - format = load_field( + format = _load_field( _doc.get("format"), uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, @@ -17415,21 +16918,21 @@ def fromDoc( "is not valid because:", ) ) - type_ = None - if "type" in _doc: + outputSource = None + if "outputSource" in _doc: try: - type_ = load_field( - _doc.get("type"), - typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + outputSource = _load_field( + _doc.get("outputSource"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("outputSource") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `outputSource`": _errors__.append( ValidationException( str(e), @@ -17437,13 +16940,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("outputSource") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `outputSource` field is not valid because:", + SourceLine(_doc, "outputSource", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -17455,4092 +16958,108 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `outputSource` field is not valid because:", + SourceLine(_doc, "outputSource", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `outputSource` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + linkMerge = None + if "linkMerge" in _doc: + try: + linkMerge = _load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + lc=_doc.get("linkMerge") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `linkMerge`": _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ValidationException( + str(e), + None + ) ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("linkMerge") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + detailed_message=f"the `linkMerge` field with value `{val}` " + "is not valid because:", + ) + ) + type_ = None + if "type" in _doc: + try: + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - outputBinding=outputBinding, - format=format, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "outputBinding", - "format", - "type", - ] - ) - - -class ExpressionTool(Process): - """ - Execute an expression as a Workflow step. - - """ - - id: str - - def __init__( - self, - inputs: Any, - outputs: Any, - expression: Any, - id: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.label = label - self.doc = doc - self.cwlVersion = cwlVersion - self.class_ = "ExpressionTool" - self.expression = expression - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ExpressionTool): - return bool( - self.id == other.id - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.label == other.label - and self.doc == other.doc - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.expression == other.expression - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.label, - self.doc, - self.cwlVersion, - self.class_, - self.expression, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ExpressionTool": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None + str(e), + None ) ) else: - val = _doc.get("id") + val = _doc.get("type") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = cast(str, id) - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_ExpressionTool_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("inputs") is None: - raise ValidationException("missing required field `inputs`", None, []) - - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_InputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - detailed_message=f"the `inputs` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("outputs") is None: - raise ValidationException("missing required field `outputs`", None, []) - - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_ExpressionToolOutputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - detailed_message=f"the `outputs` field with value `{val}` " - "is not valid because:", - ) - ) - requirements = None - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, - baseuri, - loadingOptions, - lc=_doc.get("requirements") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `requirements`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("requirements") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - detailed_message=f"the `requirements` field with value `{val}` " - "is not valid because:", - ) - ) - hints = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, - baseuri, - loadingOptions, - lc=_doc.get("hints") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `hints`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("hints") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - detailed_message=f"the `hints` field with value `{val}` " - "is not valid because:", - ) - ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - cwlVersion = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("cwlVersion") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `cwlVersion`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cwlVersion") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - detailed_message=f"the `cwlVersion` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("expression") is None: - raise ValidationException("missing required field `expression`", None, []) - - expression = load_field( - _doc.get("expression"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("expression") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `expression`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("expression") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `expression` field is not valid because:", - SourceLine(_doc, "expression", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `expression` field is not valid because:", - SourceLine(_doc, "expression", str), - [e], - detailed_message=f"the `expression` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `expression`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - id=id, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - label=label, - doc=doc, - cwlVersion=cwlVersion, - expression=expression, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, self.id, False, None, relative_uris) - r["class"] = u - if self.inputs is not None: - r["inputs"] = save( - self.inputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputs is not None: - r["outputs"] = save( - self.outputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.requirements is not None: - r["requirements"] = save( - self.requirements, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.cwlVersion is not None: - u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) - r["cwlVersion"] = u - if self.expression is not None: - r["expression"] = save( - self.expression, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "inputs", - "outputs", - "requirements", - "hints", - "label", - "doc", - "cwlVersion", - "class", - "expression", - ] - ) - - -class WorkflowOutputParameter(OutputParameter): - """ - Describe an output parameter of a workflow. The parameter must be - connected to one or more parameters defined in the workflow that will - provide the value of the output parameter. - - """ - - id: str - - def __init__( - self, - id: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - outputBinding: Optional[Any] = None, - format: Optional[Any] = None, - outputSource: Optional[Any] = None, - linkMerge: Optional[Any] = None, - type_: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.outputBinding = outputBinding - self.format = format - self.outputSource = outputSource - self.linkMerge = linkMerge - self.type_ = type_ - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.outputBinding == other.outputBinding - and self.format == other.format - and self.outputSource == other.outputSource - and self.linkMerge == other.linkMerge - and self.type_ == other.type_ - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.outputBinding, - self.format, - self.outputSource, - self.linkMerge, - self.type_, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowOutputParameter": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - secondaryFiles = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("secondaryFiles") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `secondaryFiles`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("secondaryFiles") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " - "is not valid because:", - ) - ) - streamable = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("streamable") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `streamable`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("streamable") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - detailed_message=f"the `streamable` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - outputBinding = None - if "outputBinding" in _doc: - try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), - [e], - detailed_message=f"the `outputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - format = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, - baseuri, - loadingOptions, - lc=_doc.get("format") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `format`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("format") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [e], - detailed_message=f"the `format` field with value `{val}` " - "is not valid because:", - ) - ) - outputSource = None - if "outputSource" in _doc: - try: - outputSource = load_field( - _doc.get("outputSource"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None, - baseuri, - loadingOptions, - lc=_doc.get("outputSource") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputSource`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputSource") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputSource` field is not valid because:", - SourceLine(_doc, "outputSource", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputSource` field is not valid because:", - SourceLine(_doc, "outputSource", str), - [e], - detailed_message=f"the `outputSource` field with value `{val}` " - "is not valid because:", - ) - ) - linkMerge = None - if "linkMerge" in _doc: - try: - linkMerge = load_field( - _doc.get("linkMerge"), - union_of_None_type_or_LinkMergeMethodLoader, - baseuri, - loadingOptions, - lc=_doc.get("linkMerge") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `linkMerge`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("linkMerge") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `linkMerge` field is not valid because:", - SourceLine(_doc, "linkMerge", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `linkMerge` field is not valid because:", - SourceLine(_doc, "linkMerge", str), - [e], - detailed_message=f"the `linkMerge` field with value `{val}` " - "is not valid because:", - ) - ) - type_ = None - if "type" in _doc: - try: - type_ = load_field( - _doc.get("type"), - typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `outputSource`, `linkMerge`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - outputBinding=outputBinding, - format=format, - outputSource=outputSource, - linkMerge=linkMerge, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.outputSource is not None: - u = save_relative_uri(self.outputSource, self.id, False, 1, relative_uris) - r["outputSource"] = u - if self.linkMerge is not None: - r["linkMerge"] = save( - self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "outputBinding", - "format", - "outputSource", - "linkMerge", - "type", - ] - ) - - -class Sink(Saveable): - pass - - -class WorkflowStepInput(Sink): - """ - The input of a workflow step connects an upstream parameter (from the - workflow inputs, or the outputs of other workflows steps) with the input - parameters of the underlying step. - - ## Input object - - A WorkflowStepInput object must contain an `id` field in the form - `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash - `/` the field name consists of the characters following the final slash - (the prefix portion may contain one or more slashes to indicate scope). - This defines a field of the workflow step input object with the value of - the `source` parameter(s). - - ## Merging - - To merge multiple inbound data links, - [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - If the sink parameter is an array, or named in a [workflow - scatter](#WorkflowStep) operation, there may be multiple inbound data links - listed in the `source` field. The values from the input links are merged - depending on the method specified in the `linkMerge` field. If not - specified, the default method is "merge_nested". - - * **merge_nested** - - The input must be an array consisting of exactly one entry for each - input link. If "merge_nested" is specified with a single link, the value - from the link must be wrapped in a single-item list. - - * **merge_flattened** - - 1. The source and sink parameters must be compatible types, or the source - type must be compatible with single element from the "items" type of - the destination array parameter. - 2. Source parameters which are arrays are concatenated. - Source parameters which are single element types are appended as - single elements. - - """ - - id: str - - def __init__( - self, - id: Any, - source: Optional[Any] = None, - linkMerge: Optional[Any] = None, - default: Optional[Any] = None, - valueFrom: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.source = source - self.linkMerge = linkMerge - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.default = default - self.valueFrom = valueFrom - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStepInput): - return bool( - self.source == other.source - and self.linkMerge == other.linkMerge - and self.id == other.id - and self.default == other.default - and self.valueFrom == other.valueFrom - ) - return False - - def __hash__(self) -> int: - return hash( - (self.source, self.linkMerge, self.id, self.default, self.valueFrom) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowStepInput": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - source = None - if "source" in _doc: - try: - source = load_field( - _doc.get("source"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None, - baseuri, - loadingOptions, - lc=_doc.get("source") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `source`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("source") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `source` field is not valid because:", - SourceLine(_doc, "source", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `source` field is not valid because:", - SourceLine(_doc, "source", str), - [e], - detailed_message=f"the `source` field with value `{val}` " - "is not valid because:", - ) - ) - linkMerge = None - if "linkMerge" in _doc: - try: - linkMerge = load_field( - _doc.get("linkMerge"), - union_of_None_type_or_LinkMergeMethodLoader, - baseuri, - loadingOptions, - lc=_doc.get("linkMerge") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `linkMerge`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("linkMerge") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `linkMerge` field is not valid because:", - SourceLine(_doc, "linkMerge", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `linkMerge` field is not valid because:", - SourceLine(_doc, "linkMerge", str), - [e], - detailed_message=f"the `linkMerge` field with value `{val}` " - "is not valid because:", - ) - ) - default = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_CWLObjectTypeLoader, - baseuri, - loadingOptions, - lc=_doc.get("default") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `default`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("default") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), - [e], - detailed_message=f"the `default` field with value `{val}` " - "is not valid because:", - ) - ) - valueFrom = None - if "valueFrom" in _doc: - try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("valueFrom") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `valueFrom`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("valueFrom") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), - [e], - detailed_message=f"the `valueFrom` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `source`, `linkMerge`, `id`, `default`, `valueFrom`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - source=source, - linkMerge=linkMerge, - id=id, - default=default, - valueFrom=valueFrom, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.source is not None: - u = save_relative_uri(self.source, self.id, False, 2, relative_uris) - r["source"] = u - if self.linkMerge is not None: - r["linkMerge"] = save( - self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.default is not None: - r["default"] = save( - self.default, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.valueFrom is not None: - r["valueFrom"] = save( - self.valueFrom, top=False, base_url=self.id, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["source", "linkMerge", "id", "default", "valueFrom"]) - - -class WorkflowStepOutput(Saveable): - """ - Associate an output parameter of the underlying process with a workflow - parameter. The workflow parameter (given in the `id` field) be may be used - as a `source` to connect with input parameters of other workflow steps, or - with an output parameter of the process. - - """ - - id: str - - def __init__( - self, - id: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStepOutput): - return bool(self.id == other.id) - return False - - def __hash__(self) -> int: - return hash((self.id)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowStepOutput": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`".format(k), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - id=id, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["id"]) - - -class WorkflowStep(Saveable): - """ - A workflow step is an executable element of a workflow. It specifies the - underlying process implementation (such as `CommandLineTool` or another - `Workflow`) in the `run` field and connects the input and output parameters - of the underlying process to workflow parameters. - - # Scatter/gather - - To use scatter/gather, - [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - A "scatter" operation specifies that the associated workflow step or - subworkflow should execute separately over a list of input elements. Each - job making up a scatter operation is independent and may be executed - concurrently. - - The `scatter` field specifies one or more input parameters which will be - scattered. An input parameter may be listed more than once. The declared - type of each input parameter is implicitly becomes an array of items of the - input parameter type. If a parameter is listed more than once, it becomes - a nested array. As a result, upstream parameters which are connected to - scattered parameters must be arrays. - - All output parameter types are also implicitly wrapped in arrays. Each job - in the scatter results in an entry in the output array. - - If any scattered parameter runtime value is an empty array, all outputs are - set to empty arrays and no work is done for the step, according to - applicable scattering rules. - - If `scatter` declares more than one input parameter, `scatterMethod` - describes how to decompose the input into a discrete set of jobs. - - * **dotproduct** specifies that each of the input arrays are aligned and one - element taken from each array to construct each job. It is an error - if all input arrays are not the same length. - - * **nested_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output must be nested arrays for each level of scattering, in the - order that the input arrays are listed in the `scatter` field. - - * **flat_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output arrays must be flattened to a single level, but otherwise listed in the - order that the input arrays are listed in the `scatter` field. - - # Subworkflows - - To specify a nested workflow as part of a workflow step, - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be - specified in the workflow or workflow step requirements. - - It is a fatal error if a workflow directly or indirectly invokes itself as - a subworkflow (recursive workflows are not allowed). - - """ - - id: str - - def __init__( - self, - id: Any, - in_: Any, - out: Any, - run: Any, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - scatter: Optional[Any] = None, - scatterMethod: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.in_ = in_ - self.out = out - self.requirements = requirements - self.hints = hints - self.label = label - self.doc = doc - self.run = run - self.scatter = scatter - self.scatterMethod = scatterMethod - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStep): - return bool( - self.id == other.id - and self.in_ == other.in_ - and self.out == other.out - and self.requirements == other.requirements - and self.hints == other.hints - and self.label == other.label - and self.doc == other.doc - and self.run == other.run - and self.scatter == other.scatter - and self.scatterMethod == other.scatterMethod - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.in_, - self.out, - self.requirements, - self.hints, - self.label, - self.doc, - self.run, - self.scatter, - self.scatterMethod, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowStep": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - try: - if _doc.get("in") is None: - raise ValidationException("missing required field `in`", None, []) - - in_ = load_field( - _doc.get("in"), - idmap_in__array_of_WorkflowStepInputLoader, - baseuri, - loadingOptions, - lc=_doc.get("in") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `in`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("in") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `in` field is not valid because:", - SourceLine(_doc, "in", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `in` field is not valid because:", - SourceLine(_doc, "in", str), - [e], - detailed_message=f"the `in` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("out") is None: - raise ValidationException("missing required field `out`", None, []) - - out = load_field( - _doc.get("out"), - uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("out") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `out`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("out") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `out` field is not valid because:", - SourceLine(_doc, "out", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `out` field is not valid because:", - SourceLine(_doc, "out", str), - [e], - detailed_message=f"the `out` field with value `{val}` " - "is not valid because:", - ) - ) - requirements = None - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, - baseuri, - loadingOptions, - lc=_doc.get("requirements") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `requirements`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("requirements") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - detailed_message=f"the `requirements` field with value `{val}` " - "is not valid because:", - ) - ) - hints = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_Any_type, - baseuri, - loadingOptions, - lc=_doc.get("hints") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `hints`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("hints") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - detailed_message=f"the `hints` field with value `{val}` " - "is not valid because:", - ) - ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("run") is None: - raise ValidationException("missing required field `run`", None, []) - - run = load_field( - _doc.get("run"), - uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_False_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("run") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `run`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("run") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), - [e], - detailed_message=f"the `run` field with value `{val}` " - "is not valid because:", - ) - ) - scatter = None - if "scatter" in _doc: - try: - scatter = load_field( - _doc.get("scatter"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None, - baseuri, - loadingOptions, - lc=_doc.get("scatter") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `scatter`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("scatter") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `scatter` field is not valid because:", - SourceLine(_doc, "scatter", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `scatter` field is not valid because:", - SourceLine(_doc, "scatter", str), - [e], - detailed_message=f"the `scatter` field with value `{val}` " - "is not valid because:", - ) - ) - scatterMethod = None - if "scatterMethod" in _doc: - try: - scatterMethod = load_field( - _doc.get("scatterMethod"), - uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("scatterMethod") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `scatterMethod`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("scatterMethod") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `scatterMethod` field is not valid because:", - SourceLine(_doc, "scatterMethod", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `scatterMethod` field is not valid because:", - SourceLine(_doc, "scatterMethod", str), - [e], - detailed_message=f"the `scatterMethod` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `in`, `out`, `requirements`, `hints`, `label`, `doc`, `run`, `scatter`, `scatterMethod`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - id=id, - in_=in_, - out=out, - requirements=requirements, - hints=hints, - label=label, - doc=doc, - run=run, - scatter=scatter, - scatterMethod=scatterMethod, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.in_ is not None: - r["in"] = save( - self.in_, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.out is not None: - u = save_relative_uri(self.out, self.id, True, None, relative_uris) - r["out"] = u - if self.requirements is not None: - r["requirements"] = save( - self.requirements, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.run is not None: - u = save_relative_uri(self.run, self.id, False, None, relative_uris) - r["run"] = u - if self.scatter is not None: - u = save_relative_uri(self.scatter, self.id, False, 0, relative_uris) - r["scatter"] = u - if self.scatterMethod is not None: - u = save_relative_uri( - self.scatterMethod, self.id, False, None, relative_uris - ) - r["scatterMethod"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "in", - "out", - "requirements", - "hints", - "label", - "doc", - "run", - "scatter", - "scatterMethod", - ] - ) - - -class Workflow(Process): - """ - A workflow describes a set of **steps** and the **dependencies** between - those steps. When a step produces output that will be consumed by a - second step, the first step is a dependency of the second step. - - When there is a dependency, the workflow engine must execute the preceding - step and wait for it to successfully produce output before executing the - dependent step. If two steps are defined in the workflow graph that - are not directly or indirectly dependent, these steps are **independent**, - and may execute in any order or execute concurrently. A workflow is - complete when all steps have been executed. - - Dependencies between parameters are expressed using the `source` field on - [workflow step input parameters](#WorkflowStepInput) and [workflow output - parameters](#WorkflowOutputParameter). - - The `source` field expresses the dependency of one parameter on another - such that when a value is associated with the parameter specified by - `source`, that value is propagated to the destination parameter. When all - data links inbound to a given step are fufilled, the step is ready to - execute. - - ## Workflow success and failure - - A completed step must result in one of `success`, `temporaryFailure` or - `permanentFailure` states. An implementation may choose to retry a step - execution which resulted in `temporaryFailure`. An implementation may - choose to either continue running other steps of a workflow, or terminate - immediately upon `permanentFailure`. - - * If any step of a workflow execution results in `permanentFailure`, then - the workflow status is `permanentFailure`. - - * If one or more steps result in `temporaryFailure` and all other steps - complete `success` or are not executed, then the workflow status is - `temporaryFailure`. - - * If all workflow steps are executed and complete with `success`, then the - workflow status is `success`. - - # Extensions - - [ScatterFeatureRequirement](#ScatterFeatureRequirement) and - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are - available as standard [extensions](#Extensions_and_Metadata) to core - workflow semantics. - - """ - - id: str - - def __init__( - self, - inputs: Any, - outputs: Any, - steps: Any, - id: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.label = label - self.doc = doc - self.cwlVersion = cwlVersion - self.class_ = "Workflow" - self.steps = steps - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Workflow): - return bool( - self.id == other.id - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.label == other.label - and self.doc == other.doc - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.steps == other.steps - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.label, - self.doc, - self.cwlVersion, - self.class_, - self.steps, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Workflow": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = cast(str, id) - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_Workflow_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("inputs") is None: - raise ValidationException("missing required field `inputs`", None, []) - - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_InputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - detailed_message=f"the `inputs` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("outputs") is None: - raise ValidationException("missing required field `outputs`", None, []) - - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_WorkflowOutputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - detailed_message=f"the `outputs` field with value `{val}` " - "is not valid because:", - ) - ) - requirements = None - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, - baseuri, - loadingOptions, - lc=_doc.get("requirements") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `requirements`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("requirements") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - detailed_message=f"the `requirements` field with value `{val}` " - "is not valid because:", - ) - ) - hints = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, - baseuri, - loadingOptions, - lc=_doc.get("hints") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `hints`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("hints") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - detailed_message=f"the `hints` field with value `{val}` " - "is not valid because:", - ) - ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - cwlVersion = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("cwlVersion") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `cwlVersion`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cwlVersion") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - detailed_message=f"the `cwlVersion` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("steps") is None: - raise ValidationException("missing required field `steps`", None, []) - - steps = load_field( - _doc.get("steps"), - idmap_steps_union_of_array_of_WorkflowStepLoader, - baseuri, - loadingOptions, - lc=_doc.get("steps") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `steps`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("steps") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `steps` field is not valid because:", - SourceLine(_doc, "steps", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `steps` field is not valid because:", - SourceLine(_doc, "steps", str), - [e], - detailed_message=f"the `steps` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `steps`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - id=id, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - label=label, - doc=doc, - cwlVersion=cwlVersion, - steps=steps, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, self.id, False, None, relative_uris) - r["class"] = u - if self.inputs is not None: - r["inputs"] = save( - self.inputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputs is not None: - r["outputs"] = save( - self.outputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.requirements is not None: - r["requirements"] = save( - self.requirements, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.cwlVersion is not None: - u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) - r["cwlVersion"] = u - if self.steps is not None: - r["steps"] = save( - self.steps, top=False, base_url=self.id, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "inputs", - "outputs", - "requirements", - "hints", - "label", - "doc", - "cwlVersion", - "class", - "steps", - ] - ) - - -class SubworkflowFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support nested workflows in - the `run` field of [WorkflowStep](#WorkflowStep). - - """ - - def __init__( - self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SubworkflowFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SubworkflowFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SubworkflowFeatureRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class ScatterFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support the `scatter` and - `scatterMethod` fields of [WorkflowStep](#WorkflowStep). - - """ - - def __init__( - self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ScatterFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ScatterFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ScatterFeatureRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_ScatterFeatureRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class MultipleInputFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support multiple inbound data links - listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). - - """ - - def __init__( - self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "MultipleInputFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, MultipleInputFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "MultipleInputFeatureRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class StepInputExpressionRequirement(ProcessRequirement): - """ - Indicate that the workflow platform must support the `valueFrom` field - of [WorkflowStepInput](#WorkflowStepInput). - - """ - - def __init__( - self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "StepInputExpressionRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, StepInputExpressionRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "StepInputExpressionRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_StepInputExpressionRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class LoadListingRequirement(ProcessRequirement): - def __init__( - self, - loadListing: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "LoadListingRequirement" - self.loadListing = loadListing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, LoadListingRequirement): - return bool( - self.class_ == other.class_ and self.loadListing == other.loadListing - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.loadListing)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "LoadListingRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_strtype_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("loadListing") is None: - raise ValidationException("missing required field `loadListing`", None, []) - - loadListing = load_field( - _doc.get("loadListing"), - union_of_LoadListingEnumLoader, - baseuri, - loadingOptions, - lc=_doc.get("loadListing") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadListing`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("loadListing") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - else: - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - detailed_message=f"the `loadListing` field with value `{val}` " - "is not valid because:", + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -21548,14 +17067,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `loadListing`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `outputBinding`, `format`, `outputSource`, `linkMerge`, `type`".format( k ), SourceLine(_doc, k, str), @@ -21565,10 +17084,20 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - loadListing=loadListing, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + outputBinding=outputBinding, + format=format, + outputSource=outputSource, + linkMerge=linkMerge, + type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -21582,21 +17111,52 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, top=False, - base_url=base_url, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=self.id, relative_uris=relative_uris, ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.outputSource is not None: + u = save_relative_uri(self.outputSource, self.id, False, 1, relative_uris) + r["outputSource"] = u + if self.linkMerge is not None: + r["linkMerge"] = save( + self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -21606,15 +17166,64 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "loadListing"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "outputBinding", + "format", + "outputSource", + "linkMerge", + "type", + ] + ) + + +class Sink(Saveable): + pass + + +class WorkflowStepInput(Sink): + """ + The input of a workflow step connects an upstream parameter (from the workflow inputs, or the outputs of other workflows steps) with the input parameters of the underlying step. + + Input object + ------------ + + A WorkflowStepInput object must contain an ``id`` field in the form ``#fieldname`` or ``#prefix/fieldname``. When the ``id`` field contains a slash ``/`` the field name consists of the characters following the final slash (the prefix portion may contain one or more slashes to indicate scope). This defines a field of the workflow step input object with the value of the ``source`` parameter(s). + + Merging + ------- + + To merge multiple inbound data links, `MultipleInputFeatureRequirement <#MultipleInputFeatureRequirement>`__ must be specified in the workflow or workflow step requirements. + + If the sink parameter is an array, or named in a `workflow scatter <#WorkflowStep>`__ operation, there may be multiple inbound data links listed in the ``source`` field. The values from the input links are merged depending on the method specified in the ``linkMerge`` field. If not specified, the default method is "merge_nested". + * **merge_nested** + + The input must be an array consisting of exactly one entry for each input link. If "merge_nested" is specified with a single link, the value from the link must be wrapped in a single-item list. + + * **merge_flattened** + + 1. The source and sink parameters must be compatible types, or the source type must be compatible with single element from the "items" type of the destination array parameter. + 2. Source parameters which are arrays are concatenated. Source parameters which are single element types are appended as single elements. + + """ + + id: str -class InplaceUpdateRequirement(ProcessRequirement): def __init__( self, - inplaceUpdate: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + source: Any | None = None, + linkMerge: Any | None = None, + default: Any | None = None, + valueFrom: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -21624,19 +17233,27 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "InplaceUpdateRequirement" - self.inplaceUpdate = inplaceUpdate + self.source = source + self.linkMerge = linkMerge + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.default = default + self.valueFrom = valueFrom def __eq__(self, other: Any) -> bool: - if isinstance(other, InplaceUpdateRequirement): + if isinstance(other, WorkflowStepInput): return bool( - self.class_ == other.class_ - and self.inplaceUpdate == other.inplaceUpdate + self.source == other.source + and self.linkMerge == other.linkMerge + and self.id == other.id + and self.default == other.default + and self.valueFrom == other.valueFrom ) return False def __hash__(self) -> int: - return hash((self.class_, self.inplaceUpdate)) + return hash( + (self.source, self.linkMerge, self.id, self.default, self.valueFrom) + ) @classmethod def fromDoc( @@ -21644,252 +17261,259 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InplaceUpdateRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) - class_ = load_field( - _doc.get("class"), - uri_strtype_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("inplaceUpdate") is None: - raise ValidationException("missing required field `inplaceUpdate`", None, []) + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) - inplaceUpdate = load_field( - _doc.get("inplaceUpdate"), - booltype, - baseuri, - loadingOptions, - lc=_doc.get("inplaceUpdate") - ) + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) + source = None + if "source" in _doc: + try: + source = _load_field( + _doc.get("source"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None, + baseuri, + loadingOptions, + lc=_doc.get("source") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `source`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("source") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `source` field is not valid because:", + SourceLine(_doc, "source", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `source` field is not valid because:", + SourceLine(_doc, "source", str), + [e], + detailed_message=f"the `source` field with value `{val}` " + "is not valid because:", + ) + ) + linkMerge = None + if "linkMerge" in _doc: + try: + linkMerge = _load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + lc=_doc.get("linkMerge") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inplaceUpdate`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inplaceUpdate") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `linkMerge`": _errors__.append( ValidationException( - "the `inplaceUpdate` field is not valid because:", - SourceLine(_doc, "inplaceUpdate", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("linkMerge") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + detailed_message=f"the `linkMerge` field with value `{val}` " + "is not valid because:", + ) + ) + default = None + if "default" in _doc: + try: + default = _load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, + baseuri, + loadingOptions, + lc=_doc.get("default") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `default`": _errors__.append( ValidationException( - "the `inplaceUpdate` field is not valid because:", - SourceLine(_doc, "inplaceUpdate", str), - [e], - detailed_message=f"the `inplaceUpdate` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( - k - ), - SourceLine(_doc, k, str), + val = _doc.get("default") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - inplaceUpdate=inplaceUpdate, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.inplaceUpdate is not None: - r["inplaceUpdate"] = save( - self.inplaceUpdate, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "inplaceUpdate"]) - - -class Secrets(ProcessRequirement): - def __init__( - self, - secrets: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "Secrets" - self.secrets = secrets - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Secrets): - return bool(self.class_ == other.class_ and self.secrets == other.secrets) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.secrets)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Secrets": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_strtype_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("secrets") is None: - raise ValidationException("missing required field `secrets`", None, []) - - secrets = load_field( - _doc.get("secrets"), - uri_array_of_strtype_False_False_0_None, - baseuri, - loadingOptions, - lc=_doc.get("secrets") - ) + else: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + detailed_message=f"the `default` field with value `{val}` " + "is not valid because:", + ) + ) + valueFrom = None + if "valueFrom" in _doc: + try: + valueFrom = _load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("valueFrom") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secrets`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("secrets") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `valueFrom`": _errors__.append( ValidationException( - "the `secrets` field is not valid because:", - SourceLine(_doc, "secrets", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `secrets` field is not valid because:", - SourceLine(_doc, "secrets", str), - [e], - detailed_message=f"the `secrets` field with value `{val}` " - "is not valid because:", + val = _doc.get("valueFrom") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} + else: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + detailed_message=f"the `valueFrom` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -21897,14 +17521,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `secrets`".format( + "invalid field `{}`, expected one of: `source`, `linkMerge`, `id`, `default`, `valueFrom`".format( k ), SourceLine(_doc, k, str), @@ -21914,10 +17538,15 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - secrets=secrets, + source=source, + linkMerge=linkMerge, + id=id, + default=default, + valueFrom=valueFrom, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -21931,17 +17560,24 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.secrets is not None: - u = save_relative_uri(self.secrets, base_url, False, 0, relative_uris) - r["secrets"] = u + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.source is not None: + u = save_relative_uri(self.source, self.id, False, 2, relative_uris) + r["source"] = u + if self.linkMerge is not None: + r["linkMerge"] = save( + self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.valueFrom is not None: + r["valueFrom"] = save( + self.valueFrom, top=False, base_url=self.id, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -21951,23 +17587,24 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "secrets"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["source", "linkMerge", "id", "default", "valueFrom"] + ) -class TimeLimit(ProcessRequirement): +class WorkflowStepOutput(Saveable): """ - Set an upper limit on the execution time of a CommandLineTool or - ExpressionTool. A tool execution which exceeds the time limit may - be preemptively terminated and considered failed. May also be - used by batch systems to make scheduling decisions. + Associate an output parameter of the underlying process with a workflow parameter. The workflow parameter (given in the ``id`` field) be may be used as a ``source`` to connect with input parameters of other workflow steps, or with an output parameter of the process. """ + id: str + def __init__( self, - timelimit: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -21977,18 +17614,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "TimeLimit" - self.timelimit = timelimit + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, TimeLimit): - return bool( - self.class_ == other.class_ and self.timelimit == other.timelimit - ) + if isinstance(other, WorkflowStepOutput): + return bool(self.id == other.id) return False def __hash__(self) -> int: - return hash((self.class_, self.timelimit)) + return hash((self.id)) @classmethod def fromDoc( @@ -21996,79 +17630,71 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "TimeLimit": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_strtype_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("timelimit") is None: - raise ValidationException("missing required field `timelimit`", None, []) - - timelimit = load_field( - _doc.get("timelimit"), - union_of_inttype_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("timelimit") - ) + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `timelimit`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("timelimit") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `id`": _errors__.append( ValidationException( - "the `timelimit` field is not valid because:", - SourceLine(_doc, "timelimit", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `timelimit` field is not valid because:", - SourceLine(_doc, "timelimit", str), - [e], - detailed_message=f"the `timelimit` field with value `{val}` " - "is not valid because:", + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -22076,16 +17702,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `timelimit`".format( - k - ), + "invalid field `{}`, expected one of: `id`".format(k), SourceLine(_doc, k, str), ) ) @@ -22093,10 +17717,11 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - timelimit=timelimit, + id=id, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -22110,21 +17735,9 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.timelimit is not None: - r["timelimit"] = save( - self.timelimit, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u # top refers to the directory level if top: @@ -22134,28 +17747,59 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "timelimit"]) + attrs: ClassVar[Collection[str]] = frozenset(["id"]) -class WorkReuse(ProcessRequirement): +class WorkflowStep(Saveable): """ - For implementations that support reusing output from past work (on - the assumption that same code and same input produce same - results), control whether to enable or disable the reuse behavior - for a particular tool or step (to accommodate situations where that - assumption is incorrect). A reused step is not executed but - instead returns the same output as the original execution. + A workflow step is an executable element of a workflow. It specifies the underlying process implementation (such as ``CommandLineTool`` or another ``Workflow``) in the ``run`` field and connects the input and output parameters of the underlying process to workflow parameters. + + Scatter/gather + ============== + + To use scatter/gather, `ScatterFeatureRequirement <#ScatterFeatureRequirement>`__ must be specified in the workflow or workflow step requirements. - If `enableReuse` is not specified, correct tools should assume it - is enabled by default. + A "scatter" operation specifies that the associated workflow step or subworkflow should execute separately over a list of input elements. Each job making up a scatter operation is independent and may be executed concurrently. + + The ``scatter`` field specifies one or more input parameters which will be scattered. An input parameter may be listed more than once. The declared type of each input parameter is implicitly becomes an array of items of the input parameter type. If a parameter is listed more than once, it becomes a nested array. As a result, upstream parameters which are connected to scattered parameters must be arrays. + + All output parameter types are also implicitly wrapped in arrays. Each job in the scatter results in an entry in the output array. + + If any scattered parameter runtime value is an empty array, all outputs are set to empty arrays and no work is done for the step, according to applicable scattering rules. + + If ``scatter`` declares more than one input parameter, ``scatterMethod`` describes how to decompose the input into a discrete set of jobs. + + * **dotproduct** specifies that each of the input arrays are aligned and one element taken from each array to construct each job. It is an error if all input arrays are not the same length. + + * **nested_crossproduct** specifies the Cartesian product of the inputs, producing a job for every combination of the scattered inputs. The output must be nested arrays for each level of scattering, in the order that the input arrays are listed in the ``scatter`` field. + + * **flat_crossproduct** specifies the Cartesian product of the inputs, producing a job for every combination of the scattered inputs. The output arrays must be flattened to a single level, but otherwise listed in the order that the input arrays are listed in the ``scatter`` field. + + Subworkflows + ============ + + To specify a nested workflow as part of a workflow step, `SubworkflowFeatureRequirement <#SubworkflowFeatureRequirement>`__ must be specified in the workflow or workflow step requirements. + + It is a fatal error if a workflow directly or indirectly invokes itself as a subworkflow (recursive workflows are not allowed). """ + id: str + def __init__( self, - enableReuse: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + in_: Any, + out: Any, + run: Any, + requirements: Any | None = None, + hints: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + scatter: Any | None = None, + scatterMethod: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -22165,18 +17809,48 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "WorkReuse" - self.enableReuse = enableReuse + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.in_ = in_ + self.out = out + self.requirements = requirements + self.hints = hints + self.label = label + self.doc = doc + self.run = run + self.scatter = scatter + self.scatterMethod = scatterMethod def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkReuse): + if isinstance(other, WorkflowStep): return bool( - self.class_ == other.class_ and self.enableReuse == other.enableReuse + self.id == other.id + and self.in_ == other.in_ + and self.out == other.out + and self.requirements == other.requirements + and self.hints == other.hints + and self.label == other.label + and self.doc == other.doc + and self.run == other.run + and self.scatter == other.scatter + and self.scatterMethod == other.scatterMethod ) return False def __hash__(self) -> int: - return hash((self.class_, self.enableReuse)) + return hash( + ( + self.id, + self.in_, + self.out, + self.requirements, + self.hints, + self.label, + self.doc, + self.run, + self.scatter, + self.scatterMethod, + ) + ) @classmethod def fromDoc( @@ -22184,46 +17858,134 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkReuse": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + if _doc.get("in") is None: + raise ValidationException("missing required field `in`", None, []) - class_ = load_field( - _doc.get("class"), - uri_strtype_False_True_None_None, + in_ = _load_field( + _doc.get("in"), + idmap_in__array_of_WorkflowStepInputLoader, baseuri, loadingOptions, - lc=_doc.get("class") + lc=_doc.get("in") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `in`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("in") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [e], + detailed_message=f"the `in` field with value `{val}` " + "is not valid because:", + ) + ) try: - if _doc.get("enableReuse") is None: - raise ValidationException("missing required field `enableReuse`", None, []) + if _doc.get("out") is None: + raise ValidationException("missing required field `out`", None, []) - enableReuse = load_field( - _doc.get("enableReuse"), - union_of_booltype_or_strtype, + out = _load_field( + _doc.get("out"), + uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("enableReuse") + lc=_doc.get("out") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `enableReuse`": + if str(e) == "missing required field `out`": _errors__.append( ValidationException( str(e), @@ -22231,13 +17993,13 @@ def fromDoc( ) ) else: - val = _doc.get("enableReuse") + val = _doc.get("out") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `enableReuse` field is not valid because:", - SourceLine(_doc, "enableReuse", str), + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -22249,176 +18011,217 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `enableReuse` field is not valid because:", - SourceLine(_doc, "enableReuse", str), + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), [e], - detailed_message=f"the `enableReuse` field with value `{val}` " + detailed_message=f"the `out` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + requirements = None + if "requirements" in _doc: + try: + requirements = _load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + lc=_doc.get("requirements") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `requirements`": _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ValidationException( + str(e), + None + ) ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("requirements") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + detailed_message=f"the `requirements` field with value `{val}` " + "is not valid because:", + ) + ) + hints = None + if "hints" in _doc: + try: + hints = _load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_Any_type, + baseuri, + loadingOptions, + lc=_doc.get("hints") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `hints`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `enableReuse`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) + else: + val = _doc.get("hints") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + detailed_message=f"the `hints` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - enableReuse=enableReuse, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.enableReuse is not None: - r["enableReuse"] = save( - self.enableReuse, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "enableReuse"]) - - -class NetworkAccess(ProcessRequirement): - """ - Indicate whether a process requires outgoing IPv4/IPv6 network - access. Choice of IPv4 or IPv6 is implementation and site - specific, correct tools must support both. - - If `networkAccess` is false or not specified, tools must not - assume network access, except for localhost (the loopback device). - - If `networkAccess` is true, the tool must be able to make outgoing - connections to network resources. Resources may be on a private - subnet or the public Internet. However, implementations and sites - may apply their own security policies to restrict what is - accessible by the tool. - - Enabling network access does not imply a publicly routable IP - address or the ability to accept inbound connections. - - """ - - def __init__( - self, - networkAccess: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "NetworkAccess" - self.networkAccess = networkAccess - - def __eq__(self, other: Any) -> bool: - if isinstance(other, NetworkAccess): - return bool( - self.class_ == other.class_ - and self.networkAccess == other.networkAccess - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.networkAccess)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "NetworkAccess": - _doc = copy.copy(doc) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) - class_ = load_field( - _doc.get("class"), - uri_strtype_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) try: - if _doc.get("networkAccess") is None: - raise ValidationException("missing required field `networkAccess`", None, []) + if _doc.get("run") is None: + raise ValidationException("missing required field `run`", None, []) - networkAccess = load_field( - _doc.get("networkAccess"), - union_of_booltype_or_strtype, + run = _load_field( + _doc.get("run"), + uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None_None, baseuri, loadingOptions, - lc=_doc.get("networkAccess") + lc=_doc.get("run") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `networkAccess`": + if str(e) == "missing required field `run`": _errors__.append( ValidationException( str(e), @@ -22426,13 +18229,13 @@ def fromDoc( ) ) else: - val = _doc.get("networkAccess") + val = _doc.get("run") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `networkAccess` field is not valid because:", - SourceLine(_doc, "networkAccess", str), + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -22444,14 +18247,108 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `networkAccess` field is not valid because:", - SourceLine(_doc, "networkAccess", str), + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), [e], - detailed_message=f"the `networkAccess` field with value `{val}` " + detailed_message=f"the `run` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + scatter = None + if "scatter" in _doc: + try: + scatter = _load_field( + _doc.get("scatter"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None, + baseuri, + loadingOptions, + lc=_doc.get("scatter") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `scatter`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("scatter") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `scatter` field is not valid because:", + SourceLine(_doc, "scatter", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `scatter` field is not valid because:", + SourceLine(_doc, "scatter", str), + [e], + detailed_message=f"the `scatter` field with value `{val}` " + "is not valid because:", + ) + ) + scatterMethod = None + if "scatterMethod" in _doc: + try: + scatterMethod = _load_field( + _doc.get("scatterMethod"), + uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("scatterMethod") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `scatterMethod`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("scatterMethod") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `scatterMethod` field is not valid because:", + SourceLine(_doc, "scatterMethod", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `scatterMethod` field is not valid because:", + SourceLine(_doc, "scatterMethod", str), + [e], + detailed_message=f"the `scatterMethod` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -22459,14 +18356,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `networkAccess`".format( + "invalid field `{}`, expected one of: `id`, `in`, `out`, `requirements`, `hints`, `label`, `doc`, `run`, `scatter`, `scatterMethod`".format( k ), SourceLine(_doc, k, str), @@ -22476,10 +18373,20 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - networkAccess=networkAccess, + id=id, + in_=in_, + out=out, + requirements=requirements, + hints=hints, + label=label, + doc=doc, + run=run, + scatter=scatter, + scatterMethod=scatterMethod, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -22493,21 +18400,46 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.networkAccess is not None: - r["networkAccess"] = save( - self.networkAccess, + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.in_ is not None: + r["in"] = save( + self.in_, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.out is not None: + u = save_relative_uri(self.out, self.id, True, None, relative_uris) + r["out"] = u + if self.requirements is not None: + r["requirements"] = save( + self.requirements, top=False, - base_url=base_url, + base_url=self.id, relative_uris=relative_uris, ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.run is not None: + u = save_relative_uri(self.run, self.id, False, None, relative_uris) + r["run"] = u + if self.scatter is not None: + u = save_relative_uri(self.scatter, self.id, False, 0, relative_uris) + r["scatter"] = u + if self.scatterMethod is not None: + u = save_relative_uri( + self.scatterMethod, self.id, False, None, relative_uris + ) + r["scatterMethod"] = u # top refers to the directory level if top: @@ -22517,25 +18449,65 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "networkAccess"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "id", + "in", + "out", + "requirements", + "hints", + "label", + "doc", + "run", + "scatter", + "scatterMethod", + ] + ) + + +class Workflow(Process): + """ + A workflow describes a set of **steps** and the **dependencies** between those steps. When a step produces output that will be consumed by a second step, the first step is a dependency of the second step. + + When there is a dependency, the workflow engine must execute the preceding step and wait for it to successfully produce output before executing the dependent step. If two steps are defined in the workflow graph that are not directly or indirectly dependent, these steps are **independent**, and may execute in any order or execute concurrently. A workflow is complete when all steps have been executed. + + Dependencies between parameters are expressed using the ``source`` field on `workflow step input parameters <#WorkflowStepInput>`__ and `workflow output parameters <#WorkflowOutputParameter>`__. + + The ``source`` field expresses the dependency of one parameter on another such that when a value is associated with the parameter specified by ``source``, that value is propagated to the destination parameter. When all data links inbound to a given step are fulfilled, the step is ready to execute. + + Workflow success and failure + ---------------------------- + + A completed step must result in one of ``success``, ``temporaryFailure`` or ``permanentFailure`` states. An implementation may choose to retry a step execution which resulted in ``temporaryFailure``. An implementation may choose to either continue running other steps of a workflow, or terminate immediately upon ``permanentFailure``. + + * If any step of a workflow execution results in ``permanentFailure``, then the workflow status is ``permanentFailure``. + + * If one or more steps result in ``temporaryFailure`` and all other steps complete ``success`` or are not executed, then the workflow status is ``temporaryFailure``. + + * If all workflow steps are executed and complete with ``success``, then the workflow status is ``success``. + Extensions + ========== + + `ScatterFeatureRequirement <#ScatterFeatureRequirement>`__ and `SubworkflowFeatureRequirement <#SubworkflowFeatureRequirement>`__ are available as standard `extensions <#Extensions_and_Metadata>`__ to core workflow semantics. + + """ -class ProcessGenerator(Process): id: str def __init__( self, inputs: Any, outputs: Any, - run: Any, - id: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + steps: Any, + id: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + cwlVersion: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -22553,11 +18525,11 @@ def __init__( self.label = label self.doc = doc self.cwlVersion = cwlVersion - self.class_ = "ProcessGenerator" - self.run = run + self.class_: Final[str] = "Workflow" + self.steps = steps def __eq__(self, other: Any) -> bool: - if isinstance(other, ProcessGenerator): + if isinstance(other, Workflow): return bool( self.id == other.id and self.inputs == other.inputs @@ -22568,7 +18540,7 @@ def __eq__(self, other: Any) -> bool: and self.doc == other.doc and self.cwlVersion == other.cwlVersion and self.class_ == other.class_ - and self.run == other.run + and self.steps == other.steps ) return False @@ -22584,7 +18556,7 @@ def __hash__(self) -> int: self.doc, self.cwlVersion, self.class_, - self.run, + self.steps, ) ) @@ -22594,8 +18566,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ProcessGenerator": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -22605,7 +18577,7 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -22662,23 +18634,24 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_strtype_False_True_None_None, + uri_Workflow_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + raise e try: if _doc.get("inputs") is None: raise ValidationException("missing required field `inputs`", None, []) - inputs = load_field( + inputs = _load_field( _doc.get("inputs"), idmap_inputs_array_of_InputParameterLoader, baseuri, @@ -22726,9 +18699,9 @@ def fromDoc( if _doc.get("outputs") is None: raise ValidationException("missing required field `outputs`", None, []) - outputs = load_field( + outputs = _load_field( _doc.get("outputs"), - idmap_outputs_array_of_OutputParameterLoader, + idmap_outputs_array_of_WorkflowOutputParameterLoader, baseuri, loadingOptions, lc=_doc.get("outputs") @@ -22773,9 +18746,9 @@ def fromDoc( requirements = None if "requirements" in _doc: try: - requirements = load_field( + requirements = _load_field( _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions, lc=_doc.get("requirements") @@ -22820,9 +18793,9 @@ def fromDoc( hints = None if "hints" in _doc: try: - hints = load_field( + hints = _load_field( _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, baseuri, loadingOptions, lc=_doc.get("hints") @@ -22867,7 +18840,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -22914,7 +18887,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype, baseuri, @@ -22961,7 +18934,7 @@ def fromDoc( cwlVersion = None if "cwlVersion" in _doc: try: - cwlVersion = load_field( + cwlVersion = _load_field( _doc.get("cwlVersion"), uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, baseuri, @@ -23006,21 +18979,21 @@ def fromDoc( ) ) try: - if _doc.get("run") is None: - raise ValidationException("missing required field `run`", None, []) + if _doc.get("steps") is None: + raise ValidationException("missing required field `steps`", None, []) - run = load_field( - _doc.get("run"), - uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_False_False_None_None, + steps = _load_field( + _doc.get("steps"), + idmap_steps_union_of_array_of_WorkflowStepLoader, baseuri, loadingOptions, - lc=_doc.get("run") + lc=_doc.get("steps") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `run`": + if str(e) == "missing required field `steps`": _errors__.append( ValidationException( str(e), @@ -23028,13 +19001,13 @@ def fromDoc( ) ) else: - val = _doc.get("run") + val = _doc.get("steps") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), + "the `steps` field is not valid because:", + SourceLine(_doc, "steps", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -23046,14 +19019,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), + "the `steps` field is not valid because:", + SourceLine(_doc, "steps", str), [e], - detailed_message=f"the `run` field with value `{val}` " + detailed_message=f"the `steps` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -23061,14 +19034,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `run`".format( + "invalid field `{}`, expected one of: `id`, `inputs`, `outputs`, `requirements`, `hints`, `label`, `doc`, `cwlVersion`, `class`, `steps`".format( k ), SourceLine(_doc, k, str), @@ -23086,7 +19059,7 @@ def fromDoc( label=label, doc=doc, cwlVersion=cwlVersion, - run=run, + steps=steps, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -23108,8 +19081,10 @@ def save( u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ @@ -23145,9 +19120,10 @@ def save( if self.cwlVersion is not None: u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) r["cwlVersion"] = u - if self.run is not None: - u = save_relative_uri(self.run, self.id, False, None, relative_uris) - r["run"] = u + if self.steps is not None: + r["steps"] = save( + self.steps, top=False, base_url=self.id, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -23157,7 +19133,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ "id", "inputs", @@ -23168,22 +19144,21 @@ def save( "doc", "cwlVersion", "class", - "run", + "steps", ] ) -class MPIRequirement(ProcessRequirement): +class SubworkflowFeatureRequirement(ProcessRequirement): """ - Indicates that a process requires an MPI runtime. + Indicates that the workflow platform must support nested workflows in the ``run`` field of `WorkflowStep <#WorkflowStep>`__. """ def __init__( self, - processes: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -23193,18 +19168,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "MPIRequirement" - self.processes = processes + self.class_: Final[str] = "SubworkflowFeatureRequirement" def __eq__(self, other: Any) -> bool: - if isinstance(other, MPIRequirement): - return bool( - self.class_ == other.class_ and self.processes == other.processes - ) + if isinstance(other, SubworkflowFeatureRequirement): + return bool(self.class_ == other.class_) return False def __hash__(self) -> int: - return hash((self.class_, self.processes)) + return hash((self.class_)) @classmethod def fromDoc( @@ -23212,8 +19184,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "MPIRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -23224,67 +19196,20 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_strtype_False_True_None_None, + uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e - try: - if _doc.get("processes") is None: - raise ValidationException("missing required field `processes`", None, []) - - processes = load_field( - _doc.get("processes"), - union_of_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("processes") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `processes`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("processes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `processes` field is not valid because:", - SourceLine(_doc, "processes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `processes` field is not valid because:", - SourceLine(_doc, "processes", str), - [e], - detailed_message=f"the `processes` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + raise e + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -23292,16 +19217,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `processes`".format( - k - ), + "invalid field `{}`, expected one of: `class`".format(k), SourceLine(_doc, k, str), ) ) @@ -23309,7 +19232,6 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - processes=processes, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -23327,20 +19249,15 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.processes is not None: - r["processes"] = save( - self.processes, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) # top refers to the directory level if top: @@ -23350,23 +19267,19 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "processes"]) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) -class CUDARequirement(ProcessRequirement): +class ScatterFeatureRequirement(ProcessRequirement): """ - Require support for NVIDA CUDA (GPU hardware acceleration). + Indicates that the workflow platform must support the ``scatter`` and ``scatterMethod`` fields of `WorkflowStep <#WorkflowStep>`__. """ def __init__( self, - cudaComputeCapability: Any, - cudaVersionMin: Any, - cudaDeviceCountMax: Optional[Any] = None, - cudaDeviceCountMin: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -23376,33 +19289,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "CUDARequirement" - self.cudaComputeCapability = cudaComputeCapability - self.cudaDeviceCountMax = cudaDeviceCountMax - self.cudaDeviceCountMin = cudaDeviceCountMin - self.cudaVersionMin = cudaVersionMin + self.class_: Final[str] = "ScatterFeatureRequirement" def __eq__(self, other: Any) -> bool: - if isinstance(other, CUDARequirement): - return bool( - self.class_ == other.class_ - and self.cudaComputeCapability == other.cudaComputeCapability - and self.cudaDeviceCountMax == other.cudaDeviceCountMax - and self.cudaDeviceCountMin == other.cudaDeviceCountMin - and self.cudaVersionMin == other.cudaVersionMin - ) + if isinstance(other, ScatterFeatureRequirement): + return bool(self.class_ == other.class_) return False def __hash__(self) -> int: - return hash( - ( - self.class_, - self.cudaComputeCapability, - self.cudaDeviceCountMax, - self.cudaDeviceCountMin, - self.cudaVersionMin, - ) - ) + return hash((self.class_)) @classmethod def fromDoc( @@ -23410,8 +19305,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CUDARequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -23422,209 +19317,141 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_strtype_False_True_None_None, + uri_ScatterFeatureRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("cudaComputeCapability") is None: - raise ValidationException("missing required field `cudaComputeCapability`", None, []) - - cudaComputeCapability = load_field( - _doc.get("cudaComputeCapability"), - union_of_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("cudaComputeCapability") - ) - + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `cudaComputeCapability`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cudaComputeCapability") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + raise e + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - "the `cudaComputeCapability` field is not valid because:", - SourceLine(_doc, "cudaComputeCapability", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) + extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "the `cudaComputeCapability` field is not valid because:", - SourceLine(_doc, "cudaComputeCapability", str), - [e], - detailed_message=f"the `cudaComputeCapability` field with value `{val}` " - "is not valid because:", + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), ) ) - cudaDeviceCountMax = None - if "cudaDeviceCountMax" in _doc: - try: - cudaDeviceCountMax = load_field( - _doc.get("cudaDeviceCountMax"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("cudaDeviceCountMax") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed - if str(e) == "missing required field `cudaDeviceCountMax`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cudaDeviceCountMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `cudaDeviceCountMax` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `cudaDeviceCountMax` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMax", str), - [e], - detailed_message=f"the `cudaDeviceCountMax` field with value `{val}` " - "is not valid because:", - ) - ) - cudaDeviceCountMin = None - if "cudaDeviceCountMin" in _doc: - try: - cudaDeviceCountMin = load_field( - _doc.get("cudaDeviceCountMin"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("cudaDeviceCountMin") - ) + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u - if str(e) == "missing required field `cudaDeviceCountMin`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cudaDeviceCountMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `cudaDeviceCountMin` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `cudaDeviceCountMin` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMin", str), - [e], - detailed_message=f"the `cudaDeviceCountMin` field with value `{val}` " - "is not valid because:", - ) - ) + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["class"]) + + +class MultipleInputFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support multiple inbound data links listed in the ``source`` field of `WorkflowStepInput <#WorkflowStepInput>`__. + + """ + + def __init__( + self, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_: Final[str] = "MultipleInputFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, MultipleInputFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] try: - if _doc.get("cudaVersionMin") is None: - raise ValidationException("missing required field `cudaVersionMin`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - cudaVersionMin = load_field( - _doc.get("cudaVersionMin"), - strtype, + class_ = _load_field( + _doc.get("class"), + uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("cudaVersionMin") + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `cudaVersionMin`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cudaVersionMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `cudaVersionMin` field is not valid because:", - SourceLine(_doc, "cudaVersionMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `cudaVersionMin` field is not valid because:", - SourceLine(_doc, "cudaVersionMin", str), - [e], - detailed_message=f"the `cudaVersionMin` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + raise e + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -23632,16 +19459,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `cudaComputeCapability`, `cudaDeviceCountMax`, `cudaDeviceCountMin`, `cudaVersionMin`".format( - k - ), + "invalid field `{}`, expected one of: `class`".format(k), SourceLine(_doc, k, str), ) ) @@ -23649,10 +19474,6 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - cudaComputeCapability=cudaComputeCapability, - cudaDeviceCountMax=cudaDeviceCountMax, - cudaDeviceCountMin=cudaDeviceCountMin, - cudaVersionMin=cudaVersionMin, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -23670,41 +19491,15 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.cudaComputeCapability is not None: - r["cudaComputeCapability"] = save( - self.cudaComputeCapability, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.cudaDeviceCountMax is not None: - r["cudaDeviceCountMax"] = save( - self.cudaDeviceCountMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.cudaDeviceCountMin is not None: - r["cudaDeviceCountMin"] = save( - self.cudaDeviceCountMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.cudaVersionMin is not None: - r["cudaVersionMin"] = save( - self.cudaVersionMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) # top refers to the directory level if top: @@ -23714,23 +19509,19 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "class", - "cudaComputeCapability", - "cudaDeviceCountMax", - "cudaDeviceCountMin", - "cudaVersionMin", - ] - ) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) + + +class StepInputExpressionRequirement(ProcessRequirement): + """ + Indicate that the workflow platform must support the ``valueFrom`` field of `WorkflowStepInput <#WorkflowStepInput>`__. + """ -class ShmSize(ProcessRequirement): def __init__( self, - shmSize: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -23740,16 +19531,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "ShmSize" - self.shmSize = shmSize + self.class_: Final[str] = "StepInputExpressionRequirement" def __eq__(self, other: Any) -> bool: - if isinstance(other, ShmSize): - return bool(self.class_ == other.class_ and self.shmSize == other.shmSize) + if isinstance(other, StepInputExpressionRequirement): + return bool(self.class_ == other.class_) return False def __hash__(self) -> int: - return hash((self.class_, self.shmSize)) + return hash((self.class_)) @classmethod def fromDoc( @@ -23757,8 +19547,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ShmSize": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -23769,67 +19559,20 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_strtype_False_True_None_None, + uri_StepInputExpressionRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("shmSize") is None: - raise ValidationException("missing required field `shmSize`", None, []) - - shmSize = load_field( - _doc.get("shmSize"), - strtype, - baseuri, - loadingOptions, - lc=_doc.get("shmSize") - ) - + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `shmSize`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("shmSize") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `shmSize` field is not valid because:", - SourceLine(_doc, "shmSize", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `shmSize` field is not valid because:", - SourceLine(_doc, "shmSize", str), - [e], - detailed_message=f"the `shmSize` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + raise e + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -23837,16 +19580,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `shmSize`".format( - k - ), + "invalid field `{}`, expected one of: `class`".format(k), SourceLine(_doc, k, str), ) ) @@ -23854,7 +19595,6 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - shmSize=shmSize, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -23872,17 +19612,15 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.shmSize is not None: - r["shmSize"] = save( - self.shmSize, top=False, base_url=base_url, relative_uris=relative_uris - ) # top refers to the directory level if top: @@ -23892,13 +19630,12 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "shmSize"]) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) -_vocab = { +_vocab.update({ "Any": "https://w3id.org/cwl/salad#Any", "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", - "CUDARequirement": "http://commonwl.org/cwltool#CUDARequirement", "CWLArraySchema": "https://w3id.org/cwl/cwl#CWLArraySchema", "CWLInputFile": "https://w3id.org/cwl/cwl#CWLInputFile", "CWLObjectType": "https://w3id.org/cwl/cwl#CWLObjectType", @@ -23933,7 +19670,6 @@ def save( "File": "https://w3id.org/cwl/cwl#File", "InitialWorkDirRequirement": "https://w3id.org/cwl/cwl#InitialWorkDirRequirement", "InlineJavascriptRequirement": "https://w3id.org/cwl/cwl#InlineJavascriptRequirement", - "InplaceUpdateRequirement": "http://commonwl.org/cwltool#InplaceUpdateRequirement", "InputArraySchema": "https://w3id.org/cwl/cwl#InputArraySchema", "InputBinding": "https://w3id.org/cwl/cwl#InputBinding", "InputEnumSchema": "https://w3id.org/cwl/cwl#InputEnumSchema", @@ -23942,11 +19678,8 @@ def save( "InputRecordSchema": "https://w3id.org/cwl/cwl#InputRecordSchema", "InputSchema": "https://w3id.org/cwl/cwl#InputSchema", "LinkMergeMethod": "https://w3id.org/cwl/cwl#LinkMergeMethod", - "LoadListingRequirement": "http://commonwl.org/cwltool#LoadListingRequirement", - "MPIRequirement": "http://commonwl.org/cwltool#MPIRequirement", "MapSchema": "https://w3id.org/cwl/salad#MapSchema", "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", - "NetworkAccess": "http://commonwl.org/cwltool#NetworkAccess", "OutputArraySchema": "https://w3id.org/cwl/cwl#OutputArraySchema", "OutputBinding": "https://w3id.org/cwl/cwl#OutputBinding", "OutputEnumSchema": "https://w3id.org/cwl/cwl#OutputEnumSchema", @@ -23957,7 +19690,6 @@ def save( "Parameter": "https://w3id.org/cwl/cwl#Parameter", "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", "Process": "https://w3id.org/cwl/cwl#Process", - "ProcessGenerator": "http://commonwl.org/cwltool#ProcessGenerator", "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", "RecordField": "https://w3id.org/cwl/salad#RecordField", "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", @@ -23966,17 +19698,13 @@ def save( "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", "SchemaBase": "https://w3id.org/cwl/cwl#SchemaBase", "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", - "Secrets": "http://commonwl.org/cwltool#Secrets", "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", - "ShmSize": "http://commonwl.org/cwltool#ShmSize", "Sink": "https://w3id.org/cwl/cwl#Sink", "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", "StepInputExpressionRequirement": "https://w3id.org/cwl/cwl#StepInputExpressionRequirement", "SubworkflowFeatureRequirement": "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement", - "TimeLimit": "http://commonwl.org/cwltool#TimeLimit", "UnionSchema": "https://w3id.org/cwl/salad#UnionSchema", - "WorkReuse": "http://commonwl.org/cwltool#WorkReuse", "Workflow": "https://w3id.org/cwl/cwl#Workflow", "WorkflowOutputParameter": "https://w3id.org/cwl/cwl#WorkflowOutputParameter", "WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", @@ -23984,19 +19712,8 @@ def save( "WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", "array": "https://w3id.org/cwl/salad#array", "boolean": "http://www.w3.org/2001/XMLSchema#boolean", - "deep_listing": "http://commonwl.org/cwltool#LoadListingRequirement/loadListing/LoadListingEnum/deep_listing", "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", "double": "http://www.w3.org/2001/XMLSchema#double", - "draft-2": "https://w3id.org/cwl/cwl#draft-2", - "draft-3": "https://w3id.org/cwl/cwl#draft-3", - "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", - "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", - "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", - "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", - "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", - "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", - "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", - "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", "enum": "https://w3id.org/cwl/salad#enum", "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", "float": "http://www.w3.org/2001/XMLSchema#float", @@ -24006,21 +19723,17 @@ def save( "merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", "merge_nested": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested", "nested_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct", - "no_listing": "http://commonwl.org/cwltool#LoadListingRequirement/loadListing/LoadListingEnum/no_listing", "null": "https://w3id.org/cwl/salad#null", "record": "https://w3id.org/cwl/salad#record", - "shallow_listing": "http://commonwl.org/cwltool#LoadListingRequirement/loadListing/LoadListingEnum/shallow_listing", "stderr": "https://w3id.org/cwl/cwl#stderr", "stdout": "https://w3id.org/cwl/cwl#stdout", "string": "http://www.w3.org/2001/XMLSchema#string", "union": "https://w3id.org/cwl/salad#union", "v1.0": "https://w3id.org/cwl/cwl#v1.0", - "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", -} -_rvocab = { +}) +_rvocab.update({ "https://w3id.org/cwl/salad#Any": "Any", "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", - "http://commonwl.org/cwltool#CUDARequirement": "CUDARequirement", "https://w3id.org/cwl/cwl#CWLArraySchema": "CWLArraySchema", "https://w3id.org/cwl/cwl#CWLInputFile": "CWLInputFile", "https://w3id.org/cwl/cwl#CWLObjectType": "CWLObjectType", @@ -24055,7 +19768,6 @@ def save( "https://w3id.org/cwl/cwl#File": "File", "https://w3id.org/cwl/cwl#InitialWorkDirRequirement": "InitialWorkDirRequirement", "https://w3id.org/cwl/cwl#InlineJavascriptRequirement": "InlineJavascriptRequirement", - "http://commonwl.org/cwltool#InplaceUpdateRequirement": "InplaceUpdateRequirement", "https://w3id.org/cwl/cwl#InputArraySchema": "InputArraySchema", "https://w3id.org/cwl/cwl#InputBinding": "InputBinding", "https://w3id.org/cwl/cwl#InputEnumSchema": "InputEnumSchema", @@ -24064,11 +19776,8 @@ def save( "https://w3id.org/cwl/cwl#InputRecordSchema": "InputRecordSchema", "https://w3id.org/cwl/cwl#InputSchema": "InputSchema", "https://w3id.org/cwl/cwl#LinkMergeMethod": "LinkMergeMethod", - "http://commonwl.org/cwltool#LoadListingRequirement": "LoadListingRequirement", - "http://commonwl.org/cwltool#MPIRequirement": "MPIRequirement", "https://w3id.org/cwl/salad#MapSchema": "MapSchema", "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", - "http://commonwl.org/cwltool#NetworkAccess": "NetworkAccess", "https://w3id.org/cwl/cwl#OutputArraySchema": "OutputArraySchema", "https://w3id.org/cwl/cwl#OutputBinding": "OutputBinding", "https://w3id.org/cwl/cwl#OutputEnumSchema": "OutputEnumSchema", @@ -24079,7 +19788,6 @@ def save( "https://w3id.org/cwl/cwl#Parameter": "Parameter", "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", "https://w3id.org/cwl/cwl#Process": "Process", - "http://commonwl.org/cwltool#ProcessGenerator": "ProcessGenerator", "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", "https://w3id.org/cwl/salad#RecordField": "RecordField", "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", @@ -24088,17 +19796,13 @@ def save( "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", "https://w3id.org/cwl/cwl#SchemaBase": "SchemaBase", "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", - "http://commonwl.org/cwltool#Secrets": "Secrets", "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", - "http://commonwl.org/cwltool#ShmSize": "ShmSize", "https://w3id.org/cwl/cwl#Sink": "Sink", "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", "https://w3id.org/cwl/cwl#StepInputExpressionRequirement": "StepInputExpressionRequirement", "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement": "SubworkflowFeatureRequirement", - "http://commonwl.org/cwltool#TimeLimit": "TimeLimit", "https://w3id.org/cwl/salad#UnionSchema": "UnionSchema", - "http://commonwl.org/cwltool#WorkReuse": "WorkReuse", "https://w3id.org/cwl/cwl#Workflow": "Workflow", "https://w3id.org/cwl/cwl#WorkflowOutputParameter": "WorkflowOutputParameter", "https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", @@ -24106,19 +19810,8 @@ def save( "https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", "https://w3id.org/cwl/salad#array": "array", "http://www.w3.org/2001/XMLSchema#boolean": "boolean", - "http://commonwl.org/cwltool#LoadListingRequirement/loadListing/LoadListingEnum/deep_listing": "deep_listing", "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", "http://www.w3.org/2001/XMLSchema#double": "double", - "https://w3id.org/cwl/cwl#draft-2": "draft-2", - "https://w3id.org/cwl/cwl#draft-3": "draft-3", - "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", - "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", - "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", - "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", - "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", - "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", - "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", - "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", "https://w3id.org/cwl/salad#enum": "enum", "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", "http://www.w3.org/2001/XMLSchema#float": "float", @@ -24128,25 +19821,22 @@ def save( "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested": "merge_nested", "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct": "nested_crossproduct", - "http://commonwl.org/cwltool#LoadListingRequirement/loadListing/LoadListingEnum/no_listing": "no_listing", "https://w3id.org/cwl/salad#null": "null", "https://w3id.org/cwl/salad#record": "record", - "http://commonwl.org/cwltool#LoadListingRequirement/loadListing/LoadListingEnum/shallow_listing": "shallow_listing", "https://w3id.org/cwl/cwl#stderr": "stderr", "https://w3id.org/cwl/cwl#stdout": "stdout", "http://www.w3.org/2001/XMLSchema#string": "string", "https://w3id.org/cwl/salad#union": "union", "https://w3id.org/cwl/cwl#v1.0": "v1.0", - "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", -} - -strtype = _PrimitiveLoader(str) -inttype = _PrimitiveLoader(int) -floattype = _PrimitiveLoader(float) -booltype = _PrimitiveLoader(bool) -None_type = _PrimitiveLoader(type(None)) -Any_type = _AnyLoader() -PrimitiveTypeLoader = _EnumLoader( +}) + +strtype: Final = _PrimitiveLoader(str) +inttype: Final = _PrimitiveLoader(int) +floattype: Final = _PrimitiveLoader(float) +booltype: Final = _PrimitiveLoader(bool) +None_type: Final = _PrimitiveLoader(type(None)) +Any_type: Final = _AnyLoader() +PrimitiveTypeLoader: Final = _EnumLoader( ( "null", "boolean", @@ -24161,28 +19851,41 @@ def save( """ Names of salad data types (based on Avro schema declarations). -Refer to the [Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for -detailed information. +Refer to the `Avro schema declaration documentation `__ for detailed information. null: no value + boolean: a binary value + int: 32-bit signed integer + long: 64-bit signed integer + float: single precision (32-bit) IEEE 754 floating-point number + double: double precision (64-bit) IEEE 754 floating-point number + string: Unicode character sequence """ -AnyLoader = _EnumLoader(("Any",), "Any") +AnyLoader: Final = _EnumLoader(("Any",), "Any") """ The **Any** type validates for any non-null value. """ -RecordFieldLoader = _RecordLoader(RecordField, None, None) -RecordSchemaLoader = _RecordLoader(RecordSchema, None, None) -EnumSchemaLoader = _RecordLoader(EnumSchema, None, None) -ArraySchemaLoader = _RecordLoader(ArraySchema, None, None) -MapSchemaLoader = _RecordLoader(MapSchema, None, None) -UnionSchemaLoader = _RecordLoader(UnionSchema, None, None) -CWLTypeLoader = _EnumLoader( +RecordFieldLoader: Final = _RecordLoader( + schema_salad.metaschema.RecordField, None, None +) +RecordSchemaLoader: Final = _RecordLoader( + schema_salad.metaschema.RecordSchema, None, None +) +EnumSchemaLoader: Final = _RecordLoader(schema_salad.metaschema.EnumSchema, None, None) +ArraySchemaLoader: Final = _RecordLoader( + schema_salad.metaschema.ArraySchema, None, None +) +MapSchemaLoader: Final = _RecordLoader(schema_salad.metaschema.MapSchema, None, None) +UnionSchemaLoader: Final = _RecordLoader( + schema_salad.metaschema.UnionSchema, None, None +) +CWLTypeLoader: Final = _EnumLoader( ( "null", "boolean", @@ -24198,178 +19901,196 @@ def save( ) """ Extends primitive types with the concept of a file and directory as a builtin type. + File: A File object + Directory: A Directory object """ -CWLArraySchemaLoader = _RecordLoader(CWLArraySchema, None, None) -CWLRecordFieldLoader = _RecordLoader(CWLRecordField, None, None) -CWLRecordSchemaLoader = _RecordLoader(CWLRecordSchema, None, None) -FileLoader = _RecordLoader(File, None, None) -DirectoryLoader = _RecordLoader(Directory, None, None) -CWLObjectTypeLoader = _UnionLoader((), "CWLObjectTypeLoader") -union_of_None_type_or_CWLObjectTypeLoader = _UnionLoader( +CWLArraySchemaLoader: Final = _RecordLoader(CWLArraySchema, None, None) +CWLRecordFieldLoader: Final = _RecordLoader(CWLRecordField, None, None) +CWLRecordSchemaLoader: Final = _RecordLoader(CWLRecordSchema, None, None) +FileLoader: Final = _RecordLoader(File, None, None) +DirectoryLoader: Final = _RecordLoader(Directory, None, None) +CWLObjectTypeLoader: Final = _UnionLoader((), "CWLObjectTypeLoader") +union_of_None_type_or_CWLObjectTypeLoader: Final = _UnionLoader( ( None_type, CWLObjectTypeLoader, ) ) -array_of_union_of_None_type_or_CWLObjectTypeLoader = _ArrayLoader( +array_of_union_of_None_type_or_CWLObjectTypeLoader: Final = _ArrayLoader( union_of_None_type_or_CWLObjectTypeLoader ) -map_of_union_of_None_type_or_CWLObjectTypeLoader = _MapLoader( +map_of_union_of_None_type_or_CWLObjectTypeLoader: Final = _MapLoader( union_of_None_type_or_CWLObjectTypeLoader, "CWLInputFile", "@list", True ) -CWLInputFileLoader = map_of_union_of_None_type_or_CWLObjectTypeLoader -CWLVersionLoader = _EnumLoader( - ( - "draft-2", - "draft-3.dev1", - "draft-3.dev2", - "draft-3.dev3", - "draft-3.dev4", - "draft-3.dev5", - "draft-3", - "draft-4.dev1", - "draft-4.dev2", - "draft-4.dev3", - "v1.0.dev4", - "v1.0", - ), - "CWLVersion", -) +CWLInputFileLoader: Final = map_of_union_of_None_type_or_CWLObjectTypeLoader +CWLVersionLoader: Final = _EnumLoader(("v1.0",), "CWLVersion") """ -Version symbols for published CWL document versions. +Current version symbol for CWL documents. """ -ExpressionLoader = _ExpressionLoader(str) -InputRecordFieldLoader = _RecordLoader(InputRecordField, None, None) -InputRecordSchemaLoader = _RecordLoader(InputRecordSchema, None, None) -InputEnumSchemaLoader = _RecordLoader(InputEnumSchema, None, None) -InputArraySchemaLoader = _RecordLoader(InputArraySchema, None, None) -OutputRecordFieldLoader = _RecordLoader(OutputRecordField, None, None) -OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema, None, None) -OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema, None, None) -OutputArraySchemaLoader = _RecordLoader(OutputArraySchema, None, None) -InputParameterLoader = _RecordLoader(InputParameter, None, None) -OutputParameterLoader = _RecordLoader(OutputParameter, None, None) -InlineJavascriptRequirementLoader = _RecordLoader( +ExpressionLoader: Final = _ExpressionLoader(str) +InputRecordFieldLoader: Final = _RecordLoader(InputRecordField, None, None) +InputRecordSchemaLoader: Final = _RecordLoader(InputRecordSchema, None, None) +InputEnumSchemaLoader: Final = _RecordLoader(InputEnumSchema, None, None) +InputArraySchemaLoader: Final = _RecordLoader(InputArraySchema, None, None) +OutputRecordFieldLoader: Final = _RecordLoader(OutputRecordField, None, None) +OutputRecordSchemaLoader: Final = _RecordLoader(OutputRecordSchema, None, None) +OutputEnumSchemaLoader: Final = _RecordLoader(OutputEnumSchema, None, None) +OutputArraySchemaLoader: Final = _RecordLoader(OutputArraySchema, None, None) +InputParameterLoader: Final = _RecordLoader(InputParameter, None, None) +OutputParameterLoader: Final = _RecordLoader(OutputParameter, None, None) +InlineJavascriptRequirementLoader: Final = _RecordLoader( InlineJavascriptRequirement, None, None ) -SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement, None, None) -EnvironmentDefLoader = _RecordLoader(EnvironmentDef, None, None) -CommandLineBindingLoader = _RecordLoader(CommandLineBinding, None, None) -CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding, None, None) -CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField, None, None) -CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema, None, None) -CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema, None, None) -CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema, None, None) -CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField, None, None) -CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema, None, None) -CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema, None, None) -CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema, None, None) -CommandInputParameterLoader = _RecordLoader(CommandInputParameter, None, None) -CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter, None, None) -stdoutLoader = _EnumLoader(("stdout",), "stdout") +SchemaDefRequirementLoader: Final = _RecordLoader(SchemaDefRequirement, None, None) +EnvironmentDefLoader: Final = _RecordLoader(EnvironmentDef, None, None) +CommandLineBindingLoader: Final = _RecordLoader(CommandLineBinding, None, None) +CommandOutputBindingLoader: Final = _RecordLoader(CommandOutputBinding, None, None) +CommandInputRecordFieldLoader: Final = _RecordLoader( + CommandInputRecordField, None, None +) +CommandInputRecordSchemaLoader: Final = _RecordLoader( + CommandInputRecordSchema, None, None +) +CommandInputEnumSchemaLoader: Final = _RecordLoader(CommandInputEnumSchema, None, None) +CommandInputArraySchemaLoader: Final = _RecordLoader( + CommandInputArraySchema, None, None +) +CommandOutputRecordFieldLoader: Final = _RecordLoader( + CommandOutputRecordField, None, None +) +CommandOutputRecordSchemaLoader: Final = _RecordLoader( + CommandOutputRecordSchema, None, None +) +CommandOutputEnumSchemaLoader: Final = _RecordLoader( + CommandOutputEnumSchema, None, None +) +CommandOutputArraySchemaLoader: Final = _RecordLoader( + CommandOutputArraySchema, None, None +) +CommandInputParameterLoader: Final = _RecordLoader(CommandInputParameter, None, None) +CommandOutputParameterLoader: Final = _RecordLoader(CommandOutputParameter, None, None) +stdoutLoader: Final = _EnumLoader(("stdout",), "stdout") """ -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. +Only valid as a ``type`` for a ``CommandLineTool`` output with no ``outputBinding`` set. The following -``` -outputs: - an_output_name: - type: stdout -stdout: a_stdout_file -``` +:: + + outputs: + an_output_name: + type: stdout + + stdout: a_stdout_file + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stdout_file - -stdout: a_stdout_file -``` - -If there is no `stdout` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stdout -``` + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stdout_file + + stdout: a_stdout_file + + +If there is no ``stdout`` name provided, a random filename will be created. For example, the following + +:: + + outputs: + an_output_name: + type: stdout + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stdout_filenameABCDEFG - -stdout: random_stdout_filenameABCDEFG -``` + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stdout_filenameABCDEFG + + stdout: random_stdout_filenameABCDEFG """ -stderrLoader = _EnumLoader(("stderr",), "stderr") +stderrLoader: Final = _EnumLoader(("stderr",), "stderr") """ -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. +Only valid as a ``type`` for a ``CommandLineTool`` output with no ``outputBinding`` set. The following -``` -outputs: - an_output_name: - type: stderr -stderr: a_stderr_file -``` +:: + + outputs: + an_output_name: + type: stderr + + stderr: a_stderr_file + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stderr_file - -stderr: a_stderr_file -``` - -If there is no `stderr` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stderr -``` + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stderr_file + + stderr: a_stderr_file + + +If there is no ``stderr`` name provided, a random filename will be created. For example, the following + +:: + + outputs: + an_output_name: + type: stderr + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stderr_filenameABCDEFG - -stderr: random_stderr_filenameABCDEFG -``` + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stderr_filenameABCDEFG + + stderr: random_stderr_filenameABCDEFG """ -CommandLineToolLoader = _RecordLoader(CommandLineTool, None, None) -DockerRequirementLoader = _RecordLoader(DockerRequirement, None, None) -SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement, None, None) -SoftwarePackageLoader = _RecordLoader(SoftwarePackage, None, None) -DirentLoader = _RecordLoader(Dirent, None, None) -InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement, None, None) -EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement, None, None) -ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement, None, None) -ResourceRequirementLoader = _RecordLoader(ResourceRequirement, None, None) -ExpressionToolOutputParameterLoader = _RecordLoader( +CommandLineToolLoader: Final = _RecordLoader(CommandLineTool, None, None) +DockerRequirementLoader: Final = _RecordLoader(DockerRequirement, None, None) +SoftwareRequirementLoader: Final = _RecordLoader(SoftwareRequirement, None, None) +SoftwarePackageLoader: Final = _RecordLoader(SoftwarePackage, None, None) +DirentLoader: Final = _RecordLoader(Dirent, None, None) +InitialWorkDirRequirementLoader: Final = _RecordLoader( + InitialWorkDirRequirement, None, None +) +EnvVarRequirementLoader: Final = _RecordLoader(EnvVarRequirement, None, None) +ShellCommandRequirementLoader: Final = _RecordLoader( + ShellCommandRequirement, None, None +) +ResourceRequirementLoader: Final = _RecordLoader(ResourceRequirement, None, None) +ExpressionToolOutputParameterLoader: Final = _RecordLoader( ExpressionToolOutputParameter, None, None ) -ExpressionToolLoader = _RecordLoader(ExpressionTool, None, None) -LinkMergeMethodLoader = _EnumLoader( +ExpressionToolLoader: Final = _RecordLoader(ExpressionTool, None, None) +LinkMergeMethodLoader: Final = _EnumLoader( ( "merge_nested", "merge_flattened", @@ -24377,12 +20098,14 @@ def save( "LinkMergeMethod", ) """ -The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). +The input link merge method, described in `WorkflowStepInput <#WorkflowStepInput>`__. """ -WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter, None, None) -WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput, None, None) -WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput, None, None) -ScatterMethodLoader = _EnumLoader( +WorkflowOutputParameterLoader: Final = _RecordLoader( + WorkflowOutputParameter, None, None +) +WorkflowStepInputLoader: Final = _RecordLoader(WorkflowStepInput, None, None) +WorkflowStepOutputLoader: Final = _RecordLoader(WorkflowStepOutput, None, None) +ScatterMethodLoader: Final = _EnumLoader( ( "dotproduct", "nested_crossproduct", @@ -24391,40 +20114,34 @@ def save( "ScatterMethod", ) """ -The scatter method, as described in [workflow step scatter](#WorkflowStep). +The scatter method, as described in `workflow step scatter <#WorkflowStep>`__. """ -WorkflowStepLoader = _RecordLoader(WorkflowStep, None, None) -WorkflowLoader = _RecordLoader(Workflow, None, None) -SubworkflowFeatureRequirementLoader = _RecordLoader( +WorkflowStepLoader: Final = _RecordLoader(WorkflowStep, None, None) +WorkflowLoader: Final = _RecordLoader(Workflow, None, None) +SubworkflowFeatureRequirementLoader: Final = _RecordLoader( SubworkflowFeatureRequirement, None, None ) -ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement, None, None) -MultipleInputFeatureRequirementLoader = _RecordLoader( +ScatterFeatureRequirementLoader: Final = _RecordLoader( + ScatterFeatureRequirement, None, None +) +MultipleInputFeatureRequirementLoader: Final = _RecordLoader( MultipleInputFeatureRequirement, None, None ) -StepInputExpressionRequirementLoader = _RecordLoader( +StepInputExpressionRequirementLoader: Final = _RecordLoader( StepInputExpressionRequirement, None, None ) -LoadListingRequirementLoader = _RecordLoader(LoadListingRequirement, None, None) -InplaceUpdateRequirementLoader = _RecordLoader(InplaceUpdateRequirement, None, None) -SecretsLoader = _RecordLoader(Secrets, None, None) -TimeLimitLoader = _RecordLoader(TimeLimit, None, None) -WorkReuseLoader = _RecordLoader(WorkReuse, None, None) -NetworkAccessLoader = _RecordLoader(NetworkAccess, None, None) -ProcessGeneratorLoader = _RecordLoader(ProcessGenerator, None, None) -MPIRequirementLoader = _RecordLoader(MPIRequirement, None, None) -CUDARequirementLoader = _RecordLoader(CUDARequirement, None, None) -ShmSizeLoader = _RecordLoader(ShmSize, None, None) -array_of_strtype = _ArrayLoader(strtype) -union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( +array_of_strtype: Final = _ArrayLoader(strtype) +union_of_None_type_or_strtype_or_array_of_strtype: Final = _UnionLoader( ( None_type, strtype, array_of_strtype, ) ) -uri_strtype_True_False_None_None = _URILoader(strtype, True, False, None, None) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _UnionLoader( +uri_strtype_True_False_None_None: Final = _URILoader(strtype, True, False, None, None) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, RecordSchemaLoader, @@ -24435,10 +20152,14 @@ def save( strtype, ) ) -array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype ) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _UnionLoader( +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, RecordSchemaLoader, @@ -24450,51 +20171,57 @@ def save( array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, ) ) -typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, 2, "v1.1", ) -array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) -union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( +array_of_RecordFieldLoader: Final = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_RecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader: Final = _IdMapLoader( union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" ) -Record_nameLoader = _EnumLoader(("record",), "Record_name") -typedsl_Record_nameLoader_2 = _TypeDSLLoader(Record_nameLoader, 2, "v1.1") -union_of_None_type_or_strtype = _UnionLoader( +Record_nameLoader: Final = _EnumLoader(("record",), "Record_name") +typedsl_Record_nameLoader_2: Final = _TypeDSLLoader(Record_nameLoader, 2, "v1.1") +union_of_None_type_or_strtype: Final = _UnionLoader( ( None_type, strtype, ) ) -uri_union_of_None_type_or_strtype_True_False_None_None = _URILoader( +uri_union_of_None_type_or_strtype_True_False_None_None: Final = _URILoader( union_of_None_type_or_strtype, True, False, None, None ) -uri_array_of_strtype_True_False_None_None = _URILoader( +uri_array_of_strtype_True_False_None_None: Final = _URILoader( array_of_strtype, True, False, None, None ) -Enum_nameLoader = _EnumLoader(("enum",), "Enum_name") -typedsl_Enum_nameLoader_2 = _TypeDSLLoader(Enum_nameLoader, 2, "v1.1") -uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None = _URILoader( +Enum_nameLoader: Final = _EnumLoader(("enum",), "Enum_name") +typedsl_Enum_nameLoader_2: Final = _TypeDSLLoader(Enum_nameLoader, 2, "v1.1") +uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, False, True, 2, None, ) -Array_nameLoader = _EnumLoader(("array",), "Array_name") -typedsl_Array_nameLoader_2 = _TypeDSLLoader(Array_nameLoader, 2, "v1.1") -Map_nameLoader = _EnumLoader(("map",), "Map_name") -typedsl_Map_nameLoader_2 = _TypeDSLLoader(Map_nameLoader, 2, "v1.1") -Union_nameLoader = _EnumLoader(("union",), "Union_name") -typedsl_Union_nameLoader_2 = _TypeDSLLoader(Union_nameLoader, 2, "v1.1") -union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _UnionLoader( +Array_nameLoader: Final = _EnumLoader(("array",), "Array_name") +typedsl_Array_nameLoader_2: Final = _TypeDSLLoader(Array_nameLoader, 2, "v1.1") +Map_nameLoader: Final = _EnumLoader(("map",), "Map_name") +typedsl_Map_nameLoader_2: Final = _TypeDSLLoader(Map_nameLoader, 2, "v1.1") +Union_nameLoader: Final = _EnumLoader(("union",), "Union_name") +typedsl_Union_nameLoader_2: Final = _TypeDSLLoader(Union_nameLoader, 2, "v1.1") +union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, CWLRecordSchemaLoader, @@ -24503,10 +20230,14 @@ def save( strtype, ) ) -array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype ) -union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _UnionLoader( +union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, CWLRecordSchemaLoader, @@ -24516,73 +20247,81 @@ def save( array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, ) ) -uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, False, True, 2, None, ) -typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, 2, "v1.1", ) -array_of_CWLRecordFieldLoader = _ArrayLoader(CWLRecordFieldLoader) -union_of_None_type_or_array_of_CWLRecordFieldLoader = _UnionLoader( +array_of_CWLRecordFieldLoader: Final = _ArrayLoader(CWLRecordFieldLoader) +union_of_None_type_or_array_of_CWLRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_CWLRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader = _IdMapLoader( +idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader: Final = _IdMapLoader( union_of_None_type_or_array_of_CWLRecordFieldLoader, "name", "type" ) -File_classLoader = _EnumLoader(("File",), "File_class") -uri_File_classLoader_False_True_None_None = _URILoader( +File_classLoader: Final = _EnumLoader(("File",), "File_class") +uri_File_classLoader_False_True_None_None: Final = _URILoader( File_classLoader, False, True, None, None ) -uri_union_of_None_type_or_strtype_False_False_None_None = _URILoader( +uri_union_of_None_type_or_strtype_False_False_None_None: Final = _URILoader( union_of_None_type_or_strtype, False, False, None, None ) -union_of_None_type_or_inttype = _UnionLoader( +union_of_None_type_or_inttype: Final = _UnionLoader( ( None_type, inttype, ) ) -union_of_FileLoader_or_DirectoryLoader = _UnionLoader( +union_of_FileLoader_or_DirectoryLoader: Final = _UnionLoader( ( FileLoader, DirectoryLoader, ) ) -array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( +array_of_union_of_FileLoader_or_DirectoryLoader: Final = _ArrayLoader( union_of_FileLoader_or_DirectoryLoader ) -union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( - ( - None_type, - array_of_union_of_FileLoader_or_DirectoryLoader, +union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader: Final = ( + _UnionLoader( + ( + None_type, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) ) ) -uri_union_of_None_type_or_strtype_True_False_None_True = _URILoader( +uri_union_of_None_type_or_strtype_True_False_None_True: Final = _URILoader( union_of_None_type_or_strtype, True, False, None, True ) -Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None_None = _URILoader( +Directory_classLoader: Final = _EnumLoader(("Directory",), "Directory_class") +uri_Directory_classLoader_False_True_None_None: Final = _URILoader( Directory_classLoader, False, True, None, None ) -union_of_strtype_or_ExpressionLoader = _UnionLoader( +union_of_strtype_or_ExpressionLoader: Final = _UnionLoader( ( strtype, ExpressionLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader = _ArrayLoader( +array_of_union_of_strtype_or_ExpressionLoader: Final = _ArrayLoader( union_of_strtype_or_ExpressionLoader ) -union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_union_of_strtype_or_ExpressionLoader: ( + Final +) = _UnionLoader( ( None_type, strtype, @@ -24590,13 +20329,15 @@ def save( array_of_union_of_strtype_or_ExpressionLoader, ) ) -union_of_None_type_or_booltype = _UnionLoader( +union_of_None_type_or_booltype: Final = _UnionLoader( ( None_type, booltype, ) ) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, InputRecordSchemaLoader, @@ -24605,10 +20346,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, InputRecordSchemaLoader, @@ -24618,35 +20363,41 @@ def save( array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, 2, "v1.1", ) -union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( +union_of_None_type_or_CommandLineBindingLoader: Final = _UnionLoader( ( None_type, CommandLineBindingLoader, ) ) -array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) -union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( +array_of_InputRecordFieldLoader: Final = _ArrayLoader(InputRecordFieldLoader) +union_of_None_type_or_array_of_InputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_InputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader: Final = ( + _IdMapLoader(union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type") ) -uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, OutputRecordSchemaLoader, @@ -24655,10 +20406,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, OutputRecordSchemaLoader, @@ -24668,50 +20423,60 @@ def save( array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, 2, "v1.1", ) -union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( +union_of_None_type_or_CommandOutputBindingLoader: Final = _UnionLoader( ( None_type, CommandOutputBindingLoader, ) ) -array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) -union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( +array_of_OutputRecordFieldLoader: Final = _ArrayLoader(OutputRecordFieldLoader) +union_of_None_type_or_array_of_OutputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_OutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader: Final = ( + _IdMapLoader(union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type") ) -uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - strtype, - array_of_strtype, - ExpressionLoader, +union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader: Final = ( + _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ExpressionLoader, + ) ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True = _URILoader( +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True: ( + Final +) = _URILoader( union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, True, False, None, True, ) -union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( None_type, CWLTypeLoader, @@ -24722,30 +20487,36 @@ def save( array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_None_type_or_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, 2, "v1.1", ) -union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_strtype_or_ExpressionLoader: Final = _UnionLoader( ( None_type, strtype, ExpressionLoader, ) ) -uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True = _URILoader( - union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None, True +uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True: Final = ( + _URILoader( + union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None, True + ) ) -array_of_InputParameterLoader = _ArrayLoader(InputParameterLoader) -idmap_inputs_array_of_InputParameterLoader = _IdMapLoader( +array_of_InputParameterLoader: Final = _ArrayLoader(InputParameterLoader) +idmap_inputs_array_of_InputParameterLoader: Final = _IdMapLoader( array_of_InputParameterLoader, "id", "type" ) -array_of_OutputParameterLoader = _ArrayLoader(OutputParameterLoader) -idmap_outputs_array_of_OutputParameterLoader = _IdMapLoader( +array_of_OutputParameterLoader: Final = _ArrayLoader(OutputParameterLoader) +idmap_outputs_array_of_OutputParameterLoader: Final = _IdMapLoader( array_of_OutputParameterLoader, "id", "type" ) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader = _UnionLoader( +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _UnionLoader( ( InlineJavascriptRequirementLoader, SchemaDefRequirementLoader, @@ -24759,32 +20530,31 @@ def save( ScatterFeatureRequirementLoader, MultipleInputFeatureRequirementLoader, StepInputExpressionRequirementLoader, - LoadListingRequirementLoader, - InplaceUpdateRequirementLoader, - SecretsLoader, - TimeLimitLoader, - WorkReuseLoader, - NetworkAccessLoader, - MPIRequirementLoader, - CUDARequirementLoader, - ShmSizeLoader, ) ) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader ) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader = _UnionLoader( +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _UnionLoader( ( None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, ) ) -idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, +idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, "class", "None", ) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type = _UnionLoader( +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _UnionLoader( ( InlineJavascriptRequirementLoader, SchemaDefRequirementLoader, @@ -24798,80 +20568,83 @@ def save( ScatterFeatureRequirementLoader, MultipleInputFeatureRequirementLoader, StepInputExpressionRequirementLoader, - LoadListingRequirementLoader, - InplaceUpdateRequirementLoader, - SecretsLoader, - TimeLimitLoader, - WorkReuseLoader, - NetworkAccessLoader, - MPIRequirementLoader, - CUDARequirementLoader, - ShmSizeLoader, Any_type, ) ) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type ) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type = _UnionLoader( +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _UnionLoader( ( None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, ) ) -idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_LoadListingRequirementLoader_or_InplaceUpdateRequirementLoader_or_SecretsLoader_or_TimeLimitLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, +idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, "class", "None", ) -union_of_None_type_or_CWLVersionLoader = _UnionLoader( +union_of_None_type_or_CWLVersionLoader: Final = _UnionLoader( ( None_type, CWLVersionLoader, ) ) -uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None = _URILoader( +uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None: Final = _URILoader( union_of_None_type_or_CWLVersionLoader, False, True, None, None ) -InlineJavascriptRequirement_classLoader = _EnumLoader( +InlineJavascriptRequirement_classLoader: Final = _EnumLoader( ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" ) -uri_InlineJavascriptRequirement_classLoader_False_True_None_None = _URILoader( +uri_InlineJavascriptRequirement_classLoader_False_True_None_None: Final = _URILoader( InlineJavascriptRequirement_classLoader, False, True, None, None ) -union_of_None_type_or_array_of_strtype = _UnionLoader( +union_of_None_type_or_array_of_strtype: Final = _UnionLoader( ( None_type, array_of_strtype, ) ) -SchemaDefRequirement_classLoader = _EnumLoader( +SchemaDefRequirement_classLoader: Final = _EnumLoader( ("SchemaDefRequirement",), "SchemaDefRequirement_class" ) -uri_SchemaDefRequirement_classLoader_False_True_None_None = _URILoader( +uri_SchemaDefRequirement_classLoader_False_True_None_None: Final = _URILoader( SchemaDefRequirement_classLoader, False, True, None, None ) -union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = ( - _UnionLoader( - ( - InputRecordSchemaLoader, - InputEnumSchemaLoader, - InputArraySchemaLoader, - ) +union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader: ( + Final +) = _UnionLoader( + ( + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, ) ) -array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader = _ArrayLoader( +array_of_union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader: ( + Final +) = _ArrayLoader( union_of_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader ) -union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( - ( - None_type, - strtype, - ExpressionLoader, - array_of_strtype, +union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype: Final = ( + _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + array_of_strtype, + ) ) ) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandInputRecordSchemaLoader, @@ -24880,10 +20653,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandInputRecordSchemaLoader, @@ -24893,31 +20670,39 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, 2, "v1.1", ) -array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) -union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( +array_of_CommandInputRecordFieldLoader: Final = _ArrayLoader( + CommandInputRecordFieldLoader +) +union_of_None_type_or_array_of_CommandInputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_CommandInputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader: Final = ( _IdMapLoader( union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" ) ) -uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandOutputRecordSchemaLoader, @@ -24926,10 +20711,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandOutputRecordSchemaLoader, @@ -24939,31 +20728,39 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, 2, "v1.1", ) -array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) -union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( +array_of_CommandOutputRecordFieldLoader: Final = _ArrayLoader( + CommandOutputRecordFieldLoader +) +union_of_None_type_or_array_of_CommandOutputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_CommandOutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader: Final = ( _IdMapLoader( union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" ) ) -uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( None_type, CWLTypeLoader, @@ -24974,12 +20771,16 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_None_type_or_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, 2, "v1.1", ) -union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( None_type, CWLTypeLoader, @@ -24992,72 +20793,82 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_None_type_or_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, 2, "v1.1", ) -CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") -uri_CommandLineTool_classLoader_False_True_None_None = _URILoader( +CommandLineTool_classLoader: Final = _EnumLoader( + ("CommandLineTool",), "CommandLineTool_class" +) +uri_CommandLineTool_classLoader_False_True_None_None: Final = _URILoader( CommandLineTool_classLoader, False, True, None, None ) -array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) -idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( +array_of_CommandInputParameterLoader: Final = _ArrayLoader(CommandInputParameterLoader) +idmap_inputs_array_of_CommandInputParameterLoader: Final = _IdMapLoader( array_of_CommandInputParameterLoader, "id", "type" ) -array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) -idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( +array_of_CommandOutputParameterLoader: Final = _ArrayLoader( + CommandOutputParameterLoader +) +idmap_outputs_array_of_CommandOutputParameterLoader: Final = _IdMapLoader( array_of_CommandOutputParameterLoader, "id", "type" ) -union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( +union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader: Final = _UnionLoader( ( strtype, ExpressionLoader, CommandLineBindingLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader: Final = ( _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) ) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader: ( + Final +) = _UnionLoader( ( None_type, array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, ) ) -array_of_inttype = _ArrayLoader(inttype) -union_of_None_type_or_array_of_inttype = _UnionLoader( +array_of_inttype: Final = _ArrayLoader(inttype) +union_of_None_type_or_array_of_inttype: Final = _UnionLoader( ( None_type, array_of_inttype, ) ) -DockerRequirement_classLoader = _EnumLoader( +DockerRequirement_classLoader: Final = _EnumLoader( ("DockerRequirement",), "DockerRequirement_class" ) -uri_DockerRequirement_classLoader_False_True_None_None = _URILoader( +uri_DockerRequirement_classLoader_False_True_None_None: Final = _URILoader( DockerRequirement_classLoader, False, True, None, None ) -SoftwareRequirement_classLoader = _EnumLoader( +SoftwareRequirement_classLoader: Final = _EnumLoader( ("SoftwareRequirement",), "SoftwareRequirement_class" ) -uri_SoftwareRequirement_classLoader_False_True_None_None = _URILoader( +uri_SoftwareRequirement_classLoader_False_True_None_None: Final = _URILoader( SoftwareRequirement_classLoader, False, True, None, None ) -array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) -idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( +array_of_SoftwarePackageLoader: Final = _ArrayLoader(SoftwarePackageLoader) +idmap_packages_array_of_SoftwarePackageLoader: Final = _IdMapLoader( array_of_SoftwarePackageLoader, "package", "specs" ) -uri_union_of_None_type_or_array_of_strtype_False_False_None_True = _URILoader( +uri_union_of_None_type_or_array_of_strtype_False_False_None_True: Final = _URILoader( union_of_None_type_or_array_of_strtype, False, False, None, True ) -InitialWorkDirRequirement_classLoader = _EnumLoader( +InitialWorkDirRequirement_classLoader: Final = _EnumLoader( ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" ) -uri_InitialWorkDirRequirement_classLoader_False_True_None_None = _URILoader( +uri_InitialWorkDirRequirement_classLoader_False_True_None_None: Final = _URILoader( InitialWorkDirRequirement_classLoader, False, True, None, None ) -union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _UnionLoader( +union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader: ( + Final +) = _UnionLoader( ( FileLoader, DirectoryLoader, @@ -25066,39 +20877,43 @@ def save( ExpressionLoader, ) ) -array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader = _ArrayLoader( +array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader: ( + Final +) = _ArrayLoader( union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader ) -union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader = _UnionLoader( +union_of_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader_or_strtype_or_ExpressionLoader: ( + Final +) = _UnionLoader( ( array_of_union_of_FileLoader_or_DirectoryLoader_or_DirentLoader_or_strtype_or_ExpressionLoader, strtype, ExpressionLoader, ) ) -EnvVarRequirement_classLoader = _EnumLoader( +EnvVarRequirement_classLoader: Final = _EnumLoader( ("EnvVarRequirement",), "EnvVarRequirement_class" ) -uri_EnvVarRequirement_classLoader_False_True_None_None = _URILoader( +uri_EnvVarRequirement_classLoader_False_True_None_None: Final = _URILoader( EnvVarRequirement_classLoader, False, True, None, None ) -array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) -idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( +array_of_EnvironmentDefLoader: Final = _ArrayLoader(EnvironmentDefLoader) +idmap_envDef_array_of_EnvironmentDefLoader: Final = _IdMapLoader( array_of_EnvironmentDefLoader, "envName", "envValue" ) -ShellCommandRequirement_classLoader = _EnumLoader( +ShellCommandRequirement_classLoader: Final = _EnumLoader( ("ShellCommandRequirement",), "ShellCommandRequirement_class" ) -uri_ShellCommandRequirement_classLoader_False_True_None_None = _URILoader( +uri_ShellCommandRequirement_classLoader_False_True_None_None: Final = _URILoader( ShellCommandRequirement_classLoader, False, True, None, None ) -ResourceRequirement_classLoader = _EnumLoader( +ResourceRequirement_classLoader: Final = _EnumLoader( ("ResourceRequirement",), "ResourceRequirement_class" ) -uri_ResourceRequirement_classLoader_False_True_None_None = _URILoader( +uri_ResourceRequirement_classLoader_False_True_None_None: Final = _URILoader( ResourceRequirement_classLoader, False, True, None, None ) -union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_inttype_or_strtype_or_ExpressionLoader: Final = _UnionLoader( ( None_type, inttype, @@ -25106,7 +20921,9 @@ def save( ExpressionLoader, ) ) -union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( None_type, CWLTypeLoader, @@ -25117,193 +20934,166 @@ def save( array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_None_type_or_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, 2, "v1.1", ) -ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") -uri_ExpressionTool_classLoader_False_True_None_None = _URILoader( +ExpressionTool_classLoader: Final = _EnumLoader( + ("ExpressionTool",), "ExpressionTool_class" +) +uri_ExpressionTool_classLoader_False_True_None_None: Final = _URILoader( ExpressionTool_classLoader, False, True, None, None ) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( +array_of_ExpressionToolOutputParameterLoader: Final = _ArrayLoader( ExpressionToolOutputParameterLoader ) -idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( +idmap_outputs_array_of_ExpressionToolOutputParameterLoader: Final = _IdMapLoader( array_of_ExpressionToolOutputParameterLoader, "id", "type" ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1, None +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None: Final = ( + _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1, None) ) -union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( +union_of_None_type_or_LinkMergeMethodLoader: Final = _UnionLoader( ( None_type, LinkMergeMethodLoader, ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2, None +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None: Final = ( + _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2, None) ) -array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) -idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( +array_of_WorkflowStepInputLoader: Final = _ArrayLoader(WorkflowStepInputLoader) +idmap_in__array_of_WorkflowStepInputLoader: Final = _IdMapLoader( array_of_WorkflowStepInputLoader, "id", "source" ) -union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( +union_of_strtype_or_WorkflowStepOutputLoader: Final = _UnionLoader( ( strtype, WorkflowStepOutputLoader, ) ) -array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( +array_of_union_of_strtype_or_WorkflowStepOutputLoader: Final = _ArrayLoader( union_of_strtype_or_WorkflowStepOutputLoader ) -union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( +union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader: Final = _UnionLoader( (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) ) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None = _URILoader( +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None: ( + Final +) = _URILoader( union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, True, False, None, None, ) -array_of_Any_type = _ArrayLoader(Any_type) -union_of_None_type_or_array_of_Any_type = _UnionLoader( +array_of_Any_type: Final = _ArrayLoader(Any_type) +union_of_None_type_or_array_of_Any_type: Final = _UnionLoader( ( None_type, array_of_Any_type, ) ) -idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( +idmap_hints_union_of_None_type_or_array_of_Any_type: Final = _IdMapLoader( union_of_None_type_or_array_of_Any_type, "class", "None" ) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader = _UnionLoader( +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader: ( + Final +) = _UnionLoader( ( strtype, CommandLineToolLoader, ExpressionToolLoader, WorkflowLoader, - ProcessGeneratorLoader, ) ) -uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_False_False_None_None = _URILoader( - union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, +uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None_None: ( + Final +) = _URILoader( + union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, False, False, None, None, ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0, None +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None: Final = ( + _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0, None) ) -union_of_None_type_or_ScatterMethodLoader = _UnionLoader( +union_of_None_type_or_ScatterMethodLoader: Final = _UnionLoader( ( None_type, ScatterMethodLoader, ) ) -uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None = _URILoader( +uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None: Final = _URILoader( union_of_None_type_or_ScatterMethodLoader, False, True, None, None ) -Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None_None = _URILoader( +Workflow_classLoader: Final = _EnumLoader(("Workflow",), "Workflow_class") +uri_Workflow_classLoader_False_True_None_None: Final = _URILoader( Workflow_classLoader, False, True, None, None ) -array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) -idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( +array_of_WorkflowOutputParameterLoader: Final = _ArrayLoader( + WorkflowOutputParameterLoader +) +idmap_outputs_array_of_WorkflowOutputParameterLoader: Final = _IdMapLoader( array_of_WorkflowOutputParameterLoader, "id", "type" ) -array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) -union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) -idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader( +array_of_WorkflowStepLoader: Final = _ArrayLoader(WorkflowStepLoader) +union_of_array_of_WorkflowStepLoader: Final = _UnionLoader( + (array_of_WorkflowStepLoader,) +) +idmap_steps_union_of_array_of_WorkflowStepLoader: Final = _IdMapLoader( union_of_array_of_WorkflowStepLoader, "id", "None" ) -SubworkflowFeatureRequirement_classLoader = _EnumLoader( +SubworkflowFeatureRequirement_classLoader: Final = _EnumLoader( ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" ) -uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None = _URILoader( +uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None: Final = _URILoader( SubworkflowFeatureRequirement_classLoader, False, True, None, None ) -ScatterFeatureRequirement_classLoader = _EnumLoader( +ScatterFeatureRequirement_classLoader: Final = _EnumLoader( ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" ) -uri_ScatterFeatureRequirement_classLoader_False_True_None_None = _URILoader( +uri_ScatterFeatureRequirement_classLoader_False_True_None_None: Final = _URILoader( ScatterFeatureRequirement_classLoader, False, True, None, None ) -MultipleInputFeatureRequirement_classLoader = _EnumLoader( +MultipleInputFeatureRequirement_classLoader: Final = _EnumLoader( ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" ) -uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None = _URILoader( - MultipleInputFeatureRequirement_classLoader, False, True, None, None +uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None: Final = ( + _URILoader(MultipleInputFeatureRequirement_classLoader, False, True, None, None) ) -StepInputExpressionRequirement_classLoader = _EnumLoader( +StepInputExpressionRequirement_classLoader: Final = _EnumLoader( ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" ) -uri_StepInputExpressionRequirement_classLoader_False_True_None_None = _URILoader( +uri_StepInputExpressionRequirement_classLoader_False_True_None_None: Final = _URILoader( StepInputExpressionRequirement_classLoader, False, True, None, None ) -uri_strtype_False_True_None_None = _URILoader(strtype, False, True, None, None) -LoadListingEnumLoader = _EnumLoader( - ( - "no_listing", - "shallow_listing", - "deep_listing", - ), - "LoadListingEnum", -) -union_of_LoadListingEnumLoader = _UnionLoader((LoadListingEnumLoader,)) -uri_array_of_strtype_False_False_0_None = _URILoader( - array_of_strtype, False, False, 0, None -) -union_of_inttype_or_strtype = _UnionLoader( - ( - inttype, - strtype, - ) -) -union_of_booltype_or_strtype = _UnionLoader( - ( - booltype, - strtype, - ) -) -union_of_inttype_or_ExpressionLoader = _UnionLoader( - ( - inttype, - ExpressionLoader, - ) -) -union_of_strtype_or_array_of_strtype = _UnionLoader( - ( - strtype, - array_of_strtype, - ) -) -union_of_None_type_or_inttype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - inttype, - ExpressionLoader, - ) -) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader = _UnionLoader( - ( - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - ProcessGeneratorLoader, +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader: Final = ( + _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + ) ) ) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader = _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader: ( + Final +) = _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader ) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader = _UnionLoader( +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader: ( + Final +) = _UnionLoader( ( CommandLineToolLoader, ExpressionToolLoader, WorkflowLoader, - ProcessGeneratorLoader, - array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, + array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, ) ) @@ -25323,15 +21113,15 @@ def save( def load_document( doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, + baseuri: str | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: if baseuri is None: baseuri = file_uri(os.getcwd()) + "/" if loadingOptions is None: loadingOptions = LoadingOptions() result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, doc, baseuri, loadingOptions, @@ -25341,16 +21131,16 @@ def load_document( def load_document_with_metadata( doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, - addl_metadata_fields: Optional[MutableSequence[str]] = None, + baseuri: str | None = None, + loadingOptions: LoadingOptions | None = None, + addl_metadata_fields: MutableSequence[str] | None = None, ) -> Any: if baseuri is None: baseuri = file_uri(os.getcwd()) + "/" if loadingOptions is None: loadingOptions = LoadingOptions(fileuri=baseuri) return _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, doc, baseuri, loadingOptions, @@ -25361,7 +21151,7 @@ def load_document_with_metadata( def load_document_by_string( string: Any, uri: str, - loadingOptions: Optional[LoadingOptions] = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: yaml = yaml_no_ts() result = yaml.load(string) @@ -25371,7 +21161,7 @@ def load_document_by_string( loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, result, uri, loadingOptions, @@ -25382,7 +21172,7 @@ def load_document_by_string( def load_document_by_yaml( yaml: Any, uri: str, - loadingOptions: Optional[LoadingOptions] = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """ Shortcut to load via a YAML object. @@ -25394,7 +21184,7 @@ def load_document_by_yaml( loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, yaml, uri, loadingOptions, diff --git a/src/cwl_utils/parser/cwl_v1_0_utils.py b/src/cwl_utils/parser/cwl_v1_0_utils.py index 4f4dfd5f..7927d2bd 100644 --- a/src/cwl_utils/parser/cwl_v1_0_utils.py +++ b/src/cwl_utils/parser/cwl_v1_0_utils.py @@ -9,6 +9,8 @@ from urllib.parse import urldefrag from schema_salad.exceptions import ValidationException +from schema_salad.metaschema import RecordSchema, ArraySchema +from schema_salad.runtime import shortname, LoadingOptions, file_uri, save from schema_salad.sourceline import SourceLine, add_lc_filename from schema_salad.utils import aslist, json_dumps, yaml_no_ts @@ -26,7 +28,7 @@ def _compare_records( - src: cwl.RecordSchema, sink: cwl.RecordSchema, strict: bool = False + src: RecordSchema, sink: RecordSchema, strict: bool = False ) -> bool: """ Compare two records, ensuring they have compatible fields. @@ -34,10 +36,8 @@ def _compare_records( This handles normalizing record names, which will be relative to workflow step, so that they can be compared. """ - srcfields = {cwl.shortname(field.name): field.type_ for field in (src.fields or {})} - sinkfields = { - cwl.shortname(field.name): field.type_ for field in (sink.fields or {}) - } + srcfields = {shortname(field.name): field.type_ for field in (src.fields or {})} + sinkfields = {shortname(field.name): field.type_ for field in (sink.fields or {})} for key in sinkfields.keys(): if ( not can_assign_src_to_sink( @@ -60,14 +60,14 @@ def _compare_records( def _compare_type(type1: Any, type2: Any) -> bool: match (type1, type1): - case cwl.ArraySchema() as t1, cwl.ArraySchema() as t2: + case ArraySchema() as t1, ArraySchema() as t2: return _compare_type(t1.items, t2.items) - case cwl.RecordSchema(), cwl.RecordSchema(): + case RecordSchema(), RecordSchema(): fields1 = { - cwl.shortname(field.name): field.type_ for field in (type1.fields or {}) + shortname(field.name): field.type_ for field in (type1.fields or {}) } fields2 = { - cwl.shortname(field.name): field.type_ for field in (type2.fields or {}) + shortname(field.name): field.type_ for field in (type2.fields or {}) } if fields1.keys() != fields2.keys(): return False @@ -85,9 +85,9 @@ def _compare_type(type1: Any, type2: Any) -> bool: def _inputfile_load( doc: str | MutableMapping[str, Any] | MutableSequence[Any], baseuri: str, - loadingOptions: cwl.LoadingOptions, + loadingOptions: LoadingOptions, addl_metadata_fields: MutableSequence[str] | None = None, -) -> tuple[Any, cwl.LoadingOptions]: +) -> tuple[Any, LoadingOptions]: loader = cwl.CWLInputFileLoader match doc: case str(): @@ -101,9 +101,7 @@ def _inputfile_load( yaml = yaml_no_ts() result = yaml.load(textIO) add_lc_filename(result, doc_url) - loadingOptions = cwl.LoadingOptions( - copyfrom=loadingOptions, fileuri=doc_url - ) + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) _inputfile_load( result, doc_url, @@ -117,7 +115,7 @@ def _inputfile_load( if mf in doc: addl_metadata[mf] = doc[mf] - loadingOptions = cwl.LoadingOptions( + loadingOptions = LoadingOptions( copyfrom=loadingOptions, baseuri=baseuri, addl_metadata=addl_metadata, @@ -157,9 +155,9 @@ def can_assign_src_to_sink(src: Any, sink: Any, strict: bool = False) -> bool: """ if "Any" in (src, sink): return True - if isinstance(src, cwl.ArraySchema) and isinstance(sink, cwl.ArraySchema): + if isinstance(src, ArraySchema) and isinstance(sink, ArraySchema): return can_assign_src_to_sink(src.items, sink.items, strict) - if isinstance(src, cwl.RecordSchema) and isinstance(sink, cwl.RecordSchema): + if isinstance(src, RecordSchema) and isinstance(sink, RecordSchema): return _compare_records(src, sink, strict) if isinstance(src, MutableSequence): if strict: @@ -245,7 +243,7 @@ def check_types( return "exception" if linkMerge == "merge_nested": return check_types( - cwl.ArraySchema(items=srctype, type_="array"), sinktype, None, None + ArraySchema(items=srctype, type_="array"), sinktype, None, None ) if linkMerge == "merge_flattened": return check_types(merge_flatten_type(srctype), sinktype, None, None) @@ -300,13 +298,13 @@ def convert_stdstreams_to_files(clt: cwl.CommandLineTool) -> None: def load_inputfile( doc: Any, baseuri: str | None = None, - loadingOptions: cwl.LoadingOptions | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """Load a CWL v1.0 input file from a serialized YAML string or a YAML object.""" if baseuri is None: - baseuri = cwl.file_uri(str(Path.cwd())) + "/" + baseuri = file_uri(str(Path.cwd())) + "/" if loadingOptions is None: - loadingOptions = cwl.LoadingOptions() + loadingOptions = LoadingOptions() result, metadata = _inputfile_load( doc, @@ -319,14 +317,14 @@ def load_inputfile( def load_inputfile_by_string( string: Any, uri: str, - loadingOptions: cwl.LoadingOptions | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """Load a CWL v1.0 input file from a serialized YAML string.""" result = yaml_no_ts().load(string) add_lc_filename(result, uri) if loadingOptions is None: - loadingOptions = cwl.LoadingOptions(fileuri=uri) + loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _inputfile_load( result, @@ -339,13 +337,13 @@ def load_inputfile_by_string( def load_inputfile_by_yaml( yaml: Any, uri: str, - loadingOptions: cwl.LoadingOptions | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """Load a CWL v1.0 input file from a YAML object.""" add_lc_filename(yaml, uri) if loadingOptions is None: - loadingOptions = cwl.LoadingOptions(fileuri=uri) + loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _inputfile_load( yaml, @@ -359,9 +357,9 @@ def merge_flatten_type(src: Any) -> Any: """Return the merge flattened type of the source type.""" if isinstance(src, MutableSequence): return [merge_flatten_type(t) for t in src] - if isinstance(src, cwl.ArraySchema): + if isinstance(src, ArraySchema): return src - return cwl.ArraySchema(type_="array", items=src) + return ArraySchema(type_="array", items=src) def type_for_step_input( @@ -378,7 +376,7 @@ def type_for_step_input( if cast(str, step_input.id).split("#")[-1] == in_.id.split("#")[-1]: input_type = step_input.type_ if step.scatter is not None and in_.id in aslist(step.scatter): - input_type = cwl.ArraySchema(items=input_type, type_="array") + input_type = ArraySchema(items=input_type, type_="array") return input_type return "Any" @@ -400,16 +398,14 @@ def type_for_step_output( if step.scatter is not None: if step.scatterMethod == "nested_crossproduct": for _ in range(len(aslist(step.scatter))): - output_type = cwl.ArraySchema( - items=output_type, type_="array" - ) + output_type = ArraySchema(items=output_type, type_="array") else: - output_type = cwl.ArraySchema(items=output_type, type_="array") + output_type = ArraySchema(items=output_type, type_="array") return output_type raise ValidationException( "param {} not found in {}.".format( sourcename, - yaml_dumps(cwl.save(step)), + yaml_dumps(save(step)), ) ) @@ -428,11 +424,11 @@ def type_for_source( if scatter_context[0] is not None: if scatter_context[0][1] == "nested_crossproduct": for _ in range(scatter_context[0][0]): - new_type = cwl.ArraySchema(items=new_type, type_="array") + new_type = ArraySchema(items=new_type, type_="array") else: - new_type = cwl.ArraySchema(items=new_type, type_="array") + new_type = ArraySchema(items=new_type, type_="array") if linkMerge == "merge_nested": - new_type = cwl.ArraySchema(items=new_type, type_="array") + new_type = ArraySchema(items=new_type, type_="array") elif linkMerge == "merge_flattened": new_type = merge_flatten_type(new_type) return new_type @@ -450,18 +446,18 @@ def type_for_source( if sc is not None: if sc[1] == "nested_crossproduct": for _ in range(sc[0]): - cur_type = cwl.ArraySchema(items=cur_type, type_="array") + cur_type = ArraySchema(items=cur_type, type_="array") else: - cur_type = cwl.ArraySchema(items=cur_type, type_="array") + cur_type = ArraySchema(items=cur_type, type_="array") new_type.append(cur_type) if len(new_type) == 1: new_type = new_type[0] if linkMerge == "merge_nested": - return cwl.ArraySchema(items=new_type, type_="array") + return ArraySchema(items=new_type, type_="array") elif linkMerge == "merge_flattened": return merge_flatten_type(new_type) elif isinstance(sourcenames, list) and len(sourcenames) > 1: - return cwl.ArraySchema(items=new_type, type_="array") + return ArraySchema(items=new_type, type_="array") return new_type @@ -545,7 +541,7 @@ def param_for_source_id( raise WorkflowException( "param {} not found in {}\n{}.".format( sourcename, - yaml_dumps(cwl.save(process)), - (f" or\n {yaml_dumps(cwl.save(parent))}" if parent is not None else ""), + yaml_dumps(save(process)), + (f" or\n {yaml_dumps(save(parent))}" if parent is not None else ""), ) ) diff --git a/src/cwl_utils/parser/cwl_v1_1.py b/src/cwl_utils/parser/cwl_v1_1.py index 744cba7f..43e38a19 100644 --- a/src/cwl_utils/parser/cwl_v1_1.py +++ b/src/cwl_utils/parser/cwl_v1_1.py @@ -2,430 +2,51 @@ # This file was autogenerated using schema-salad-tool --codegen=python # The code itself is released under the Apache 2.0 license and the help text is # subject to the license of the original schema. +from __future__ import annotations -import copy -import logging import os -import pathlib -import tempfile -import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 -import xml.sax # nosec -from abc import ABC, abstractmethod -from collections.abc import MutableMapping, MutableSequence, Sequence +import sys +import uuid as _uuid__ +from collections.abc import Collection +from typing import ClassVar + +from schema_salad.runtime import ( + Saveable, + file_uri, + parse_errors, + prefix_url, + save, + save_relative_uri, +) + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self + +import schema_salad.metaschema + +import copy +from collections.abc import MutableSequence, Sequence, MutableMapping from io import StringIO from itertools import chain -from typing import Any, Final, Optional, Union, cast -from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit -from urllib.request import pathname2url +from typing import Any, Final, cast, Generic +from urllib.parse import urldefrag, urlsplit, urlunsplit -from rdflib import Graph -from rdflib.plugins.parsers.notation3 import BadSyntax from ruamel.yaml.comments import CommentedMap -from schema_salad.exceptions import SchemaSaladException, ValidationException -from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher +from schema_salad.exceptions import ValidationException, SchemaSaladException +from schema_salad.runtime import ( + LoadingOptions, + convert_typing, + extract_type, + SaveableType, +) from schema_salad.sourceline import SourceLine, add_lc_filename -from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ - -_vocab: dict[str, str] = {} -_rvocab: dict[str, str] = {} - -_logger: Final = logging.getLogger("salad") - - -IdxType = MutableMapping[str, tuple[Any, "LoadingOptions"]] - - -class LoadingOptions: - idx: Final[IdxType] - fileuri: Final[Optional[str]] - baseuri: Final[str] - namespaces: Final[MutableMapping[str, str]] - schemas: Final[MutableSequence[str]] - original_doc: Final[Optional[Any]] - addl_metadata: Final[MutableMapping[str, Any]] - fetcher: Final[Fetcher] - vocab: Final[dict[str, str]] - rvocab: Final[dict[str, str]] - cache: Final[CacheType] - imports: Final[list[str]] - includes: Final[list[str]] - no_link_check: Final[Optional[bool]] - container: Final[Optional[str]] - - def __init__( - self, - fetcher: Optional[Fetcher] = None, - namespaces: Optional[dict[str, str]] = None, - schemas: Optional[list[str]] = None, - fileuri: Optional[str] = None, - copyfrom: Optional["LoadingOptions"] = None, - original_doc: Optional[Any] = None, - addl_metadata: Optional[dict[str, str]] = None, - baseuri: Optional[str] = None, - idx: Optional[IdxType] = None, - imports: Optional[list[str]] = None, - includes: Optional[list[str]] = None, - no_link_check: Optional[bool] = None, - container: Optional[str] = None, - ) -> None: - """Create a LoadingOptions object.""" - self.original_doc = original_doc - - if idx is not None: - temp_idx = idx - else: - temp_idx = copyfrom.idx if copyfrom is not None else {} - self.idx = temp_idx - - if fileuri is not None: - temp_fileuri: Optional[str] = fileuri - else: - temp_fileuri = copyfrom.fileuri if copyfrom is not None else None - self.fileuri = temp_fileuri - - if baseuri is not None: - temp_baseuri = baseuri - else: - temp_baseuri = copyfrom.baseuri if copyfrom is not None else "" - self.baseuri = temp_baseuri - - if namespaces is not None: - temp_namespaces: MutableMapping[str, str] = namespaces - else: - temp_namespaces = copyfrom.namespaces if copyfrom is not None else {} - self.namespaces = temp_namespaces - - if schemas is not None: - temp_schemas: MutableSequence[str] = schemas - else: - temp_schemas = copyfrom.schemas if copyfrom is not None else [] - self.schemas = temp_schemas - - if addl_metadata is not None: - temp_addl_metadata: MutableMapping[str, Any] = addl_metadata - else: - temp_addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} - self.addl_metadata = temp_addl_metadata - - if imports is not None: - temp_imports = imports - else: - temp_imports = copyfrom.imports if copyfrom is not None else [] - self.imports = temp_imports - - if includes is not None: - temp_includes = includes - else: - temp_includes = copyfrom.includes if copyfrom is not None else [] - self.includes = temp_includes - - if no_link_check is not None: - temp_no_link_check: Optional[bool] = no_link_check - else: - temp_no_link_check = copyfrom.no_link_check if copyfrom is not None else False - self.no_link_check = temp_no_link_check - - if container is not None: - temp_container: Optional[str] = container - else: - temp_container = copyfrom.container if copyfrom is not None else None - self.container = temp_container - - if fetcher is not None: - temp_fetcher = fetcher - elif copyfrom is not None: - temp_fetcher = copyfrom.fetcher - else: - import requests - from cachecontrol.caches import SeparateBodyFileCache - from cachecontrol.wrapper import CacheControl - - root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) - session = CacheControl( - requests.Session(), - cache=SeparateBodyFileCache(root / ".cache" / "salad"), - ) - temp_fetcher = DefaultFetcher({}, session) - self.fetcher = temp_fetcher - - self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} - - if self.namespaces != {}: - temp_vocab = _vocab.copy() - temp_rvocab = _rvocab.copy() - for k, v in self.namespaces.items(): - temp_vocab[k] = v - temp_rvocab[v] = k - else: - temp_vocab = _vocab - temp_rvocab = _rvocab - self.vocab = temp_vocab - self.rvocab = temp_rvocab - - @property - def graph(self) -> Graph: - """Generate a merged rdflib.Graph from all entries in self.schemas.""" - graph = Graph() - if not self.schemas: - return graph - key: Final = str(hash(tuple(self.schemas))) - if key in self.cache: - return cast(Graph, self.cache[key]) - for schema in self.schemas: - fetchurl = ( - self.fetcher.urljoin(self.fileuri, schema) - if self.fileuri is not None - else pathlib.Path(schema).resolve().as_uri() - ) - if fetchurl not in self.cache or self.cache[fetchurl] is True: - _logger.debug("Getting external schema %s", fetchurl) - try: - content = self.fetcher.fetch_text(fetchurl) - except Exception as e: - _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) - continue - newGraph = Graph() - err_msg = "unknown error" - for fmt in ["xml", "turtle"]: - try: - newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) - self.cache[fetchurl] = newGraph - graph += newGraph - break - except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: - err_msg = str(e) - else: - _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) - self.cache[key] = graph - return graph - - -class Saveable(ABC): - """Mark classes than have a save() and fromDoc() function.""" - - @classmethod - @abstractmethod - def fromDoc( - cls, - _doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Saveable": - """Construct this object from the result of yaml.load().""" - - @abstractmethod - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - """Convert this object to a JSON/YAML friendly dictionary.""" - - -def load_field( - val: Union[str, dict[str, str]], - fieldtype: "_Loader", - baseuri: str, - loadingOptions: LoadingOptions, - lc: Optional[list[Any]] = None, -) -> Any: - """Load field.""" - if isinstance(val, MutableMapping): - if "$import" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url1: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) - result, metadata = _document_load_by_url( - fieldtype, - url1, - loadingOptions, - ) - loadingOptions.imports.append(url1) - return result - if "$include" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url2: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) - val = loadingOptions.fetcher.fetch_text(url2) - loadingOptions.includes.append(url2) - return fieldtype.load(val, baseuri, loadingOptions, lc=lc) - - -save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] - - -def extract_type(val_type: type[Any]) -> str: - """Take a type of value, and extracts the value as a string.""" - val_str: Final = str(val_type) - return val_str.split("'")[1] - - -def convert_typing(val_type: str) -> str: - """Normalize type names to schema-salad types.""" - if "None" in val_type: - return "null" - if "CommentedSeq" in val_type or "list" in val_type: - return "array" - if "CommentedMap" in val_type or "dict" in val_type: - return "object" - if "False" in val_type or "True" in val_type: - return "boolean" - return val_type - - -def parse_errors(error_message: str) -> tuple[str, str, str]: - """Parse error messages from several loaders into one error message.""" - if not error_message.startswith("Expected"): - return error_message, "", "" - vals: Final = error_message.split("\n") - if len(vals) == 1: - return error_message, "", "" - types1: Final = set() - for val in vals: - individual_vals = val.split(" ") - if val == "": - continue - if individual_vals[1] == "one": - individual_vals = val.split("(")[1].split(",") - for t in individual_vals: - types1.add(t.strip(" ").strip(")\n")) - elif individual_vals[2] == "").replace("'", "")) - elif individual_vals[0] == "Value": - types1.add(individual_vals[-1].strip(".")) - else: - types1.add(individual_vals[1].replace(",", "")) - types2: Final = {val for val in types1 if val != "NoneType"} - if "str" in types2: - types3 = {convert_typing(val) for val in types2 if "'" not in val} - else: - types3 = types2 - to_print = "" - for val in types3: - if "'" in val: - to_print = "value" if len(types3) == 1 else "values" - - if to_print == "": - to_print = "type" if len(types3) == 1 else "types" - - verb_tensage: Final = "is" if len(types3) == 1 else "are" - - return str(types3).replace("{", "(").replace("}", ")").replace("'", ""), to_print, verb_tensage - - -def save( - val: Any, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, -) -> save_type: - if isinstance(val, Saveable): - return val.save(top=top, base_url=base_url, relative_uris=relative_uris) - if isinstance(val, MutableSequence): - return [save(v, top=False, base_url=base_url, relative_uris=relative_uris) for v in val] - if isinstance(val, MutableMapping): - newdict: Final = {} - for key in val: - newdict[key] = save(val[key], top=False, base_url=base_url, relative_uris=relative_uris) - return newdict - if val is None or isinstance(val, (int, float, bool, str)): - return val - raise Exception("Not Saveable: %s" % type(val)) - - -def save_with_metadata( - val: Any, - valLoadingOpts: LoadingOptions, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, -) -> save_type: - """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" - saved_val: Final = save(val, top, base_url, relative_uris) - newdict: MutableMapping[str, Any] = {} - if isinstance(saved_val, MutableSequence): - newdict = {"$graph": saved_val} - elif isinstance(saved_val, MutableMapping): - newdict = saved_val - - if valLoadingOpts.namespaces: - newdict["$namespaces"] = valLoadingOpts.namespaces - if valLoadingOpts.schemas: - newdict["$schemas"] = valLoadingOpts.schemas - if valLoadingOpts.baseuri: - newdict["$base"] = valLoadingOpts.baseuri - for k, v in valLoadingOpts.addl_metadata.items(): - if k not in newdict: - newdict[k] = v - - return newdict - - -def expand_url( - url: str, - base_url: str, - loadingOptions: LoadingOptions, - scoped_id: bool = False, - vocab_term: bool = False, - scoped_ref: Optional[int] = None, -) -> str: - if url in ("@id", "@type"): - return url - - if vocab_term and url in loadingOptions.vocab: - return url - - if bool(loadingOptions.vocab) and ":" in url: - prefix: Final = url.split(":")[0] - if prefix in loadingOptions.vocab: - url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] - - split1: Final = urlsplit(url) - - if ( - (bool(split1.scheme) and split1.scheme in loadingOptions.fetcher.supported_schemes()) - or url.startswith("$(") - or url.startswith("${") - ): - pass - elif scoped_id and not bool(split1.fragment): - splitbase1: Final = urlsplit(base_url) - frg: str - if bool(splitbase1.fragment): - frg = splitbase1.fragment + "/" + split1.path - else: - frg = split1.path - pt: Final = splitbase1.path if splitbase1.path != "" else "/" - url = urlunsplit((splitbase1.scheme, splitbase1.netloc, pt, splitbase1.query, frg)) - elif scoped_ref is not None and not bool(split1.fragment): - splitbase2: Final = urlsplit(base_url) - sp = splitbase2.fragment.split("/") - n = scoped_ref - while n > 0 and len(sp) > 0: - sp.pop() - n -= 1 - sp.append(url) - url = urlunsplit( - ( - splitbase2.scheme, - splitbase2.netloc, - splitbase2.path, - splitbase2.query, - "/".join(sp), - ) - ) - else: - url = loadingOptions.fetcher.urljoin(base_url, url) - - if vocab_term: - split2: Final = urlsplit(url) - if bool(split2.scheme): - if url in loadingOptions.rvocab: - return loadingOptions.rvocab[url] - else: - raise ValidationException(f"Term {url!r} not in vocabulary") +from schema_salad.utils import yaml_no_ts # requires schema-salad v8.2+ - return url +_vocab: Final[dict[str, str]] = {} +_rvocab: Final[dict[str, str]] = {} class _Loader: @@ -434,9 +55,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> Any | None: pass @@ -446,8 +67,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if doc is not None: return doc @@ -455,7 +76,7 @@ def load( class _PrimitiveLoader(_Loader): - def __init__(self, tp: Union[type, tuple[type[str], type[str]]]) -> None: + def __init__(self, tp: type | tuple[type[str], type[str]]) -> None: self.tp: Final = tp def load( @@ -463,8 +84,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if not isinstance(doc, self.tp): raise ValidationException(f"Expected a {self.tp} but got {doc.__class__.__name__}") @@ -483,9 +104,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> list[Any]: if not isinstance(doc, MutableSequence): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " @@ -496,7 +117,7 @@ def load( fields: Final[list[str]] = [] for i in range(0, len(doc)): try: - lf = load_field( + lf = _load_field( doc[i], _UnionLoader([self, self.items]), baseuri, loadingOptions, lc=lc ) flatten = loadingOptions.container != "@list" @@ -535,9 +156,9 @@ class _MapLoader(_Loader): def __init__( self, values: _Loader, - name: Optional[str] = None, - container: Optional[str] = None, - no_link_check: Optional[bool] = None, + name: str | None = None, + container: str | None = None, + no_link_check: bool | None = None, ) -> None: self.values: Final = values self.name: Final = name @@ -549,9 +170,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> dict[str, Any]: if not isinstance(doc, MutableMapping): raise ValidationException(f"Expected a map, was {type(doc)}") if self.container is not None or self.no_link_check is not None: @@ -562,7 +183,7 @@ def load( errors: Final[list[SchemaSaladException]] = [] for k, v in doc.items(): try: - lf = load_field(v, self.values, baseuri, loadingOptions, lc) + lf = _load_field(v, self.values, baseuri, loadingOptions, lc) r[k] = lf except ValidationException as e: errors.append(e.with_sourceline(SourceLine(doc, k, str))) @@ -584,11 +205,11 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> str: if doc in self.symbols: - return doc + return cast(str, doc) raise ValidationException(f"Expected one of {self.symbols}") def __repr__(self) -> str: @@ -604,75 +225,76 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: r: Final[list[dict[str, Any]]] = [] - if isinstance(doc, MutableSequence): - for d in doc: - if isinstance(d, str): - if d.endswith("?"): - r.append({"pattern": d[:-1], "required": False}) - else: - r.append({"pattern": d}) - elif isinstance(d, dict): - new_dict1: dict[str, Any] = {} - dict_copy = copy.deepcopy(d) - if "pattern" in dict_copy: - new_dict1["pattern"] = dict_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {d}" + match doc: + case MutableSequence() as dlist: + for d in dlist: + if isinstance(d, str): + if d.endswith("?"): + r.append({"pattern": d[:-1], "required": False}) + else: + r.append({"pattern": d}) + elif isinstance(d, dict): + new_dict1: dict[str, Any] = {} + dict_copy = copy.deepcopy(d) + if "pattern" in dict_copy: + new_dict1["pattern"] = dict_copy.pop("pattern") + else: + raise ValidationException( + f"Missing pattern in secondaryFiles specification entry: {d}" + ) + new_dict1["required"] = ( + dict_copy.pop("required") if "required" in dict_copy else None ) - new_dict1["required"] = ( - dict_copy.pop("required") if "required" in dict_copy else None - ) - if len(dict_copy): - raise ValidationException( - "Unallowed values in secondaryFiles specification entry: {}".format( - dict_copy + if len(dict_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + dict_copy + ) ) - ) - r.append(new_dict1) + r.append(new_dict1) + else: + raise ValidationException( + "Expected a string or sequence of (strings or mappings)." + ) + case MutableMapping() as decl: + new_dict2 = {} + doc_copy = copy.deepcopy(decl) + if "pattern" in doc_copy: + new_dict2["pattern"] = doc_copy.pop("pattern") else: raise ValidationException( - "Expected a string or sequence of (strings or mappings)." + f"Missing pattern in secondaryFiles specification entry: {decl}" ) - elif isinstance(doc, MutableMapping): - new_dict2: Final = {} - doc_copy: Final = copy.deepcopy(doc) - if "pattern" in doc_copy: - new_dict2["pattern"] = doc_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {doc}" - ) - new_dict2["required"] = doc_copy.pop("required") if "required" in doc_copy else None + new_dict2["required"] = doc_copy.pop("required") if "required" in doc_copy else None - if len(doc_copy): - raise ValidationException( - f"Unallowed values in secondaryFiles specification entry: {doc_copy}" - ) - r.append(new_dict2) + if len(doc_copy): + raise ValidationException( + f"Unallowed values in secondaryFiles specification entry: {doc_copy}" + ) + r.append(new_dict2) - elif isinstance(doc, str): - if doc.endswith("?"): - r.append({"pattern": doc[:-1], "required": False}) - else: - r.append({"pattern": doc}) - else: - raise ValidationException("Expected str or sequence of str") + case str(decl): + if decl.endswith("?"): + r.append({"pattern": decl[:-1], "required": False}) + else: + r.append({"pattern": decl}) + case _: + raise ValidationException("Expected str or sequence of str") return self.inner.load(r, baseuri, loadingOptions, docRoot, lc=lc) -class _RecordLoader(_Loader): +class _RecordLoader(_Loader, Generic[SaveableType]): def __init__( self, - classtype: type[Saveable], - container: Optional[str] = None, - no_link_check: Optional[bool] = None, + classtype: type[SaveableType], + container: str | None = None, + no_link_check: bool | None = None, ) -> None: self.classtype: Final = classtype self.container: Final = container @@ -683,9 +305,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> SaveableType: if not isinstance(doc, MutableMapping): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " @@ -710,19 +332,20 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> str: if not isinstance(doc, str): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " f"but valid type for this field is a str." ) - return doc + else: + return doc class _UnionLoader(_Loader): - def __init__(self, alternates: Sequence[_Loader], name: Optional[str] = None) -> None: + def __init__(self, alternates: Sequence[_Loader], name: str | None = None) -> None: self.alternates = alternates self.name: Final = name @@ -734,8 +357,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: errors: Final = [] @@ -817,8 +440,8 @@ def __init__( inner: _Loader, scoped_id: bool, vocab_term: bool, - scoped_ref: Optional[int], - no_link_check: Optional[bool], + scoped_ref: int | None, + no_link_check: bool | None, ) -> None: self.inner: Final = inner self.scoped_id: Final = scoped_id @@ -831,39 +454,40 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if self.no_link_check is not None: loadingOptions = LoadingOptions( copyfrom=loadingOptions, no_link_check=self.no_link_check ) - if isinstance(doc, MutableSequence): - newdoc: Final = [] - for i in doc: - if isinstance(i, str): - newdoc.append( - expand_url( - i, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) - ) - else: - newdoc.append(i) - doc = newdoc - elif isinstance(doc, str): - doc = expand_url( - doc, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) + match doc: + case MutableSequence() as decl: + newdoc: Final = [] + for i in decl: + if isinstance(i, str): + newdoc.append( + _expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + ) + else: + newdoc.append(i) + doc = newdoc + case str(decl): + doc = _expand_url( + decl, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) if isinstance(doc, str): if not loadingOptions.no_link_check: errors: Final = [] @@ -880,7 +504,12 @@ def load( class _TypeDSLLoader(_Loader): - def __init__(self, inner: _Loader, refScope: Optional[int], salad_version: str) -> None: + def __init__( + self, + inner: _Loader, + refScope: int | None, + salad_version: str, + ) -> None: self.inner: Final = inner self.refScope: Final = refScope self.salad_version: Final = salad_version @@ -890,7 +519,7 @@ def resolve( doc: str, baseuri: str, loadingOptions: LoadingOptions, - ) -> Union[list[Union[dict[str, Any], str]], dict[str, Any], str]: + ) -> list[dict[str, Any] | str] | dict[str, Any] | str: doc_ = doc optional = False if doc_.endswith("?"): @@ -899,21 +528,42 @@ def resolve( if doc_.endswith("[]"): salad_versions: Final = [int(v) for v in self.salad_version[1:].split(".")] - items: Union[list[Union[dict[str, Any], str]], dict[str, Any], str] = "" + items: list[dict[str, Any] | str] | dict[str, Any] | str = "" rest: Final = doc_[0:-2] if salad_versions < [1, 3]: if rest.endswith("[]"): # To show the error message with the original type return doc else: - items = expand_url(rest, baseuri, loadingOptions, False, True, self.refScope) + items = _expand_url( + rest, + baseuri, + loadingOptions, + False, + True, + self.refScope, + ) else: items = self.resolve(rest, baseuri, loadingOptions) if isinstance(items, str): - items = expand_url(items, baseuri, loadingOptions, False, True, self.refScope) - expanded: Union[dict[str, Any], str] = {"type": "array", "items": items} + items = _expand_url( + items, + baseuri, + loadingOptions, + False, + True, + self.refScope, + ) + expanded: dict[str, Any] | str = {"type": "array", "items": items} else: - expanded = expand_url(doc_, baseuri, loadingOptions, False, True, self.refScope) + expanded = _expand_url( + doc_, + baseuri, + loadingOptions, + False, + True, + self.refScope, + ) if optional: return ["null", expanded] @@ -925,8 +575,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if isinstance(doc, MutableSequence): r: Final[list[Any]] = [] @@ -950,7 +600,7 @@ def load( class _IdMapLoader(_Loader): - def __init__(self, inner: _Loader, mapSubject: str, mapPredicate: Optional[str]) -> None: + def __init__(self, inner: _Loader, mapSubject: str, mapPredicate: str | None) -> None: self.inner: Final = inner self.mapSubject: Final = mapSubject self.mapPredicate: Final = mapPredicate @@ -960,8 +610,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if isinstance(doc, MutableMapping): r: Final[list[Any]] = [] @@ -990,10 +640,10 @@ def load( def _document_load( loader: _Loader, - doc: Union[str, MutableMapping[str, Any], MutableSequence[Any]], + doc: str | MutableMapping[str, Any] | MutableSequence[Any], baseuri: str, loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, + addl_metadata_fields: MutableSequence[str] | None = None, ) -> tuple[Any, LoadingOptions]: if isinstance(doc, str): return _document_load_by_url( @@ -1062,7 +712,7 @@ def _document_load_by_url( loader: _Loader, url: str, loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, + addl_metadata_fields: MutableSequence[str] | None = None, ) -> tuple[Any, LoadingOptions]: if url in loadingOptions.idx: return loadingOptions.idx[url] @@ -1089,103 +739,316 @@ def _document_load_by_url( return loadingOptions.idx[url] -def file_uri(path: str, split_frag: bool = False) -> str: - """Transform a file path into a URL with file scheme.""" - if path.startswith("file://"): - return path - if split_frag: - pathsp: Final = path.split("#", 2) - frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" - urlpath = pathname2url(str(pathsp[0])) - else: - urlpath = pathname2url(path) - frag = "" - if urlpath.startswith("//"): - return f"file:{urlpath}{frag}" - return f"file://{urlpath}{frag}" - - -def prefix_url(url: str, namespaces: dict[str, str]) -> str: - """Expand short forms into full URLs using the given namespace dictionary.""" - for k, v in namespaces.items(): - if url.startswith(v): - return k + ":" + url[len(v) :] - return url - - -def save_relative_uri( - uri: Any, +def _expand_url( + url: str, base_url: str, - scoped_id: bool, - ref_scope: Optional[int], - relative_uris: bool, -) -> Any: - """Convert any URI to a relative one, obeying the scoping rules.""" - if isinstance(uri, MutableSequence): - return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] - elif isinstance(uri, str): - if not relative_uris or uri == base_url: - return uri - urisplit: Final = urlsplit(uri) - basesplit: Final = urlsplit(base_url) - if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: - if urisplit.path != basesplit.path: - p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) - if urisplit.fragment: - p = p + "#" + urisplit.fragment - return p - - basefrag = basesplit.fragment + "/" - if ref_scope: - sp = basefrag.split("/") - i = 0 - while i < ref_scope: - sp.pop() - i += 1 - basefrag = "/".join(sp) - - if urisplit.fragment.startswith(basefrag): - return urisplit.fragment[len(basefrag) :] - return urisplit.fragment - return uri - else: - return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) + loadingOptions: LoadingOptions, + scoped_id: bool = False, + vocab_term: bool = False, + scoped_ref: int | None = None, +) -> str: + if url in ("@id", "@type"): + return url + vocab = _vocab | loadingOptions.vocab + if vocab_term and url in vocab: + return url -def shortname(inputid: str) -> str: - """ - Compute the shortname of a fully qualified identifier. + if bool(vocab) and ":" in url: + prefix: Final = url.split(":")[0] + if prefix in vocab: + url = vocab[prefix] + url[len(prefix) + 1 :] - See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. - """ - parsed_id: Final = urlparse(inputid) - if parsed_id.fragment: - return parsed_id.fragment.split("/")[-1] - return parsed_id.path.split("/")[-1] + split1: Final = urlsplit(url) + if ( + (bool(split1.scheme) and split1.scheme in loadingOptions.fetcher.supported_schemes()) + or url.startswith("$(") + or url.startswith("${") + ): + pass + elif scoped_id and not bool(split1.fragment): + splitbase1: Final = urlsplit(base_url) + frg: str + if bool(splitbase1.fragment): + frg = splitbase1.fragment + "/" + split1.path + else: + frg = split1.path + pt: Final = splitbase1.path if splitbase1.path != "" else "/" + url = urlunsplit((splitbase1.scheme, splitbase1.netloc, pt, splitbase1.query, frg)) + elif scoped_ref is not None and not bool(split1.fragment): + splitbase2: Final = urlsplit(base_url) + sp = splitbase2.fragment.split("/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urlunsplit( + ( + splitbase2.scheme, + splitbase2.netloc, + splitbase2.path, + splitbase2.query, + "/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) -def parser_info() -> str: - return "org.w3id.cwl.v1_1" + if vocab_term: + split2: Final = urlsplit(url) + if bool(split2.scheme): + if url in (rvocab := _rvocab | loadingOptions.rvocab): + return rvocab[url] + else: + raise ValidationException(f"Term {url!r} not in vocabulary") + return url -class Documented(Saveable): - pass + +def _load_field( + val: Any | None, + fieldtype: "_Loader", + baseuri: str, + loadingOptions: LoadingOptions, + lc: Any | None = None, +) -> Any: + """Load field.""" + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url1: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) + result, metadata = _document_load_by_url( + fieldtype, + url1, + loadingOptions, + ) + loadingOptions.imports.append(url1) + return result + if "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url2: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) + val = loadingOptions.fetcher.fetch_text(url2) + loadingOptions.includes.append(url2) + return fieldtype.load(val, baseuri, loadingOptions, lc=lc) -class RecordField(Documented): - """ - A field of a record. - """ +def parser_info() -> str: + return "org.w3id.cwl.v1_1" + + +class CWLArraySchema(schema_salad.metaschema.ArraySchema): + def __init__( + self, + items: Any, + type_: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CWLArraySchema): + return bool(self.items == other.items and self.type_ == other.type_) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = _load_field( + _doc.get("items"), + uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.items is not None: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["items", "type"]) + + +class CWLRecordField(schema_salad.metaschema.RecordField): name: str def __init__( self, name: Any, type_: Any, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + doc: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -1200,7 +1063,7 @@ def __init__( self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordField): + if isinstance(other, CWLRecordField): return bool( self.doc == other.doc and self.name == other.name @@ -1217,8 +1080,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "RecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -1228,7 +1091,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, @@ -1284,7 +1147,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -1332,9 +1195,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2, + typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -1376,7 +1239,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -1384,7 +1247,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -1441,16 +1304,16 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type"]) + attrs: ClassVar[Collection[str]] = frozenset(["doc", "name", "type"]) -class RecordSchema(Saveable): +class CWLRecordSchema(schema_salad.metaschema.RecordSchema): def __init__( self, type_: Any, - fields: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + fields: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -1464,7 +1327,7 @@ def __init__( self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordSchema): + if isinstance(other, CWLRecordSchema): return bool(self.fields == other.fields and self.type_ == other.type_) return False @@ -1477,8 +1340,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "RecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -1488,9 +1351,9 @@ def fromDoc( fields = None if "fields" in _doc: try: - fields = load_field( + fields = _load_field( _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, + idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader, baseuri, loadingOptions, lc=_doc.get("fields") @@ -1536,7 +1399,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Record_nameLoader_2, baseuri, @@ -1580,7 +1443,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -1588,7 +1451,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -1640,24 +1503,52 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type"]) + attrs: ClassVar[Collection[str]] = frozenset(["fields", "type"]) -class EnumSchema(Saveable): +class File(Saveable): """ - Define an enumerated type. + Represents a file (or group of files when ``secondaryFiles`` is provided) that will be accessible by tools using standard POSIX file system call API such as open(2) and read(2). - """ + Files are represented as objects with ``class`` of ``File``. File objects have a number of properties that provide metadata about the file. - name: str + The ``location`` property of a File is a URI that uniquely identifies the file. Implementations must support the file:// URI scheme and may support other schemes such as http://. The value of ``location`` may also be a relative reference, in which case it must be resolved relative to the URI of the document it appears in. Alternately to ``location``, implementations must also accept the ``path`` property on File, which must be a filesystem path available on the same host as the CWL runner (for inputs) or the runtime environment of a command line tool execution (for command line tool outputs). + + If no ``location`` or ``path`` is specified, a file object must specify ``contents`` with the UTF-8 text content of the file. This is a "file literal". File literals do not correspond to external resources, but are created on disk with ``contents`` with when needed for a executing a tool. Where appropriate, expressions can return file literals to define new files on a runtime. The maximum size of ``contents`` is 64 kilobytes. + + The ``basename`` property defines the filename on disk where the file is staged. This may differ from the resource name. If not provided, ``basename`` must be computed from the last path part of ``location`` and made available to expressions. + + The ``secondaryFiles`` property is a list of File or Directory objects that must be staged in the same directory as the primary file. It is an error for file names to be duplicated in ``secondaryFiles``. + + The ``size`` property is the size in bytes of the File. It must be computed from the resource and made available to expressions. The ``checksum`` field contains a cryptographic hash of the file content for use it verifying file contents. Implementations may, at user option, enable or disable computation of the ``checksum`` field for performance or other reasons. However, the ability to compute output checksums is required to pass the CWL conformance test suite. + + When executing a CommandLineTool, the files and secondary files may be staged to an arbitrary directory, but must use the value of ``basename`` for the filename. The ``path`` property must be file path in the context of the tool execution runtime (local to the compute node, or within the executing container). All computed properties should be available to expressions. File literals also must be staged and ``path`` must be set. + + When collecting CommandLineTool outputs, ``glob`` matching returns file paths (with the ``path`` property) and the derived properties. This can all be modified by ``outputEval``. Alternately, if the file ``cwl.output.json`` is present in the output, ``outputBinding`` is ignored. + + File objects in the output must provide either a ``location`` URI or a ``path`` property in the context of the tool execution runtime (local to the compute node, or within the executing container). + + When evaluating an ExpressionTool, file objects must be referenced via ``location`` (the expression tool does not have access to files on disk so ``path`` is meaningless) or as file literals. It is legal to return a file object with an existing ``location`` but a different ``basename``. The ``loadContents`` field of ExpressionTool inputs behaves the same as on CommandLineTool inputs, however it is not meaningful on the outputs. + + An ExpressionTool may forward file references from input to output by using the same value for ``location``. + + """ def __init__( self, - symbols: Any, - type_: Any, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + location: Any | None = None, + path: Any | None = None, + basename: Any | None = None, + dirname: Any | None = None, + nameroot: Any | None = None, + nameext: Any | None = None, + checksum: Any | None = None, + size: Any | None = None, + secondaryFiles: Any | None = None, + format: Any | None = None, + contents: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -1667,21 +1558,54 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols - self.type_ = type_ + self.class_: Final[str] = "File" + self.location = location + self.path = path + self.basename = basename + self.dirname = dirname + self.nameroot = nameroot + self.nameext = nameext + self.checksum = checksum + self.size = size + self.secondaryFiles = secondaryFiles + self.format = format + self.contents = contents def __eq__(self, other: Any) -> bool: - if isinstance(other, EnumSchema): + if isinstance(other, File): return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type_ == other.type_ + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.dirname == other.dirname + and self.nameroot == other.nameroot + and self.nameext == other.nameext + and self.checksum == other.checksum + and self.size == other.size + and self.secondaryFiles == other.secondaryFiles + and self.format == other.format + and self.contents == other.contents ) return False def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type_)) + return hash( + ( + self.class_, + self.location, + self.path, + self.basename, + self.dirname, + self.nameroot, + self.nameext, + self.checksum, + self.size, + self.secondaryFiles, + self.format, + self.contents, + ) + ) @classmethod def fromDoc( @@ -1689,29 +1613,46 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "EnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_File_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + location = None + if "location" in _doc: try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + location = _load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("location") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `location`": _errors__.append( ValidationException( str(e), @@ -1719,13 +1660,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("location") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -1737,537 +1678,499 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `location` field with value `{val}` " "is not valid because:", ) ) + path = None + if "path" in _doc: + try: + path = _load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("path") + ) - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) - - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("symbols") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `symbols`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("symbols") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `path`": _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - detailed_message=f"the `symbols` field with value `{val}` " - "is not valid because:", + val = _doc.get("path") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + detailed_message=f"the `path` field with value `{val}` " + "is not valid because:", + ) + ) + basename = None + if "basename" in _doc: + try: + basename = _load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("basename") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `basename`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("basename") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + detailed_message=f"the `basename` field with value `{val}` " + "is not valid because:", + ) + ) + dirname = None + if "dirname" in _doc: + try: + dirname = _load_field( + _doc.get("dirname"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("dirname") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `dirname`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`".format( - k - ), - SourceLine(_doc, k, str), + val = _doc.get("dirname") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `dirname` field is not valid because:", + SourceLine(_doc, "dirname", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - name=name, - symbols=symbols, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "symbols", "type"]) - - -class ArraySchema(Saveable): - def __init__( - self, - items: Any, - type_: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ArraySchema): - return bool(self.items == other.items and self.type_ == other.type_) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ArraySchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) + else: + _errors__.append( + ValidationException( + "the `dirname` field is not valid because:", + SourceLine(_doc, "dirname", str), + [e], + detailed_message=f"the `dirname` field with value `{val}` " + "is not valid because:", + ) + ) + nameroot = None + if "nameroot" in _doc: + try: + nameroot = _load_field( + _doc.get("nameroot"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("nameroot") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `nameroot`": _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", + val = _doc.get("nameroot") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `nameroot` field is not valid because:", + SourceLine(_doc, "nameroot", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + _errors__.append( + ValidationException( + "the `nameroot` field is not valid because:", + SourceLine(_doc, "nameroot", str), + [e], + detailed_message=f"the `nameroot` field with value `{val}` " + "is not valid because:", + ) + ) + nameext = None + if "nameext" in _doc: + try: + nameext = _load_field( + _doc.get("nameext"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("nameext") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `nameext`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("nameext") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `nameext` field is not valid because:", + SourceLine(_doc, "nameext", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), - SourceLine(_doc, k, str), + else: + _errors__.append( + ValidationException( + "the `nameext` field is not valid because:", + SourceLine(_doc, "nameext", str), + [e], + detailed_message=f"the `nameext` field with value `{val}` " + "is not valid because:", + ) ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - items=items, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.items is not None: - u = save_relative_uri(self.items, base_url, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type"]) - - -class MapSchema(Saveable): - def __init__( - self, - type_: Any, - values: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.type_ = type_ - self.values = values - - def __eq__(self, other: Any) -> bool: - if isinstance(other, MapSchema): - return bool(self.type_ == other.type_ and self.values == other.values) - return False - - def __hash__(self) -> int: - return hash((self.type_, self.values)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "MapSchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Map_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + checksum = None + if "checksum" in _doc: + try: + checksum = _load_field( + _doc.get("checksum"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("checksum") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `checksum`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("checksum") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `checksum` field is not valid because:", + SourceLine(_doc, "checksum", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("values") is None: - raise ValidationException("missing required field `values`", None, []) - - values = load_field( - _doc.get("values"), - uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("values") - ) + else: + _errors__.append( + ValidationException( + "the `checksum` field is not valid because:", + SourceLine(_doc, "checksum", str), + [e], + detailed_message=f"the `checksum` field with value `{val}` " + "is not valid because:", + ) + ) + size = None + if "size" in _doc: + try: + size = _load_field( + _doc.get("size"), + union_of_None_type_or_inttype, + baseuri, + loadingOptions, + lc=_doc.get("size") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `values`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("values") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `size`": _errors__.append( ValidationException( - "the `values` field is not valid because:", - SourceLine(_doc, "values", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("size") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `size` field is not valid because:", + SourceLine(_doc, "size", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `size` field is not valid because:", + SourceLine(_doc, "size", str), + [e], + detailed_message=f"the `size` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - "the `values` field is not valid because:", - SourceLine(_doc, "values", str), - [e], - detailed_message=f"the `values` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + contents = None + if "contents" in _doc: + try: + contents = _load_field( + _doc.get("contents"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("contents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `contents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("contents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `contents` field is not valid because:", + SourceLine(_doc, "contents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `contents` field is not valid because:", + SourceLine(_doc, "contents", str), + [e], + detailed_message=f"the `contents` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `type`, `values`".format( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( k ), SourceLine(_doc, k, str), @@ -2277,8 +2180,17 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - type_=type_, - values=values, + location=location, + path=path, + basename=basename, + dirname=dirname, + nameroot=nameroot, + nameext=nameext, + checksum=checksum, + size=size, + secondaryFiles=secondaryFiles, + format=format, + contents=contents, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2295,13 +2207,60 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.location is not None: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + if self.path is not None: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + if self.basename is not None: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.dirname is not None: + r["dirname"] = save( + self.dirname, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.nameroot is not None: + r["nameroot"] = save( + self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.nameext is not None: + r["nameext"] = save( + self.nameext, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.checksum is not None: + r["checksum"] = save( + self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.size is not None: + r["size"] = save( + self.size, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, base_url, True, None, relative_uris) + r["format"] = u + if self.contents is not None: + r["contents"] = save( + self.contents, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.values is not None: - u = save_relative_uri(self.values, base_url, False, 2, relative_uris) - r["values"] = u # top refers to the directory level if top: @@ -2311,16 +2270,56 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["type", "values"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "class", + "location", + "path", + "basename", + "dirname", + "nameroot", + "nameext", + "checksum", + "size", + "secondaryFiles", + "format", + "contents", + ] + ) + + +class Directory(Saveable): + """ + Represents a directory to present to a command line tool. + + Directories are represented as objects with ``class`` of ``Directory``. Directory objects have a number of properties that provide metadata about the directory. + + The ``location`` property of a Directory is a URI that uniquely identifies the directory. Implementations must support the file:// URI scheme and may support other schemes such as http://. Alternately to ``location``, implementations must also accept the ``path`` property on Directory, which must be a filesystem path available on the same host as the CWL runner (for inputs) or the runtime environment of a command line tool execution (for command line tool outputs). + + A Directory object may have a ``listing`` field. This is a list of File and Directory objects that are contained in the Directory. For each entry in ``listing``, the ``basename`` property defines the name of the File or Subdirectory when staged to disk. If ``listing`` is not provided, the implementation must have some way of fetching the Directory listing at runtime based on the ``location`` field. + + If a Directory does not have ``location``, it is a Directory literal. A Directory literal must provide ``listing``. Directory literals must be created on disk at runtime as needed. + + The resources in a Directory literal do not need to have any implied relationship in their ``location``. For example, a Directory listing may contain two files located on different hosts. It is the responsibility of the runtime to ensure that those files are staged to disk appropriately. Secondary files associated with files in ``listing`` must also be staged to the same Directory. + When executing a CommandLineTool, Directories must be recursively staged first and have local values of ``path`` assigned. + + Directory objects in CommandLineTool output must provide either a ``location`` URI or a ``path`` property in the context of the tool execution runtime (local to the compute node, or within the executing container). + + An ExpressionTool may forward file references from input to output by using the same value for ``location``. + + Name conflicts (the same ``basename`` appearing multiple times in ``listing`` or in any entry in ``secondaryFiles`` in the listing) is a fatal error. + + """ -class UnionSchema(Saveable): def __init__( self, - names: Any, - type_: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + location: Any | None = None, + path: Any | None = None, + basename: Any | None = None, + listing: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -2330,16 +2329,27 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.names = names - self.type_ = type_ + self.class_: Final[str] = "Directory" + self.location = location + self.path = path + self.basename = basename + self.listing = listing def __eq__(self, other: Any) -> bool: - if isinstance(other, UnionSchema): - return bool(self.names == other.names and self.type_ == other.type_) + if isinstance(other, Directory): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.listing == other.listing + ) return False def __hash__(self) -> int: - return hash((self.names, self.type_)) + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) @classmethod def fromDoc( @@ -2347,8 +2357,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "UnionSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -2356,102 +2366,211 @@ def fromDoc( _doc.lc.filename = doc.lc.filename _errors__ = [] try: - if _doc.get("names") is None: - raise ValidationException("missing required field `names`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - names = load_field( - _doc.get("names"), - uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, + class_ = _load_field( + _doc.get("class"), + uri_Directory_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("names") + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + raise e + location = None + if "location" in _doc: + try: + location = _load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("location") + ) - if str(e) == "missing required field `names`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("names") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `location`": _errors__.append( ValidationException( - "the `names` field is not valid because:", - SourceLine(_doc, "names", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("location") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [e], + detailed_message=f"the `location` field with value `{val}` " + "is not valid because:", + ) + ) + path = None + if "path" in _doc: + try: + path = _load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("path") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `path`": _errors__.append( ValidationException( - "the `names` field is not valid because:", - SourceLine(_doc, "names", str), - [e], - detailed_message=f"the `names` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Union_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + val = _doc.get("path") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + detailed_message=f"the `path` field with value `{val}` " + "is not valid because:", + ) + ) + basename = None + if "basename" in _doc: + try: + basename = _load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("basename") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `basename`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("basename") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + detailed_message=f"the `basename` field with value `{val}` " + "is not valid because:", + ) + ) + listing = None + if "listing" in _doc: + try: + listing = _load_field( + _doc.get("listing"), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + lc=_doc.get("listing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `listing`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} + else: + val = _doc.get("listing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + detailed_message=f"the `listing` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -2459,14 +2578,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `names`, `type`".format( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( k ), SourceLine(_doc, k, str), @@ -2476,8 +2595,10 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - names=names, - type_=type_, + location=location, + path=path, + basename=basename, + listing=listing, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2494,12 +2615,29 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.names is not None: - u = save_relative_uri(self.names, base_url, False, 2, relative_uris) - r["names"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.location is not None: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + if self.path is not None: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + if self.basename is not None: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.listing is not None: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -2510,35 +2648,72 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["names", "type"]) - + attrs: ClassVar[Collection[str]] = frozenset( + ["class", "location", "path", "basename", "listing"] + ) -class CWLArraySchema(ArraySchema): - def __init__( - self, - items: Any, - type_: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ - def __eq__(self, other: Any) -> bool: - if isinstance(other, CWLArraySchema): - return bool(self.items == other.items and self.type_ == other.type_) - return False +class Labeled(Saveable): + pass - def __hash__(self) -> int: - return hash((self.items, self.type_)) + +class Identified(Saveable): + pass + + +class IdentifierRequired(Identified): + pass + + +class LoadContents(Saveable): + pass + + +class FieldBase(Labeled): + pass + + +class InputFormat(Saveable): + pass + + +class OutputFormat(Saveable): + pass + + +class Parameter(FieldBase, schema_salad.metaschema.Documented, IdentifierRequired): + """ + Define an input or output parameter to a process. + + """ + + pass + + +class InputBinding(Saveable): + def __init__( + self, + loadContents: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputBinding): + return bool(self.loadContents == other.loadContents) + return False + + def __hash__(self) -> int: + return hash((self.loadContents)) @classmethod def fromDoc( @@ -2546,111 +2721,62 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CWLArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -2658,14 +2784,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( + "invalid field `{}`, expected one of: `loadContents`".format( k ), SourceLine(_doc, k, str), @@ -2675,8 +2801,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - items=items, - type_=type_, + loadContents=loadContents, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2693,12 +2818,12 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.items is not None: - u = save_relative_uri(self.items, base_url, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -2709,19 +2834,37 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["items", "type"]) + attrs: ClassVar[Collection[str]] = frozenset(["loadContents"]) + + +class IOSchema(Labeled, schema_salad.metaschema.Documented): + pass + + +class InputSchema(IOSchema): + pass + + +class OutputSchema(IOSchema): + pass -class CWLRecordField(RecordField): +class InputRecordField(CWLRecordField, FieldBase, InputFormat, LoadContents): name: str def __init__( self, name: Any, type_: Any, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + doc: Any | None = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + format: Any | None = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -2734,18 +2877,42 @@ def __init__( self.doc = doc self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) self.type_ = type_ + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing def __eq__(self, other: Any) -> bool: - if isinstance(other, CWLRecordField): + if isinstance(other, InputRecordField): return bool( self.doc == other.doc and self.name == other.name and self.type_ == other.type_ + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing ) return False def __hash__(self) -> int: - return hash((self.doc, self.name, self.type_)) + return hash( + ( + self.doc, + self.name, + self.type_, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + ) + ) @classmethod def fromDoc( @@ -2753,8 +2920,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CWLRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -2764,7 +2931,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, @@ -2820,7 +2987,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -2868,9 +3035,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2, + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -2912,130 +3079,68 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - doc=doc, - name=name, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["doc", "name", "type"]) - - -class CWLRecordSchema(RecordSchema): - def __init__( - self, - type_: Any, - fields: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type_ = type_ - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CWLRecordSchema): - return bool(self.fields == other.fields and self.type_ == other.type_) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CWLRecordSchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - fields = None - if "fields" in _doc: + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader, + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, loadingOptions, - lc=_doc.get("fields") + lc=_doc.get("secondaryFiles") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `fields`": + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( str(e), @@ -3043,13 +3148,13 @@ def fromDoc( ) ) else: - val = _doc.get("fields") + val = _doc.get("secondaryFiles") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3061,327 +3166,89 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [e], - detailed_message=f"the `fields` field with value `{val}` " + detailed_message=f"the `secondaryFiles` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + streamable = None + if "streamable" in _doc: + try: + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - fields=fields, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type"]) - - -class File(Saveable): - """ - Represents a file (or group of files when `secondaryFiles` is provided) that - will be accessible by tools using standard POSIX file system call API such as - open(2) and read(2). - - Files are represented as objects with `class` of `File`. File objects have - a number of properties that provide metadata about the file. - - The `location` property of a File is a URI that uniquely identifies the - file. Implementations must support the file:// URI scheme and may support - other schemes such as http://. The value of `location` may also be a - relative reference, in which case it must be resolved relative to the URI - of the document it appears in. Alternately to `location`, implementations - must also accept the `path` property on File, which must be a filesystem - path available on the same host as the CWL runner (for inputs) or the - runtime environment of a command line tool execution (for command line tool - outputs). - - If no `location` or `path` is specified, a file object must specify - `contents` with the UTF-8 text content of the file. This is a "file - literal". File literals do not correspond to external resources, but are - created on disk with `contents` with when needed for a executing a tool. - Where appropriate, expressions can return file literals to define new files - on a runtime. The maximum size of `contents` is 64 kilobytes. - - The `basename` property defines the filename on disk where the file is - staged. This may differ from the resource name. If not provided, - `basename` must be computed from the last path part of `location` and made - available to expressions. - - The `secondaryFiles` property is a list of File or Directory objects that - must be staged in the same directory as the primary file. It is an error - for file names to be duplicated in `secondaryFiles`. - - The `size` property is the size in bytes of the File. It must be computed - from the resource and made available to expressions. The `checksum` field - contains a cryptographic hash of the file content for use it verifying file - contents. Implementations may, at user option, enable or disable - computation of the `checksum` field for performance or other reasons. - However, the ability to compute output checksums is required to pass the - CWL conformance test suite. - - When executing a CommandLineTool, the files and secondary files may be - staged to an arbitrary directory, but must use the value of `basename` for - the filename. The `path` property must be file path in the context of the - tool execution runtime (local to the compute node, or within the executing - container). All computed properties should be available to expressions. - File literals also must be staged and `path` must be set. - - When collecting CommandLineTool outputs, `glob` matching returns file paths - (with the `path` property) and the derived properties. This can all be - modified by `outputEval`. Alternately, if the file `cwl.output.json` is - present in the output, `outputBinding` is ignored. - - File objects in the output must provide either a `location` URI or a `path` - property in the context of the tool execution runtime (local to the compute - node, or within the executing container). - - When evaluating an ExpressionTool, file objects must be referenced via - `location` (the expression tool does not have access to files on disk so - `path` is meaningless) or as file literals. It is legal to return a file - object with an existing `location` but a different `basename`. The - `loadContents` field of ExpressionTool inputs behaves the same as on - CommandLineTool inputs, however it is not meaningful on the outputs. - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - """ - - def __init__( - self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - dirname: Optional[Any] = None, - nameroot: Optional[Any] = None, - nameext: Optional[Any] = None, - checksum: Optional[Any] = None, - size: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - format: Optional[Any] = None, - contents: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "File" - self.location = location - self.path = path - self.basename = basename - self.dirname = dirname - self.nameroot = nameroot - self.nameext = nameext - self.checksum = checksum - self.size = size - self.secondaryFiles = secondaryFiles - self.format = format - self.contents = contents - - def __eq__(self, other: Any) -> bool: - if isinstance(other, File): - return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.dirname == other.dirname - and self.nameroot == other.nameroot - and self.nameext == other.nameext - and self.checksum == other.checksum - and self.size == other.size - and self.secondaryFiles == other.secondaryFiles - and self.format == other.format - and self.contents == other.contents - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.location, - self.path, - self.basename, - self.dirname, - self.nameroot, - self.nameext, - self.checksum, - self.size, - self.secondaryFiles, - self.format, - self.contents, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "File": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_File_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - location = None - if "location" in _doc: - try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("location") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `location`": - _errors__.append( - ValidationException( - str(e), - None + str(e), + None ) ) else: - val = _doc.get("location") + val = _doc.get("format") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3393,28 +3260,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [e], - detailed_message=f"the `location` field with value `{val}` " + detailed_message=f"the `format` field with value `{val}` " "is not valid because:", ) ) - path = None - if "path" in _doc: + loadContents = None + if "loadContents" in _doc: try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None_None, + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("path") + lc=_doc.get("loadContents") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `path`": + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( str(e), @@ -3422,13 +3289,13 @@ def fromDoc( ) ) else: - val = _doc.get("path") + val = _doc.get("loadContents") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3440,28 +3307,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [e], - detailed_message=f"the `path` field with value `{val}` " + detailed_message=f"the `loadContents` field with value `{val}` " "is not valid because:", ) ) - basename = None - if "basename" in _doc: + loadListing = None + if "loadListing" in _doc: try: - basename = load_field( - _doc.get("basename"), - union_of_None_type_or_strtype, + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, baseuri, loadingOptions, - lc=_doc.get("basename") + lc=_doc.get("loadListing") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `basename`": + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( str(e), @@ -3469,13 +3336,13 @@ def fromDoc( ) ) else: - val = _doc.get("basename") + val = _doc.get("loadListing") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3487,75 +3354,204 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [e], - detailed_message=f"the `basename` field with value `{val}` " + detailed_message=f"the `loadListing` field with value `{val}` " "is not valid because:", ) ) - dirname = None - if "dirname" in _doc: - try: - dirname = load_field( - _doc.get("dirname"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dirname") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `dirname`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - str(e), - None - ) + ValidationException("mapping with implicit null key") ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("dirname") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dirname` field is not valid because:", - SourceLine(_doc, "dirname", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), ) - else: - _errors__.append( - ValidationException( - "the `dirname` field is not valid because:", - SourceLine(_doc, "dirname", str), - [e], - detailed_message=f"the `dirname` field with value `{val}` " - "is not valid because:", - ) - ) - nameroot = None - if "nameroot" in _doc: + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + doc=doc, + name=name, + type_=type_, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, self.name, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + ] + ) + + +class InputRecordSchema(CWLRecordSchema, InputSchema): + name: str + + def __init__( + self, + type_: Any, + fields: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordSchema): + return bool( + self.fields == other.fields + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type_, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: try: - nameroot = load_field( - _doc.get("nameroot"), - union_of_None_type_or_strtype, + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("nameroot") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `nameroot`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -3563,13 +3559,13 @@ def fromDoc( ) ) else: - val = _doc.get("nameroot") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `nameroot` field is not valid because:", - SourceLine(_doc, "nameroot", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3581,28 +3577,37 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `nameroot` field is not valid because:", - SourceLine(_doc, "nameroot", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `nameroot` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - nameext = None - if "nameext" in _doc: + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + fields = None + if "fields" in _doc: try: - nameext = load_field( - _doc.get("nameext"), - union_of_None_type_or_strtype, + fields = _load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, baseuri, loadingOptions, - lc=_doc.get("nameext") + lc=_doc.get("fields") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `nameext`": + if str(e) == "missing required field `fields`": _errors__.append( ValidationException( str(e), @@ -3610,13 +3615,13 @@ def fromDoc( ) ) else: - val = _doc.get("nameext") + val = _doc.get("fields") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `nameext` field is not valid because:", - SourceLine(_doc, "nameext", str), + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3628,169 +3633,76 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `nameext` field is not valid because:", - SourceLine(_doc, "nameext", str), + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), [e], - detailed_message=f"the `nameext` field with value `{val}` " + detailed_message=f"the `fields` field with value `{val}` " "is not valid because:", ) ) - checksum = None - if "checksum" in _doc: - try: - checksum = load_field( - _doc.get("checksum"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("checksum") - ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + type_ = _load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) - if str(e) == "missing required field `checksum`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("checksum") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `checksum` field is not valid because:", - SourceLine(_doc, "checksum", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `checksum` field is not valid because:", - SourceLine(_doc, "checksum", str), - [e], - detailed_message=f"the `checksum` field with value `{val}` " - "is not valid because:", - ) - ) - size = None - if "size" in _doc: - try: - size = load_field( - _doc.get("size"), - union_of_None_type_or_inttype, - baseuri, - loadingOptions, - lc=_doc.get("size") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `size`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("size") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `size` field is not valid because:", - SourceLine(_doc, "size", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `size` field is not valid because:", - SourceLine(_doc, "size", str), - [e], - detailed_message=f"the `size` field with value `{val}` " - "is not valid because:", - ) - ) - secondaryFiles = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, - baseuri, - loadingOptions, - lc=_doc.get("secondaryFiles") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - str(e), - None + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", ) ) - else: - val = _doc.get("secondaryFiles") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " - "is not valid because:", - ) - ) - format = None - if "format" in _doc: + label = None + if "label" in _doc: try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_True_False_None_True, + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("format") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `format`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -3798,13 +3710,13 @@ def fromDoc( ) ) else: - val = _doc.get("format") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3816,28 +3728,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - contents = None - if "contents" in _doc: + doc = None + if "doc" in _doc: try: - contents = load_field( - _doc.get("contents"), - union_of_None_type_or_strtype, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("contents") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `contents`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -3845,13 +3757,13 @@ def fromDoc( ) ) else: - val = _doc.get("contents") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `contents` field is not valid because:", - SourceLine(_doc, "contents", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3863,14 +3775,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `contents` field is not valid because:", - SourceLine(_doc, "contents", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `contents` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -3878,14 +3790,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( k ), SourceLine(_doc, k, str), @@ -3895,20 +3807,15 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - location=location, - path=path, - basename=basename, - dirname=dirname, - nameroot=nameroot, - nameext=nameext, - checksum=checksum, - size=size, - secondaryFiles=secondaryFiles, - format=format, - contents=contents, + fields=fields, + type_=type_, + label=label, + doc=doc, + name=name, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -3922,57 +3829,24 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.location is not None: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - if self.path is not None: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - if self.basename is not None: - r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.dirname is not None: - r["dirname"] = save( - self.dirname, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.nameroot is not None: - r["nameroot"] = save( - self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.nameext is not None: - r["nameext"] = save( - self.nameext, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.checksum is not None: - r["checksum"] = save( - self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.size is not None: - r["size"] = save( - self.size, top=False, base_url=base_url, relative_uris=relative_uris + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.format is not None: - u = save_relative_uri(self.format, base_url, True, None, relative_uris) - r["format"] = u - if self.contents is not None: - r["contents"] = save( - self.contents, top=False, base_url=base_url, relative_uris=relative_uris + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) # top refers to the directory level @@ -3983,80 +3857,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "class", - "location", - "path", - "basename", - "dirname", - "nameroot", - "nameext", - "checksum", - "size", - "secondaryFiles", - "format", - "contents", - ] + attrs: ClassVar[Collection[str]] = frozenset( + ["fields", "type", "label", "doc", "name"] ) -class Directory(Saveable): - """ - Represents a directory to present to a command line tool. - - Directories are represented as objects with `class` of `Directory`. Directory objects have - a number of properties that provide metadata about the directory. - - The `location` property of a Directory is a URI that uniquely identifies - the directory. Implementations must support the file:// URI scheme and may - support other schemes such as http://. Alternately to `location`, - implementations must also accept the `path` property on Directory, which - must be a filesystem path available on the same host as the CWL runner (for - inputs) or the runtime environment of a command line tool execution (for - command line tool outputs). - - A Directory object may have a `listing` field. This is a list of File and - Directory objects that are contained in the Directory. For each entry in - `listing`, the `basename` property defines the name of the File or - Subdirectory when staged to disk. If `listing` is not provided, the - implementation must have some way of fetching the Directory listing at - runtime based on the `location` field. - - If a Directory does not have `location`, it is a Directory literal. A - Directory literal must provide `listing`. Directory literals must be - created on disk at runtime as needed. - - The resources in a Directory literal do not need to have any implied - relationship in their `location`. For example, a Directory listing may - contain two files located on different hosts. It is the responsibility of - the runtime to ensure that those files are staged to disk appropriately. - Secondary files associated with files in `listing` must also be staged to - the same Directory. - - When executing a CommandLineTool, Directories must be recursively staged - first and have local values of `path` assigend. - - Directory objects in CommandLineTool output must provide either a - `location` URI or a `path` property in the context of the tool execution - runtime (local to the compute node, or within the executing container). - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - Name conflicts (the same `basename` appearing multiple times in `listing` - or in any entry in `secondaryFiles` in the listing) is a fatal error. - - """ +class InputEnumSchema(schema_salad.metaschema.EnumSchema, InputSchema): + name: str def __init__( self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - listing: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + symbols: Any, + type_: Any, + name: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -4066,27 +3883,25 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "Directory" - self.location = location - self.path = path - self.basename = basename - self.listing = listing + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols + self.type_ = type_ + self.label = label + self.doc = doc def __eq__(self, other: Any) -> bool: - if isinstance(other, Directory): + if isinstance(other, InputEnumSchema): return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.listing == other.listing + self.name == other.name + and self.symbols == other.symbols + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc ) return False def __hash__(self) -> int: - return hash( - (self.class_, self.location, self.path, self.basename, self.listing) - ) + return hash((self.name, self.symbols, self.type_, self.label, self.doc)) @classmethod def fromDoc( @@ -4094,45 +3909,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Directory": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_Directory_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - location = None - if "location" in _doc: + name = None + if "name" in _doc: try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None_None, + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("location") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `location`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -4140,13 +3939,13 @@ def fromDoc( ) ) else: - val = _doc.get("location") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4158,75 +3957,133 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `location` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - path = None - if "path" in _doc: - try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("path") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) - if str(e) == "missing required field `path`": + symbols = _load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("path") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", ) - else: - _errors__.append( - ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), - [e], - detailed_message=f"the `path` field with value `{val}` " - "is not valid because:", - ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) - basename = None - if "basename" in _doc: + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: try: - basename = load_field( - _doc.get("basename"), + label = _load_field( + _doc.get("label"), union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("basename") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `basename`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -4234,13 +4091,13 @@ def fromDoc( ) ) else: - val = _doc.get("basename") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4252,28 +4109,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `basename` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - listing = None - if "listing" in _doc: + doc = None + if "doc" in _doc: try: - listing = load_field( - _doc.get("listing"), - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("listing") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `listing`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -4281,13 +4138,13 @@ def fromDoc( ) ) else: - val = _doc.get("listing") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4299,14 +4156,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `listing` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -4314,14 +4171,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( k ), SourceLine(_doc, k, str), @@ -4331,13 +4188,15 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - location=location, - path=path, - basename=basename, - listing=listing, + name=name, + symbols=symbols, + type_=type_, + label=label, + doc=doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -4351,27 +4210,23 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.location is not None: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - if self.path is not None: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - if self.basename is not None: - r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.listing is not None: - r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) # top refers to the directory level @@ -4382,52 +4237,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "location", "path", "basename", "listing"]) - - -class Labeled(Saveable): - pass - - -class Identified(Saveable): - pass - - -class IdentifierRequired(Identified): - pass - - -class LoadContents(Saveable): - pass - - -class FieldBase(Labeled): - pass - - -class InputFormat(Saveable): - pass - - -class OutputFormat(Saveable): - pass - - -class Parameter(FieldBase, Documented, IdentifierRequired): - """ - Define an input or output parameter to a process. - - """ + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "doc"] + ) - pass +class InputArraySchema(CWLArraySchema, InputSchema): + name: str -class InputBinding(Saveable): def __init__( self, - loadContents: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + items: Any, + type_: Any, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -4437,15 +4263,25 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.loadContents = loadContents + self.items = items + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, InputBinding): - return bool(self.loadContents == other.loadContents) + if isinstance(other, InputArraySchema): + return bool( + self.items == other.items + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) return False def __hash__(self) -> int: - return hash((self.loadContents)) + return hash((self.items, self.type_, self.label, self.doc, self.name)) @classmethod def fromDoc( @@ -4453,29 +4289,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputBinding": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - loadContents = None - if "loadContents" in _doc: + name = None + if "name" in _doc: try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("loadContents") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadContents`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -4483,13 +4319,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadContents") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4501,107 +4337,310 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `loadContents` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = _load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `loadContents`".format( - k - ), - SourceLine(_doc, k, str), + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", ) ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - loadContents=loadContents, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, + type_ = _load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") ) - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["loadContents"]) - - -class IOSchema(Labeled, Documented): - pass - - -class InputSchema(IOSchema): - pass - - -class OutputSchema(IOSchema): - pass - - -class InputRecordField(CWLRecordField, FieldBase, InputFormat, LoadContents): - name: str + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - def __init__( - self, - name: Any, - type_: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.items is not None: + u = save_relative_uri(self.items, self.name, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "doc", "name"] + ) + + +class OutputRecordField(CWLRecordField, FieldBase, OutputFormat): + name: str + + def __init__( + self, + name: Any, + type_: Any, + doc: Any | None = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + format: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: @@ -4613,11 +4652,9 @@ def __init__( self.secondaryFiles = secondaryFiles self.streamable = streamable self.format = format - self.loadContents = loadContents - self.loadListing = loadListing def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordField): + if isinstance(other, OutputRecordField): return bool( self.doc == other.doc and self.name == other.name @@ -4626,8 +4663,6 @@ def __eq__(self, other: Any) -> bool: and self.secondaryFiles == other.secondaryFiles and self.streamable == other.streamable and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing ) return False @@ -4641,8 +4676,6 @@ def __hash__(self) -> int: self.secondaryFiles, self.streamable, self.format, - self.loadContents, - self.loadListing, ) ) @@ -4652,8 +4685,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -4663,7 +4696,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, @@ -4719,7 +4752,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -4767,9 +4800,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -4814,7 +4847,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -4861,7 +4894,7 @@ def fromDoc( secondaryFiles = None if "secondaryFiles" in _doc: try: - secondaryFiles = load_field( + secondaryFiles = _load_field( _doc.get("secondaryFiles"), secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, @@ -4908,7 +4941,7 @@ def fromDoc( streamable = None if "streamable" in _doc: try: - streamable = load_field( + streamable = _load_field( _doc.get("streamable"), union_of_None_type_or_booltype, baseuri, @@ -4955,9 +4988,9 @@ def fromDoc( format = None if "format" in _doc: try: - format = load_field( + format = _load_field( _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, lc=_doc.get("format") @@ -4999,119 +5032,25 @@ def fromDoc( "is not valid because:", ) ) - loadContents = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("loadContents") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadContents`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: _errors__.append( ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("loadContents") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - detailed_message=f"the `loadContents` field with value `{val}` " - "is not valid because:", - ) - ) - loadListing = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - lc=_doc.get("loadListing") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadListing`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("loadListing") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - detailed_message=f"the `loadListing` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`".format( - k - ), - SourceLine(_doc, k, str), + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`".format( + k + ), + SourceLine(_doc, k, str), ) ) @@ -5125,8 +5064,6 @@ def fromDoc( secondaryFiles=secondaryFiles, streamable=streamable, format=format, - loadContents=loadContents, - loadListing=loadListing, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -5176,20 +5113,6 @@ def save( if self.format is not None: u = save_relative_uri(self.format, self.name, True, None, relative_uris) r["format"] = u - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) # top refers to the directory level if top: @@ -5199,33 +5122,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "doc", - "name", - "type", - "label", - "secondaryFiles", - "streamable", - "format", - "loadContents", - "loadListing", - ] + attrs: ClassVar[Collection[str]] = frozenset( + ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] ) -class InputRecordSchema(CWLRecordSchema, InputSchema): +class OutputRecordSchema(CWLRecordSchema, OutputSchema): name: str def __init__( self, type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + fields: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -5242,7 +5155,7 @@ def __init__( self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordSchema): + if isinstance(other, OutputRecordSchema): return bool( self.fields == other.fields and self.type_ == other.type_ @@ -5261,8 +5174,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -5272,7 +5185,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -5328,9 +5241,9 @@ def fromDoc( fields = None if "fields" in _doc: try: - fields = load_field( + fields = _load_field( _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, + idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, baseuri, loadingOptions, lc=_doc.get("fields") @@ -5376,7 +5289,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Record_nameLoader_2, baseuri, @@ -5423,7 +5336,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -5470,7 +5383,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -5514,7 +5427,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -5522,7 +5435,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -5589,21 +5502,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "label", "doc", "name"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["fields", "type", "label", "doc", "name"] + ) -class InputEnumSchema(EnumSchema, InputSchema): +class OutputEnumSchema(schema_salad.metaschema.EnumSchema, OutputSchema): name: str def __init__( self, symbols: Any, type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + name: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -5620,7 +5535,7 @@ def __init__( self.doc = doc def __eq__(self, other: Any) -> bool: - if isinstance(other, InputEnumSchema): + if isinstance(other, OutputEnumSchema): return bool( self.name == other.name and self.symbols == other.symbols @@ -5639,8 +5554,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputEnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -5650,7 +5565,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -5707,7 +5622,7 @@ def fromDoc( if _doc.get("symbols") is None: raise ValidationException("missing required field `symbols`", None, []) - symbols = load_field( + symbols = _load_field( _doc.get("symbols"), uri_array_of_strtype_True_False_None_None, baseuri, @@ -5755,7 +5670,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Enum_nameLoader_2, baseuri, @@ -5802,7 +5717,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -5849,7 +5764,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -5893,7 +5808,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -5901,7 +5816,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -5967,21 +5882,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "doc"] + ) -class InputArraySchema(CWLArraySchema, InputSchema): +class OutputArraySchema(CWLArraySchema, OutputSchema): name: str def __init__( self, items: Any, type_: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -5998,7 +5915,7 @@ def __init__( self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, InputArraySchema): + if isinstance(other, OutputArraySchema): return bool( self.items == other.items and self.type_ == other.type_ @@ -6017,8 +5934,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -6028,7 +5945,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -6085,9 +6002,9 @@ def fromDoc( if _doc.get("items") is None: raise ValidationException("missing required field `items`", None, []) - items = load_field( + items = _load_field( _doc.get("items"), - uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None, + uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None, baseuri, loadingOptions, lc=_doc.get("items") @@ -6133,7 +6050,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Array_nameLoader_2, baseuri, @@ -6180,7 +6097,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -6227,7 +6144,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -6271,7 +6188,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -6279,7 +6196,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -6345,23 +6262,50 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["items", "type", "label", "doc", "name"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "doc", "name"] + ) -class OutputRecordField(CWLRecordField, FieldBase, OutputFormat): - name: str +class InputParameter(Parameter, InputFormat, LoadContents): + pass + + +class OutputParameter(Parameter, OutputFormat): + pass + + +class ProcessRequirement(Saveable): + """ + A process requirement declares a prerequisite that may or must be fulfilled before executing a process. See ```Process.hints`` <#process>`__ and ```Process.requirements`` <#process>`__. + + Process requirements are the primary mechanism for specifying extensions to the CWL core specification. + + """ + + pass + + +class Process(Identified, Labeled, schema_salad.metaschema.Documented): + """ + The base executable type in CWL is the ``Process`` object defined by the document. Note that the ``Process`` object is abstract and cannot be directly executed. + + """ + + pass + + +class InlineJavascriptRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support inline Javascript expressions. If this requirement is not present, the workflow platform must not perform expression interpolatation. + + """ def __init__( self, - name: Any, - type_: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + expressionLib: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -6371,39 +6315,19 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.type_ = type_ - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format + self.class_: Final[str] = "InlineJavascriptRequirement" + self.expressionLib = expressionLib def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordField): + if isinstance(other, InlineJavascriptRequirement): return bool( - self.doc == other.doc - and self.name == other.name - and self.type_ == other.type_ - and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format + self.class_ == other.class_ + and self.expressionLib == other.expressionLib ) return False def __hash__(self) -> int: - return hash( - ( - self.doc, - self.name, - self.type_, - self.label, - self.secondaryFiles, - self.streamable, - self.format, - ) - ) + return hash((self.class_, self.expressionLib)) @classmethod def fromDoc( @@ -6411,29 +6335,46 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_InlineJavascriptRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + expressionLib = None + if "expressionLib" in _doc: try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None_None, + expressionLib = _load_field( + _doc.get("expressionLib"), + union_of_None_type_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("expressionLib") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `expressionLib`": _errors__.append( ValidationException( str(e), @@ -6441,13 +6382,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("expressionLib") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `expressionLib` field is not valid because:", + SourceLine(_doc, "expressionLib", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -6459,85 +6400,166 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `expressionLib` field is not valid because:", + SourceLine(_doc, "expressionLib", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `expressionLib` field with value `{val}` " "is not valid because:", ) ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `expressionLib`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + expressionLib=expressionLib, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" else: - _errors__.append(ValidationException("missing name")) - if not __original_name_is_none: - baseuri = cast(str, name) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.expressionLib is not None: + r["expressionLib"] = save( + self.expressionLib, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) + attrs: ClassVar[Collection[str]] = frozenset(["class", "expressionLib"]) + + +class CommandInputSchema(Saveable): + pass + + +class SchemaDefRequirement(ProcessRequirement): + """ + This field consists of an array of type definitions which must be used when interpreting the ``inputs`` and ``outputs`` fields. When a ``type`` field contain a IRI, the implementation must check if the type is defined in ``schemaDefs`` and use that definition. If the type is not found in ``schemaDefs``, it is an error. The entries in ``schemaDefs`` must be processed in the order listed such that later schema definitions may refer to earlier schema definitions. + + """ + + def __init__( + self, + types: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_: Final[str] = "SchemaDefRequirement" + self.types = types + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SchemaDefRequirement): + return bool(self.class_ == other.class_ and self.types == other.types) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.types)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + class_ = _load_field( + _doc.get("class"), + uri_SchemaDefRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("types") is None: + raise ValidationException("missing required field `types`", None, []) + + types = _load_field( + _doc.get("types"), + array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("types") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `types`": _errors__.append( ValidationException( str(e), @@ -6545,13 +6567,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("types") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `types` field is not valid because:", + SourceLine(_doc, "types", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -6563,169 +6585,187 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `types` field is not valid because:", + SourceLine(_doc, "types", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `types` field with value `{val}` " "is not valid because:", ) ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - str(e), - None - ) + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - secondaryFiles = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - lc=_doc.get("secondaryFiles") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - str(e), - None + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), + SourceLine(_doc, k, str), ) ) - else: - val = _doc.get("secondaryFiles") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " - "is not valid because:", - ) - ) - streamable = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("streamable") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + types=types, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed - if str(e) == "missing required field `streamable`": + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.types is not None: + r["types"] = save( + self.types, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["class", "types"]) + + +class SecondaryFileSchema(Saveable): + def __init__( + self, + pattern: Any, + required: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.pattern = pattern + self.required = required + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SecondaryFileSchema): + return bool( + self.pattern == other.pattern and self.required == other.required + ) + return False + + def __hash__(self) -> int: + return hash((self.pattern, self.required)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("pattern") is None: + raise ValidationException("missing required field `pattern`", None, []) + + pattern = _load_field( + _doc.get("pattern"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("pattern") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `pattern`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("pattern") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `pattern` field is not valid because:", + SourceLine(_doc, "pattern", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("streamable") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - detailed_message=f"the `streamable` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `pattern` field is not valid because:", + SourceLine(_doc, "pattern", str), + [e], + detailed_message=f"the `pattern` field with value `{val}` " + "is not valid because:", ) - format = None - if "format" in _doc: + ) + required = None + if "required" in _doc: try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + required = _load_field( + _doc.get("required"), + union_of_None_type_or_booltype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("format") + lc=_doc.get("required") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `format`": + if str(e) == "missing required field `required`": _errors__.append( ValidationException( str(e), @@ -6733,13 +6773,13 @@ def fromDoc( ) ) else: - val = _doc.get("format") + val = _doc.get("required") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `required` field is not valid because:", + SourceLine(_doc, "required", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -6751,14 +6791,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `required` field is not valid because:", + SourceLine(_doc, "required", str), [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `required` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -6766,14 +6806,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`".format( + "invalid field `{}`, expected one of: `pattern`, `required`".format( k ), SourceLine(_doc, k, str), @@ -6783,17 +6823,11 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - doc=doc, - name=name, - type_=type_, - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, + pattern=pattern, + required=required, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -6807,38 +6841,14 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.name, - relative_uris=relative_uris, + if self.pattern is not None: + r["pattern"] = save( + self.pattern, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.name, - relative_uris=relative_uris, + if self.required is not None: + r["required"] = save( + self.required, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.format is not None: - u = save_relative_uri(self.format, self.name, True, None, relative_uris) - r["format"] = u # top refers to the directory level if top: @@ -6848,23 +6858,20 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] - ) + attrs: ClassVar[Collection[str]] = frozenset(["pattern", "required"]) -class OutputRecordSchema(CWLRecordSchema, OutputSchema): - name: str +class LoadListingRequirement(ProcessRequirement): + """ + Specify the desired behavior for loading the ``listing`` field of a Directory object for use by expressions. + + """ def __init__( self, - type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + loadListing: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -6874,25 +6881,18 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.fields = fields - self.type_ = type_ - self.label = label - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.class_: Final[str] = "LoadListingRequirement" + self.loadListing = loadListing def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordSchema): + if isinstance(other, LoadListingRequirement): return bool( - self.fields == other.fields - and self.type_ == other.type_ - and self.label == other.label - and self.doc == other.doc - and self.name == other.name + self.class_ == other.class_ and self.loadListing == other.loadListing ) return False def __hash__(self) -> int: - return hash((self.fields, self.type_, self.label, self.doc, self.name)) + return hash((self.class_, self.loadListing)) @classmethod def fromDoc( @@ -6900,227 +6900,46 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("name") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `name`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("name") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [e], - detailed_message=f"the `name` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - fields = None - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, - baseuri, - loadingOptions, - lc=_doc.get("fields") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `fields`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("fields") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - detailed_message=f"the `fields` field with value `{val}` " - "is not valid because:", - ) - ) try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, + class_ = _load_field( + _doc.get("class"), + uri_LoadListingRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: + raise e + loadListing = None + if "loadListing" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("loadListing") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( str(e), @@ -7128,13 +6947,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("loadListing") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7146,14 +6965,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `loadListing` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -7161,14 +6980,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + "invalid field `{}`, expected one of: `class`, `loadListing`".format( k ), SourceLine(_doc, k, str), @@ -7178,15 +6997,10 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - fields=fields, - type_=type_, - label=label, - doc=doc, - name=name, + loadListing=loadListing, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -7200,24 +7014,22 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -7228,21 +7040,21 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "label", "doc", "name"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "loadListing"]) -class OutputEnumSchema(EnumSchema, OutputSchema): - name: str +class EnvironmentDef(Saveable): + """ + Define an environment variable that will be set in the runtime environment by the workflow platform when executing the command line tool. May be the result of executing an expression, such as getting a parameter from input. + + """ def __init__( self, - symbols: Any, - type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + envName: Any, + envValue: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -7252,25 +7064,18 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols - self.type_ = type_ - self.label = label - self.doc = doc + self.envName = envName + self.envValue = envValue def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputEnumSchema): + if isinstance(other, EnvironmentDef): return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type_ == other.type_ - and self.label == other.label - and self.doc == other.doc + self.envName == other.envName and self.envValue == other.envValue ) return False def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type_, self.label, self.doc)) + return hash((self.envName, self.envValue)) @classmethod def fromDoc( @@ -7278,86 +7083,30 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputEnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("name") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `name`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("name") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [e], - detailed_message=f"the `name` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) + if _doc.get("envName") is None: + raise ValidationException("missing required field `envName`", None, []) - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, + envName = _load_field( + _doc.get("envName"), + strtype, baseuri, loadingOptions, - lc=_doc.get("symbols") + lc=_doc.get("envName") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `symbols`": + if str(e) == "missing required field `envName`": _errors__.append( ValidationException( str(e), @@ -7365,13 +7114,13 @@ def fromDoc( ) ) else: - val = _doc.get("symbols") + val = _doc.get("envName") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), + "the `envName` field is not valid because:", + SourceLine(_doc, "envName", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7383,29 +7132,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), + "the `envName` field is not valid because:", + SourceLine(_doc, "envName", str), [e], - detailed_message=f"the `symbols` field with value `{val}` " + detailed_message=f"the `envName` field with value `{val}` " "is not valid because:", ) ) try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("envValue") is None: + raise ValidationException("missing required field `envValue`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, + envValue = _load_field( + _doc.get("envValue"), + union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("envValue") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `envValue`": _errors__.append( ValidationException( str(e), @@ -7413,13 +7162,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("envValue") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `envValue` field is not valid because:", + SourceLine(_doc, "envValue", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7431,108 +7180,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `envValue` field is not valid because:", + SourceLine(_doc, "envValue", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `envValue` field with value `{val}` " "is not valid because:", ) ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -7540,14 +7195,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + "invalid field `{}`, expected one of: `envName`, `envValue`".format( k ), SourceLine(_doc, k, str), @@ -7557,15 +7212,11 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - name=name, - symbols=symbols, - type_=type_, - label=label, - doc=doc, + envName=envName, + envValue=envValue, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -7579,23 +7230,13 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + if self.envName is not None: + r["envName"] = save( + self.envName, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + if self.envValue is not None: + r["envValue"] = save( + self.envValue, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -7606,21 +7247,44 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + attrs: ClassVar[Collection[str]] = frozenset(["envName", "envValue"]) -class OutputArraySchema(CWLArraySchema, OutputSchema): - name: str +class CommandLineBinding(InputBinding): + """ + When listed under ``inputBinding`` in the input schema, the term "value" refers to the the corresponding value in the input object. For binding objects listed in ``CommandLineTool.arguments``, the term "value" refers to the effective value after evaluating ``valueFrom``. + + The binding behavior when building the command line depends on the data type of the value. If there is a mismatch between the type described by the input schema and the effective value, such as resulting from an expression evaluation, an implementation must use the data type of the effective value. + + - **string**: Add ``prefix`` and the string to the command line. + + - **number**: Add ``prefix`` and decimal representation to command line. + + - **boolean**: If true, add ``prefix`` to the command line. If false, add nothing. + + - **File**: Add ``prefix`` and the value of ```File.path`` <#File>`__ to the command line. + + - **Directory**: Add ``prefix`` and the value of ```Directory.path`` <#Directory>`__ to the command line. + + - **array**: If ``itemSeparator`` is specified, add ``prefix`` and the join the array into a single string with ``itemSeparator`` separating the items. Otherwise first add ``prefix``, then recursively process individual elements. If the array is empty, it does not add anything to command line. + + - **object**: Add ``prefix`` only, and recursively add object fields for which ``inputBinding`` is specified. + + - **null**: Add nothing. + + """ def __init__( self, - items: Any, - type_: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + loadContents: Any | None = None, + position: Any | None = None, + prefix: Any | None = None, + separate: Any | None = None, + itemSeparator: Any | None = None, + valueFrom: Any | None = None, + shellQuote: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -7630,25 +7294,39 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ - self.label = label - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.loadContents = loadContents + self.position = position + self.prefix = prefix + self.separate = separate + self.itemSeparator = itemSeparator + self.valueFrom = valueFrom + self.shellQuote = shellQuote def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputArraySchema): + if isinstance(other, CommandLineBinding): return bool( - self.items == other.items - and self.type_ == other.type_ - and self.label == other.label - and self.doc == other.doc - and self.name == other.name + self.loadContents == other.loadContents + and self.position == other.position + and self.prefix == other.prefix + and self.separate == other.separate + and self.itemSeparator == other.itemSeparator + and self.valueFrom == other.valueFrom + and self.shellQuote == other.shellQuote ) return False def __hash__(self) -> int: - return hash((self.items, self.type_, self.label, self.doc, self.name)) + return hash( + ( + self.loadContents, + self.position, + self.prefix, + self.separate, + self.itemSeparator, + self.valueFrom, + self.shellQuote, + ) + ) @classmethod def fromDoc( @@ -7656,29 +7334,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + loadContents = None + if "loadContents" in _doc: try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("loadContents") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( str(e), @@ -7686,13 +7364,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("loadContents") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7704,133 +7382,75 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `loadContents` field with value `{val}` " "is not valid because:", ) ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) + position = None + if "position" in _doc: + try: + position = _load_field( + _doc.get("position"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("position") ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `position`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("position") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `position` field is not valid because:", + SourceLine(_doc, "position", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - label = None - if "label" in _doc: + else: + _errors__.append( + ValidationException( + "the `position` field is not valid because:", + SourceLine(_doc, "position", str), + [e], + detailed_message=f"the `position` field with value `{val}` " + "is not valid because:", + ) + ) + prefix = None + if "prefix" in _doc: try: - label = load_field( - _doc.get("label"), + prefix = _load_field( + _doc.get("prefix"), union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("prefix") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `prefix`": _errors__.append( ValidationException( str(e), @@ -7838,13 +7458,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("prefix") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `prefix` field is not valid because:", + SourceLine(_doc, "prefix", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7856,28 +7476,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `prefix` field is not valid because:", + SourceLine(_doc, "prefix", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `prefix` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + separate = None + if "separate" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + separate = _load_field( + _doc.get("separate"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("separate") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `separate`": _errors__.append( ValidationException( str(e), @@ -7885,13 +7505,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("separate") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `separate` field is not valid because:", + SourceLine(_doc, "separate", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7903,14 +7523,155 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " + "the `separate` field is not valid because:", + SourceLine(_doc, "separate", str), + [e], + detailed_message=f"the `separate` field with value `{val}` " + "is not valid because:", + ) + ) + itemSeparator = None + if "itemSeparator" in _doc: + try: + itemSeparator = _load_field( + _doc.get("itemSeparator"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("itemSeparator") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `itemSeparator`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("itemSeparator") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `itemSeparator` field is not valid because:", + SourceLine(_doc, "itemSeparator", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `itemSeparator` field is not valid because:", + SourceLine(_doc, "itemSeparator", str), + [e], + detailed_message=f"the `itemSeparator` field with value `{val}` " + "is not valid because:", + ) + ) + valueFrom = None + if "valueFrom" in _doc: + try: + valueFrom = _load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("valueFrom") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `valueFrom`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("valueFrom") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + detailed_message=f"the `valueFrom` field with value `{val}` " + "is not valid because:", + ) + ) + shellQuote = None + if "shellQuote" in _doc: + try: + shellQuote = _load_field( + _doc.get("shellQuote"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("shellQuote") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `shellQuote`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("shellQuote") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `shellQuote` field is not valid because:", + SourceLine(_doc, "shellQuote", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `shellQuote` field is not valid because:", + SourceLine(_doc, "shellQuote", str), + [e], + detailed_message=f"the `shellQuote` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -7918,14 +7679,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( k ), SourceLine(_doc, k, str), @@ -7935,15 +7696,16 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - items=items, - type_=type_, - label=label, - doc=doc, - name=name, + loadContents=loadContents, + position=position, + prefix=prefix, + separate=separate, + itemSeparator=itemSeparator, + valueFrom=valueFrom, + shellQuote=shellQuote, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -7957,23 +7719,45 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.items is not None: - u = save_relative_uri(self.items, self.name, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris + if self.position is not None: + r["position"] = save( + self.position, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + if self.prefix is not None: + r["prefix"] = save( + self.prefix, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.separate is not None: + r["separate"] = save( + self.separate, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.itemSeparator is not None: + r["itemSeparator"] = save( + self.itemSeparator, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.valueFrom is not None: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.shellQuote is not None: + r["shellQuote"] = save( + self.shellQuote, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -7984,56 +7768,40 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["items", "type", "label", "doc", "name"]) - - -class InputParameter(Parameter, InputFormat, LoadContents): - pass - - -class OutputParameter(Parameter, OutputFormat): - pass - - -class ProcessRequirement(Saveable): - """ - A process requirement declares a prerequisite that may or must be fulfilled - before executing a process. See [`Process.hints`](#process) and - [`Process.requirements`](#process). - - Process requirements are the primary mechanism for specifying extensions to - the CWL core specification. - - """ - - pass - - -class Process(Identified, Labeled, Documented): - """ + attrs: ClassVar[Collection[str]] = frozenset( + [ + "loadContents", + "position", + "prefix", + "separate", + "itemSeparator", + "valueFrom", + "shellQuote", + ] + ) - The base executable type in CWL is the `Process` object defined by the - document. Note that the `Process` object is abstract and cannot be - directly executed. +class CommandOutputBinding(LoadContents): """ + Describes how to generate an output parameter based on the files produced by a CommandLineTool. - pass - + The output parameter value is generated by applying these operations in the following order: -class InlineJavascriptRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support inline Javascript expressions. - If this requirement is not present, the workflow platform must not perform expression - interpolatation. + - glob + - loadContents + - outputEval + - secondaryFiles """ def __init__( self, - expressionLib: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + glob: Any | None = None, + outputEval: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -8043,19 +7811,23 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "InlineJavascriptRequirement" - self.expressionLib = expressionLib + self.loadContents = loadContents + self.loadListing = loadListing + self.glob = glob + self.outputEval = outputEval def __eq__(self, other: Any) -> bool: - if isinstance(other, InlineJavascriptRequirement): + if isinstance(other, CommandOutputBinding): return bool( - self.class_ == other.class_ - and self.expressionLib == other.expressionLib - ) - return False + self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.glob == other.glob + and self.outputEval == other.outputEval + ) + return False def __hash__(self) -> int: - return hash((self.class_, self.expressionLib)) + return hash((self.loadContents, self.loadListing, self.glob, self.outputEval)) @classmethod def fromDoc( @@ -8063,45 +7835,76 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InlineJavascriptRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) - class_ = load_field( - _doc.get("class"), - uri_InlineJavascriptRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - expressionLib = None - if "expressionLib" in _doc: + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: try: - expressionLib = load_field( - _doc.get("expressionLib"), - union_of_None_type_or_array_of_strtype, + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, baseuri, loadingOptions, - lc=_doc.get("expressionLib") + lc=_doc.get("loadListing") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `expressionLib`": + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( str(e), @@ -8109,13 +7912,13 @@ def fromDoc( ) ) else: - val = _doc.get("expressionLib") + val = _doc.get("loadListing") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `expressionLib` field is not valid because:", - SourceLine(_doc, "expressionLib", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8127,14 +7930,108 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `expressionLib` field is not valid because:", - SourceLine(_doc, "expressionLib", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [e], - detailed_message=f"the `expressionLib` field with value `{val}` " + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + glob = None + if "glob" in _doc: + try: + glob = _load_field( + _doc.get("glob"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("glob") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `glob`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("glob") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `glob` field is not valid because:", + SourceLine(_doc, "glob", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `glob` field is not valid because:", + SourceLine(_doc, "glob", str), + [e], + detailed_message=f"the `glob` field with value `{val}` " + "is not valid because:", + ) + ) + outputEval = None + if "outputEval" in _doc: + try: + outputEval = _load_field( + _doc.get("outputEval"), + union_of_None_type_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputEval") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputEval`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputEval") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputEval` field is not valid because:", + SourceLine(_doc, "outputEval", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputEval` field is not valid because:", + SourceLine(_doc, "outputEval", str), + [e], + detailed_message=f"the `outputEval` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -8142,14 +8039,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `expressionLib`".format( + "invalid field `{}`, expected one of: `loadContents`, `loadListing`, `glob`, `outputEval`".format( k ), SourceLine(_doc, k, str), @@ -8159,7 +8056,10 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - expressionLib=expressionLib, + loadContents=loadContents, + loadListing=loadListing, + glob=glob, + outputEval=outputEval, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -8176,17 +8076,27 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.expressionLib is not None: - r["expressionLib"] = save( - self.expressionLib, + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.glob is not None: + r["glob"] = save( + self.glob, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.outputEval is not None: + r["outputEval"] = save( + self.outputEval, top=False, base_url=base_url, relative_uris=relative_uris, @@ -8200,30 +8110,32 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "expressionLib"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["loadContents", "loadListing", "glob", "outputEval"] + ) -class CommandInputSchema(Saveable): +class CommandLineBindable(Saveable): pass -class SchemaDefRequirement(ProcessRequirement): - """ - This field consists of an array of type definitions which must be used when - interpreting the `inputs` and `outputs` fields. When a `type` field - contain a IRI, the implementation must check if the type is defined in - `schemaDefs` and use that definition. If the type is not found in - `schemaDefs`, it is an error. The entries in `schemaDefs` must be - processed in the order listed such that later schema definitions may refer - to earlier schema definitions. - - """ +class CommandInputRecordField(InputRecordField, CommandLineBindable): + name: str def __init__( self, - types: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + name: Any, + type_: Any, + doc: Any | None = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + format: Any | None = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -8233,63 +8145,182 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "SchemaDefRequirement" - self.types = types + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.type_ = type_ + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, SchemaDefRequirement): - return bool(self.class_ == other.class_ and self.types == other.types) + if isinstance(other, CommandInputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type_ == other.type_ + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.inputBinding == other.inputBinding + ) return False def __hash__(self) -> int: - return hash((self.class_, self.types)) - - @classmethod + return hash( + ( + self.doc, + self.name, + self.type_, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + self.inputBinding, + ) + ) + + @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SchemaDefRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + name = None + if "name" in _doc: + try: + name = _load_field( + _doc.get("name"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) - class_ = load_field( - _doc.get("class"), - uri_SchemaDefRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e + if str(e) == "missing required field `name`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + _errors__.append(ValidationException("missing name")) + if not __original_name_is_none: + baseuri = cast(str, name) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) try: - if _doc.get("types") is None: - raise ValidationException("missing required field `types`", None, []) + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - types = load_field( - _doc.get("types"), - array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader, + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, - lc=_doc.get("types") + lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `types`": + if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), @@ -8297,13 +8328,13 @@ def fromDoc( ) ) else: - val = _doc.get("types") + val = _doc.get("type") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `types` field is not valid because:", - SourceLine(_doc, "types", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8315,185 +8346,169 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `types` field is not valid because:", - SourceLine(_doc, "types", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [e], - detailed_message=f"the `types` field with value `{val}` " + detailed_message=f"the `type` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ValidationException( + str(e), + None + ) ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - types=types, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.types is not None: - r["types"] = save( - self.types, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "types"]) - - -class SecondaryFileSchema(Saveable): - def __init__( - self, - pattern: Any, - required: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.pattern = pattern - self.required = required - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SecondaryFileSchema): - return bool( - self.pattern == other.pattern and self.required == other.required - ) - return False - - def __hash__(self) -> int: - return hash((self.pattern, self.required)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SecondaryFileSchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("pattern") is None: - raise ValidationException("missing required field `pattern`", None, []) - - pattern = load_field( - _doc.get("pattern"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("pattern") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `pattern`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("pattern") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( - "the `pattern` field is not valid because:", - SourceLine(_doc, "pattern", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `pattern` field is not valid because:", - SourceLine(_doc, "pattern", str), - [e], - detailed_message=f"the `pattern` field with value `{val}` " - "is not valid because:", + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - required = None - if "required" in _doc: + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: try: - required = load_field( - _doc.get("required"), - union_of_None_type_or_booltype_or_ExpressionLoader, + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, - lc=_doc.get("required") + lc=_doc.get("format") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `required`": + if str(e) == "missing required field `format`": _errors__.append( ValidationException( str(e), @@ -8501,13 +8516,13 @@ def fromDoc( ) ) else: - val = _doc.get("required") + val = _doc.get("format") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `required` field is not valid because:", - SourceLine(_doc, "required", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8519,155 +8534,122 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `required` field is not valid because:", - SourceLine(_doc, "required", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [e], - detailed_message=f"the `required` field with value `{val}` " + detailed_message=f"the `format` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + loadContents = None + if "loadContents" in _doc: + try: + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ValidationException( + str(e), + None + ) ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: + try: + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `pattern`, `required`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - pattern=pattern, - required=required, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.pattern is not None: - r["pattern"] = save( - self.pattern, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.required is not None: - r["required"] = save( - self.required, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["pattern", "required"]) - - -class LoadListingRequirement(ProcessRequirement): - """ - Specify the desired behavior for loading the `listing` field of - a Directory object for use by expressions. - - """ - - def __init__( - self, - loadListing: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "LoadListingRequirement" - self.loadListing = loadListing - - def __eq__(self, other: Any) -> bool: - if isinstance(other, LoadListingRequirement): - return bool( - self.class_ == other.class_ and self.loadListing == other.loadListing - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.loadListing)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "LoadListingRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_LoadListingRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - loadListing = None - if "loadListing" in _doc: + else: + val = _doc.get("loadListing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("loadListing") + lc=_doc.get("inputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadListing`": + if str(e) == "missing required field `inputBinding`": _errors__.append( ValidationException( str(e), @@ -8675,13 +8657,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadListing") + val = _doc.get("inputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8693,14 +8675,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [e], - detailed_message=f"the `loadListing` field with value `{val}` " + detailed_message=f"the `inputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -8708,14 +8690,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `loadListing`".format( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`, `inputBinding`".format( k ), SourceLine(_doc, k, str), @@ -8725,10 +8707,20 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + doc=doc, + name=name, + type_=type_, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, loadListing=loadListing, + inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -8742,19 +8734,57 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, self.name, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) if self.loadListing is not None: r["loadListing"] = save( self.loadListing, top=False, - base_url=base_url, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.name, relative_uris=relative_uris, ) @@ -8766,23 +8796,37 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "loadListing"]) - + attrs: ClassVar[Collection[str]] = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + "inputBinding", + ] + ) -class EnvironmentDef(Saveable): - """ - Define an environment variable that will be set in the runtime environment - by the workflow platform when executing the command line tool. May be the - result of executing an expression, such as getting a parameter from input. - """ +class CommandInputRecordSchema( + InputRecordSchema, CommandInputSchema, CommandLineBindable +): + name: str def __init__( self, - envName: Any, - envValue: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + type_: Any, + fields: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -8792,18 +8836,36 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.envName = envName - self.envValue = envValue + self.fields = fields + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvironmentDef): + if isinstance(other, CommandInputRecordSchema): return bool( - self.envName == other.envName and self.envValue == other.envValue + self.fields == other.fields + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: - return hash((self.envName, self.envValue)) + return hash( + ( + self.fields, + self.type_, + self.label, + self.doc, + self.name, + self.inputBinding, + ) + ) @classmethod def fromDoc( @@ -8811,78 +8873,133 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "EnvironmentDef": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("envName") is None: - raise ValidationException("missing required field `envName`", None, []) - - envName = load_field( - _doc.get("envName"), - strtype, - baseuri, - loadingOptions, - lc=_doc.get("envName") - ) + name = None + if "name" in _doc: + try: + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("name") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `envName`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("envName") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `name`": _errors__.append( ValidationException( - "the `envName` field is not valid because:", - SourceLine(_doc, "envName", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("name") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), + [e], + detailed_message=f"the `name` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + fields = None + if "fields" in _doc: + try: + fields = _load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, + baseuri, + loadingOptions, + lc=_doc.get("fields") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `fields`": _errors__.append( ValidationException( - "the `envName` field is not valid because:", - SourceLine(_doc, "envName", str), - [e], - detailed_message=f"the `envName` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) + else: + val = _doc.get("fields") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + detailed_message=f"the `fields` field with value `{val}` " + "is not valid because:", + ) + ) try: - if _doc.get("envValue") is None: - raise ValidationException("missing required field `envValue`", None, []) + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - envValue = load_field( - _doc.get("envValue"), - union_of_strtype_or_ExpressionLoader, + type_ = _load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, baseuri, loadingOptions, - lc=_doc.get("envValue") + lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `envValue`": + if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), @@ -8890,13 +9007,13 @@ def fromDoc( ) ) else: - val = _doc.get("envValue") + val = _doc.get("type") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `envValue` field is not valid because:", - SourceLine(_doc, "envValue", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8908,44 +9025,190 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `envValue` field is not valid because:", - SourceLine(_doc, "envValue", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [e], - detailed_message=f"the `envValue` field with value `{val}` " + detailed_message=f"the `type` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ValidationException( + str(e), + None + ) ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - envName=envName, - envValue=envValue, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + fields=fields, + type_=type_, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True @@ -8958,13 +9221,31 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.envName is not None: - r["envName"] = save( - self.envName, top=False, base_url=base_url, relative_uris=relative_uris + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.envValue is not None: - r["envValue"] = save( - self.envValue, top=False, base_url=base_url, relative_uris=relative_uris + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.name, + relative_uris=relative_uris, ) # top refers to the directory level @@ -8975,60 +9256,24 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["envName", "envValue"]) - - -class CommandLineBinding(InputBinding): - """ - - When listed under `inputBinding` in the input schema, the term - "value" refers to the the corresponding value in the input object. For - binding objects listed in `CommandLineTool.arguments`, the term "value" - refers to the effective value after evaluating `valueFrom`. - - The binding behavior when building the command line depends on the data - type of the value. If there is a mismatch between the type described by - the input schema and the effective value, such as resulting from an - expression evaluation, an implementation must use the data type of the - effective value. - - - **string**: Add `prefix` and the string to the command line. - - - **number**: Add `prefix` and decimal representation to command line. - - - **boolean**: If true, add `prefix` to the command line. If false, add - nothing. - - - **File**: Add `prefix` and the value of - [`File.path`](#File) to the command line. - - - **Directory**: Add `prefix` and the value of - [`Directory.path`](#Directory) to the command line. - - - **array**: If `itemSeparator` is specified, add `prefix` and the join - the array into a single string with `itemSeparator` separating the - items. Otherwise first add `prefix`, then recursively process - individual elements. - If the array is empty, it does not add anything to command line. - - - **object**: Add `prefix` only, and recursively add object fields for - which `inputBinding` is specified. + attrs: ClassVar[Collection[str]] = frozenset( + ["fields", "type", "label", "doc", "name", "inputBinding"] + ) - - **null**: Add nothing. - """ +class CommandInputEnumSchema(InputEnumSchema, CommandInputSchema, CommandLineBindable): + name: str def __init__( self, - loadContents: Optional[Any] = None, - position: Optional[Any] = None, - prefix: Optional[Any] = None, - separate: Optional[Any] = None, - itemSeparator: Optional[Any] = None, - valueFrom: Optional[Any] = None, - shellQuote: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + symbols: Any, + type_: Any, + name: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -9038,37 +9283,34 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - self.position = position - self.prefix = prefix - self.separate = separate - self.itemSeparator = itemSeparator - self.valueFrom = valueFrom - self.shellQuote = shellQuote + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols + self.type_ = type_ + self.label = label + self.doc = doc + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineBinding): + if isinstance(other, CommandInputEnumSchema): return bool( - self.loadContents == other.loadContents - and self.position == other.position - and self.prefix == other.prefix - and self.separate == other.separate - and self.itemSeparator == other.itemSeparator - and self.valueFrom == other.valueFrom - and self.shellQuote == other.shellQuote + self.name == other.name + and self.symbols == other.symbols + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: return hash( ( - self.loadContents, - self.position, - self.prefix, - self.separate, - self.itemSeparator, - self.valueFrom, - self.shellQuote, + self.name, + self.symbols, + self.type_, + self.label, + self.doc, + self.inputBinding, ) ) @@ -9078,29 +9320,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandLineBinding": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - loadContents = None - if "loadContents" in _doc: + name = None + if "name" in _doc: try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("loadContents") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadContents`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -9108,13 +9350,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadContents") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9126,169 +9368,133 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `loadContents` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - position = None - if "position" in _doc: - try: - position = load_field( - _doc.get("position"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("position") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) - if str(e) == "missing required field `position`": + symbols = _load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("position") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `position` field is not valid because:", - SourceLine(_doc, "position", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `position` field is not valid because:", - SourceLine(_doc, "position", str), - [e], - detailed_message=f"the `position` field with value `{val}` " - "is not valid because:", - ) - ) - prefix = None - if "prefix" in _doc: - try: - prefix = load_field( - _doc.get("prefix"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("prefix") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `prefix`": _errors__.append( ValidationException( - str(e), - None + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", ) ) - else: - val = _doc.get("prefix") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `prefix` field is not valid because:", - SourceLine(_doc, "prefix", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `prefix` field is not valid because:", - SourceLine(_doc, "prefix", str), - [e], - detailed_message=f"the `prefix` field with value `{val}` " - "is not valid because:", - ) - ) - separate = None - if "separate" in _doc: - try: - separate = load_field( - _doc.get("separate"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("separate") - ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + type_ = _load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) - if str(e) == "missing required field `separate`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("separate") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `separate` field is not valid because:", - SourceLine(_doc, "separate", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `separate` field is not valid because:", - SourceLine(_doc, "separate", str), - [e], - detailed_message=f"the `separate` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", ) - itemSeparator = None - if "itemSeparator" in _doc: + ) + label = None + if "label" in _doc: try: - itemSeparator = load_field( - _doc.get("itemSeparator"), + label = _load_field( + _doc.get("label"), union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("itemSeparator") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `itemSeparator`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -9296,13 +9502,13 @@ def fromDoc( ) ) else: - val = _doc.get("itemSeparator") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `itemSeparator` field is not valid because:", - SourceLine(_doc, "itemSeparator", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9314,28 +9520,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `itemSeparator` field is not valid because:", - SourceLine(_doc, "itemSeparator", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `itemSeparator` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - valueFrom = None - if "valueFrom" in _doc: + doc = None + if "doc" in _doc: try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("valueFrom") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `valueFrom`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -9343,13 +9549,13 @@ def fromDoc( ) ) else: - val = _doc.get("valueFrom") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9361,28 +9567,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `valueFrom` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - shellQuote = None - if "shellQuote" in _doc: + inputBinding = None + if "inputBinding" in _doc: try: - shellQuote = load_field( - _doc.get("shellQuote"), - union_of_None_type_or_booltype, + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("shellQuote") + lc=_doc.get("inputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `shellQuote`": + if str(e) == "missing required field `inputBinding`": _errors__.append( ValidationException( str(e), @@ -9390,13 +9596,13 @@ def fromDoc( ) ) else: - val = _doc.get("shellQuote") + val = _doc.get("inputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `shellQuote` field is not valid because:", - SourceLine(_doc, "shellQuote", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9408,14 +9614,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `shellQuote` field is not valid because:", - SourceLine(_doc, "shellQuote", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [e], - detailed_message=f"the `shellQuote` field with value `{val}` " + detailed_message=f"the `inputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -9423,14 +9629,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`, `inputBinding`".format( k ), SourceLine(_doc, k, str), @@ -9440,16 +9646,16 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - loadContents=loadContents, - position=position, - prefix=prefix, - separate=separate, - itemSeparator=itemSeparator, - valueFrom=valueFrom, - shellQuote=shellQuote, + name=name, + symbols=symbols, + type_=type_, + label=label, + doc=doc, + inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -9463,44 +9669,29 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.position is not None: - r["position"] = save( - self.position, top=False, base_url=base_url, relative_uris=relative_uris + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.prefix is not None: - r["prefix"] = save( - self.prefix, top=False, base_url=base_url, relative_uris=relative_uris + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.separate is not None: - r["separate"] = save( - self.separate, top=False, base_url=base_url, relative_uris=relative_uris + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.itemSeparator is not None: - r["itemSeparator"] = save( - self.itemSeparator, + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.valueFrom is not None: - r["valueFrom"] = save( - self.valueFrom, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.shellQuote is not None: - r["shellQuote"] = save( - self.shellQuote, - top=False, - base_url=base_url, + base_url=self.name, relative_uris=relative_uris, ) @@ -9512,42 +9703,26 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "loadContents", - "position", - "prefix", - "separate", - "itemSeparator", - "valueFrom", - "shellQuote", - ] + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "doc", "inputBinding"] ) -class CommandOutputBinding(LoadContents): - """ - Describes how to generate an output parameter based on the files produced - by a CommandLineTool. - - The output parameter value is generated by applying these operations in the - following order: - - - glob - - loadContents - - outputEval - - secondaryFiles - - """ +class CommandInputArraySchema( + InputArraySchema, CommandInputSchema, CommandLineBindable +): + name: str def __init__( self, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - glob: Optional[Any] = None, - outputEval: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + items: Any, + type_: Any, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -9557,23 +9732,29 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - self.loadListing = loadListing - self.glob = glob - self.outputEval = outputEval + self.items = items + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputBinding): + if isinstance(other, CommandInputArraySchema): return bool( - self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.glob == other.glob - and self.outputEval == other.outputEval + self.items == other.items + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: - return hash((self.loadContents, self.loadListing, self.glob, self.outputEval)) + return hash( + (self.items, self.type_, self.label, self.doc, self.name, self.inputBinding) + ) @classmethod def fromDoc( @@ -9581,29 +9762,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputBinding": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - loadContents = None - if "loadContents" in _doc: + name = None + if "name" in _doc: try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("loadContents") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadContents`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -9611,13 +9792,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadContents") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9629,28 +9810,133 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `loadContents` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - loadListing = None - if "loadListing" in _doc: + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = _load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("loadListing") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadListing`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -9658,13 +9944,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadListing") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9676,28 +9962,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `loadListing` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - glob = None - if "glob" in _doc: + doc = None + if "doc" in _doc: try: - glob = load_field( - _doc.get("glob"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("glob") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `glob`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -9705,13 +9991,13 @@ def fromDoc( ) ) else: - val = _doc.get("glob") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `glob` field is not valid because:", - SourceLine(_doc, "glob", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9723,28 +10009,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `glob` field is not valid because:", - SourceLine(_doc, "glob", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `glob` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - outputEval = None - if "outputEval" in _doc: + inputBinding = None + if "inputBinding" in _doc: try: - outputEval = load_field( - _doc.get("outputEval"), - union_of_None_type_or_ExpressionLoader, + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("outputEval") + lc=_doc.get("inputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputEval`": + if str(e) == "missing required field `inputBinding`": _errors__.append( ValidationException( str(e), @@ -9752,13 +10038,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputEval") + val = _doc.get("inputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputEval` field is not valid because:", - SourceLine(_doc, "outputEval", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9770,14 +10056,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputEval` field is not valid because:", - SourceLine(_doc, "outputEval", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [e], - detailed_message=f"the `outputEval` field with value `{val}` " + detailed_message=f"the `inputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -9785,14 +10071,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `loadContents`, `loadListing`, `glob`, `outputEval`".format( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`, `inputBinding`".format( k ), SourceLine(_doc, k, str), @@ -9802,13 +10088,16 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - loadContents=loadContents, - loadListing=loadListing, - glob=glob, - outputEval=outputEval, + items=items, + type_=type_, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -9822,29 +10111,29 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.items is not None: + u = save_relative_uri(self.items, self.name, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.glob is not None: - r["glob"] = save( - self.glob, top=False, base_url=base_url, relative_uris=relative_uris + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.outputEval is not None: - r["outputEval"] = save( - self.outputEval, + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, top=False, - base_url=base_url, + base_url=self.name, relative_uris=relative_uris, ) @@ -9856,30 +10145,26 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["loadContents", "loadListing", "glob", "outputEval"]) - - -class CommandLineBindable(Saveable): - pass + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "doc", "name", "inputBinding"] + ) -class CommandInputRecordField(InputRecordField, CommandLineBindable): +class CommandOutputRecordField(OutputRecordField): name: str def __init__( self, name: Any, type_: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + doc: Any | None = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + format: Any | None = None, + outputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -9896,12 +10181,10 @@ def __init__( self.secondaryFiles = secondaryFiles self.streamable = streamable self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.inputBinding = inputBinding + self.outputBinding = outputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordField): + if isinstance(other, CommandOutputRecordField): return bool( self.doc == other.doc and self.name == other.name @@ -9910,9 +10193,7 @@ def __eq__(self, other: Any) -> bool: and self.secondaryFiles == other.secondaryFiles and self.streamable == other.streamable and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.inputBinding == other.inputBinding + and self.outputBinding == other.outputBinding ) return False @@ -9926,9 +10207,7 @@ def __hash__(self) -> int: self.secondaryFiles, self.streamable, self.format, - self.loadContents, - self.loadListing, - self.inputBinding, + self.outputBinding, ) ) @@ -9938,8 +10217,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -9949,7 +10228,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, @@ -10005,7 +10284,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -10053,9 +10332,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -10100,7 +10379,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -10147,7 +10426,7 @@ def fromDoc( secondaryFiles = None if "secondaryFiles" in _doc: try: - secondaryFiles = load_field( + secondaryFiles = _load_field( _doc.get("secondaryFiles"), secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, @@ -10194,7 +10473,7 @@ def fromDoc( streamable = None if "streamable" in _doc: try: - streamable = load_field( + streamable = _load_field( _doc.get("streamable"), union_of_None_type_or_booltype, baseuri, @@ -10241,9 +10520,9 @@ def fromDoc( format = None if "format" in _doc: try: - format = load_field( + format = _load_field( _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, lc=_doc.get("format") @@ -10285,115 +10564,21 @@ def fromDoc( "is not valid because:", ) ) - loadContents = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("loadContents") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadContents`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("loadContents") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - detailed_message=f"the `loadContents` field with value `{val}` " - "is not valid because:", - ) - ) - loadListing = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - lc=_doc.get("loadListing") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadListing`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("loadListing") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - detailed_message=f"the `loadListing` field with value `{val}` " - "is not valid because:", - ) - ) - inputBinding = None - if "inputBinding" in _doc: + outputBinding = None + if "outputBinding" in _doc: try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions, - lc=_doc.get("inputBinding") + lc=_doc.get("outputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inputBinding`": + if str(e) == "missing required field `outputBinding`": _errors__.append( ValidationException( str(e), @@ -10401,13 +10586,13 @@ def fromDoc( ) ) else: - val = _doc.get("inputBinding") + val = _doc.get("outputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -10419,14 +10604,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [e], - detailed_message=f"the `inputBinding` field with value `{val}` " + detailed_message=f"the `outputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -10434,14 +10619,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`, `inputBinding`".format( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `outputBinding`".format( k ), SourceLine(_doc, k, str), @@ -10458,9 +10643,7 @@ def fromDoc( secondaryFiles=secondaryFiles, streamable=streamable, format=format, - loadContents=loadContents, - loadListing=loadListing, - inputBinding=inputBinding, + outputBinding=outputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -10510,23 +10693,9 @@ def save( if self.format is not None: u = save_relative_uri(self.format, self.name, True, None, relative_uris) r["format"] = u - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, top=False, base_url=self.name, relative_uris=relative_uris, @@ -10540,7 +10709,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ "doc", "name", @@ -10549,28 +10718,23 @@ def save( "secondaryFiles", "streamable", "format", - "loadContents", - "loadListing", - "inputBinding", + "outputBinding", ] ) -class CommandInputRecordSchema( - InputRecordSchema, CommandInputSchema, CommandLineBindable -): +class CommandOutputRecordSchema(OutputRecordSchema): name: str def __init__( self, type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + fields: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -10585,31 +10749,20 @@ def __init__( self.label = label self.doc = doc self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordSchema): + if isinstance(other, CommandOutputRecordSchema): return bool( self.fields == other.fields and self.type_ == other.type_ and self.label == other.label and self.doc == other.doc and self.name == other.name - and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: - return hash( - ( - self.fields, - self.type_, - self.label, - self.doc, - self.name, - self.inputBinding, - ) - ) + return hash((self.fields, self.type_, self.label, self.doc, self.name)) @classmethod def fromDoc( @@ -10617,8 +10770,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -10628,7 +10781,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -10684,9 +10837,9 @@ def fromDoc( fields = None if "fields" in _doc: try: - fields = load_field( + fields = _load_field( _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, + idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, baseuri, loadingOptions, lc=_doc.get("fields") @@ -10732,7 +10885,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Record_nameLoader_2, baseuri, @@ -10779,7 +10932,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -10826,7 +10979,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -10870,54 +11023,7 @@ def fromDoc( "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - detailed_message=f"the `inputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -10925,14 +11031,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( k ), SourceLine(_doc, k, str), @@ -10947,7 +11053,6 @@ def fromDoc( label=label, doc=doc, name=name, - inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -10984,13 +11089,6 @@ def save( r["doc"] = save( self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) # top refers to the directory level if top: @@ -11000,22 +11098,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "label", "doc", "name", "inputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["fields", "type", "label", "doc", "name"] + ) -class CommandInputEnumSchema(InputEnumSchema, CommandInputSchema, CommandLineBindable): +class CommandOutputEnumSchema(OutputEnumSchema): name: str def __init__( self, symbols: Any, type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + name: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -11030,31 +11129,20 @@ def __init__( self.type_ = type_ self.label = label self.doc = doc - self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputEnumSchema): + if isinstance(other, CommandOutputEnumSchema): return bool( self.name == other.name and self.symbols == other.symbols and self.type_ == other.type_ and self.label == other.label and self.doc == other.doc - and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: - return hash( - ( - self.name, - self.symbols, - self.type_, - self.label, - self.doc, - self.inputBinding, - ) - ) + return hash((self.name, self.symbols, self.type_, self.label, self.doc)) @classmethod def fromDoc( @@ -11062,8 +11150,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputEnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -11073,7 +11161,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -11130,7 +11218,7 @@ def fromDoc( if _doc.get("symbols") is None: raise ValidationException("missing required field `symbols`", None, []) - symbols = load_field( + symbols = _load_field( _doc.get("symbols"), uri_array_of_strtype_True_False_None_None, baseuri, @@ -11178,7 +11266,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Enum_nameLoader_2, baseuri, @@ -11225,7 +11313,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -11272,7 +11360,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -11316,54 +11404,7 @@ def fromDoc( "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - detailed_message=f"the `inputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -11371,14 +11412,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`, `inputBinding`".format( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( k ), SourceLine(_doc, k, str), @@ -11393,7 +11434,6 @@ def fromDoc( type_=type_, label=label, doc=doc, - inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -11429,13 +11469,6 @@ def save( r["doc"] = save( self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) # top refers to the directory level if top: @@ -11445,24 +11478,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "label", "doc", "inputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "doc"] + ) -class CommandInputArraySchema( - InputArraySchema, CommandInputSchema, CommandLineBindable -): +class CommandOutputArraySchema(OutputArraySchema): name: str def __init__( self, items: Any, type_: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -11477,24 +11509,20 @@ def __init__( self.label = label self.doc = doc self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputArraySchema): + if isinstance(other, CommandOutputArraySchema): return bool( self.items == other.items and self.type_ == other.type_ and self.label == other.label and self.doc == other.doc and self.name == other.name - and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: - return hash( - (self.items, self.type_, self.label, self.doc, self.name, self.inputBinding) - ) + return hash((self.items, self.type_, self.label, self.doc, self.name)) @classmethod def fromDoc( @@ -11502,8 +11530,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -11513,7 +11541,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -11570,9 +11598,9 @@ def fromDoc( if _doc.get("items") is None: raise ValidationException("missing required field `items`", None, []) - items = load_field( + items = _load_field( _doc.get("items"), - uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None, + uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None, baseuri, loadingOptions, lc=_doc.get("items") @@ -11618,7 +11646,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Array_nameLoader_2, baseuri, @@ -11665,7 +11693,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -11712,7 +11740,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -11756,54 +11784,7 @@ def fromDoc( "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - detailed_message=f"the `inputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -11811,14 +11792,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( k ), SourceLine(_doc, k, str), @@ -11833,7 +11814,6 @@ def fromDoc( label=label, doc=doc, name=name, - inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -11869,13 +11849,6 @@ def save( r["doc"] = save( self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) # top refers to the directory level if top: @@ -11885,24 +11858,34 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["items", "type", "label", "doc", "name", "inputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "doc", "name"] + ) -class CommandOutputRecordField(OutputRecordField): - name: str +class CommandInputParameter(InputParameter): + """ + An input parameter for a CommandLineTool. + + """ + + id: str def __init__( self, - name: Any, + id: Any, type_: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + default: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -11912,40 +11895,49 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.type_ = type_ self.label = label self.secondaryFiles = secondaryFiles self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) self.format = format - self.outputBinding = outputBinding + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type_ = type_ + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordField): + if isinstance(other, CommandInputParameter): return bool( - self.doc == other.doc - and self.name == other.name - and self.type_ == other.type_ - and self.label == other.label + self.label == other.label and self.secondaryFiles == other.secondaryFiles and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id and self.format == other.format - and self.outputBinding == other.outputBinding + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type_ == other.type_ + and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: return hash( ( - self.doc, - self.name, - self.type_, self.label, self.secondaryFiles, self.streamable, + self.doc, + self.id, self.format, - self.outputBinding, + self.loadContents, + self.loadListing, + self.default, + self.type_, + self.inputBinding, ) ) @@ -11955,29 +11947,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + id = None + if "id" in _doc: try: - name = load_field( - _doc.get("name"), + id = _load_field( + _doc.get("id"), uri_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("id") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `id`": _errors__.append( ValidationException( str(e), @@ -11985,69 +11977,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("id") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [e], - detailed_message=f"the `name` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - _errors__.append(ValidationException("missing name")) - if not __original_name_is_none: - baseuri = cast(str, name) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12059,65 +11995,26 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `id` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -12164,7 +12061,7 @@ def fromDoc( secondaryFiles = None if "secondaryFiles" in _doc: try: - secondaryFiles = load_field( + secondaryFiles = _load_field( _doc.get("secondaryFiles"), secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, @@ -12211,7 +12108,7 @@ def fromDoc( streamable = None if "streamable" in _doc: try: - streamable = load_field( + streamable = _load_field( _doc.get("streamable"), union_of_None_type_or_booltype, baseuri, @@ -12255,12 +12152,59 @@ def fromDoc( "is not valid because:", ) ) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) format = None if "format" in _doc: try: - format = load_field( + format = _load_field( _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, lc=_doc.get("format") @@ -12302,21 +12246,21 @@ def fromDoc( "is not valid because:", ) ) - outputBinding = None - if "outputBinding" in _doc: + loadContents = None + if "loadContents" in _doc: try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("outputBinding") + lc=_doc.get("loadContents") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputBinding`": + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( str(e), @@ -12324,13 +12268,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputBinding") + val = _doc.get("loadContents") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12342,251 +12286,75 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [e], - detailed_message=f"the `outputBinding` field with value `{val}` " + detailed_message=f"the `loadContents` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: + loadListing = None + if "loadListing" in _doc: + try: + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - doc=doc, - name=name, - type_=type_, - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - if self.format is not None: - u = save_relative_uri(self.format, self.name, True, None, relative_uris) - r["format"] = u - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "doc", - "name", - "type", - "label", - "secondaryFiles", - "streamable", - "format", - "outputBinding", - ] - ) - - -class CommandOutputRecordSchema(OutputRecordSchema): - name: str - - def __init__( - self, - type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type_ = type_ - self.label = label - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordSchema): - return bool( - self.fields == other.fields - and self.type_ == other.type_ - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type_, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputRecordSchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - name = None - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("name") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `name`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("name") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [e], - detailed_message=f"the `name` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - fields = None - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, - baseuri, - loadingOptions, - lc=_doc.get("fields") - ) + else: + val = _doc.get("loadListing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + default = None + if "default" in _doc: + try: + default = _load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, + baseuri, + loadingOptions, + lc=_doc.get("default") + ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `fields`": + if str(e) == "missing required field `default`": _errors__.append( ValidationException( str(e), @@ -12594,13 +12362,13 @@ def fromDoc( ) ) else: - val = _doc.get("fields") + val = _doc.get("default") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12612,10 +12380,10 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), [e], - detailed_message=f"the `fields` field with value `{val}` " + detailed_message=f"the `default` field with value `{val}` " "is not valid because:", ) ) @@ -12623,9 +12391,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_Record_nameLoader_2, + typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -12667,21 +12435,21 @@ def fromDoc( "is not valid because:", ) ) - label = None - if "label" in _doc: + inputBinding = None + if "inputBinding" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("inputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `inputBinding`": _errors__.append( ValidationException( str(e), @@ -12689,13 +12457,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("inputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12707,76 +12475,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `inputBinding` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( k ), SourceLine(_doc, k, str), @@ -12786,15 +12507,21 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - fields=fields, - type_=type_, label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, doc=doc, - name=name, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type_=type_, + inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -12808,24 +12535,62 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u if self.label is not None: r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) if self.doc is not None: r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) # top refers to the directory level @@ -12836,21 +12601,43 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "label", "doc", "name"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) -class CommandOutputEnumSchema(OutputEnumSchema): - name: str +class CommandOutputParameter(OutputParameter): + """ + An output parameter for a CommandLineTool. + + """ + + id: str def __init__( self, - symbols: Any, + id: Any, type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + outputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -12860,25 +12647,42 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols - self.type_ = type_ self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.type_ = type_ + self.outputBinding = outputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputEnumSchema): + if isinstance(other, CommandOutputParameter): return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type_ == other.type_ - and self.label == other.label + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type_ == other.type_ + and self.outputBinding == other.outputBinding ) return False def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type_, self.label, self.doc)) + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type_, + self.outputBinding, + ) + ) @classmethod def fromDoc( @@ -12886,29 +12690,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputEnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + id = None + if "id" in _doc: try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("id") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `id`": _errors__.append( ValidationException( str(e), @@ -12916,13 +12720,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("id") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12934,133 +12738,131 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `id` field with value `{val}` " "is not valid because:", ) ) - __original_name_is_none = name is None - if name is None: + __original_id_is_none = id is None + if id is None: if docRoot is not None: - name = docRoot + id = docRoot else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) - - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("symbols") - ) + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `symbols`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("symbols") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `label`": _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - detailed_message=f"the `symbols` field with value `{val}` " - "is not valid because:", + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - label = None - if "label" in _doc: + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("streamable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( str(e), @@ -13068,13 +12870,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("streamable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13086,17 +12888,17 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `streamable` field with value `{val}` " "is not valid because:", ) ) doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -13140,46 +12942,191 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + format = None + if "format" in _doc: + try: + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": _errors__.append( - ValidationException("mapping with implicit null key") + ValidationException( + str(e), + None + ) ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( - k - ), - SourceLine(_doc, k, str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", ) ) + outputBinding = None + if "outputBinding" in _doc: + try: + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputBinding") + ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - name=name, - symbols=symbols, - type_=type_, - label=label, - doc=doc, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + detailed_message=f"the `outputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type_=type_, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: @@ -13187,23 +13134,44 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u if self.label is not None: r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) if self.doc is not None: r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) # top refers to the directory level @@ -13214,21 +13182,48 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "type", + "outputBinding", + ] + ) + +class CommandLineTool(Process): + """ + This defines the schema of the CWL Command Line Tool Description document. -class CommandOutputArraySchema(OutputArraySchema): - name: str + """ + + id: str def __init__( self, - items: Any, - type_: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + inputs: Any, + outputs: Any, + id: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + cwlVersion: Any | None = None, + baseCommand: Any | None = None, + arguments: Any | None = None, + stdin: Any | None = None, + stderr: Any | None = None, + stdout: Any | None = None, + successCodes: Any | None = None, + temporaryFailCodes: Any | None = None, + permanentFailCodes: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -13238,25 +13233,69 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) self.label = label self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.class_: Final[str] = "CommandLineTool" + self.baseCommand = baseCommand + self.arguments = arguments + self.stdin = stdin + self.stderr = stderr + self.stdout = stdout + self.successCodes = successCodes + self.temporaryFailCodes = temporaryFailCodes + self.permanentFailCodes = permanentFailCodes def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputArraySchema): + if isinstance(other, CommandLineTool): return bool( - self.items == other.items - and self.type_ == other.type_ + self.id == other.id and self.label == other.label and self.doc == other.doc - and self.name == other.name + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.class_ == other.class_ + and self.baseCommand == other.baseCommand + and self.arguments == other.arguments + and self.stdin == other.stdin + and self.stderr == other.stderr + and self.stdout == other.stdout + and self.successCodes == other.successCodes + and self.temporaryFailCodes == other.temporaryFailCodes + and self.permanentFailCodes == other.permanentFailCodes ) return False def __hash__(self) -> int: - return hash((self.items, self.type_, self.label, self.doc, self.name)) + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.class_, + self.baseCommand, + self.arguments, + self.stdin, + self.stderr, + self.stdout, + self.successCodes, + self.temporaryFailCodes, + self.permanentFailCodes, + ) + ) @classmethod def fromDoc( @@ -13264,29 +13303,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + id = None + if "id" in _doc: try: - name = load_field( - _doc.get("name"), + id = _load_field( + _doc.get("id"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("id") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `id`": _errors__.append( ValidationException( str(e), @@ -13294,13 +13333,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("id") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13312,52 +13351,163 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `id` field with value `{val}` " "is not valid because:", ) ) - __original_name_is_none = name is None - if name is None: + __original_id_is_none = id is None + if id is None: if docRoot is not None: - name = docRoot + id = docRoot else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None, + class_ = _load_field( + _doc.get("class"), + uri_CommandLineTool_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("items") + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) + raise e + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("inputs") is None: + raise ValidationException("missing required field `inputs`", None, []) + + inputs = _load_field( + _doc.get("inputs"), + idmap_inputs_array_of_CommandInputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) else: - val = _doc.get("items") + val = _doc.get("inputs") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13369,29 +13519,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), [e], - detailed_message=f"the `items` field with value `{val}` " + detailed_message=f"the `inputs` field with value `{val}` " "is not valid because:", ) ) try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("outputs") is None: + raise ValidationException("missing required field `outputs`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, + outputs = _load_field( + _doc.get("outputs"), + idmap_outputs_array_of_CommandOutputParameterLoader, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("outputs") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `outputs`": _errors__.append( ValidationException( str(e), @@ -13399,13 +13549,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("outputs") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13417,28 +13567,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `outputs` field with value `{val}` " "is not valid because:", ) ) - label = None - if "label" in _doc: + requirements = None + if "requirements" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + requirements = _load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("requirements") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `requirements`": _errors__.append( ValidationException( str(e), @@ -13446,13 +13596,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("requirements") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13464,28 +13614,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `requirements` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + hints = None + if "hints" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + hints = _load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("hints") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `hints`": _errors__.append( ValidationException( str(e), @@ -13493,13 +13643,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("hints") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13511,210 +13661,42 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `hints` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: + cwlVersion = None + if "cwlVersion" in _doc: + try: + cwlVersion = _load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("cwlVersion") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `cwlVersion`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - items=items, - type_=type_, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.items is not None: - u = save_relative_uri(self.items, self.name, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "doc", "name"]) - - -class CommandInputParameter(InputParameter): - """ - An input parameter for a CommandLineTool. - """ - - id: str - - def __init__( - self, - id: Any, - type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - default: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.default = default - self.type_ = type_ - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.default == other.default - and self.type_ == other.type_ - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.loadContents, - self.loadListing, - self.default, - self.type_, - self.inputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputParameter": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None + str(e), + None ) ) else: - val = _doc.get("id") + val = _doc.get("cwlVersion") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13726,37 +13708,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), [e], - detailed_message=f"the `id` field with value `{val}` " + detailed_message=f"the `cwlVersion` field with value `{val}` " "is not valid because:", ) ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - label = None - if "label" in _doc: + baseCommand = None + if "baseCommand" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + baseCommand = _load_field( + _doc.get("baseCommand"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("baseCommand") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `baseCommand`": _errors__.append( ValidationException( str(e), @@ -13764,13 +13737,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("baseCommand") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `baseCommand` field is not valid because:", + SourceLine(_doc, "baseCommand", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13782,28 +13755,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `baseCommand` field is not valid because:", + SourceLine(_doc, "baseCommand", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `baseCommand` field with value `{val}` " "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: + arguments = None + if "arguments" in _doc: try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + arguments = _load_field( + _doc.get("arguments"), + union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("secondaryFiles") + lc=_doc.get("arguments") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secondaryFiles`": + if str(e) == "missing required field `arguments`": _errors__.append( ValidationException( str(e), @@ -13811,13 +13784,13 @@ def fromDoc( ) ) else: - val = _doc.get("secondaryFiles") + val = _doc.get("arguments") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `arguments` field is not valid because:", + SourceLine(_doc, "arguments", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13829,28 +13802,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `arguments` field is not valid because:", + SourceLine(_doc, "arguments", str), [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " + detailed_message=f"the `arguments` field with value `{val}` " "is not valid because:", ) ) - streamable = None - if "streamable" in _doc: + stdin = None + if "stdin" in _doc: try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, + stdin = _load_field( + _doc.get("stdin"), + union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("streamable") + lc=_doc.get("stdin") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `streamable`": + if str(e) == "missing required field `stdin`": _errors__.append( ValidationException( str(e), @@ -13858,13 +13831,13 @@ def fromDoc( ) ) else: - val = _doc.get("streamable") + val = _doc.get("stdin") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `stdin` field is not valid because:", + SourceLine(_doc, "stdin", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13876,28 +13849,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `stdin` field is not valid because:", + SourceLine(_doc, "stdin", str), [e], - detailed_message=f"the `streamable` field with value `{val}` " + detailed_message=f"the `stdin` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + stderr = None + if "stderr" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + stderr = _load_field( + _doc.get("stderr"), + union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("stderr") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `stderr`": _errors__.append( ValidationException( str(e), @@ -13905,13 +13878,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("stderr") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `stderr` field is not valid because:", + SourceLine(_doc, "stderr", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13923,28 +13896,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `stderr` field is not valid because:", + SourceLine(_doc, "stderr", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `stderr` field with value `{val}` " "is not valid because:", ) ) - format = None - if "format" in _doc: + stdout = None + if "stdout" in _doc: try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + stdout = _load_field( + _doc.get("stdout"), + union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("format") + lc=_doc.get("stdout") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `format`": + if str(e) == "missing required field `stdout`": _errors__.append( ValidationException( str(e), @@ -13952,60 +13925,13 @@ def fromDoc( ) ) else: - val = _doc.get("format") + val = _doc.get("stdout") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [e], - detailed_message=f"the `format` field with value `{val}` " - "is not valid because:", - ) - ) - loadContents = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("loadContents") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadContents`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("loadContents") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `stdout` field is not valid because:", + SourceLine(_doc, "stdout", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14017,28 +13943,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `stdout` field is not valid because:", + SourceLine(_doc, "stdout", str), [e], - detailed_message=f"the `loadContents` field with value `{val}` " + detailed_message=f"the `stdout` field with value `{val}` " "is not valid because:", ) ) - loadListing = None - if "loadListing" in _doc: + successCodes = None + if "successCodes" in _doc: try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, + successCodes = _load_field( + _doc.get("successCodes"), + union_of_None_type_or_array_of_inttype, baseuri, loadingOptions, - lc=_doc.get("loadListing") + lc=_doc.get("successCodes") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadListing`": + if str(e) == "missing required field `successCodes`": _errors__.append( ValidationException( str(e), @@ -14046,13 +13972,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadListing") + val = _doc.get("successCodes") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `successCodes` field is not valid because:", + SourceLine(_doc, "successCodes", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14064,28 +13990,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `successCodes` field is not valid because:", + SourceLine(_doc, "successCodes", str), [e], - detailed_message=f"the `loadListing` field with value `{val}` " + detailed_message=f"the `successCodes` field with value `{val}` " "is not valid because:", ) ) - default = None - if "default" in _doc: + temporaryFailCodes = None + if "temporaryFailCodes" in _doc: try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_CWLObjectTypeLoader, + temporaryFailCodes = _load_field( + _doc.get("temporaryFailCodes"), + union_of_None_type_or_array_of_inttype, baseuri, loadingOptions, - lc=_doc.get("default") + lc=_doc.get("temporaryFailCodes") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `default`": + if str(e) == "missing required field `temporaryFailCodes`": _errors__.append( ValidationException( str(e), @@ -14093,13 +14019,13 @@ def fromDoc( ) ) else: - val = _doc.get("default") + val = _doc.get("temporaryFailCodes") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `temporaryFailCodes` field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14111,76 +14037,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `temporaryFailCodes` field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), [e], - detailed_message=f"the `default` field with value `{val}` " + detailed_message=f"the `temporaryFailCodes` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - inputBinding = None - if "inputBinding" in _doc: + permanentFailCodes = None + if "permanentFailCodes" in _doc: try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, + permanentFailCodes = _load_field( + _doc.get("permanentFailCodes"), + union_of_None_type_or_array_of_inttype, baseuri, loadingOptions, - lc=_doc.get("inputBinding") + lc=_doc.get("permanentFailCodes") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inputBinding`": + if str(e) == "missing required field `permanentFailCodes`": _errors__.append( ValidationException( str(e), @@ -14188,13 +14066,13 @@ def fromDoc( ) ) else: - val = _doc.get("inputBinding") + val = _doc.get("permanentFailCodes") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `permanentFailCodes` field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14206,14 +14084,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `permanentFailCodes` field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), [e], - detailed_message=f"the `inputBinding` field with value `{val}` " + detailed_message=f"the `permanentFailCodes` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -14221,14 +14099,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( k ), SourceLine(_doc, k, str), @@ -14238,17 +14116,22 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + id=id, label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, doc=doc, - id=id, - format=format, - loadContents=loadContents, - loadListing=loadListing, - default=default, - type_=type_, - inputBinding=inputBinding, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + baseCommand=baseCommand, + arguments=arguments, + stdin=stdin, + stderr=stderr, + stdout=stdout, + successCodes=successCodes, + temporaryFailCodes=temporaryFailCodes, + permanentFailCodes=permanentFailCodes, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -14269,56 +14152,86 @@ def save( if self.id is not None: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, self.id, False, None, relative_uris) + r["class"] = u if self.label is not None: r["label"] = save( self.label, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) if self.doc is not None: r["doc"] = save( self.doc, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, + if self.inputs is not None: + r["inputs"] = save( + self.inputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputs is not None: + r["outputs"] = save( + self.outputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.requirements is not None: + r["requirements"] = save( + self.requirements, top=False, base_url=self.id, relative_uris=relative_uris, ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.cwlVersion is not None: + u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) + r["cwlVersion"] = u + if self.baseCommand is not None: + r["baseCommand"] = save( + self.baseCommand, top=False, base_url=self.id, relative_uris=relative_uris, ) - if self.default is not None: - r["default"] = save( - self.default, top=False, base_url=self.id, relative_uris=relative_uris + if self.arguments is not None: + r["arguments"] = save( + self.arguments, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris + if self.stdin is not None: + r["stdin"] = save( + self.stdin, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, + if self.stderr is not None: + r["stderr"] = save( + self.stderr, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.stdout is not None: + r["stdout"] = save( + self.stdout, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.successCodes is not None: + r["successCodes"] = save( + self.successCodes, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.temporaryFailCodes is not None: + r["temporaryFailCodes"] = save( + self.temporaryFailCodes, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.permanentFailCodes is not None: + r["permanentFailCodes"] = save( + self.permanentFailCodes, top=False, base_url=self.id, relative_uris=relative_uris, @@ -14332,42 +14245,64 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ + "id", "label", - "secondaryFiles", - "streamable", "doc", - "id", - "format", - "loadContents", - "loadListing", - "default", - "type", - "inputBinding", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "class", + "baseCommand", + "arguments", + "stdin", + "stderr", + "stdout", + "successCodes", + "temporaryFailCodes", + "permanentFailCodes", ] ) -class CommandOutputParameter(OutputParameter): - """ - An output parameter for a CommandLineTool. +class DockerRequirement(ProcessRequirement): """ + Indicates that a workflow component should be run in a `Docker `__ or Docker-compatible (such as `Singularity `__ and `udocker `__) container environment and specifies how to fetch or build the image. - id: str + If a CommandLineTool lists ``DockerRequirement`` under ``hints`` (or ``requirements``), it may (or must) be run in the specified Docker container. + + The platform must first acquire or install the correct Docker image as specified by ``dockerPull``, ``dockerImport``, ``dockerLoad`` or ``dockerFile``. + + The platform must execute the tool in the container using ``docker run`` with the appropriate Docker image and tool command line. + + The workflow platform may provide input files and the designated output directory through the use of volume bind mounts. The platform should rewrite file paths in the input object to correspond to the Docker bind mounted locations. That is, the platform should rewrite values in the parameter context such as ``runtime.outdir``, ``runtime.tmpdir`` and others to be valid paths within the container. The platform must ensure that ``runtime.outdir`` and ``runtime.tmpdir`` are distinct directories. + + When running a tool contained in Docker, the workflow platform must not assume anything about the contents of the Docker container, such as the presence or absence of specific software, except to assume that the generated command line represents a valid command within the runtime environment of the container. + + A container image may specify an `ENTRYPOINT `__ and/or `CMD `__. Command line arguments will be appended after all elements of ENTRYPOINT, and will override all elements specified using CMD (in other words, CMD is only used when the CommandLineTool definition produces an empty command line). + + Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility concerns of the implicit hidden execution point (For further discussion, see `https://doi.org/10.12688/f1000research.15140.1 `__). Portable CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. CommandLineTools which do rely on ENTRYPOINT or CMD must list ``DockerRequirement`` in the ``requirements`` section. + + Interaction with other requirements + ----------------------------------- + + If `EnvVarRequirement <#EnvVarRequirement>`__ is specified alongside a DockerRequirement, the environment variables must be provided to Docker using ``--env`` or ``--env-file`` and interact with the container's preexisting environment as defined by Docker. + + """ def __init__( self, - id: Any, - type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + dockerPull: Any | None = None, + dockerLoad: Any | None = None, + dockerFile: Any | None = None, + dockerImport: Any | None = None, + dockerImageId: Any | None = None, + dockerOutputDirectory: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -14377,40 +14312,37 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.format = format - self.type_ = type_ - self.outputBinding = outputBinding + self.class_: Final[str] = "DockerRequirement" + self.dockerPull = dockerPull + self.dockerLoad = dockerLoad + self.dockerFile = dockerFile + self.dockerImport = dockerImport + self.dockerImageId = dockerImageId + self.dockerOutputDirectory = dockerOutputDirectory def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputParameter): + if isinstance(other, DockerRequirement): return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.type_ == other.type_ - and self.outputBinding == other.outputBinding + self.class_ == other.class_ + and self.dockerPull == other.dockerPull + and self.dockerLoad == other.dockerLoad + and self.dockerFile == other.dockerFile + and self.dockerImport == other.dockerImport + and self.dockerImageId == other.dockerImageId + and self.dockerOutputDirectory == other.dockerOutputDirectory ) return False def __hash__(self) -> int: return hash( ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.type_, - self.outputBinding, + self.class_, + self.dockerPull, + self.dockerLoad, + self.dockerFile, + self.dockerImport, + self.dockerImageId, + self.dockerOutputDirectory, ) ) @@ -14420,29 +14352,46 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - id = None - if "id" in _doc: + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_DockerRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + dockerPull = None + if "dockerPull" in _doc: try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, + dockerPull = _load_field( + _doc.get("dockerPull"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("id") + lc=_doc.get("dockerPull") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `id`": + if str(e) == "missing required field `dockerPull`": _errors__.append( ValidationException( str(e), @@ -14450,13 +14399,13 @@ def fromDoc( ) ) else: - val = _doc.get("id") + val = _doc.get("dockerPull") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `dockerPull` field is not valid because:", + SourceLine(_doc, "dockerPull", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14468,37 +14417,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `dockerPull` field is not valid because:", + SourceLine(_doc, "dockerPull", str), [e], - detailed_message=f"the `id` field with value `{val}` " + detailed_message=f"the `dockerPull` field with value `{val}` " "is not valid because:", ) ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - label = None - if "label" in _doc: + dockerLoad = None + if "dockerLoad" in _doc: try: - label = load_field( - _doc.get("label"), + dockerLoad = _load_field( + _doc.get("dockerLoad"), union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("dockerLoad") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `dockerLoad`": _errors__.append( ValidationException( str(e), @@ -14506,13 +14446,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("dockerLoad") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `dockerLoad` field is not valid because:", + SourceLine(_doc, "dockerLoad", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14524,28 +14464,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `dockerLoad` field is not valid because:", + SourceLine(_doc, "dockerLoad", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `dockerLoad` field with value `{val}` " "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: + dockerFile = None + if "dockerFile" in _doc: try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + dockerFile = _load_field( + _doc.get("dockerFile"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("secondaryFiles") + lc=_doc.get("dockerFile") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secondaryFiles`": + if str(e) == "missing required field `dockerFile`": _errors__.append( ValidationException( str(e), @@ -14553,13 +14493,13 @@ def fromDoc( ) ) else: - val = _doc.get("secondaryFiles") + val = _doc.get("dockerFile") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `dockerFile` field is not valid because:", + SourceLine(_doc, "dockerFile", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14571,28 +14511,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `dockerFile` field is not valid because:", + SourceLine(_doc, "dockerFile", str), [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " + detailed_message=f"the `dockerFile` field with value `{val}` " "is not valid because:", ) ) - streamable = None - if "streamable" in _doc: + dockerImport = None + if "dockerImport" in _doc: try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, + dockerImport = _load_field( + _doc.get("dockerImport"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("streamable") + lc=_doc.get("dockerImport") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `streamable`": + if str(e) == "missing required field `dockerImport`": _errors__.append( ValidationException( str(e), @@ -14600,13 +14540,13 @@ def fromDoc( ) ) else: - val = _doc.get("streamable") + val = _doc.get("dockerImport") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `dockerImport` field is not valid because:", + SourceLine(_doc, "dockerImport", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14618,28 +14558,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `dockerImport` field is not valid because:", + SourceLine(_doc, "dockerImport", str), [e], - detailed_message=f"the `streamable` field with value `{val}` " + detailed_message=f"the `dockerImport` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + dockerImageId = None + if "dockerImageId" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + dockerImageId = _load_field( + _doc.get("dockerImageId"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("dockerImageId") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `dockerImageId`": _errors__.append( ValidationException( str(e), @@ -14647,13 +14587,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("dockerImageId") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `dockerImageId` field is not valid because:", + SourceLine(_doc, "dockerImageId", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14665,123 +14605,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `dockerImageId` field is not valid because:", + SourceLine(_doc, "dockerImageId", str), [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - format = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, - baseuri, - loadingOptions, - lc=_doc.get("format") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `format`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("format") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `dockerImageId` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - outputBinding = None - if "outputBinding" in _doc: + dockerOutputDirectory = None + if "dockerOutputDirectory" in _doc: try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, + dockerOutputDirectory = _load_field( + _doc.get("dockerOutputDirectory"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("outputBinding") + lc=_doc.get("dockerOutputDirectory") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputBinding`": + if str(e) == "missing required field `dockerOutputDirectory`": _errors__.append( ValidationException( str(e), @@ -14789,13 +14634,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputBinding") + val = _doc.get("dockerOutputDirectory") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `dockerOutputDirectory` field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14807,14 +14652,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `dockerOutputDirectory` field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), [e], - detailed_message=f"the `outputBinding` field with value `{val}` " + detailed_message=f"the `dockerOutputDirectory` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -14822,14 +14667,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`, `outputBinding`".format( + "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( k ), SourceLine(_doc, k, str), @@ -14839,18 +14684,15 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - type_=type_, - outputBinding=outputBinding, + dockerPull=dockerPull, + dockerLoad=dockerLoad, + dockerFile=dockerFile, + dockerImport=dockerImport, + dockerImageId=dockerImageId, + dockerOutputDirectory=dockerOutputDirectory, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -14864,43 +14706,56 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.dockerPull is not None: + r["dockerPull"] = save( + self.dockerPull, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, + if self.dockerLoad is not None: + r["dockerLoad"] = save( + self.dockerLoad, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, + if self.dockerFile is not None: + r["dockerFile"] = save( + self.dockerFile, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris + if self.dockerImport is not None: + r["dockerImport"] = save( + self.dockerImport, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris + if self.dockerImageId is not None: + r["dockerImageId"] = save( + self.dockerImageId, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, + if self.dockerOutputDirectory is not None: + r["dockerOutputDirectory"] = save( + self.dockerOutputDirectory, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) @@ -14912,48 +14767,30 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "type", - "outputBinding", + "class", + "dockerPull", + "dockerLoad", + "dockerFile", + "dockerImport", + "dockerImageId", + "dockerOutputDirectory", ] ) -class CommandLineTool(Process): +class SoftwareRequirement(ProcessRequirement): """ - This defines the schema of the CWL Command Line Tool Description document. + A list of software packages that should be configured in the environment of the defined process. """ - id: str - def __init__( self, - inputs: Any, - outputs: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - baseCommand: Optional[Any] = None, - arguments: Optional[Any] = None, - stdin: Optional[Any] = None, - stderr: Optional[Any] = None, - stdout: Optional[Any] = None, - successCodes: Optional[Any] = None, - temporaryFailCodes: Optional[Any] = None, - permanentFailCodes: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + packages: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -14963,69 +14800,16 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.class_ = "CommandLineTool" - self.baseCommand = baseCommand - self.arguments = arguments - self.stdin = stdin - self.stderr = stderr - self.stdout = stdout - self.successCodes = successCodes - self.temporaryFailCodes = temporaryFailCodes - self.permanentFailCodes = permanentFailCodes + self.class_: Final[str] = "SoftwareRequirement" + self.packages = packages def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineTool): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.baseCommand == other.baseCommand - and self.arguments == other.arguments - and self.stdin == other.stdin - and self.stderr == other.stderr - and self.stdout == other.stdout - and self.successCodes == other.successCodes - and self.temporaryFailCodes == other.temporaryFailCodes - and self.permanentFailCodes == other.permanentFailCodes - ) + if isinstance(other, SoftwareRequirement): + return bool(self.class_ == other.class_ and self.packages == other.packages) return False def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.class_, - self.baseCommand, - self.arguments, - self.stdin, - self.stderr, - self.stdout, - self.successCodes, - self.temporaryFailCodes, - self.permanentFailCodes, - ) - ) + return hash((self.class_, self.packages)) @classmethod def fromDoc( @@ -15033,196 +14817,47 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandLineTool": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = cast(str, id) try: if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_CommandLineTool_classLoader_False_True_None_None, + uri_SoftwareRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) + raise e try: - if _doc.get("inputs") is None: - raise ValidationException("missing required field `inputs`", None, []) + if _doc.get("packages") is None: + raise ValidationException("missing required field `packages`", None, []) - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_CommandInputParameterLoader, + packages = _load_field( + _doc.get("packages"), + idmap_packages_array_of_SoftwarePackageLoader, baseuri, loadingOptions, - lc=_doc.get("inputs") + lc=_doc.get("packages") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inputs`": + if str(e) == "missing required field `packages`": _errors__.append( ValidationException( str(e), @@ -15230,13 +14865,13 @@ def fromDoc( ) ) else: - val = _doc.get("inputs") + val = _doc.get("packages") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), + "the `packages` field is not valid because:", + SourceLine(_doc, "packages", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15248,123 +14883,191 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), + "the `packages` field is not valid because:", + SourceLine(_doc, "packages", str), [e], - detailed_message=f"the `inputs` field with value `{val}` " + detailed_message=f"the `packages` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("outputs") is None: - raise ValidationException("missing required field `outputs`", None, []) - - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_CommandOutputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) + extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - detailed_message=f"the `outputs` field with value `{val}` " - "is not valid because:", + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), + SourceLine(_doc, k, str), ) ) - requirements = None - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, - baseuri, - loadingOptions, - lc=_doc.get("requirements") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + packages=packages, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed - if str(e) == "missing required field `requirements`": + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.packages is not None: + r["packages"] = save( + self.packages, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["class", "packages"]) + + +class SoftwarePackage(Saveable): + def __init__( + self, + package: Any, + version: Any | None = None, + specs: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.package = package + self.version = version + self.specs = specs + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwarePackage): + return bool( + self.package == other.package + and self.version == other.version + and self.specs == other.specs + ) + return False + + def __hash__(self) -> int: + return hash((self.package, self.version, self.specs)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("package") is None: + raise ValidationException("missing required field `package`", None, []) + + package = _load_field( + _doc.get("package"), + strtype, + baseuri, + loadingOptions, + lc=_doc.get("package") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `package`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("package") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `package` field is not valid because:", + SourceLine(_doc, "package", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("requirements") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - detailed_message=f"the `requirements` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `package` field is not valid because:", + SourceLine(_doc, "package", str), + [e], + detailed_message=f"the `package` field with value `{val}` " + "is not valid because:", ) - hints = None - if "hints" in _doc: + ) + version = None + if "version" in _doc: try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, + version = _load_field( + _doc.get("version"), + union_of_None_type_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("hints") + lc=_doc.get("version") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `hints`": + if str(e) == "missing required field `version`": _errors__.append( ValidationException( str(e), @@ -15372,13 +15075,13 @@ def fromDoc( ) ) else: - val = _doc.get("hints") + val = _doc.get("version") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `version` field is not valid because:", + SourceLine(_doc, "version", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15390,28 +15093,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `version` field is not valid because:", + SourceLine(_doc, "version", str), [e], - detailed_message=f"the `hints` field with value `{val}` " + detailed_message=f"the `version` field with value `{val}` " "is not valid because:", ) ) - cwlVersion = None - if "cwlVersion" in _doc: + specs = None + if "specs" in _doc: try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, + specs = _load_field( + _doc.get("specs"), + uri_union_of_None_type_or_array_of_strtype_False_False_None_True, baseuri, loadingOptions, - lc=_doc.get("cwlVersion") + lc=_doc.get("specs") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `cwlVersion`": + if str(e) == "missing required field `specs`": _errors__.append( ValidationException( str(e), @@ -15419,13 +15122,13 @@ def fromDoc( ) ) else: - val = _doc.get("cwlVersion") + val = _doc.get("specs") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), + "the `specs` field is not valid because:", + SourceLine(_doc, "specs", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15437,75 +15140,147 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), + "the `specs` field is not valid because:", + SourceLine(_doc, "specs", str), [e], - detailed_message=f"the `cwlVersion` field with value `{val}` " + detailed_message=f"the `specs` field with value `{val}` " "is not valid because:", ) ) - baseCommand = None - if "baseCommand" in _doc: - try: - baseCommand = load_field( - _doc.get("baseCommand"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("baseCommand") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `baseCommand`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - str(e), - None - ) + ValidationException("mapping with implicit null key") ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("baseCommand") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `baseCommand` field is not valid because:", - SourceLine(_doc, "baseCommand", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( + k + ), + SourceLine(_doc, k, str), ) - else: - _errors__.append( - ValidationException( - "the `baseCommand` field is not valid because:", - SourceLine(_doc, "baseCommand", str), - [e], - detailed_message=f"the `baseCommand` field with value `{val}` " - "is not valid because:", - ) - ) - arguments = None - if "arguments" in _doc: + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + package=package, + version=version, + specs=specs, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.package is not None: + r["package"] = save( + self.package, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.version is not None: + r["version"] = save( + self.version, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.specs is not None: + u = save_relative_uri(self.specs, base_url, False, None, relative_uris) + r["specs"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["package", "version", "specs"]) + + +class Dirent(Saveable): + """ + Define a file or subdirectory that must be placed in the designated output directory prior to executing the command line tool. May be the result of executing an expression, such as building a configuration file from a template. + + """ + + def __init__( + self, + entry: Any, + entryname: Any | None = None, + writable: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.entryname = entryname + self.entry = entry + self.writable = writable + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Dirent): + return bool( + self.entryname == other.entryname + and self.entry == other.entry + and self.writable == other.writable + ) + return False + + def __hash__(self) -> int: + return hash((self.entryname, self.entry, self.writable)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + entryname = None + if "entryname" in _doc: try: - arguments = load_field( - _doc.get("arguments"), - union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + entryname = _load_field( + _doc.get("entryname"), + union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("arguments") + lc=_doc.get("entryname") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `arguments`": + if str(e) == "missing required field `entryname`": _errors__.append( ValidationException( str(e), @@ -15513,13 +15288,13 @@ def fromDoc( ) ) else: - val = _doc.get("arguments") + val = _doc.get("entryname") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `arguments` field is not valid because:", - SourceLine(_doc, "arguments", str), + "the `entryname` field is not valid because:", + SourceLine(_doc, "entryname", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15531,75 +15306,76 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `arguments` field is not valid because:", - SourceLine(_doc, "arguments", str), + "the `entryname` field is not valid because:", + SourceLine(_doc, "entryname", str), [e], - detailed_message=f"the `arguments` field with value `{val}` " + detailed_message=f"the `entryname` field with value `{val}` " "is not valid because:", ) ) - stdin = None - if "stdin" in _doc: - try: - stdin = load_field( - _doc.get("stdin"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("stdin") - ) + try: + if _doc.get("entry") is None: + raise ValidationException("missing required field `entry`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + entry = _load_field( + _doc.get("entry"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("entry") + ) - if str(e) == "missing required field `stdin`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `entry`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("entry") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `entry` field is not valid because:", + SourceLine(_doc, "entry", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("stdin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `stdin` field is not valid because:", - SourceLine(_doc, "stdin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `stdin` field is not valid because:", - SourceLine(_doc, "stdin", str), - [e], - detailed_message=f"the `stdin` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `entry` field is not valid because:", + SourceLine(_doc, "entry", str), + [e], + detailed_message=f"the `entry` field with value `{val}` " + "is not valid because:", ) - stderr = None - if "stderr" in _doc: + ) + writable = None + if "writable" in _doc: try: - stderr = load_field( - _doc.get("stderr"), - union_of_None_type_or_strtype_or_ExpressionLoader, + writable = _load_field( + _doc.get("writable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("stderr") + lc=_doc.get("writable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `stderr`": + if str(e) == "missing required field `writable`": _errors__.append( ValidationException( str(e), @@ -15607,13 +15383,13 @@ def fromDoc( ) ) else: - val = _doc.get("stderr") + val = _doc.get("writable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `stderr` field is not valid because:", - SourceLine(_doc, "stderr", str), + "the `writable` field is not valid because:", + SourceLine(_doc, "writable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15625,246 +15401,44 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `stderr` field is not valid because:", - SourceLine(_doc, "stderr", str), + "the `writable` field is not valid because:", + SourceLine(_doc, "writable", str), [e], - detailed_message=f"the `stderr` field with value `{val}` " + detailed_message=f"the `writable` field with value `{val}` " "is not valid because:", ) ) - stdout = None - if "stdout" in _doc: - try: - stdout = load_field( - _doc.get("stdout"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("stdout") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `stdout`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: _errors__.append( ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("stdout") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `stdout` field is not valid because:", - SourceLine(_doc, "stdout", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `stdout` field is not valid because:", - SourceLine(_doc, "stdout", str), - [e], - detailed_message=f"the `stdout` field with value `{val}` " - "is not valid because:", - ) - ) - successCodes = None - if "successCodes" in _doc: - try: - successCodes = load_field( - _doc.get("successCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - lc=_doc.get("successCodes") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `successCodes`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("successCodes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `successCodes` field is not valid because:", - SourceLine(_doc, "successCodes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `successCodes` field is not valid because:", - SourceLine(_doc, "successCodes", str), - [e], - detailed_message=f"the `successCodes` field with value `{val}` " - "is not valid because:", - ) - ) - temporaryFailCodes = None - if "temporaryFailCodes" in _doc: - try: - temporaryFailCodes = load_field( - _doc.get("temporaryFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - lc=_doc.get("temporaryFailCodes") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `temporaryFailCodes`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("temporaryFailCodes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `temporaryFailCodes` field is not valid because:", - SourceLine(_doc, "temporaryFailCodes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `temporaryFailCodes` field is not valid because:", - SourceLine(_doc, "temporaryFailCodes", str), - [e], - detailed_message=f"the `temporaryFailCodes` field with value `{val}` " - "is not valid because:", - ) - ) - permanentFailCodes = None - if "permanentFailCodes" in _doc: - try: - permanentFailCodes = load_field( - _doc.get("permanentFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - lc=_doc.get("permanentFailCodes") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `permanentFailCodes`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("permanentFailCodes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `permanentFailCodes` field is not valid because:", - SourceLine(_doc, "permanentFailCodes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `permanentFailCodes` field is not valid because:", - SourceLine(_doc, "permanentFailCodes", str), - [e], - detailed_message=f"the `permanentFailCodes` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( - k - ), - SourceLine(_doc, k, str), + "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( + k + ), + SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - baseCommand=baseCommand, - arguments=arguments, - stdin=stdin, - stderr=stderr, - stdout=stdout, - successCodes=successCodes, - temporaryFailCodes=temporaryFailCodes, - permanentFailCodes=permanentFailCodes, + entryname=entryname, + entry=entry, + writable=writable, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -15878,90 +15452,20 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, self.id, False, None, relative_uris) - r["class"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.inputs is not None: - r["inputs"] = save( - self.inputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputs is not None: - r["outputs"] = save( - self.outputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.requirements is not None: - r["requirements"] = save( - self.requirements, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.cwlVersion is not None: - u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) - r["cwlVersion"] = u - if self.baseCommand is not None: - r["baseCommand"] = save( - self.baseCommand, + if self.entryname is not None: + r["entryname"] = save( + self.entryname, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.arguments is not None: - r["arguments"] = save( - self.arguments, top=False, base_url=self.id, relative_uris=relative_uris + if self.entry is not None: + r["entry"] = save( + self.entry, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.stdin is not None: - r["stdin"] = save( - self.stdin, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.stderr is not None: - r["stderr"] = save( - self.stderr, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.stdout is not None: - r["stdout"] = save( - self.stdout, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.successCodes is not None: - r["successCodes"] = save( - self.successCodes, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.temporaryFailCodes is not None: - r["temporaryFailCodes"] = save( - self.temporaryFailCodes, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.permanentFailCodes is not None: - r["permanentFailCodes"] = save( - self.permanentFailCodes, - top=False, - base_url=self.id, - relative_uris=relative_uris, + if self.writable is not None: + r["writable"] = save( + self.writable, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -15972,95 +15476,20 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "class", - "baseCommand", - "arguments", - "stdin", - "stderr", - "stdout", - "successCodes", - "temporaryFailCodes", - "permanentFailCodes", - ] - ) + attrs: ClassVar[Collection[str]] = frozenset(["entryname", "entry", "writable"]) -class DockerRequirement(ProcessRequirement): +class InitialWorkDirRequirement(ProcessRequirement): """ - Indicates that a workflow component should be run in a - [Docker](http://docker.com) or Docker-compatible (such as - [Singularity](https://www.sylabs.io/) and [udocker](https://github.com/indigo-dc/udocker)) container environment and - specifies how to fetch or build the image. - - If a CommandLineTool lists `DockerRequirement` under - `hints` (or `requirements`), it may (or must) be run in the specified Docker - container. - - The platform must first acquire or install the correct Docker image as - specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. - - The platform must execute the tool in the container using `docker run` with - the appropriate Docker image and tool command line. - - The workflow platform may provide input files and the designated output - directory through the use of volume bind mounts. The platform should rewrite - file paths in the input object to correspond to the Docker bind mounted - locations. That is, the platform should rewrite values in the parameter context - such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths - within the container. The platform must ensure that `runtime.outdir` and - `runtime.tmpdir` are distinct directories. - - When running a tool contained in Docker, the workflow platform must not - assume anything about the contents of the Docker container, such as the - presence or absence of specific software, except to assume that the - generated command line represents a valid command within the runtime - environment of the container. - - A container image may specify an - [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint) - and/or - [CMD](https://docs.docker.com/engine/reference/builder/#cmd). - Command line arguments will be appended after all elements of - ENTRYPOINT, and will override all elements specified using CMD (in - other words, CMD is only used when the CommandLineTool definition - produces an empty command line). - - Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility - concerns of the implicit hidden execution point (For further discussion, see - [https://doi.org/10.12688/f1000research.15140.1](https://doi.org/10.12688/f1000research.15140.1)). Portable - CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. - CommandLineTools which do rely on ENTRYPOINT or CMD must list `DockerRequirement` in the - `requirements` section. - - ## Interaction with other requirements - - If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a - DockerRequirement, the environment variables must be provided to Docker - using `--env` or `--env-file` and interact with the container's preexisting - environment as defined by Docker. + Define a list of files and subdirectories that must be created by the workflow platform in the designated output directory prior to executing the command line tool. """ def __init__( self, - dockerPull: Optional[Any] = None, - dockerLoad: Optional[Any] = None, - dockerFile: Optional[Any] = None, - dockerImport: Optional[Any] = None, - dockerImageId: Optional[Any] = None, - dockerOutputDirectory: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + listing: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -16070,39 +15499,16 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "DockerRequirement" - self.dockerPull = dockerPull - self.dockerLoad = dockerLoad - self.dockerFile = dockerFile - self.dockerImport = dockerImport - self.dockerImageId = dockerImageId - self.dockerOutputDirectory = dockerOutputDirectory + self.class_: Final[str] = "InitialWorkDirRequirement" + self.listing = listing def __eq__(self, other: Any) -> bool: - if isinstance(other, DockerRequirement): - return bool( - self.class_ == other.class_ - and self.dockerPull == other.dockerPull - and self.dockerLoad == other.dockerLoad - and self.dockerFile == other.dockerFile - and self.dockerImport == other.dockerImport - and self.dockerImageId == other.dockerImageId - and self.dockerOutputDirectory == other.dockerOutputDirectory - ) + if isinstance(other, InitialWorkDirRequirement): + return bool(self.class_ == other.class_ and self.listing == other.listing) return False def __hash__(self) -> int: - return hash( - ( - self.class_, - self.dockerPull, - self.dockerLoad, - self.dockerFile, - self.dockerImport, - self.dockerImageId, - self.dockerOutputDirectory, - ) - ) + return hash((self.class_, self.listing)) @classmethod def fromDoc( @@ -16110,8 +15516,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "DockerRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -16122,331 +15528,93 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_DockerRequirement_classLoader_False_True_None_None, + uri_InitialWorkDirRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e - dockerPull = None - if "dockerPull" in _doc: - try: - dockerPull = load_field( - _doc.get("dockerPull"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dockerPull") - ) + raise e + try: + if _doc.get("listing") is None: + raise ValidationException("missing required field `listing`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + listing = _load_field( + _doc.get("listing"), + union_of_array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("listing") + ) - if str(e) == "missing required field `dockerPull`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `listing`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("listing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("dockerPull") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dockerPull` field is not valid because:", - SourceLine(_doc, "dockerPull", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `dockerPull` field is not valid because:", - SourceLine(_doc, "dockerPull", str), - [e], - detailed_message=f"the `dockerPull` field with value `{val}` " - "is not valid because:", - ) - ) - dockerLoad = None - if "dockerLoad" in _doc: - try: - dockerLoad = load_field( - _doc.get("dockerLoad"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dockerLoad") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `dockerLoad`": _errors__.append( ValidationException( - str(e), - None + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + detailed_message=f"the `listing` field with value `{val}` " + "is not valid because:", ) ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("dockerLoad") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dockerLoad` field is not valid because:", - SourceLine(_doc, "dockerLoad", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `dockerLoad` field is not valid because:", - SourceLine(_doc, "dockerLoad", str), - [e], - detailed_message=f"the `dockerLoad` field with value `{val}` " - "is not valid because:", - ) - ) - dockerFile = None - if "dockerFile" in _doc: - try: - dockerFile = load_field( - _doc.get("dockerFile"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dockerFile") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `dockerFile`": _errors__.append( ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("dockerFile") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dockerFile` field is not valid because:", - SourceLine(_doc, "dockerFile", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `dockerFile` field is not valid because:", - SourceLine(_doc, "dockerFile", str), - [e], - detailed_message=f"the `dockerFile` field with value `{val}` " - "is not valid because:", - ) - ) - dockerImport = None - if "dockerImport" in _doc: - try: - dockerImport = load_field( - _doc.get("dockerImport"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dockerImport") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `dockerImport`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("dockerImport") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dockerImport` field is not valid because:", - SourceLine(_doc, "dockerImport", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `dockerImport` field is not valid because:", - SourceLine(_doc, "dockerImport", str), - [e], - detailed_message=f"the `dockerImport` field with value `{val}` " - "is not valid because:", - ) - ) - dockerImageId = None - if "dockerImageId" in _doc: - try: - dockerImageId = load_field( - _doc.get("dockerImageId"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dockerImageId") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `dockerImageId`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("dockerImageId") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dockerImageId` field is not valid because:", - SourceLine(_doc, "dockerImageId", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `dockerImageId` field is not valid because:", - SourceLine(_doc, "dockerImageId", str), - [e], - detailed_message=f"the `dockerImageId` field with value `{val}` " - "is not valid because:", - ) - ) - dockerOutputDirectory = None - if "dockerOutputDirectory" in _doc: - try: - dockerOutputDirectory = load_field( - _doc.get("dockerOutputDirectory"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dockerOutputDirectory") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `dockerOutputDirectory`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("dockerOutputDirectory") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dockerOutputDirectory` field is not valid because:", - SourceLine(_doc, "dockerOutputDirectory", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `dockerOutputDirectory` field is not valid because:", - SourceLine(_doc, "dockerOutputDirectory", str), - [e], - detailed_message=f"the `dockerOutputDirectory` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( - k - ), - SourceLine(_doc, k, str), + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), + SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - dockerPull=dockerPull, - dockerLoad=dockerLoad, - dockerFile=dockerFile, - dockerImport=dockerImport, - dockerImageId=dockerImageId, - dockerOutputDirectory=dockerOutputDirectory, + listing=listing, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -16464,54 +15632,18 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.dockerPull is not None: - r["dockerPull"] = save( - self.dockerPull, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.dockerLoad is not None: - r["dockerLoad"] = save( - self.dockerLoad, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.dockerFile is not None: - r["dockerFile"] = save( - self.dockerFile, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.dockerImport is not None: - r["dockerImport"] = save( - self.dockerImport, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.dockerImageId is not None: - r["dockerImageId"] = save( - self.dockerImageId, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.dockerOutputDirectory is not None: - r["dockerOutputDirectory"] = save( - self.dockerOutputDirectory, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.listing is not None: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -16522,31 +15654,20 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "class", - "dockerPull", - "dockerLoad", - "dockerFile", - "dockerImport", - "dockerImageId", - "dockerOutputDirectory", - ] - ) + attrs: ClassVar[Collection[str]] = frozenset(["class", "listing"]) -class SoftwareRequirement(ProcessRequirement): +class EnvVarRequirement(ProcessRequirement): """ - A list of software packages that should be configured in the environment of - the defined process. + Define a list of environment variables which will be set in the execution environment of the tool. See ``EnvironmentDef`` for details. """ def __init__( self, - packages: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + envDef: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -16556,16 +15677,16 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "SoftwareRequirement" - self.packages = packages + self.class_: Final[str] = "EnvVarRequirement" + self.envDef = envDef def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwareRequirement): - return bool(self.class_ == other.class_ and self.packages == other.packages) + if isinstance(other, EnvVarRequirement): + return bool(self.class_ == other.class_ and self.envDef == other.envDef) return False def __hash__(self) -> int: - return hash((self.class_, self.packages)) + return hash((self.class_, self.envDef)) @classmethod def fromDoc( @@ -16573,8 +15694,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SoftwareRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -16585,34 +15706,35 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_SoftwareRequirement_classLoader_False_True_None_None, + uri_EnvVarRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + raise e try: - if _doc.get("packages") is None: - raise ValidationException("missing required field `packages`", None, []) + if _doc.get("envDef") is None: + raise ValidationException("missing required field `envDef`", None, []) - packages = load_field( - _doc.get("packages"), - idmap_packages_array_of_SoftwarePackageLoader, + envDef = _load_field( + _doc.get("envDef"), + idmap_envDef_array_of_EnvironmentDefLoader, baseuri, loadingOptions, - lc=_doc.get("packages") + lc=_doc.get("envDef") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `packages`": + if str(e) == "missing required field `envDef`": _errors__.append( ValidationException( str(e), @@ -16620,13 +15742,13 @@ def fromDoc( ) ) else: - val = _doc.get("packages") + val = _doc.get("envDef") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `packages` field is not valid because:", - SourceLine(_doc, "packages", str), + "the `envDef` field is not valid because:", + SourceLine(_doc, "envDef", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16638,14 +15760,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `packages` field is not valid because:", - SourceLine(_doc, "packages", str), + "the `envDef` field is not valid because:", + SourceLine(_doc, "envDef", str), [e], - detailed_message=f"the `packages` field with value `{val}` " + detailed_message=f"the `envDef` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -16653,14 +15775,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format( + "invalid field `{}`, expected one of: `class`, `envDef`".format( k ), SourceLine(_doc, k, str), @@ -16670,7 +15792,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - packages=packages, + envDef=envDef, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -16688,16 +15810,18 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.packages is not None: - r["packages"] = save( - self.packages, top=False, base_url=base_url, relative_uris=relative_uris + if self.envDef is not None: + r["envDef"] = save( + self.envDef, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -16708,17 +15832,19 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "packages"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "envDef"]) -class SoftwarePackage(Saveable): +class ShellCommandRequirement(ProcessRequirement): + """ + Modify the behavior of CommandLineTool to generate a single string containing a shell command line. Each item in the argument list must be joined into a string separated by single spaces and quoted to prevent interpretation by the shell, unless ``CommandLineBinding`` for that argument contains ``shellQuote: false``. If ``shellQuote: false`` is specified, the argument is joined into the command string without quoting, which allows the use of shell metacharacters such as ``|`` for pipes. + + """ + def __init__( self, - package: Any, - version: Optional[Any] = None, - specs: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -16728,21 +15854,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.package = package - self.version = version - self.specs = specs + self.class_: Final[str] = "ShellCommandRequirement" def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwarePackage): - return bool( - self.package == other.package - and self.version == other.version - and self.specs == other.specs - ) + if isinstance(other, ShellCommandRequirement): + return bool(self.class_ == other.class_) return False def __hash__(self) -> int: - return hash((self.package, self.version, self.specs)) + return hash((self.class_)) @classmethod def fromDoc( @@ -16750,8 +15870,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SoftwarePackage": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -16759,148 +15879,23 @@ def fromDoc( _doc.lc.filename = doc.lc.filename _errors__ = [] try: - if _doc.get("package") is None: - raise ValidationException("missing required field `package`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - package = load_field( - _doc.get("package"), - strtype, + class_ = _load_field( + _doc.get("class"), + uri_ShellCommandRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("package") + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `package`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("package") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `package` field is not valid because:", - SourceLine(_doc, "package", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `package` field is not valid because:", - SourceLine(_doc, "package", str), - [e], - detailed_message=f"the `package` field with value `{val}` " - "is not valid because:", - ) - ) - version = None - if "version" in _doc: - try: - version = load_field( - _doc.get("version"), - union_of_None_type_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("version") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `version`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("version") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `version` field is not valid because:", - SourceLine(_doc, "version", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `version` field is not valid because:", - SourceLine(_doc, "version", str), - [e], - detailed_message=f"the `version` field with value `{val}` " - "is not valid because:", - ) - ) - specs = None - if "specs" in _doc: - try: - specs = load_field( - _doc.get("specs"), - uri_union_of_None_type_or_array_of_strtype_False_False_None_True, - baseuri, - loadingOptions, - lc=_doc.get("specs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `specs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("specs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `specs` field is not valid because:", - SourceLine(_doc, "specs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `specs` field is not valid because:", - SourceLine(_doc, "specs", str), - [e], - detailed_message=f"the `specs` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + raise e + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -16908,16 +15903,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( - k - ), + "invalid field `{}`, expected one of: `class`".format(k), SourceLine(_doc, k, str), ) ) @@ -16925,9 +15918,6 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - package=package, - version=version, - specs=specs, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -16944,17 +15934,16 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.package is not None: - r["package"] = save( - self.package, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.version is not None: - r["version"] = save( - self.version, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.specs is not None: - u = save_relative_uri(self.specs, base_url, False, None, relative_uris) - r["specs"] = u + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u # top refers to the directory level if top: @@ -16964,25 +15953,39 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["package", "version", "specs"]) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) -class Dirent(Saveable): +class ResourceRequirement(ProcessRequirement): """ - Define a file or subdirectory that must be placed in the designated output - directory prior to executing the command line tool. May be the result of - executing an expression, such as building a configuration file from a - template. + Specify basic hardware resource requirements. + + "min" is the minimum amount of a resource that must be reserved to schedule a job. If "min" cannot be satisfied, the job should not be run. + + "max" is the maximum amount of a resource that the job shall be permitted to use. If a node has sufficient resources, multiple jobs may be scheduled on a single node provided each job's "max" resource requirements are met. If a job attempts to exceed its "max" resource allocation, an implementation may deny additional resources, which may result in job failure. + + If "min" is specified but "max" is not, then "max" == "min" If "max" is specified by "min" is not, then "min" == "max". + + It is an error if max < min. + + It is an error if the value of any of these fields is negative. + + If neither "min" nor "max" is specified for a resource, use the default values below. """ def __init__( self, - entry: Any, - entryname: Optional[Any] = None, - writable: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + coresMin: Any | None = None, + coresMax: Any | None = None, + ramMin: Any | None = None, + ramMax: Any | None = None, + tmpdirMin: Any | None = None, + tmpdirMax: Any | None = None, + outdirMin: Any | None = None, + outdirMax: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -16992,21 +15995,45 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.entryname = entryname - self.entry = entry - self.writable = writable + self.class_: Final[str] = "ResourceRequirement" + self.coresMin = coresMin + self.coresMax = coresMax + self.ramMin = ramMin + self.ramMax = ramMax + self.tmpdirMin = tmpdirMin + self.tmpdirMax = tmpdirMax + self.outdirMin = outdirMin + self.outdirMax = outdirMax def __eq__(self, other: Any) -> bool: - if isinstance(other, Dirent): + if isinstance(other, ResourceRequirement): return bool( - self.entryname == other.entryname - and self.entry == other.entry - and self.writable == other.writable + self.class_ == other.class_ + and self.coresMin == other.coresMin + and self.coresMax == other.coresMax + and self.ramMin == other.ramMin + and self.ramMax == other.ramMax + and self.tmpdirMin == other.tmpdirMin + and self.tmpdirMax == other.tmpdirMax + and self.outdirMin == other.outdirMin + and self.outdirMax == other.outdirMax ) return False def __hash__(self) -> int: - return hash((self.entryname, self.entry, self.writable)) + return hash( + ( + self.class_, + self.coresMin, + self.coresMax, + self.ramMin, + self.ramMax, + self.tmpdirMin, + self.tmpdirMax, + self.outdirMin, + self.outdirMax, + ) + ) @classmethod def fromDoc( @@ -17014,29 +16041,46 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Dirent": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - entryname = None - if "entryname" in _doc: - try: - entryname = load_field( - _doc.get("entryname"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("entryname") - ) + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_ResourceRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + coresMin = None + if "coresMin" in _doc: + try: + coresMin = _load_field( + _doc.get("coresMin"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("coresMin") + ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `entryname`": + if str(e) == "missing required field `coresMin`": _errors__.append( ValidationException( str(e), @@ -17044,13 +16088,13 @@ def fromDoc( ) ) else: - val = _doc.get("entryname") + val = _doc.get("coresMin") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `entryname` field is not valid because:", - SourceLine(_doc, "entryname", str), + "the `coresMin` field is not valid because:", + SourceLine(_doc, "coresMin", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -17062,76 +16106,122 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `entryname` field is not valid because:", - SourceLine(_doc, "entryname", str), + "the `coresMin` field is not valid because:", + SourceLine(_doc, "coresMin", str), [e], - detailed_message=f"the `entryname` field with value `{val}` " + detailed_message=f"the `coresMin` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("entry") is None: - raise ValidationException("missing required field `entry`", None, []) - - entry = load_field( - _doc.get("entry"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("entry") - ) + coresMax = None + if "coresMax" in _doc: + try: + coresMax = _load_field( + _doc.get("coresMax"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("coresMax") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `entry`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("entry") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `coresMax`": _errors__.append( ValidationException( - "the `entry` field is not valid because:", - SourceLine(_doc, "entry", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("coresMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `coresMax` field is not valid because:", + SourceLine(_doc, "coresMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `coresMax` field is not valid because:", + SourceLine(_doc, "coresMax", str), + [e], + detailed_message=f"the `coresMax` field with value `{val}` " + "is not valid because:", + ) + ) + ramMin = None + if "ramMin" in _doc: + try: + ramMin = _load_field( + _doc.get("ramMin"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("ramMin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `ramMin`": _errors__.append( ValidationException( - "the `entry` field is not valid because:", - SourceLine(_doc, "entry", str), - [e], - detailed_message=f"the `entry` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - writable = None - if "writable" in _doc: + else: + val = _doc.get("ramMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `ramMin` field is not valid because:", + SourceLine(_doc, "ramMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `ramMin` field is not valid because:", + SourceLine(_doc, "ramMin", str), + [e], + detailed_message=f"the `ramMin` field with value `{val}` " + "is not valid because:", + ) + ) + ramMax = None + if "ramMax" in _doc: try: - writable = load_field( - _doc.get("writable"), - union_of_None_type_or_booltype, + ramMax = _load_field( + _doc.get("ramMax"), + union_of_None_type_or_inttype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("writable") + lc=_doc.get("ramMax") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `writable`": + if str(e) == "missing required field `ramMax`": _errors__.append( ValidationException( str(e), @@ -17139,13 +16229,13 @@ def fromDoc( ) ) else: - val = _doc.get("writable") + val = _doc.get("ramMax") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `writable` field is not valid because:", - SourceLine(_doc, "writable", str), + "the `ramMax` field is not valid because:", + SourceLine(_doc, "ramMax", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -17157,46 +16247,239 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `writable` field is not valid because:", - SourceLine(_doc, "writable", str), + "the `ramMax` field is not valid because:", + SourceLine(_doc, "ramMax", str), [e], - detailed_message=f"the `writable` field with value `{val}` " + detailed_message=f"the `ramMax` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + tmpdirMin = None + if "tmpdirMin" in _doc: + try: + tmpdirMin = _load_field( + _doc.get("tmpdirMin"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("tmpdirMin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `tmpdirMin`": _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ValidationException( + str(e), + None + ) ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("tmpdirMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `tmpdirMin` field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `tmpdirMin` field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), + [e], + detailed_message=f"the `tmpdirMin` field with value `{val}` " + "is not valid because:", + ) + ) + tmpdirMax = None + if "tmpdirMax" in _doc: + try: + tmpdirMax = _load_field( + _doc.get("tmpdirMax"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("tmpdirMax") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `tmpdirMax`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - entryname=entryname, - entry=entry, - writable=writable, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - + else: + val = _doc.get("tmpdirMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `tmpdirMax` field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `tmpdirMax` field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), + [e], + detailed_message=f"the `tmpdirMax` field with value `{val}` " + "is not valid because:", + ) + ) + outdirMin = None + if "outdirMin" in _doc: + try: + outdirMin = _load_field( + _doc.get("outdirMin"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("outdirMin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outdirMin`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outdirMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outdirMin` field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outdirMin` field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [e], + detailed_message=f"the `outdirMin` field with value `{val}` " + "is not valid because:", + ) + ) + outdirMax = None + if "outdirMax" in _doc: + try: + outdirMax = _load_field( + _doc.get("outdirMax"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("outdirMax") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outdirMax`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outdirMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outdirMax` field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outdirMax` field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [e], + detailed_message=f"the `outdirMax` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + coresMin=coresMin, + coresMax=coresMax, + ramMin=ramMin, + ramMax=ramMax, + tmpdirMin=tmpdirMin, + tmpdirMax=tmpdirMax, + outdirMin=outdirMin, + outdirMax=outdirMax, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> dict[str, Any]: @@ -17208,20 +16491,59 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.entryname is not None: - r["entryname"] = save( - self.entryname, + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.coresMin is not None: + r["coresMin"] = save( + self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.coresMax is not None: + r["coresMax"] = save( + self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.ramMin is not None: + r["ramMin"] = save( + self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.ramMax is not None: + r["ramMax"] = save( + self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.tmpdirMin is not None: + r["tmpdirMin"] = save( + self.tmpdirMin, top=False, base_url=base_url, relative_uris=relative_uris, ) - if self.entry is not None: - r["entry"] = save( - self.entry, top=False, base_url=base_url, relative_uris=relative_uris + if self.tmpdirMax is not None: + r["tmpdirMax"] = save( + self.tmpdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) - if self.writable is not None: - r["writable"] = save( - self.writable, top=False, base_url=base_url, relative_uris=relative_uris + if self.outdirMin is not None: + r["outdirMin"] = save( + self.outdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.outdirMax is not None: + r["outdirMax"] = save( + self.outdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -17232,19 +16554,34 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["entryname", "entry", "writable"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "class", + "coresMin", + "coresMax", + "ramMin", + "ramMax", + "tmpdirMin", + "tmpdirMax", + "outdirMin", + "outdirMax", + ] + ) -class InitialWorkDirRequirement(ProcessRequirement): +class WorkReuse(ProcessRequirement): """ - Define a list of files and subdirectories that must be created by the workflow platform in the designated output directory prior to executing the command line tool. + For implementations that support reusing output from past work (on the assumption that same code and same input produce same results), control whether to enable or disable the reuse behavior for a particular tool or step (to accommodate situations where that assumption is incorrect). A reused step is not executed but instead returns the same output as the original execution. + + If ``enableReuse`` is not specified, correct tools should assume it is enabled by default. + """ def __init__( self, - listing: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + enableReuse: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -17254,16 +16591,18 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "InitialWorkDirRequirement" - self.listing = listing + self.class_: Final[str] = "WorkReuse" + self.enableReuse = enableReuse def __eq__(self, other: Any) -> bool: - if isinstance(other, InitialWorkDirRequirement): - return bool(self.class_ == other.class_ and self.listing == other.listing) + if isinstance(other, WorkReuse): + return bool( + self.class_ == other.class_ and self.enableReuse == other.enableReuse + ) return False def __hash__(self) -> int: - return hash((self.class_, self.listing)) + return hash((self.class_, self.enableReuse)) @classmethod def fromDoc( @@ -17271,8 +16610,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InitialWorkDirRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -17283,34 +16622,35 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_InitialWorkDirRequirement_classLoader_False_True_None_None, + uri_WorkReuse_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + raise e try: - if _doc.get("listing") is None: - raise ValidationException("missing required field `listing`", None, []) + if _doc.get("enableReuse") is None: + raise ValidationException("missing required field `enableReuse`", None, []) - listing = load_field( - _doc.get("listing"), - union_of_array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader_or_ExpressionLoader, + enableReuse = _load_field( + _doc.get("enableReuse"), + union_of_booltype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("listing") + lc=_doc.get("enableReuse") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `listing`": + if str(e) == "missing required field `enableReuse`": _errors__.append( ValidationException( str(e), @@ -17318,13 +16658,13 @@ def fromDoc( ) ) else: - val = _doc.get("listing") + val = _doc.get("enableReuse") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), + "the `enableReuse` field is not valid because:", + SourceLine(_doc, "enableReuse", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -17336,14 +16676,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), + "the `enableReuse` field is not valid because:", + SourceLine(_doc, "enableReuse", str), [e], - detailed_message=f"the `listing` field with value `{val}` " + detailed_message=f"the `enableReuse` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -17351,14 +16691,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format( + "invalid field `{}`, expected one of: `class`, `enableReuse`".format( k ), SourceLine(_doc, k, str), @@ -17368,7 +16708,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - listing=listing, + enableReuse=enableReuse, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -17386,16 +16726,21 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.listing is not None: - r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris + if self.enableReuse is not None: + r["enableReuse"] = save( + self.enableReuse, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -17406,21 +16751,26 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "listing"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "enableReuse"]) -class EnvVarRequirement(ProcessRequirement): +class NetworkAccess(ProcessRequirement): """ - Define a list of environment variables which will be set in the - execution environment of the tool. See `EnvironmentDef` for details. + Indicate whether a process requires outgoing IPv4/IPv6 network access. Choice of IPv4 or IPv6 is implementation and site specific, correct tools must support both. + + If ``networkAccess`` is false or not specified, tools must not assume network access, except for localhost (the loopback device). + + If ``networkAccess`` is true, the tool must be able to make outgoing connections to network resources. Resources may be on a private subnet or the public Internet. However, implementations and sites may apply their own security policies to restrict what is accessible by the tool. + + Enabling network access does not imply a publicly routable IP address or the ability to accept inbound connections. """ def __init__( self, - envDef: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + networkAccess: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -17430,16 +16780,19 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "EnvVarRequirement" - self.envDef = envDef + self.class_: Final[str] = "NetworkAccess" + self.networkAccess = networkAccess def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvVarRequirement): - return bool(self.class_ == other.class_ and self.envDef == other.envDef) + if isinstance(other, NetworkAccess): + return bool( + self.class_ == other.class_ + and self.networkAccess == other.networkAccess + ) return False def __hash__(self) -> int: - return hash((self.class_, self.envDef)) + return hash((self.class_, self.networkAccess)) @classmethod def fromDoc( @@ -17447,8 +16800,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "EnvVarRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -17459,34 +16812,35 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_EnvVarRequirement_classLoader_False_True_None_None, + uri_NetworkAccess_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + raise e try: - if _doc.get("envDef") is None: - raise ValidationException("missing required field `envDef`", None, []) + if _doc.get("networkAccess") is None: + raise ValidationException("missing required field `networkAccess`", None, []) - envDef = load_field( - _doc.get("envDef"), - idmap_envDef_array_of_EnvironmentDefLoader, + networkAccess = _load_field( + _doc.get("networkAccess"), + union_of_booltype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("envDef") + lc=_doc.get("networkAccess") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `envDef`": + if str(e) == "missing required field `networkAccess`": _errors__.append( ValidationException( str(e), @@ -17494,13 +16848,13 @@ def fromDoc( ) ) else: - val = _doc.get("envDef") + val = _doc.get("networkAccess") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `envDef` field is not valid because:", - SourceLine(_doc, "envDef", str), + "the `networkAccess` field is not valid because:", + SourceLine(_doc, "networkAccess", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -17512,14 +16866,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `envDef` field is not valid because:", - SourceLine(_doc, "envDef", str), + "the `networkAccess` field is not valid because:", + SourceLine(_doc, "networkAccess", str), [e], - detailed_message=f"the `envDef` field with value `{val}` " + detailed_message=f"the `networkAccess` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -17527,14 +16881,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format( + "invalid field `{}`, expected one of: `class`, `networkAccess`".format( k ), SourceLine(_doc, k, str), @@ -17544,7 +16898,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - envDef=envDef, + networkAccess=networkAccess, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -17562,142 +16916,23 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.envDef is not None: - r["envDef"] = save( - self.envDef, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "envDef"]) - - -class ShellCommandRequirement(ProcessRequirement): - """ - Modify the behavior of CommandLineTool to generate a single string - containing a shell command line. Each item in the argument list must be - joined into a string separated by single spaces and quoted to prevent - intepretation by the shell, unless `CommandLineBinding` for that argument - contains `shellQuote: false`. If `shellQuote: false` is specified, the - argument is joined into the command string without quoting, which allows - the use of shell metacharacters such as `|` for pipes. - - """ - - def __init__( - self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ShellCommandRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ShellCommandRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ShellCommandRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_ShellCommandRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") + if self.networkAccess is not None: + r["networkAccess"] = save( + self.networkAccess, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - # top refers to the directory level if top: if self.loadingOptions.namespaces: @@ -17706,46 +16941,28 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "networkAccess"]) -class ResourceRequirement(ProcessRequirement): +class InplaceUpdateRequirement(ProcessRequirement): """ - Specify basic hardware resource requirements. - - "min" is the minimum amount of a resource that must be reserved to schedule - a job. If "min" cannot be satisfied, the job should not be run. - - "max" is the maximum amount of a resource that the job shall be permitted - to use. If a node has sufficient resources, multiple jobs may be scheduled - on a single node provided each job's "max" resource requirements are - met. If a job attempts to exceed its "max" resource allocation, an - implementation may deny additional resources, which may result in job - failure. + If ``inplaceUpdate`` is true, then an implementation supporting this feature may permit tools to directly update files with ``writable: true`` in InitialWorkDirRequirement. That is, as an optimization, files may be destructively modified in place as opposed to copied and updated. - If "min" is specified but "max" is not, then "max" == "min" - If "max" is specified by "min" is not, then "min" == "max". + An implementation must ensure that only one workflow step may access a writable file at a time. It is an error if a file which is writable by one workflow step file is accessed (for reading or writing) by any other workflow step running independently. However, a file which has been updated in a previous completed step may be used as input to multiple steps, provided it is read-only in every step. - It is an error if max < min. + Workflow steps which modify a file must produce the modified file as output. Downstream steps which further process the file must use the output of previous steps, and not refer to a common input (this is necessary for both ordering and correctness). - It is an error if the value of any of these fields is negative. + Workflow authors should provide this in the ``hints`` section. The intent of this feature is that workflows produce the same results whether or not InplaceUpdateRequirement is supported by the implementation, and this feature is primarily available as an optimization for particular environments. - If neither "min" nor "max" is specified for a resource, use the default values below. + Users and implementers should be aware that workflows that destructively modify inputs may not be repeatable or reproducible. In particular, enabling this feature implies that WorkReuse should not be enabled. """ def __init__( self, - coresMin: Optional[Any] = None, - coresMax: Optional[Any] = None, - ramMin: Optional[Any] = None, - ramMax: Optional[Any] = None, - tmpdirMin: Optional[Any] = None, - tmpdirMax: Optional[Any] = None, - outdirMin: Optional[Any] = None, - outdirMax: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + inplaceUpdate: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -17755,45 +16972,19 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "ResourceRequirement" - self.coresMin = coresMin - self.coresMax = coresMax - self.ramMin = ramMin - self.ramMax = ramMax - self.tmpdirMin = tmpdirMin - self.tmpdirMax = tmpdirMax - self.outdirMin = outdirMin - self.outdirMax = outdirMax + self.class_: Final[str] = "InplaceUpdateRequirement" + self.inplaceUpdate = inplaceUpdate def __eq__(self, other: Any) -> bool: - if isinstance(other, ResourceRequirement): + if isinstance(other, InplaceUpdateRequirement): return bool( self.class_ == other.class_ - and self.coresMin == other.coresMin - and self.coresMax == other.coresMax - and self.ramMin == other.ramMin - and self.ramMax == other.ramMax - and self.tmpdirMin == other.tmpdirMin - and self.tmpdirMax == other.tmpdirMax - and self.outdirMin == other.outdirMin - and self.outdirMax == other.outdirMax + and self.inplaceUpdate == other.inplaceUpdate ) return False def __hash__(self) -> int: - return hash( - ( - self.class_, - self.coresMin, - self.coresMax, - self.ramMin, - self.ramMax, - self.tmpdirMin, - self.tmpdirMax, - self.outdirMin, - self.outdirMax, - ) - ) + return hash((self.class_, self.inplaceUpdate)) @classmethod def fromDoc( @@ -17801,8 +16992,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ResourceRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -17812,2339 +17003,36 @@ def fromDoc( try: if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_ResourceRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - coresMin = None - if "coresMin" in _doc: - try: - coresMin = load_field( - _doc.get("coresMin"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("coresMin") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `coresMin`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("coresMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `coresMin` field is not valid because:", - SourceLine(_doc, "coresMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `coresMin` field is not valid because:", - SourceLine(_doc, "coresMin", str), - [e], - detailed_message=f"the `coresMin` field with value `{val}` " - "is not valid because:", - ) - ) - coresMax = None - if "coresMax" in _doc: - try: - coresMax = load_field( - _doc.get("coresMax"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("coresMax") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `coresMax`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("coresMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `coresMax` field is not valid because:", - SourceLine(_doc, "coresMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `coresMax` field is not valid because:", - SourceLine(_doc, "coresMax", str), - [e], - detailed_message=f"the `coresMax` field with value `{val}` " - "is not valid because:", - ) - ) - ramMin = None - if "ramMin" in _doc: - try: - ramMin = load_field( - _doc.get("ramMin"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("ramMin") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `ramMin`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("ramMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `ramMin` field is not valid because:", - SourceLine(_doc, "ramMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `ramMin` field is not valid because:", - SourceLine(_doc, "ramMin", str), - [e], - detailed_message=f"the `ramMin` field with value `{val}` " - "is not valid because:", - ) - ) - ramMax = None - if "ramMax" in _doc: - try: - ramMax = load_field( - _doc.get("ramMax"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("ramMax") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `ramMax`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("ramMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `ramMax` field is not valid because:", - SourceLine(_doc, "ramMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `ramMax` field is not valid because:", - SourceLine(_doc, "ramMax", str), - [e], - detailed_message=f"the `ramMax` field with value `{val}` " - "is not valid because:", - ) - ) - tmpdirMin = None - if "tmpdirMin" in _doc: - try: - tmpdirMin = load_field( - _doc.get("tmpdirMin"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("tmpdirMin") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `tmpdirMin`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("tmpdirMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `tmpdirMin` field is not valid because:", - SourceLine(_doc, "tmpdirMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `tmpdirMin` field is not valid because:", - SourceLine(_doc, "tmpdirMin", str), - [e], - detailed_message=f"the `tmpdirMin` field with value `{val}` " - "is not valid because:", - ) - ) - tmpdirMax = None - if "tmpdirMax" in _doc: - try: - tmpdirMax = load_field( - _doc.get("tmpdirMax"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("tmpdirMax") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `tmpdirMax`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("tmpdirMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `tmpdirMax` field is not valid because:", - SourceLine(_doc, "tmpdirMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `tmpdirMax` field is not valid because:", - SourceLine(_doc, "tmpdirMax", str), - [e], - detailed_message=f"the `tmpdirMax` field with value `{val}` " - "is not valid because:", - ) - ) - outdirMin = None - if "outdirMin" in _doc: - try: - outdirMin = load_field( - _doc.get("outdirMin"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("outdirMin") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outdirMin`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outdirMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outdirMin` field is not valid because:", - SourceLine(_doc, "outdirMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outdirMin` field is not valid because:", - SourceLine(_doc, "outdirMin", str), - [e], - detailed_message=f"the `outdirMin` field with value `{val}` " - "is not valid because:", - ) - ) - outdirMax = None - if "outdirMax" in _doc: - try: - outdirMax = load_field( - _doc.get("outdirMax"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("outdirMax") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outdirMax`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outdirMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outdirMax` field is not valid because:", - SourceLine(_doc, "outdirMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outdirMax` field is not valid because:", - SourceLine(_doc, "outdirMax", str), - [e], - detailed_message=f"the `outdirMax` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - coresMin=coresMin, - coresMax=coresMax, - ramMin=ramMin, - ramMax=ramMax, - tmpdirMin=tmpdirMin, - tmpdirMax=tmpdirMax, - outdirMin=outdirMin, - outdirMax=outdirMax, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.coresMin is not None: - r["coresMin"] = save( - self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.coresMax is not None: - r["coresMax"] = save( - self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.ramMin is not None: - r["ramMin"] = save( - self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.ramMax is not None: - r["ramMax"] = save( - self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.tmpdirMin is not None: - r["tmpdirMin"] = save( - self.tmpdirMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.tmpdirMax is not None: - r["tmpdirMax"] = save( - self.tmpdirMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.outdirMin is not None: - r["outdirMin"] = save( - self.outdirMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.outdirMax is not None: - r["outdirMax"] = save( - self.outdirMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "class", - "coresMin", - "coresMax", - "ramMin", - "ramMax", - "tmpdirMin", - "tmpdirMax", - "outdirMin", - "outdirMax", - ] - ) - - -class WorkReuse(ProcessRequirement): - """ - For implementations that support reusing output from past work (on - the assumption that same code and same input produce same - results), control whether to enable or disable the reuse behavior - for a particular tool or step (to accomodate situations where that - assumption is incorrect). A reused step is not executed but - instead returns the same output as the original execution. - - If `enableReuse` is not specified, correct tools should assume it - is enabled by default. - - """ - - def __init__( - self, - enableReuse: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "WorkReuse" - self.enableReuse = enableReuse - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkReuse): - return bool( - self.class_ == other.class_ and self.enableReuse == other.enableReuse - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.enableReuse)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkReuse": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_WorkReuse_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("enableReuse") is None: - raise ValidationException("missing required field `enableReuse`", None, []) - - enableReuse = load_field( - _doc.get("enableReuse"), - union_of_booltype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("enableReuse") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `enableReuse`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("enableReuse") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `enableReuse` field is not valid because:", - SourceLine(_doc, "enableReuse", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `enableReuse` field is not valid because:", - SourceLine(_doc, "enableReuse", str), - [e], - detailed_message=f"the `enableReuse` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `enableReuse`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - enableReuse=enableReuse, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.enableReuse is not None: - r["enableReuse"] = save( - self.enableReuse, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "enableReuse"]) - - -class NetworkAccess(ProcessRequirement): - """ - Indicate whether a process requires outgoing IPv4/IPv6 network - access. Choice of IPv4 or IPv6 is implementation and site - specific, correct tools must support both. - - If `networkAccess` is false or not specified, tools must not - assume network access, except for localhost (the loopback device). - - If `networkAccess` is true, the tool must be able to make outgoing - connections to network resources. Resources may be on a private - subnet or the public Internet. However, implementations and sites - may apply their own security policies to restrict what is - accessible by the tool. - - Enabling network access does not imply a publically routable IP - address or the ability to accept inbound connections. - - """ - - def __init__( - self, - networkAccess: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "NetworkAccess" - self.networkAccess = networkAccess - - def __eq__(self, other: Any) -> bool: - if isinstance(other, NetworkAccess): - return bool( - self.class_ == other.class_ - and self.networkAccess == other.networkAccess - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.networkAccess)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "NetworkAccess": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_NetworkAccess_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("networkAccess") is None: - raise ValidationException("missing required field `networkAccess`", None, []) - - networkAccess = load_field( - _doc.get("networkAccess"), - union_of_booltype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("networkAccess") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `networkAccess`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("networkAccess") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `networkAccess` field is not valid because:", - SourceLine(_doc, "networkAccess", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `networkAccess` field is not valid because:", - SourceLine(_doc, "networkAccess", str), - [e], - detailed_message=f"the `networkAccess` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `networkAccess`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - networkAccess=networkAccess, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.networkAccess is not None: - r["networkAccess"] = save( - self.networkAccess, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "networkAccess"]) - - -class InplaceUpdateRequirement(ProcessRequirement): - """ - - If `inplaceUpdate` is true, then an implementation supporting this - feature may permit tools to directly update files with `writable: - true` in InitialWorkDirRequirement. That is, as an optimization, - files may be destructively modified in place as opposed to copied - and updated. - - An implementation must ensure that only one workflow step may - access a writable file at a time. It is an error if a file which - is writable by one workflow step file is accessed (for reading or - writing) by any other workflow step running independently. - However, a file which has been updated in a previous completed - step may be used as input to multiple steps, provided it is - read-only in every step. - - Workflow steps which modify a file must produce the modified file - as output. Downstream steps which futher process the file must - use the output of previous steps, and not refer to a common input - (this is necessary for both ordering and correctness). - - Workflow authors should provide this in the `hints` section. The - intent of this feature is that workflows produce the same results - whether or not InplaceUpdateRequirement is supported by the - implementation, and this feature is primarily available as an - optimization for particular environments. - - Users and implementers should be aware that workflows that - destructively modify inputs may not be repeatable or reproducible. - In particular, enabling this feature implies that WorkReuse should - not be enabled. - - """ - - def __init__( - self, - inplaceUpdate: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "InplaceUpdateRequirement" - self.inplaceUpdate = inplaceUpdate - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InplaceUpdateRequirement): - return bool( - self.class_ == other.class_ - and self.inplaceUpdate == other.inplaceUpdate - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.inplaceUpdate)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InplaceUpdateRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_InplaceUpdateRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("inplaceUpdate") is None: - raise ValidationException("missing required field `inplaceUpdate`", None, []) - - inplaceUpdate = load_field( - _doc.get("inplaceUpdate"), - booltype, - baseuri, - loadingOptions, - lc=_doc.get("inplaceUpdate") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inplaceUpdate`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inplaceUpdate") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inplaceUpdate` field is not valid because:", - SourceLine(_doc, "inplaceUpdate", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inplaceUpdate` field is not valid because:", - SourceLine(_doc, "inplaceUpdate", str), - [e], - detailed_message=f"the `inplaceUpdate` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - inplaceUpdate=inplaceUpdate, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.inplaceUpdate is not None: - r["inplaceUpdate"] = save( - self.inplaceUpdate, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "inplaceUpdate"]) - - -class ToolTimeLimit(ProcessRequirement): - """ - Set an upper limit on the execution time of a CommandLineTool. - A CommandLineTool whose execution duration exceeds the time - limit may be preemptively terminated and considered failed. - May also be used by batch systems to make scheduling decisions. - The execution duration excludes external operations, such as - staging of files, pulling a docker image etc, and only counts - wall-time for the execution of the command line itself. - - """ - - def __init__( - self, - timelimit: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ToolTimeLimit" - self.timelimit = timelimit - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ToolTimeLimit): - return bool( - self.class_ == other.class_ and self.timelimit == other.timelimit - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.timelimit)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ToolTimeLimit": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_ToolTimeLimit_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("timelimit") is None: - raise ValidationException("missing required field `timelimit`", None, []) - - timelimit = load_field( - _doc.get("timelimit"), - union_of_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("timelimit") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `timelimit`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("timelimit") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `timelimit` field is not valid because:", - SourceLine(_doc, "timelimit", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `timelimit` field is not valid because:", - SourceLine(_doc, "timelimit", str), - [e], - detailed_message=f"the `timelimit` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `timelimit`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - timelimit=timelimit, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.timelimit is not None: - r["timelimit"] = save( - self.timelimit, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "timelimit"]) - - -class ExpressionToolOutputParameter(OutputParameter): - id: str - - def __init__( - self, - id: Any, - type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.format = format - self.type_ = type_ - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ExpressionToolOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.type_ == other.type_ - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.type_, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ExpressionToolOutputParameter": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - secondaryFiles = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - lc=_doc.get("secondaryFiles") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `secondaryFiles`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("secondaryFiles") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " - "is not valid because:", - ) - ) - streamable = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("streamable") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `streamable`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("streamable") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - detailed_message=f"the `streamable` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - format = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, - baseuri, - loadingOptions, - lc=_doc.get("format") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `format`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("format") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [e], - detailed_message=f"the `format` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] - ) - - -class WorkflowInputParameter(InputParameter): - id: str - - def __init__( - self, - id: Any, - type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - default: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.default = default - self.type_ = type_ - self.inputBinding = inputBinding - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowInputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.default == other.default - and self.type_ == other.type_ - and self.inputBinding == other.inputBinding - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.loadContents, - self.loadListing, - self.default, - self.type_, - self.inputBinding, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowInputParameter": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - secondaryFiles = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - lc=_doc.get("secondaryFiles") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `secondaryFiles`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("secondaryFiles") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " - "is not valid because:", - ) - ) - streamable = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("streamable") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `streamable`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("streamable") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - detailed_message=f"the `streamable` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - format = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, - baseuri, - loadingOptions, - lc=_doc.get("format") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `format`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("format") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [e], - detailed_message=f"the `format` field with value `{val}` " - "is not valid because:", - ) - ) - loadContents = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("loadContents") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadContents`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("loadContents") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - detailed_message=f"the `loadContents` field with value `{val}` " - "is not valid because:", - ) - ) - loadListing = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - lc=_doc.get("loadListing") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadListing`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("loadListing") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - detailed_message=f"the `loadListing` field with value `{val}` " - "is not valid because:", - ) - ) - default = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_CWLObjectTypeLoader, - baseuri, - loadingOptions, - lc=_doc.get("default") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `default`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("default") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), - [e], - detailed_message=f"the `default` field with value `{val}` " - "is not valid because:", - ) - ) + + class_ = _load_field( + _doc.get("class"), + uri_InplaceUpdateRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("inplaceUpdate") is None: + raise ValidationException("missing required field `inplaceUpdate`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + inplaceUpdate = _load_field( + _doc.get("inplaceUpdate"), + booltype, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("inplaceUpdate") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `inplaceUpdate`": _errors__.append( ValidationException( str(e), @@ -20152,13 +17040,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("inplaceUpdate") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `inplaceUpdate` field is not valid because:", + SourceLine(_doc, "inplaceUpdate", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -20170,61 +17058,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `inplaceUpdate` field is not valid because:", + SourceLine(_doc, "inplaceUpdate", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `inplaceUpdate` field with value `{val}` " "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_InputBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - detailed_message=f"the `inputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -20232,14 +17073,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( k ), SourceLine(_doc, k, str), @@ -20249,21 +17090,10 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - loadContents=loadContents, - loadListing=loadListing, - default=default, - type_=type_, - inputBinding=inputBinding, + inplaceUpdate=inplaceUpdate, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -20277,61 +17107,204 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.inplaceUpdate is not None: + r["inplaceUpdate"] = save( + self.inplaceUpdate, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=self.id, - relative_uris=relative_uris, + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["class", "inplaceUpdate"]) + + +class ToolTimeLimit(ProcessRequirement): + """ + Set an upper limit on the execution time of a CommandLineTool. A CommandLineTool whose execution duration exceeds the time limit may be preemptively terminated and considered failed. May also be used by batch systems to make scheduling decisions. The execution duration excludes external operations, such as staging of files, pulling a docker image etc, and only counts wall-time for the execution of the command line itself. + + """ + + def __init__( + self, + timelimit: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_: Final[str] = "ToolTimeLimit" + self.timelimit = timelimit + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ToolTimeLimit): + return bool( + self.class_ == other.class_ and self.timelimit == other.timelimit ) - if self.default is not None: - r["default"] = save( - self.default, top=False, base_url=self.id, relative_uris=relative_uris + return False + + def __hash__(self) -> int: + return hash((self.class_, self.timelimit)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_ToolTimeLimit_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("timelimit") is None: + raise ValidationException("missing required field `timelimit`", None, []) + + timelimit = _load_field( + _doc.get("timelimit"), + union_of_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("timelimit") ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `timelimit`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("timelimit") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `timelimit` field is not valid because:", + SourceLine(_doc, "timelimit", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `timelimit` field is not valid because:", + SourceLine(_doc, "timelimit", str), + [e], + detailed_message=f"the `timelimit` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `timelimit`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + timelimit=timelimit, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.timelimit is not None: + r["timelimit"] = save( + self.timelimit, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) @@ -20343,50 +17316,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "loadContents", - "loadListing", - "default", - "type", - "inputBinding", - ] - ) - - -class ExpressionTool(Process): - """ - An ExpressionTool is a type of Process object that can be run by itself - or as a Workflow step. It executes a pure Javascript expression that has - access to the same input parameters as a workflow. It is meant to be used - sparingly as a way to isolate complex Javascript expressions that need to - operate on input data and produce some result; perhaps just a - rearrangement of the inputs. No Docker software container is required - or allowed. + attrs: ClassVar[Collection[str]] = frozenset(["class", "timelimit"]) - """ +class ExpressionToolOutputParameter(OutputParameter): id: str def __init__( self, - inputs: Any, - outputs: Any, - expression: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + type_: Any, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -20396,46 +17342,37 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.class_ = "ExpressionTool" - self.expression = expression + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, ExpressionTool): + if isinstance(other, ExpressionToolOutputParameter): return bool( - self.id == other.id - and self.label == other.label + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.expression == other.expression + and self.id == other.id + and self.format == other.format + and self.type_ == other.type_ ) return False def __hash__(self) -> int: return hash( ( - self.id, self.label, + self.secondaryFiles, + self.streamable, self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.class_, - self.expression, + self.id, + self.format, + self.type_, ) ) @@ -20445,8 +17382,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ExpressionTool": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -20456,9 +17393,9 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None_None, + uri_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("id") @@ -20506,29 +17443,13 @@ def fromDoc( if docRoot is not None: id = docRoot else: - id = "_:" + str(_uuid__.uuid4()) + _errors__.append(ValidationException("missing id")) if not __original_id_is_none: baseuri = cast(str, id) - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_ExpressionTool_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -20572,21 +17493,21 @@ def fromDoc( "is not valid because:", ) ) - doc = None - if "doc" in _doc: + secondaryFiles = None + if "secondaryFiles" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("secondaryFiles") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( str(e), @@ -20594,13 +17515,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("secondaryFiles") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -20612,124 +17533,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("inputs") is None: - raise ValidationException("missing required field `inputs`", None, []) - - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_WorkflowInputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - detailed_message=f"the `inputs` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("outputs") is None: - raise ValidationException("missing required field `outputs`", None, []) - - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_ExpressionToolOutputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - detailed_message=f"the `outputs` field with value `{val}` " - "is not valid because:", + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) ) - ) - requirements = None - if "requirements" in _doc: + streamable = None + if "streamable" in _doc: try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("requirements") + lc=_doc.get("streamable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `requirements`": + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( str(e), @@ -20737,13 +17562,13 @@ def fromDoc( ) ) else: - val = _doc.get("requirements") + val = _doc.get("streamable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -20755,28 +17580,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [e], - detailed_message=f"the `requirements` field with value `{val}` " + detailed_message=f"the `streamable` field with value `{val}` " "is not valid because:", ) ) - hints = None - if "hints" in _doc: + doc = None + if "doc" in _doc: try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("hints") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `hints`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -20784,13 +17609,13 @@ def fromDoc( ) ) else: - val = _doc.get("hints") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -20802,28 +17627,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `hints` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - cwlVersion = None - if "cwlVersion" in _doc: + format = None + if "format" in _doc: try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, - lc=_doc.get("cwlVersion") + lc=_doc.get("format") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `cwlVersion`": + if str(e) == "missing required field `format`": _errors__.append( ValidationException( str(e), @@ -20831,13 +17656,13 @@ def fromDoc( ) ) else: - val = _doc.get("cwlVersion") + val = _doc.get("format") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -20849,29 +17674,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [e], - detailed_message=f"the `cwlVersion` field with value `{val}` " + detailed_message=f"the `format` field with value `{val}` " "is not valid because:", ) ) try: - if _doc.get("expression") is None: - raise ValidationException("missing required field `expression`", None, []) + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - expression = load_field( - _doc.get("expression"), - ExpressionLoader, + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, - lc=_doc.get("expression") + lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `expression`": + if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), @@ -20879,13 +17704,13 @@ def fromDoc( ) ) else: - val = _doc.get("expression") + val = _doc.get("type") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `expression` field is not valid because:", - SourceLine(_doc, "expression", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -20897,14 +17722,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `expression` field is not valid because:", - SourceLine(_doc, "expression", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [e], - detailed_message=f"the `expression` field with value `{val}` " + detailed_message=f"the `type` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -20912,14 +17737,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `expression`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( k ), SourceLine(_doc, k, str), @@ -20929,15 +17754,13 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - id=id, label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - expression=expression, + id=id, + format=format, + type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -20958,51 +17781,35 @@ def save( if self.id is not None: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, self.id, False, None, relative_uris) - r["class"] = u if self.label is not None: r["label"] = save( self.label, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.inputs is not None: - r["inputs"] = save( - self.inputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputs is not None: - r["outputs"] = save( - self.outputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.requirements is not None: - r["requirements"] = save( - self.requirements, + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, top=False, base_url=self.id, relative_uris=relative_uris, ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.cwlVersion is not None: - u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) - r["cwlVersion"] = u - if self.expression is not None: - r["expression"] = save( - self.expression, + if self.streamable is not None: + r["streamable"] = save( + self.streamable, top=False, base_url=self.id, relative_uris=relative_uris, ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -21012,46 +17819,29 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "class", - "expression", - ] + attrs: ClassVar[Collection[str]] = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] ) -class WorkflowOutputParameter(OutputParameter): - """ - Describe an output parameter of a workflow. The parameter must be - connected to one or more parameters defined in the workflow that - will provide the value of the output parameter. It is legal to - connect a WorkflowInputParameter to a WorkflowOutputParameter. - - """ - +class WorkflowInputParameter(InputParameter): id: str def __init__( self, id: Any, type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - outputSource: Optional[Any] = None, - linkMerge: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + default: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -21067,12 +17857,14 @@ def __init__( self.doc = doc self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) self.format = format - self.outputSource = outputSource - self.linkMerge = linkMerge + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default self.type_ = type_ + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowOutputParameter): + if isinstance(other, WorkflowInputParameter): return bool( self.label == other.label and self.secondaryFiles == other.secondaryFiles @@ -21080,9 +17872,11 @@ def __eq__(self, other: Any) -> bool: and self.doc == other.doc and self.id == other.id and self.format == other.format - and self.outputSource == other.outputSource - and self.linkMerge == other.linkMerge + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default and self.type_ == other.type_ + and self.inputBinding == other.inputBinding ) return False @@ -21095,9 +17889,11 @@ def __hash__(self) -> int: self.doc, self.id, self.format, - self.outputSource, - self.linkMerge, + self.loadContents, + self.loadListing, + self.default, self.type_, + self.inputBinding, ) ) @@ -21107,8 +17903,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowOutputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -21118,7 +17914,7 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), uri_strtype_True_False_None_None, baseuri, @@ -21174,7 +17970,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -21221,7 +18017,7 @@ def fromDoc( secondaryFiles = None if "secondaryFiles" in _doc: try: - secondaryFiles = load_field( + secondaryFiles = _load_field( _doc.get("secondaryFiles"), secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, @@ -21268,7 +18064,7 @@ def fromDoc( streamable = None if "streamable" in _doc: try: - streamable = load_field( + streamable = _load_field( _doc.get("streamable"), union_of_None_type_or_booltype, baseuri, @@ -21315,7 +18111,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -21362,9 +18158,9 @@ def fromDoc( format = None if "format" in _doc: try: - format = load_field( + format = _load_field( _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, lc=_doc.get("format") @@ -21406,21 +18202,68 @@ def fromDoc( "is not valid because:", ) ) - outputSource = None - if "outputSource" in _doc: + loadContents = None + if "loadContents" in _doc: try: - outputSource = load_field( - _doc.get("outputSource"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None, + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("outputSource") + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: + try: + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputSource`": + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( str(e), @@ -21428,13 +18271,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputSource") + val = _doc.get("loadListing") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputSource` field is not valid because:", - SourceLine(_doc, "outputSource", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -21446,28 +18289,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputSource` field is not valid because:", - SourceLine(_doc, "outputSource", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [e], - detailed_message=f"the `outputSource` field with value `{val}` " + detailed_message=f"the `loadListing` field with value `{val}` " "is not valid because:", ) ) - linkMerge = None - if "linkMerge" in _doc: + default = None + if "default" in _doc: try: - linkMerge = load_field( - _doc.get("linkMerge"), - union_of_None_type_or_LinkMergeMethodLoader, + default = _load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, baseuri, loadingOptions, - lc=_doc.get("linkMerge") + lc=_doc.get("default") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `linkMerge`": + if str(e) == "missing required field `default`": _errors__.append( ValidationException( str(e), @@ -21475,13 +18318,13 @@ def fromDoc( ) ) else: - val = _doc.get("linkMerge") + val = _doc.get("default") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", - SourceLine(_doc, "linkMerge", str), + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -21493,10 +18336,10 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", - SourceLine(_doc, "linkMerge", str), + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), [e], - detailed_message=f"the `linkMerge` field with value `{val}` " + detailed_message=f"the `default` field with value `{val}` " "is not valid because:", ) ) @@ -21504,9 +18347,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -21548,7 +18391,54 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_InputBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -21556,14 +18446,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `outputSource`, `linkMerge`, `type`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( k ), SourceLine(_doc, k, str), @@ -21579,9 +18469,11 @@ def fromDoc( doc=doc, id=id, format=format, - outputSource=outputSource, - linkMerge=linkMerge, + loadContents=loadContents, + loadListing=loadListing, + default=default, type_=type_, + inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -21627,17 +18519,35 @@ def save( if self.format is not None: u = save_relative_uri(self.format, self.id, True, None, relative_uris) r["format"] = u - if self.outputSource is not None: - u = save_relative_uri(self.outputSource, self.id, False, 1, relative_uris) - r["outputSource"] = u - if self.linkMerge is not None: - r["linkMerge"] = save( - self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris ) if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.id, relative_uris=relative_uris ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) # top refers to the directory level if top: @@ -21647,7 +18557,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ "label", "secondaryFiles", @@ -21655,62 +18565,18 @@ def save( "doc", "id", "format", - "outputSource", - "linkMerge", + "loadContents", + "loadListing", + "default", "type", + "inputBinding", ] ) -class Sink(Saveable): - pass - - -class WorkflowStepInput(IdentifierRequired, Sink, LoadContents, Labeled): +class ExpressionTool(Process): """ - The input of a workflow step connects an upstream parameter (from the - workflow inputs, or the outputs of other workflows steps) with the input - parameters of the process specified by the `run` field. Only input parameters - declared by the target process will be passed through at runtime to the process - though additonal parameters may be specified (for use within `valueFrom` - expressions for instance) - unconnected or unused parameters do not represent an - error condition. - - ## Input object - - A WorkflowStepInput object must contain an `id` field in the form - `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash - `/` the field name consists of the characters following the final slash - (the prefix portion may contain one or more slashes to indicate scope). - This defines a field of the workflow step input object with the value of - the `source` parameter(s). - - ## Merging - - To merge multiple inbound data links, - [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - If the sink parameter is an array, or named in a [workflow - scatter](#WorkflowStep) operation, there may be multiple inbound data links - listed in the `source` field. The values from the input links are merged - depending on the method specified in the `linkMerge` field. If not - specified, the default method is "merge_nested". - - * **merge_nested** - - The input must be an array consisting of exactly one entry for each - input link. If "merge_nested" is specified with a single link, the value - from the link must be wrapped in a single-item list. - - * **merge_flattened** - - 1. The source and sink parameters must be compatible types, or the source - type must be compatible with single element from the "items" type of - the destination array parameter. - 2. Source parameters which are arrays are concatenated. - Source parameters which are single element types are appended as - single elements. + An ExpressionTool is a type of Process object that can be run by itself or as a Workflow step. It executes a pure Javascript expression that has access to the same input parameters as a workflow. It is meant to be used sparingly as a way to isolate complex Javascript expressions that need to operate on input data and produce some result; perhaps just a rearrangement of the inputs. No Docker software container is required or allowed. """ @@ -21718,16 +18584,17 @@ class WorkflowStepInput(IdentifierRequired, Sink, LoadContents, Labeled): def __init__( self, - id: Any, - source: Optional[Any] = None, - linkMerge: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - label: Optional[Any] = None, - default: Optional[Any] = None, - valueFrom: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + inputs: Any, + outputs: Any, + expression: Any, + id: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + cwlVersion: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -21738,25 +18605,29 @@ def __init__( else: self.loadingOptions = LoadingOptions() self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.source = source - self.linkMerge = linkMerge - self.loadContents = loadContents - self.loadListing = loadListing self.label = label - self.default = default - self.valueFrom = valueFrom + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.class_: Final[str] = "ExpressionTool" + self.expression = expression def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStepInput): + if isinstance(other, ExpressionTool): return bool( self.id == other.id - and self.source == other.source - and self.linkMerge == other.linkMerge - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing and self.label == other.label - and self.default == other.default - and self.valueFrom == other.valueFrom + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.class_ == other.class_ + and self.expression == other.expression ) return False @@ -21764,101 +18635,47 @@ def __hash__(self) -> int: return hash( ( self.id, - self.source, - self.linkMerge, - self.loadContents, - self.loadListing, self.label, - self.default, - self.valueFrom, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.class_, + self.expression, ) ) @classmethod def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowStepInput": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - source = None - if "source" in _doc: + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + id = None + if "id" in _doc: try: - source = load_field( - _doc.get("source"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None, + id = _load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("source") + lc=_doc.get("id") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `source`": + if str(e) == "missing required field `id`": _errors__.append( ValidationException( str(e), @@ -21866,13 +18683,13 @@ def fromDoc( ) ) else: - val = _doc.get("source") + val = _doc.get("id") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `source` field is not valid because:", - SourceLine(_doc, "source", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -21884,28 +18701,54 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `source` field is not valid because:", - SourceLine(_doc, "source", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [e], - detailed_message=f"the `source` field with value `{val}` " + detailed_message=f"the `id` field with value `{val}` " "is not valid because:", ) ) - linkMerge = None - if "linkMerge" in _doc: + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_ExpressionTool_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + label = None + if "label" in _doc: try: - linkMerge = load_field( - _doc.get("linkMerge"), - union_of_None_type_or_LinkMergeMethodLoader, + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("linkMerge") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `linkMerge`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -21913,13 +18756,13 @@ def fromDoc( ) ) else: - val = _doc.get("linkMerge") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", - SourceLine(_doc, "linkMerge", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -21931,28 +18774,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", - SourceLine(_doc, "linkMerge", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `linkMerge` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - loadContents = None - if "loadContents" in _doc: + doc = None + if "doc" in _doc: try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("loadContents") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadContents`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -21960,13 +18803,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadContents") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -21978,122 +18821,124 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `loadContents` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - loadListing = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - lc=_doc.get("loadListing") - ) + try: + if _doc.get("inputs") is None: + raise ValidationException("missing required field `inputs`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + inputs = _load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputs") + ) - if str(e) == "missing required field `loadListing`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("loadListing") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - detailed_message=f"the `loadListing` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + detailed_message=f"the `inputs` field with value `{val}` " + "is not valid because:", ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) + ) + try: + if _doc.get("outputs") is None: + raise ValidationException("missing required field `outputs`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + outputs = _load_field( + _doc.get("outputs"), + idmap_outputs_array_of_ExpressionToolOutputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputs") + ) - if str(e) == "missing required field `label`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + detailed_message=f"the `outputs` field with value `{val}` " + "is not valid because:", ) - default = None - if "default" in _doc: + ) + requirements = None + if "requirements" in _doc: try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_CWLObjectTypeLoader, + requirements = _load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions, - lc=_doc.get("default") + lc=_doc.get("requirements") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `default`": + if str(e) == "missing required field `requirements`": _errors__.append( ValidationException( str(e), @@ -22101,13 +18946,13 @@ def fromDoc( ) ) else: - val = _doc.get("default") + val = _doc.get("requirements") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -22119,28 +18964,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [e], - detailed_message=f"the `default` field with value `{val}` " + detailed_message=f"the `requirements` field with value `{val}` " "is not valid because:", ) ) - valueFrom = None - if "valueFrom" in _doc: + hints = None + if "hints" in _doc: try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, + hints = _load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, baseuri, loadingOptions, - lc=_doc.get("valueFrom") + lc=_doc.get("hints") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `valueFrom`": + if str(e) == "missing required field `hints`": _errors__.append( ValidationException( str(e), @@ -22148,13 +18993,13 @@ def fromDoc( ) ) else: - val = _doc.get("valueFrom") + val = _doc.get("hints") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -22166,190 +19011,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [e], - detailed_message=f"the `valueFrom` field with value `{val}` " + detailed_message=f"the `hints` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `source`, `linkMerge`, `loadContents`, `loadListing`, `label`, `default`, `valueFrom`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - id=id, - source=source, - linkMerge=linkMerge, - loadContents=loadContents, - loadListing=loadListing, - label=label, - default=default, - valueFrom=valueFrom, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.source is not None: - u = save_relative_uri(self.source, self.id, False, 2, relative_uris) - r["source"] = u - if self.linkMerge is not None: - r["linkMerge"] = save( - self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.default is not None: - r["default"] = save( - self.default, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.valueFrom is not None: - r["valueFrom"] = save( - self.valueFrom, top=False, base_url=self.id, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "source", - "linkMerge", - "loadContents", - "loadListing", - "label", - "default", - "valueFrom", - ] - ) - - -class WorkflowStepOutput(IdentifierRequired): - """ - Associate an output parameter of the underlying process with a workflow - parameter. The workflow parameter (given in the `id` field) be may be used - as a `source` to connect with input parameters of other workflow steps, or - with an output parameter of the process. - - A unique identifier for this workflow output parameter. This is - the identifier to use in the `source` field of `WorkflowStepInput` - to connect the output value to downstream parameters. - - """ - - id: str - - def __init__( - self, - id: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStepOutput): - return bool(self.id == other.id) - return False - - def __hash__(self) -> int: - return hash((self.id)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowStepOutput": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: + cwlVersion = None + if "cwlVersion" in _doc: try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, + cwlVersion = _load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("id") + lc=_doc.get("cwlVersion") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `id`": + if str(e) == "missing required field `cwlVersion`": _errors__.append( ValidationException( str(e), @@ -22357,13 +19040,13 @@ def fromDoc( ) ) else: - val = _doc.get("id") + val = _doc.get("cwlVersion") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -22372,26 +19055,65 @@ def fromDoc( f"{verb_tensage} {error_message}")], ) ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) + else: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + detailed_message=f"the `cwlVersion` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("expression") is None: + raise ValidationException("missing required field `expression`", None, []) + + expression = _load_field( + _doc.get("expression"), + ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("expression") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `expression`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("expression") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `expression` field is not valid because:", + SourceLine(_doc, "expression", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `expression` field is not valid because:", + SourceLine(_doc, "expression", str), + [e], + detailed_message=f"the `expression` field with value `{val}` " + "is not valid because:", ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - extension_fields: dict[str, Any] = {} + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -22399,14 +19121,16 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `id`".format(k), + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `expression`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -22415,6 +19139,14 @@ def fromDoc( raise ValidationException("", None, _errors__, "*") _constructed = cls( id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + expression=expression, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -22435,6 +19167,53 @@ def save( if self.id is not None: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, self.id, False, None, relative_uris) + r["class"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.inputs is not None: + r["inputs"] = save( + self.inputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputs is not None: + r["outputs"] = save( + self.outputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.requirements is not None: + r["requirements"] = save( + self.requirements, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.cwlVersion is not None: + u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) + r["cwlVersion"] = u + if self.expression is not None: + r["expression"] = save( + self.expression, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) # top refers to the directory level if top: @@ -22444,66 +19223,25 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["id"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "class", + "expression", + ] + ) -class WorkflowStep(IdentifierRequired, Labeled, Documented): +class WorkflowOutputParameter(OutputParameter): """ - A workflow step is an executable element of a workflow. It specifies the - underlying process implementation (such as `CommandLineTool` or another - `Workflow`) in the `run` field and connects the input and output parameters - of the underlying process to workflow parameters. - - # Scatter/gather - - To use scatter/gather, - [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - A "scatter" operation specifies that the associated workflow step or - subworkflow should execute separately over a list of input elements. Each - job making up a scatter operation is independent and may be executed - concurrently. - - The `scatter` field specifies one or more input parameters which will be - scattered. An input parameter may be listed more than once. The declared - type of each input parameter is implicitly becomes an array of items of the - input parameter type. If a parameter is listed more than once, it becomes - a nested array. As a result, upstream parameters which are connected to - scattered parameters must be arrays. - - All output parameter types are also implicitly wrapped in arrays. Each job - in the scatter results in an entry in the output array. - - If any scattered parameter runtime value is an empty array, all outputs are - set to empty arrays and no work is done for the step, according to - applicable scattering rules. - - If `scatter` declares more than one input parameter, `scatterMethod` - describes how to decompose the input into a discrete set of jobs. - - * **dotproduct** specifies that each of the input arrays are aligned and one - element taken from each array to construct each job. It is an error - if all input arrays are not the same length. - - * **nested_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output must be nested arrays for each level of scattering, in the - order that the input arrays are listed in the `scatter` field. - - * **flat_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output arrays must be flattened to a single level, but otherwise listed in the - order that the input arrays are listed in the `scatter` field. - - # Subworkflows - - To specify a nested workflow as part of a workflow step, - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be - specified in the workflow or workflow step requirements. - - It is a fatal error if a workflow directly or indirectly invokes itself as - a subworkflow (recursive workflows are not allowed). + Describe an output parameter of a workflow. The parameter must be connected to one or more parameters defined in the workflow that will provide the value of the output parameter. It is legal to connect a WorkflowInputParameter to a WorkflowOutputParameter. """ @@ -22512,17 +19250,16 @@ class WorkflowStep(IdentifierRequired, Labeled, Documented): def __init__( self, id: Any, - in_: Any, - out: Any, - run: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - scatter: Optional[Any] = None, - scatterMethod: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + type_: Any, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + outputSource: Any | None = None, + linkMerge: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -22532,46 +19269,43 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable self.doc = doc - self.in_ = in_ - self.out = out - self.requirements = requirements - self.hints = hints - self.run = run - self.scatter = scatter - self.scatterMethod = scatterMethod + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.outputSource = outputSource + self.linkMerge = linkMerge + self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStep): + if isinstance(other, WorkflowOutputParameter): return bool( - self.id == other.id - and self.label == other.label + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable and self.doc == other.doc - and self.in_ == other.in_ - and self.out == other.out - and self.requirements == other.requirements - and self.hints == other.hints - and self.run == other.run - and self.scatter == other.scatter - and self.scatterMethod == other.scatterMethod + and self.id == other.id + and self.format == other.format + and self.outputSource == other.outputSource + and self.linkMerge == other.linkMerge + and self.type_ == other.type_ ) return False def __hash__(self) -> int: return hash( ( - self.id, self.label, + self.secondaryFiles, + self.streamable, self.doc, - self.in_, - self.out, - self.requirements, - self.hints, - self.run, - self.scatter, - self.scatterMethod, + self.id, + self.format, + self.outputSource, + self.linkMerge, + self.type_, ) ) @@ -22581,8 +19315,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowStep": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -22592,7 +19326,7 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), uri_strtype_True_False_None_None, baseuri, @@ -22648,18 +19382,112 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: + try: + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( str(e), @@ -22667,13 +19495,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("streamable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -22685,17 +19513,17 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `streamable` field with value `{val}` " "is not valid because:", ) ) doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -22739,117 +19567,21 @@ def fromDoc( "is not valid because:", ) ) - try: - if _doc.get("in") is None: - raise ValidationException("missing required field `in`", None, []) - - in_ = load_field( - _doc.get("in"), - idmap_in__array_of_WorkflowStepInputLoader, - baseuri, - loadingOptions, - lc=_doc.get("in") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `in`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("in") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `in` field is not valid because:", - SourceLine(_doc, "in", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `in` field is not valid because:", - SourceLine(_doc, "in", str), - [e], - detailed_message=f"the `in` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("out") is None: - raise ValidationException("missing required field `out`", None, []) - - out = load_field( - _doc.get("out"), - uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("out") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `out`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("out") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `out` field is not valid because:", - SourceLine(_doc, "out", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `out` field is not valid because:", - SourceLine(_doc, "out", str), - [e], - detailed_message=f"the `out` field with value `{val}` " - "is not valid because:", - ) - ) - requirements = None - if "requirements" in _doc: + format = None + if "format" in _doc: try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, - lc=_doc.get("requirements") + lc=_doc.get("format") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `requirements`": + if str(e) == "missing required field `format`": _errors__.append( ValidationException( str(e), @@ -22857,13 +19589,13 @@ def fromDoc( ) ) else: - val = _doc.get("requirements") + val = _doc.get("format") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -22875,28 +19607,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [e], - detailed_message=f"the `requirements` field with value `{val}` " + detailed_message=f"the `format` field with value `{val}` " "is not valid because:", ) ) - hints = None - if "hints" in _doc: + outputSource = None + if "outputSource" in _doc: try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_Any_type, + outputSource = _load_field( + _doc.get("outputSource"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None, baseuri, loadingOptions, - lc=_doc.get("hints") + lc=_doc.get("outputSource") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `hints`": + if str(e) == "missing required field `outputSource`": _errors__.append( ValidationException( str(e), @@ -22904,13 +19636,13 @@ def fromDoc( ) ) else: - val = _doc.get("hints") + val = _doc.get("outputSource") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `outputSource` field is not valid because:", + SourceLine(_doc, "outputSource", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -22922,78 +19654,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `outputSource` field is not valid because:", + SourceLine(_doc, "outputSource", str), [e], - detailed_message=f"the `hints` field with value `{val}` " + detailed_message=f"the `outputSource` field with value `{val}` " "is not valid because:", ) ) - - subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) - try: - if _doc.get("run") is None: - raise ValidationException("missing required field `run`", None, []) - - run = load_field( - _doc.get("run"), - uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_False_False_None_None, - subscope_baseuri, - loadingOptions, - lc=_doc.get("run") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `run`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("run") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), - [e], - detailed_message=f"the `run` field with value `{val}` " - "is not valid because:", - ) - ) - scatter = None - if "scatter" in _doc: + linkMerge = None + if "linkMerge" in _doc: try: - scatter = load_field( - _doc.get("scatter"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None, + linkMerge = _load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, baseuri, loadingOptions, - lc=_doc.get("scatter") + lc=_doc.get("linkMerge") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `scatter`": + if str(e) == "missing required field `linkMerge`": _errors__.append( ValidationException( str(e), @@ -23001,13 +19683,13 @@ def fromDoc( ) ) else: - val = _doc.get("scatter") + val = _doc.get("linkMerge") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `scatter` field is not valid because:", - SourceLine(_doc, "scatter", str), + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -23019,61 +19701,62 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `scatter` field is not valid because:", - SourceLine(_doc, "scatter", str), + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), [e], - detailed_message=f"the `scatter` field with value `{val}` " + detailed_message=f"the `linkMerge` field with value `{val}` " "is not valid because:", ) ) - scatterMethod = None - if "scatterMethod" in _doc: - try: - scatterMethod = load_field( - _doc.get("scatterMethod"), - uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("scatterMethod") - ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) - if str(e) == "missing required field `scatterMethod`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("scatterMethod") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `scatterMethod` field is not valid because:", - SourceLine(_doc, "scatterMethod", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `scatterMethod` field is not valid because:", - SourceLine(_doc, "scatterMethod", str), - [e], - detailed_message=f"the `scatterMethod` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", ) - extension_fields: dict[str, Any] = {} + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -23081,14 +19764,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `scatter`, `scatterMethod`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `outputSource`, `linkMerge`, `type`".format( k ), SourceLine(_doc, k, str), @@ -23098,16 +19781,15 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - id=id, label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, doc=doc, - in_=in_, - out=out, - requirements=requirements, - hints=hints, - run=run, - scatter=scatter, - scatterMethod=scatterMethod, + id=id, + format=format, + outputSource=outputSource, + linkMerge=linkMerge, + type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -23132,39 +19814,38 @@ def save( r["label"] = save( self.label, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.in_ is not None: - r["in"] = save( - self.in_, top=False, base_url=self.id, relative_uris=relative_uris + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) - if self.out is not None: - u = save_relative_uri(self.out, self.id, True, None, relative_uris) - r["out"] = u - if self.requirements is not None: - r["requirements"] = save( - self.requirements, + if self.streamable is not None: + r["streamable"] = save( + self.streamable, top=False, base_url=self.id, relative_uris=relative_uris, ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.run is not None: - u = save_relative_uri(self.run, self.id, False, None, relative_uris) - r["run"] = u - if self.scatter is not None: - u = save_relative_uri(self.scatter, self.id, False, 0, relative_uris) - r["scatter"] = u - if self.scatterMethod is not None: - u = save_relative_uri( - self.scatterMethod, self.id, False, None, relative_uris + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.outputSource is not None: + u = save_relative_uri(self.outputSource, self.id, False, 1, relative_uris) + r["outputSource"] = u + if self.linkMerge is not None: + r["linkMerge"] = save( + self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris ) - r["scatterMethod"] = u # top refers to the directory level if top: @@ -23174,69 +19855,49 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ - "id", "label", + "secondaryFiles", + "streamable", "doc", - "in", - "out", - "requirements", - "hints", - "run", - "scatter", - "scatterMethod", + "id", + "format", + "outputSource", + "linkMerge", + "type", ] ) -class Workflow(Process): - """ - A workflow describes a set of **steps** and the **dependencies** between - those steps. When a step produces output that will be consumed by a - second step, the first step is a dependency of the second step. +class Sink(Saveable): + pass - When there is a dependency, the workflow engine must execute the preceding - step and wait for it to successfully produce output before executing the - dependent step. If two steps are defined in the workflow graph that - are not directly or indirectly dependent, these steps are **independent**, - and may execute in any order or execute concurrently. A workflow is - complete when all steps have been executed. - Dependencies between parameters are expressed using the `source` field on - [workflow step input parameters](#WorkflowStepInput) and [workflow output - parameters](#WorkflowOutputParameter). +class WorkflowStepInput(IdentifierRequired, Sink, LoadContents, Labeled): + """ + The input of a workflow step connects an upstream parameter (from the workflow inputs, or the outputs of other workflows steps) with the input parameters of the process specified by the ``run`` field. Only input parameters declared by the target process will be passed through at runtime to the process though additional parameters may be specified (for use within ``valueFrom`` expressions for instance) - unconnected or unused parameters do not represent an error condition. - The `source` field expresses the dependency of one parameter on another - such that when a value is associated with the parameter specified by - `source`, that value is propagated to the destination parameter. When all - data links inbound to a given step are fufilled, the step is ready to - execute. + Input object + ------------ - ## Workflow success and failure + A WorkflowStepInput object must contain an ``id`` field in the form ``#fieldname`` or ``#prefix/fieldname``. When the ``id`` field contains a slash ``/`` the field name consists of the characters following the final slash (the prefix portion may contain one or more slashes to indicate scope). This defines a field of the workflow step input object with the value of the ``source`` parameter(s). - A completed step must result in one of `success`, `temporaryFailure` or - `permanentFailure` states. An implementation may choose to retry a step - execution which resulted in `temporaryFailure`. An implementation may - choose to either continue running other steps of a workflow, or terminate - immediately upon `permanentFailure`. + Merging + ------- - * If any step of a workflow execution results in `permanentFailure`, then - the workflow status is `permanentFailure`. + To merge multiple inbound data links, `MultipleInputFeatureRequirement <#MultipleInputFeatureRequirement>`__ must be specified in the workflow or workflow step requirements. - * If one or more steps result in `temporaryFailure` and all other steps - complete `success` or are not executed, then the workflow status is - `temporaryFailure`. + If the sink parameter is an array, or named in a `workflow scatter <#WorkflowStep>`__ operation, there may be multiple inbound data links listed in the ``source`` field. The values from the input links are merged depending on the method specified in the ``linkMerge`` field. If not specified, the default method is "merge_nested". + + * **merge_nested** - * If all workflow steps are executed and complete with `success`, then the - workflow status is `success`. + The input must be an array consisting of exactly one entry for each input link. If "merge_nested" is specified with a single link, the value from the link must be wrapped in a single-item list. - # Extensions + * **merge_flattened** - [ScatterFeatureRequirement](#ScatterFeatureRequirement) and - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are - available as standard [extensions](#Extensions_and_Metadata) to core - workflow semantics. + 1. The source and sink parameters must be compatible types, or the source type must be compatible with single element from the "items" type of the destination array parameter. + 2. Source parameters which are arrays are concatenated. Source parameters which are single element types are appended as single elements. """ @@ -23244,17 +19905,16 @@ class Workflow(Process): def __init__( self, - inputs: Any, - outputs: Any, - steps: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + source: Any | None = None, + linkMerge: Any | None = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + label: Any | None = None, + default: Any | None = None, + valueFrom: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -23265,29 +19925,25 @@ def __init__( else: self.loadingOptions = LoadingOptions() self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.source = source + self.linkMerge = linkMerge + self.loadContents = loadContents + self.loadListing = loadListing self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.class_ = "Workflow" - self.steps = steps + self.default = default + self.valueFrom = valueFrom def __eq__(self, other: Any) -> bool: - if isinstance(other, Workflow): + if isinstance(other, WorkflowStepInput): return bool( self.id == other.id + and self.source == other.source + and self.linkMerge == other.linkMerge + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.class_ == other.class_ - and self.steps == other.steps + and self.default == other.default + and self.valueFrom == other.valueFrom ) return False @@ -23295,15 +19951,13 @@ def __hash__(self) -> int: return hash( ( self.id, + self.source, + self.linkMerge, + self.loadContents, + self.loadListing, self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.class_, - self.steps, + self.default, + self.valueFrom, ) ) @@ -23313,8 +19967,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Workflow": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -23324,9 +19978,9 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None_None, + uri_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("id") @@ -23374,40 +20028,71 @@ def fromDoc( if docRoot is not None: id = docRoot else: - id = "_:" + str(_uuid__.uuid4()) + _errors__.append(ValidationException("missing id")) if not __original_id_is_none: baseuri = cast(str, id) - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + source = None + if "source" in _doc: + try: + source = _load_field( + _doc.get("source"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None, + baseuri, + loadingOptions, + lc=_doc.get("source") + ) - class_ = load_field( - _doc.get("class"), - uri_Workflow_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - label = None - if "label" in _doc: + if str(e) == "missing required field `source`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("source") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `source` field is not valid because:", + SourceLine(_doc, "source", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `source` field is not valid because:", + SourceLine(_doc, "source", str), + [e], + detailed_message=f"the `source` field with value `{val}` " + "is not valid because:", + ) + ) + linkMerge = None + if "linkMerge" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + linkMerge = _load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("linkMerge") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `linkMerge`": _errors__.append( ValidationException( str(e), @@ -23415,13 +20100,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("linkMerge") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -23433,28 +20118,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `linkMerge` field is not valid because:", + SourceLine(_doc, "linkMerge", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `linkMerge` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + loadContents = None + if "loadContents" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("loadContents") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( str(e), @@ -23462,13 +20147,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("loadContents") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -23480,124 +20165,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `loadContents` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("inputs") is None: - raise ValidationException("missing required field `inputs`", None, []) - - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_WorkflowInputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - detailed_message=f"the `inputs` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("outputs") is None: - raise ValidationException("missing required field `outputs`", None, []) - - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_WorkflowOutputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - detailed_message=f"the `outputs` field with value `{val}` " - "is not valid because:", - ) - ) - requirements = None - if "requirements" in _doc: + loadListing = None + if "loadListing" in _doc: try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, baseuri, loadingOptions, - lc=_doc.get("requirements") + lc=_doc.get("loadListing") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `requirements`": + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( str(e), @@ -23605,13 +20194,13 @@ def fromDoc( ) ) else: - val = _doc.get("requirements") + val = _doc.get("loadListing") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -23623,28 +20212,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [e], - detailed_message=f"the `requirements` field with value `{val}` " + detailed_message=f"the `loadListing` field with value `{val}` " "is not valid because:", ) ) - hints = None - if "hints" in _doc: + label = None + if "label" in _doc: try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("hints") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `hints`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -23652,13 +20241,13 @@ def fromDoc( ) ) else: - val = _doc.get("hints") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -23670,28 +20259,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `hints` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - cwlVersion = None - if "cwlVersion" in _doc: + default = None + if "default" in _doc: try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, + default = _load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, baseuri, loadingOptions, - lc=_doc.get("cwlVersion") + lc=_doc.get("default") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `cwlVersion`": + if str(e) == "missing required field `default`": _errors__.append( ValidationException( str(e), @@ -23699,13 +20288,13 @@ def fromDoc( ) ) else: - val = _doc.get("cwlVersion") + val = _doc.get("default") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -23717,62 +20306,61 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), [e], - detailed_message=f"the `cwlVersion` field with value `{val}` " + detailed_message=f"the `default` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("steps") is None: - raise ValidationException("missing required field `steps`", None, []) - - steps = load_field( - _doc.get("steps"), - idmap_steps_union_of_array_of_WorkflowStepLoader, - baseuri, - loadingOptions, - lc=_doc.get("steps") - ) + valueFrom = None + if "valueFrom" in _doc: + try: + valueFrom = _load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("valueFrom") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `steps`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("steps") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `steps` field is not valid because:", - SourceLine(_doc, "steps", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: + if str(e) == "missing required field `valueFrom`": _errors__.append( ValidationException( - "the `steps` field is not valid because:", - SourceLine(_doc, "steps", str), - [e], - detailed_message=f"the `steps` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} + else: + val = _doc.get("valueFrom") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + detailed_message=f"the `valueFrom` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -23780,14 +20368,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `steps`".format( + "invalid field `{}`, expected one of: `id`, `source`, `linkMerge`, `loadContents`, `loadListing`, `label`, `default`, `valueFrom`".format( k ), SourceLine(_doc, k, str), @@ -23798,14 +20386,13 @@ def fromDoc( raise ValidationException("", None, _errors__, "*") _constructed = cls( id=id, + source=source, + linkMerge=linkMerge, + loadContents=loadContents, + loadListing=loadListing, label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - steps=steps, + default=default, + valueFrom=valueFrom, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -23826,47 +20413,38 @@ def save( if self.id is not None: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, self.id, False, None, relative_uris) - r["class"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.inputs is not None: - r["inputs"] = save( - self.inputs, top=False, base_url=self.id, relative_uris=relative_uris + if self.source is not None: + u = save_relative_uri(self.source, self.id, False, 2, relative_uris) + r["source"] = u + if self.linkMerge is not None: + r["linkMerge"] = save( + self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.outputs is not None: - r["outputs"] = save( - self.outputs, top=False, base_url=self.id, relative_uris=relative_uris + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) - if self.requirements is not None: - r["requirements"] = save( - self.requirements, + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, top=False, base_url=self.id, relative_uris=relative_uris, ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.cwlVersion is not None: - u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) - r["cwlVersion"] = u - if self.steps is not None: - r["steps"] = save( - self.steps, top=False, base_url=self.id, relative_uris=relative_uris + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.valueFrom is not None: + r["valueFrom"] = save( + self.valueFrom, top=False, base_url=self.id, relative_uris=relative_uris ) # top refers to the directory level @@ -23877,33 +20455,35 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ "id", + "source", + "linkMerge", + "loadContents", + "loadListing", "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "class", - "steps", + "default", + "valueFrom", ] ) -class SubworkflowFeatureRequirement(ProcessRequirement): +class WorkflowStepOutput(IdentifierRequired): """ - Indicates that the workflow platform must support nested workflows in - the `run` field of [WorkflowStep](#WorkflowStep). + Associate an output parameter of the underlying process with a workflow parameter. The workflow parameter (given in the ``id`` field) be may be used as a ``source`` to connect with input parameters of other workflow steps, or with an output parameter of the process. + + A unique identifier for this workflow output parameter. This is the identifier to use in the ``source`` field of ``WorkflowStepInput`` to connect the output value to downstream parameters. """ + id: str + def __init__( self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -23913,15 +20493,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "SubworkflowFeatureRequirement" + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, SubworkflowFeatureRequirement): - return bool(self.class_ == other.class_) + if isinstance(other, WorkflowStepOutput): + return bool(self.id == other.id) return False def __hash__(self) -> int: - return hash((self.class_)) + return hash((self.id)) @classmethod def fromDoc( @@ -23929,150 +20509,71 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SubworkflowFeatureRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) - class_ = load_field( - _doc.get("class"), - uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: + if str(e) == "missing required field `id`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), + str(e), + None ) ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class ScatterFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support the `scatter` and - `scatterMethod` fields of [WorkflowStep](#WorkflowStep). - - """ - - def __init__( - self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ScatterFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ScatterFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ScatterFeatureRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_ScatterFeatureRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -24080,14 +20581,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), + "invalid field `{}`, expected one of: `id`".format(k), SourceLine(_doc, k, str), ) ) @@ -24095,9 +20596,11 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + id=id, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -24111,14 +20614,9 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u # top refers to the directory level if top: @@ -24128,20 +20626,59 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class"]) + attrs: ClassVar[Collection[str]] = frozenset(["id"]) -class MultipleInputFeatureRequirement(ProcessRequirement): +class WorkflowStep(IdentifierRequired, Labeled, schema_salad.metaschema.Documented): """ - Indicates that the workflow platform must support multiple inbound data links - listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). + A workflow step is an executable element of a workflow. It specifies the underlying process implementation (such as ``CommandLineTool`` or another ``Workflow``) in the ``run`` field and connects the input and output parameters of the underlying process to workflow parameters. + + Scatter/gather + ============== + + To use scatter/gather, `ScatterFeatureRequirement <#ScatterFeatureRequirement>`__ must be specified in the workflow or workflow step requirements. + + A "scatter" operation specifies that the associated workflow step or subworkflow should execute separately over a list of input elements. Each job making up a scatter operation is independent and may be executed concurrently. + + The ``scatter`` field specifies one or more input parameters which will be scattered. An input parameter may be listed more than once. The declared type of each input parameter is implicitly becomes an array of items of the input parameter type. If a parameter is listed more than once, it becomes a nested array. As a result, upstream parameters which are connected to scattered parameters must be arrays. + + All output parameter types are also implicitly wrapped in arrays. Each job in the scatter results in an entry in the output array. + + If any scattered parameter runtime value is an empty array, all outputs are set to empty arrays and no work is done for the step, according to applicable scattering rules. + + If ``scatter`` declares more than one input parameter, ``scatterMethod`` describes how to decompose the input into a discrete set of jobs. + + * **dotproduct** specifies that each of the input arrays are aligned and one element taken from each array to construct each job. It is an error if all input arrays are not the same length. + + * **nested_crossproduct** specifies the Cartesian product of the inputs, producing a job for every combination of the scattered inputs. The output must be nested arrays for each level of scattering, in the order that the input arrays are listed in the ``scatter`` field. + + * **flat_crossproduct** specifies the Cartesian product of the inputs, producing a job for every combination of the scattered inputs. The output arrays must be flattened to a single level, but otherwise listed in the order that the input arrays are listed in the ``scatter`` field. + + Subworkflows + ============ + + To specify a nested workflow as part of a workflow step, `SubworkflowFeatureRequirement <#SubworkflowFeatureRequirement>`__ must be specified in the workflow or workflow step requirements. + + It is a fatal error if a workflow directly or indirectly invokes itself as a subworkflow (recursive workflows are not allowed). """ + id: str + def __init__( self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + in_: Any, + out: Any, + run: Any, + label: Any | None = None, + doc: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + scatter: Any | None = None, + scatterMethod: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -24151,15 +20688,48 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "MultipleInputFeatureRequirement" + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.label = label + self.doc = doc + self.in_ = in_ + self.out = out + self.requirements = requirements + self.hints = hints + self.run = run + self.scatter = scatter + self.scatterMethod = scatterMethod def __eq__(self, other: Any) -> bool: - if isinstance(other, MultipleInputFeatureRequirement): - return bool(self.class_ == other.class_) + if isinstance(other, WorkflowStep): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.in_ == other.in_ + and self.out == other.out + and self.requirements == other.requirements + and self.hints == other.hints + and self.run == other.run + and self.scatter == other.scatter + and self.scatterMethod == other.scatterMethod + ) return False def __hash__(self) -> int: - return hash((self.class_)) + return hash( + ( + self.id, + self.label, + self.doc, + self.in_, + self.out, + self.requirements, + self.hints, + self.run, + self.scatter, + self.scatterMethod, + ) + ) @classmethod def fromDoc( @@ -24167,280 +20737,372 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "MultipleInputFeatureRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + if _doc.get("in") is None: + raise ValidationException("missing required field `in`", None, []) - class_ = load_field( - _doc.get("class"), - uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None, + in_ = _load_field( + _doc.get("in"), + idmap_in__array_of_WorkflowStepInputLoader, baseuri, loadingOptions, - lc=_doc.get("class") + lc=_doc.get("in") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `in`": + _errors__.append( + ValidationException( + str(e), + None ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + else: + val = _doc.get("in") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [e], + detailed_message=f"the `in` field with value `{val}` " + "is not valid because:", ) ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class StepInputExpressionRequirement(ProcessRequirement): - """ - Indicate that the workflow platform must support the `valueFrom` field - of [WorkflowStepInput](#WorkflowStepInput). - - """ - - def __init__( - self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "StepInputExpressionRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, StepInputExpressionRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "StepInputExpressionRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + if _doc.get("out") is None: + raise ValidationException("missing required field `out`", None, []) - class_ = load_field( - _doc.get("class"), - uri_StepInputExpressionRequirement_classLoader_False_True_None_None, + out = _load_field( + _doc.get("out"), + uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("class") + lc=_doc.get("out") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `out`": + _errors__.append( + ValidationException( + str(e), + None ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + else: + val = _doc.get("out") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [e], + detailed_message=f"the `out` field with value `{val}` " + "is not valid because:", ) ) + requirements = None + if "requirements" in _doc: + try: + requirements = _load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + lc=_doc.get("requirements") + ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class Secrets(ProcessRequirement): - def __init__( - self, - secrets: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "Secrets" - self.secrets = secrets - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Secrets): - return bool(self.class_ == other.class_ and self.secrets == other.secrets) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.secrets)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Secrets": - _doc = copy.copy(doc) + if str(e) == "missing required field `requirements`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("requirements") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + detailed_message=f"the `requirements` field with value `{val}` " + "is not valid because:", + ) + ) + hints = None + if "hints" in _doc: + try: + hints = _load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_Any_type, + baseuri, + loadingOptions, + lc=_doc.get("hints") + ) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - class_ = load_field( - _doc.get("class"), - uri_strtype_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) + if str(e) == "missing required field `hints`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("hints") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + detailed_message=f"the `hints` field with value `{val}` " + "is not valid because:", + ) + ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e + subscope_baseuri = _expand_url('run', baseuri, loadingOptions, True) try: - if _doc.get("secrets") is None: - raise ValidationException("missing required field `secrets`", None, []) + if _doc.get("run") is None: + raise ValidationException("missing required field `run`", None, []) - secrets = load_field( - _doc.get("secrets"), - uri_array_of_strtype_False_False_0_None, - baseuri, + run = _load_field( + _doc.get("run"), + uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None_None, + subscope_baseuri, loadingOptions, - lc=_doc.get("secrets") + lc=_doc.get("run") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secrets`": + if str(e) == "missing required field `run`": _errors__.append( ValidationException( str(e), @@ -24448,13 +21110,13 @@ def fromDoc( ) ) else: - val = _doc.get("secrets") + val = _doc.get("run") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secrets` field is not valid because:", - SourceLine(_doc, "secrets", str), + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -24466,14 +21128,108 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secrets` field is not valid because:", - SourceLine(_doc, "secrets", str), + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), [e], - detailed_message=f"the `secrets` field with value `{val}` " + detailed_message=f"the `run` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + scatter = None + if "scatter" in _doc: + try: + scatter = _load_field( + _doc.get("scatter"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None, + baseuri, + loadingOptions, + lc=_doc.get("scatter") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `scatter`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("scatter") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `scatter` field is not valid because:", + SourceLine(_doc, "scatter", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `scatter` field is not valid because:", + SourceLine(_doc, "scatter", str), + [e], + detailed_message=f"the `scatter` field with value `{val}` " + "is not valid because:", + ) + ) + scatterMethod = None + if "scatterMethod" in _doc: + try: + scatterMethod = _load_field( + _doc.get("scatterMethod"), + uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("scatterMethod") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `scatterMethod`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("scatterMethod") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `scatterMethod` field is not valid because:", + SourceLine(_doc, "scatterMethod", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `scatterMethod` field is not valid because:", + SourceLine(_doc, "scatterMethod", str), + [e], + detailed_message=f"the `scatterMethod` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -24481,14 +21237,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `secrets`".format( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `scatter`, `scatterMethod`".format( k ), SourceLine(_doc, k, str), @@ -24498,10 +21254,20 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - secrets=secrets, + id=id, + label=label, + doc=doc, + in_=in_, + out=out, + requirements=requirements, + hints=hints, + run=run, + scatter=scatter, + scatterMethod=scatterMethod, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -24515,45 +21281,114 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.secrets is not None: - u = save_relative_uri(self.secrets, base_url, False, 0, relative_uris) - r["secrets"] = u + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.in_ is not None: + r["in"] = save( + self.in_, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.out is not None: + u = save_relative_uri(self.out, self.id, True, None, relative_uris) + r["out"] = u + if self.requirements is not None: + r["requirements"] = save( + self.requirements, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.run is not None: + u = save_relative_uri(self.run, self.id, False, None, relative_uris) + r["run"] = u + if self.scatter is not None: + u = save_relative_uri(self.scatter, self.id, False, 0, relative_uris) + r["scatter"] = u + if self.scatterMethod is not None: + u = save_relative_uri( + self.scatterMethod, self.id, False, None, relative_uris + ) + r["scatterMethod"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset( + [ + "id", + "label", + "doc", + "in", + "out", + "requirements", + "hints", + "run", + "scatter", + "scatterMethod", + ] + ) + + +class Workflow(Process): + """ + A workflow describes a set of **steps** and the **dependencies** between those steps. When a step produces output that will be consumed by a second step, the first step is a dependency of the second step. + + When there is a dependency, the workflow engine must execute the preceding step and wait for it to successfully produce output before executing the dependent step. If two steps are defined in the workflow graph that are not directly or indirectly dependent, these steps are **independent**, and may execute in any order or execute concurrently. A workflow is complete when all steps have been executed. + + Dependencies between parameters are expressed using the ``source`` field on `workflow step input parameters <#WorkflowStepInput>`__ and `workflow output parameters <#WorkflowOutputParameter>`__. + + The ``source`` field expresses the dependency of one parameter on another such that when a value is associated with the parameter specified by ``source``, that value is propagated to the destination parameter. When all data links inbound to a given step are fulfilled, the step is ready to execute. + + Workflow success and failure + ---------------------------- - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r + A completed step must result in one of ``success``, ``temporaryFailure`` or ``permanentFailure`` states. An implementation may choose to retry a step execution which resulted in ``temporaryFailure``. An implementation may choose to either continue running other steps of a workflow, or terminate immediately upon ``permanentFailure``. + + * If any step of a workflow execution results in ``permanentFailure``, then the workflow status is ``permanentFailure``. - attrs = frozenset(["class", "secrets"]) + * If one or more steps result in ``temporaryFailure`` and all other steps complete ``success`` or are not executed, then the workflow status is ``temporaryFailure``. + * If all workflow steps are executed and complete with ``success``, then the workflow status is ``success``. + + Extensions + ========== + + `ScatterFeatureRequirement <#ScatterFeatureRequirement>`__ and `SubworkflowFeatureRequirement <#SubworkflowFeatureRequirement>`__ are available as standard `extensions <#Extensions_and_Metadata>`__ to core workflow semantics. + + """ -class ProcessGenerator(Process): id: str def __init__( self, inputs: Any, outputs: Any, - run: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + steps: Any, + id: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + cwlVersion: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -24571,11 +21406,11 @@ def __init__( self.requirements = requirements self.hints = hints self.cwlVersion = cwlVersion - self.class_ = "ProcessGenerator" - self.run = run + self.class_: Final[str] = "Workflow" + self.steps = steps def __eq__(self, other: Any) -> bool: - if isinstance(other, ProcessGenerator): + if isinstance(other, Workflow): return bool( self.id == other.id and self.label == other.label @@ -24586,7 +21421,7 @@ def __eq__(self, other: Any) -> bool: and self.hints == other.hints and self.cwlVersion == other.cwlVersion and self.class_ == other.class_ - and self.run == other.run + and self.steps == other.steps ) return False @@ -24602,7 +21437,7 @@ def __hash__(self) -> int: self.hints, self.cwlVersion, self.class_, - self.run, + self.steps, ) ) @@ -24612,8 +21447,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ProcessGenerator": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -24623,7 +21458,7 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -24680,22 +21515,23 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_strtype_False_True_None_None, + uri_Workflow_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + raise e label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -24742,7 +21578,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -24790,9 +21626,9 @@ def fromDoc( if _doc.get("inputs") is None: raise ValidationException("missing required field `inputs`", None, []) - inputs = load_field( + inputs = _load_field( _doc.get("inputs"), - idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, + idmap_inputs_array_of_WorkflowInputParameterLoader, baseuri, loadingOptions, lc=_doc.get("inputs") @@ -24838,9 +21674,9 @@ def fromDoc( if _doc.get("outputs") is None: raise ValidationException("missing required field `outputs`", None, []) - outputs = load_field( + outputs = _load_field( _doc.get("outputs"), - idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader, + idmap_outputs_array_of_WorkflowOutputParameterLoader, baseuri, loadingOptions, lc=_doc.get("outputs") @@ -24885,9 +21721,9 @@ def fromDoc( requirements = None if "requirements" in _doc: try: - requirements = load_field( + requirements = _load_field( _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions, lc=_doc.get("requirements") @@ -24932,9 +21768,9 @@ def fromDoc( hints = None if "hints" in _doc: try: - hints = load_field( + hints = _load_field( _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, baseuri, loadingOptions, lc=_doc.get("hints") @@ -24979,7 +21815,7 @@ def fromDoc( cwlVersion = None if "cwlVersion" in _doc: try: - cwlVersion = load_field( + cwlVersion = _load_field( _doc.get("cwlVersion"), uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, baseuri, @@ -25023,24 +21859,22 @@ def fromDoc( "is not valid because:", ) ) - - subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) try: - if _doc.get("run") is None: - raise ValidationException("missing required field `run`", None, []) + if _doc.get("steps") is None: + raise ValidationException("missing required field `steps`", None, []) - run = load_field( - _doc.get("run"), - uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_False_False_None_None, - subscope_baseuri, + steps = _load_field( + _doc.get("steps"), + idmap_steps_union_of_array_of_WorkflowStepLoader, + baseuri, loadingOptions, - lc=_doc.get("run") + lc=_doc.get("steps") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `run`": + if str(e) == "missing required field `steps`": _errors__.append( ValidationException( str(e), @@ -25048,13 +21882,13 @@ def fromDoc( ) ) else: - val = _doc.get("run") + val = _doc.get("steps") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), + "the `steps` field is not valid because:", + SourceLine(_doc, "steps", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25066,14 +21900,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), + "the `steps` field is not valid because:", + SourceLine(_doc, "steps", str), [e], - detailed_message=f"the `run` field with value `{val}` " + detailed_message=f"the `steps` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -25081,14 +21915,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `run`".format( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `class`, `steps`".format( k ), SourceLine(_doc, k, str), @@ -25106,7 +21940,7 @@ def fromDoc( requirements=requirements, hints=hints, cwlVersion=cwlVersion, - run=run, + steps=steps, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -25128,8 +21962,10 @@ def save( u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ @@ -25165,9 +22001,10 @@ def save( if self.cwlVersion is not None: u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) r["cwlVersion"] = u - if self.run is not None: - u = save_relative_uri(self.run, self.id, False, None, relative_uris) - r["run"] = u + if self.steps is not None: + r["steps"] = save( + self.steps, top=False, base_url=self.id, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -25177,7 +22014,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ "id", "label", @@ -25188,22 +22025,21 @@ def save( "hints", "cwlVersion", "class", - "run", + "steps", ] ) -class MPIRequirement(ProcessRequirement): +class SubworkflowFeatureRequirement(ProcessRequirement): """ - Indicates that a process requires an MPI runtime. + Indicates that the workflow platform must support nested workflows in the ``run`` field of `WorkflowStep <#WorkflowStep>`__. """ def __init__( self, - processes: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -25213,18 +22049,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "MPIRequirement" - self.processes = processes + self.class_: Final[str] = "SubworkflowFeatureRequirement" def __eq__(self, other: Any) -> bool: - if isinstance(other, MPIRequirement): - return bool( - self.class_ == other.class_ and self.processes == other.processes - ) + if isinstance(other, SubworkflowFeatureRequirement): + return bool(self.class_ == other.class_) return False def __hash__(self) -> int: - return hash((self.class_, self.processes)) + return hash((self.class_)) @classmethod def fromDoc( @@ -25232,8 +22065,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "MPIRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -25244,67 +22077,20 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_strtype_False_True_None_None, + uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("processes") is None: - raise ValidationException("missing required field `processes`", None, []) - - processes = load_field( - _doc.get("processes"), - union_of_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("processes") - ) - + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `processes`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("processes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `processes` field is not valid because:", - SourceLine(_doc, "processes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `processes` field is not valid because:", - SourceLine(_doc, "processes", str), - [e], - detailed_message=f"the `processes` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + raise e + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -25312,16 +22098,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `processes`".format( - k - ), + "invalid field `{}`, expected one of: `class`".format(k), SourceLine(_doc, k, str), ) ) @@ -25329,7 +22113,6 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - processes=processes, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -25347,20 +22130,15 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.processes is not None: - r["processes"] = save( - self.processes, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) # top refers to the directory level if top: @@ -25370,23 +22148,19 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "processes"]) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) -class CUDARequirement(ProcessRequirement): +class ScatterFeatureRequirement(ProcessRequirement): """ - Require support for NVIDA CUDA (GPU hardware acceleration). + Indicates that the workflow platform must support the ``scatter`` and ``scatterMethod`` fields of `WorkflowStep <#WorkflowStep>`__. """ def __init__( self, - cudaComputeCapability: Any, - cudaVersionMin: Any, - cudaDeviceCountMax: Optional[Any] = None, - cudaDeviceCountMin: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -25396,33 +22170,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "CUDARequirement" - self.cudaComputeCapability = cudaComputeCapability - self.cudaDeviceCountMax = cudaDeviceCountMax - self.cudaDeviceCountMin = cudaDeviceCountMin - self.cudaVersionMin = cudaVersionMin + self.class_: Final[str] = "ScatterFeatureRequirement" def __eq__(self, other: Any) -> bool: - if isinstance(other, CUDARequirement): - return bool( - self.class_ == other.class_ - and self.cudaComputeCapability == other.cudaComputeCapability - and self.cudaDeviceCountMax == other.cudaDeviceCountMax - and self.cudaDeviceCountMin == other.cudaDeviceCountMin - and self.cudaVersionMin == other.cudaVersionMin - ) + if isinstance(other, ScatterFeatureRequirement): + return bool(self.class_ == other.class_) return False def __hash__(self) -> int: - return hash( - ( - self.class_, - self.cudaComputeCapability, - self.cudaDeviceCountMax, - self.cudaDeviceCountMin, - self.cudaVersionMin, - ) - ) + return hash((self.class_)) @classmethod def fromDoc( @@ -25430,8 +22186,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CUDARequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -25442,209 +22198,141 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_strtype_False_True_None_None, + uri_ScatterFeatureRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("cudaComputeCapability") is None: - raise ValidationException("missing required field `cudaComputeCapability`", None, []) - - cudaComputeCapability = load_field( - _doc.get("cudaComputeCapability"), - union_of_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("cudaComputeCapability") - ) - + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `cudaComputeCapability`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cudaComputeCapability") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + raise e + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - "the `cudaComputeCapability` field is not valid because:", - SourceLine(_doc, "cudaComputeCapability", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) + extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "the `cudaComputeCapability` field is not valid because:", - SourceLine(_doc, "cudaComputeCapability", str), - [e], - detailed_message=f"the `cudaComputeCapability` field with value `{val}` " - "is not valid because:", + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), ) ) - cudaDeviceCountMax = None - if "cudaDeviceCountMax" in _doc: - try: - cudaDeviceCountMax = load_field( - _doc.get("cudaDeviceCountMax"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("cudaDeviceCountMax") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed - if str(e) == "missing required field `cudaDeviceCountMax`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cudaDeviceCountMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `cudaDeviceCountMax` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `cudaDeviceCountMax` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMax", str), - [e], - detailed_message=f"the `cudaDeviceCountMax` field with value `{val}` " - "is not valid because:", - ) - ) - cudaDeviceCountMin = None - if "cudaDeviceCountMin" in _doc: - try: - cudaDeviceCountMin = load_field( - _doc.get("cudaDeviceCountMin"), - union_of_None_type_or_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("cudaDeviceCountMin") - ) + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["class"]) + + +class MultipleInputFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support multiple inbound data links listed in the ``source`` field of `WorkflowStepInput <#WorkflowStepInput>`__. + + """ + + def __init__( + self, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_: Final[str] = "MultipleInputFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, MultipleInputFeatureRequirement): + return bool(self.class_ == other.class_) + return False - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + def __hash__(self) -> int: + return hash((self.class_)) - if str(e) == "missing required field `cudaDeviceCountMin`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cudaDeviceCountMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `cudaDeviceCountMin` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `cudaDeviceCountMin` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMin", str), - [e], - detailed_message=f"the `cudaDeviceCountMin` field with value `{val}` " - "is not valid because:", - ) - ) + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] try: - if _doc.get("cudaVersionMin") is None: - raise ValidationException("missing required field `cudaVersionMin`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - cudaVersionMin = load_field( - _doc.get("cudaVersionMin"), - strtype, + class_ = _load_field( + _doc.get("class"), + uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("cudaVersionMin") + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `cudaVersionMin`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cudaVersionMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `cudaVersionMin` field is not valid because:", - SourceLine(_doc, "cudaVersionMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `cudaVersionMin` field is not valid because:", - SourceLine(_doc, "cudaVersionMin", str), - [e], - detailed_message=f"the `cudaVersionMin` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + raise e + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -25652,16 +22340,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `cudaComputeCapability`, `cudaDeviceCountMax`, `cudaDeviceCountMin`, `cudaVersionMin`".format( - k - ), + "invalid field `{}`, expected one of: `class`".format(k), SourceLine(_doc, k, str), ) ) @@ -25669,10 +22355,6 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - cudaComputeCapability=cudaComputeCapability, - cudaDeviceCountMax=cudaDeviceCountMax, - cudaDeviceCountMin=cudaDeviceCountMin, - cudaVersionMin=cudaVersionMin, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -25690,41 +22372,15 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.cudaComputeCapability is not None: - r["cudaComputeCapability"] = save( - self.cudaComputeCapability, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.cudaDeviceCountMax is not None: - r["cudaDeviceCountMax"] = save( - self.cudaDeviceCountMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.cudaDeviceCountMin is not None: - r["cudaDeviceCountMin"] = save( - self.cudaDeviceCountMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.cudaVersionMin is not None: - r["cudaVersionMin"] = save( - self.cudaVersionMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) # top refers to the directory level if top: @@ -25734,23 +22390,19 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "class", - "cudaComputeCapability", - "cudaDeviceCountMax", - "cudaDeviceCountMin", - "cudaVersionMin", - ] - ) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) + + +class StepInputExpressionRequirement(ProcessRequirement): + """ + Indicate that the workflow platform must support the ``valueFrom`` field of `WorkflowStepInput <#WorkflowStepInput>`__. + """ -class ShmSize(ProcessRequirement): def __init__( self, - shmSize: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -25760,16 +22412,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "ShmSize" - self.shmSize = shmSize + self.class_: Final[str] = "StepInputExpressionRequirement" def __eq__(self, other: Any) -> bool: - if isinstance(other, ShmSize): - return bool(self.class_ == other.class_ and self.shmSize == other.shmSize) + if isinstance(other, StepInputExpressionRequirement): + return bool(self.class_ == other.class_) return False def __hash__(self) -> int: - return hash((self.class_, self.shmSize)) + return hash((self.class_)) @classmethod def fromDoc( @@ -25777,8 +22428,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ShmSize": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -25789,67 +22440,20 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_strtype_False_True_None_None, + uri_StepInputExpressionRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("shmSize") is None: - raise ValidationException("missing required field `shmSize`", None, []) - - shmSize = load_field( - _doc.get("shmSize"), - strtype, - baseuri, - loadingOptions, - lc=_doc.get("shmSize") - ) - + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `shmSize`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("shmSize") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `shmSize` field is not valid because:", - SourceLine(_doc, "shmSize", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `shmSize` field is not valid because:", - SourceLine(_doc, "shmSize", str), - [e], - detailed_message=f"the `shmSize` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + raise e + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -25857,16 +22461,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `shmSize`".format( - k - ), + "invalid field `{}`, expected one of: `class`".format(k), SourceLine(_doc, k, str), ) ) @@ -25874,7 +22476,6 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - shmSize=shmSize, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -25892,17 +22493,15 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.shmSize is not None: - r["shmSize"] = save( - self.shmSize, top=False, base_url=base_url, relative_uris=relative_uris - ) # top refers to the directory level if top: @@ -25912,13 +22511,12 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "shmSize"]) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) -_vocab = { +_vocab.update({ "Any": "https://w3id.org/cwl/salad#Any", "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", - "CUDARequirement": "http://commonwl.org/cwltool#CUDARequirement", "CWLArraySchema": "https://w3id.org/cwl/cwl#CWLArraySchema", "CWLInputFile": "https://w3id.org/cwl/cwl#CWLInputFile", "CWLObjectType": "https://w3id.org/cwl/cwl#CWLObjectType", @@ -25973,7 +22571,6 @@ def save( "LoadContents": "https://w3id.org/cwl/cwl#LoadContents", "LoadListingEnum": "https://w3id.org/cwl/cwl#LoadListingEnum", "LoadListingRequirement": "https://w3id.org/cwl/cwl#LoadListingRequirement", - "MPIRequirement": "http://commonwl.org/cwltool#MPIRequirement", "MapSchema": "https://w3id.org/cwl/salad#MapSchema", "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", "NetworkAccess": "https://w3id.org/cwl/cwl#NetworkAccess", @@ -25987,7 +22584,6 @@ def save( "Parameter": "https://w3id.org/cwl/cwl#Parameter", "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", "Process": "https://w3id.org/cwl/cwl#Process", - "ProcessGenerator": "http://commonwl.org/cwltool#ProcessGenerator", "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", "RecordField": "https://w3id.org/cwl/salad#RecordField", "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", @@ -25996,9 +22592,7 @@ def save( "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", "SecondaryFileSchema": "https://w3id.org/cwl/cwl#SecondaryFileSchema", - "Secrets": "http://commonwl.org/cwltool#Secrets", "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", - "ShmSize": "http://commonwl.org/cwltool#ShmSize", "Sink": "https://w3id.org/cwl/cwl#Sink", "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", @@ -26018,16 +22612,6 @@ def save( "deep_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing", "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", "double": "http://www.w3.org/2001/XMLSchema#double", - "draft-2": "https://w3id.org/cwl/cwl#draft-2", - "draft-3": "https://w3id.org/cwl/cwl#draft-3", - "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", - "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", - "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", - "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", - "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", - "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", - "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", - "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", "enum": "https://w3id.org/cwl/salad#enum", "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", "float": "http://www.w3.org/2001/XMLSchema#float", @@ -26046,15 +22630,11 @@ def save( "stdout": "https://w3id.org/cwl/cwl#stdout", "string": "http://www.w3.org/2001/XMLSchema#string", "union": "https://w3id.org/cwl/salad#union", - "v1.0": "https://w3id.org/cwl/cwl#v1.0", - "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", "v1.1": "https://w3id.org/cwl/cwl#v1.1", - "v1.1.0-dev1": "https://w3id.org/cwl/cwl#v1.1.0-dev1", -} -_rvocab = { +}) +_rvocab.update({ "https://w3id.org/cwl/salad#Any": "Any", "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", - "http://commonwl.org/cwltool#CUDARequirement": "CUDARequirement", "https://w3id.org/cwl/cwl#CWLArraySchema": "CWLArraySchema", "https://w3id.org/cwl/cwl#CWLInputFile": "CWLInputFile", "https://w3id.org/cwl/cwl#CWLObjectType": "CWLObjectType", @@ -26109,7 +22689,6 @@ def save( "https://w3id.org/cwl/cwl#LoadContents": "LoadContents", "https://w3id.org/cwl/cwl#LoadListingEnum": "LoadListingEnum", "https://w3id.org/cwl/cwl#LoadListingRequirement": "LoadListingRequirement", - "http://commonwl.org/cwltool#MPIRequirement": "MPIRequirement", "https://w3id.org/cwl/salad#MapSchema": "MapSchema", "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", "https://w3id.org/cwl/cwl#NetworkAccess": "NetworkAccess", @@ -26123,7 +22702,6 @@ def save( "https://w3id.org/cwl/cwl#Parameter": "Parameter", "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", "https://w3id.org/cwl/cwl#Process": "Process", - "http://commonwl.org/cwltool#ProcessGenerator": "ProcessGenerator", "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", "https://w3id.org/cwl/salad#RecordField": "RecordField", "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", @@ -26132,9 +22710,7 @@ def save( "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", "https://w3id.org/cwl/cwl#SecondaryFileSchema": "SecondaryFileSchema", - "http://commonwl.org/cwltool#Secrets": "Secrets", "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", - "http://commonwl.org/cwltool#ShmSize": "ShmSize", "https://w3id.org/cwl/cwl#Sink": "Sink", "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", @@ -26154,16 +22730,6 @@ def save( "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing": "deep_listing", "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", "http://www.w3.org/2001/XMLSchema#double": "double", - "https://w3id.org/cwl/cwl#draft-2": "draft-2", - "https://w3id.org/cwl/cwl#draft-3": "draft-3", - "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", - "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", - "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", - "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", - "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", - "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", - "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", - "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", "https://w3id.org/cwl/salad#enum": "enum", "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", "http://www.w3.org/2001/XMLSchema#float": "float", @@ -26182,19 +22748,16 @@ def save( "https://w3id.org/cwl/cwl#stdout": "stdout", "http://www.w3.org/2001/XMLSchema#string": "string", "https://w3id.org/cwl/salad#union": "union", - "https://w3id.org/cwl/cwl#v1.0": "v1.0", - "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", "https://w3id.org/cwl/cwl#v1.1": "v1.1", - "https://w3id.org/cwl/cwl#v1.1.0-dev1": "v1.1.0-dev1", -} - -strtype = _PrimitiveLoader(str) -inttype = _PrimitiveLoader(int) -floattype = _PrimitiveLoader(float) -booltype = _PrimitiveLoader(bool) -None_type = _PrimitiveLoader(type(None)) -Any_type = _AnyLoader() -PrimitiveTypeLoader = _EnumLoader( +}) + +strtype: Final = _PrimitiveLoader(str) +inttype: Final = _PrimitiveLoader(int) +floattype: Final = _PrimitiveLoader(float) +booltype: Final = _PrimitiveLoader(bool) +None_type: Final = _PrimitiveLoader(type(None)) +Any_type: Final = _AnyLoader() +PrimitiveTypeLoader: Final = _EnumLoader( ( "null", "boolean", @@ -26209,28 +22772,41 @@ def save( """ Names of salad data types (based on Avro schema declarations). -Refer to the [Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for -detailed information. +Refer to the `Avro schema declaration documentation `__ for detailed information. null: no value + boolean: a binary value + int: 32-bit signed integer + long: 64-bit signed integer + float: single precision (32-bit) IEEE 754 floating-point number + double: double precision (64-bit) IEEE 754 floating-point number + string: Unicode character sequence """ -AnyLoader = _EnumLoader(("Any",), "Any") +AnyLoader: Final = _EnumLoader(("Any",), "Any") """ The **Any** type validates for any non-null value. """ -RecordFieldLoader = _RecordLoader(RecordField, None, None) -RecordSchemaLoader = _RecordLoader(RecordSchema, None, None) -EnumSchemaLoader = _RecordLoader(EnumSchema, None, None) -ArraySchemaLoader = _RecordLoader(ArraySchema, None, None) -MapSchemaLoader = _RecordLoader(MapSchema, None, None) -UnionSchemaLoader = _RecordLoader(UnionSchema, None, None) -CWLTypeLoader = _EnumLoader( +RecordFieldLoader: Final = _RecordLoader( + schema_salad.metaschema.RecordField, None, None +) +RecordSchemaLoader: Final = _RecordLoader( + schema_salad.metaschema.RecordSchema, None, None +) +EnumSchemaLoader: Final = _RecordLoader(schema_salad.metaschema.EnumSchema, None, None) +ArraySchemaLoader: Final = _RecordLoader( + schema_salad.metaschema.ArraySchema, None, None +) +MapSchemaLoader: Final = _RecordLoader(schema_salad.metaschema.MapSchema, None, None) +UnionSchemaLoader: Final = _RecordLoader( + schema_salad.metaschema.UnionSchema, None, None +) +CWLTypeLoader: Final = _EnumLoader( ( "null", "boolean", @@ -26246,57 +22822,65 @@ def save( ) """ Extends primitive types with the concept of a file and directory as a builtin type. + File: A File object + Directory: A Directory object """ -CWLArraySchemaLoader = _RecordLoader(CWLArraySchema, None, None) -CWLRecordFieldLoader = _RecordLoader(CWLRecordField, None, None) -CWLRecordSchemaLoader = _RecordLoader(CWLRecordSchema, None, None) -FileLoader = _RecordLoader(File, None, None) -DirectoryLoader = _RecordLoader(Directory, None, None) -CWLObjectTypeLoader = _UnionLoader((), "CWLObjectTypeLoader") -union_of_None_type_or_CWLObjectTypeLoader = _UnionLoader( +CWLArraySchemaLoader: Final = _RecordLoader(CWLArraySchema, None, None) +CWLRecordFieldLoader: Final = _RecordLoader(CWLRecordField, None, None) +CWLRecordSchemaLoader: Final = _RecordLoader(CWLRecordSchema, None, None) +FileLoader: Final = _RecordLoader(File, None, None) +DirectoryLoader: Final = _RecordLoader(Directory, None, None) +CWLObjectTypeLoader: Final = _UnionLoader((), "CWLObjectTypeLoader") +union_of_None_type_or_CWLObjectTypeLoader: Final = _UnionLoader( ( None_type, CWLObjectTypeLoader, ) ) -array_of_union_of_None_type_or_CWLObjectTypeLoader = _ArrayLoader( +array_of_union_of_None_type_or_CWLObjectTypeLoader: Final = _ArrayLoader( union_of_None_type_or_CWLObjectTypeLoader ) -map_of_union_of_None_type_or_CWLObjectTypeLoader = _MapLoader( +map_of_union_of_None_type_or_CWLObjectTypeLoader: Final = _MapLoader( union_of_None_type_or_CWLObjectTypeLoader, "None", None, None ) -InlineJavascriptRequirementLoader = _RecordLoader( +InlineJavascriptRequirementLoader: Final = _RecordLoader( InlineJavascriptRequirement, None, None ) -SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement, None, None) -LoadListingRequirementLoader = _RecordLoader(LoadListingRequirement, None, None) -DockerRequirementLoader = _RecordLoader(DockerRequirement, None, None) -SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement, None, None) -InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement, None, None) -EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement, None, None) -ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement, None, None) -ResourceRequirementLoader = _RecordLoader(ResourceRequirement, None, None) -WorkReuseLoader = _RecordLoader(WorkReuse, None, None) -NetworkAccessLoader = _RecordLoader(NetworkAccess, None, None) -InplaceUpdateRequirementLoader = _RecordLoader(InplaceUpdateRequirement, None, None) -ToolTimeLimitLoader = _RecordLoader(ToolTimeLimit, None, None) -SubworkflowFeatureRequirementLoader = _RecordLoader( +SchemaDefRequirementLoader: Final = _RecordLoader(SchemaDefRequirement, None, None) +LoadListingRequirementLoader: Final = _RecordLoader(LoadListingRequirement, None, None) +DockerRequirementLoader: Final = _RecordLoader(DockerRequirement, None, None) +SoftwareRequirementLoader: Final = _RecordLoader(SoftwareRequirement, None, None) +InitialWorkDirRequirementLoader: Final = _RecordLoader( + InitialWorkDirRequirement, None, None +) +EnvVarRequirementLoader: Final = _RecordLoader(EnvVarRequirement, None, None) +ShellCommandRequirementLoader: Final = _RecordLoader( + ShellCommandRequirement, None, None +) +ResourceRequirementLoader: Final = _RecordLoader(ResourceRequirement, None, None) +WorkReuseLoader: Final = _RecordLoader(WorkReuse, None, None) +NetworkAccessLoader: Final = _RecordLoader(NetworkAccess, None, None) +InplaceUpdateRequirementLoader: Final = _RecordLoader( + InplaceUpdateRequirement, None, None +) +ToolTimeLimitLoader: Final = _RecordLoader(ToolTimeLimit, None, None) +SubworkflowFeatureRequirementLoader: Final = _RecordLoader( SubworkflowFeatureRequirement, None, None ) -ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement, None, None) -MultipleInputFeatureRequirementLoader = _RecordLoader( +ScatterFeatureRequirementLoader: Final = _RecordLoader( + ScatterFeatureRequirement, None, None +) +MultipleInputFeatureRequirementLoader: Final = _RecordLoader( MultipleInputFeatureRequirement, None, None ) -StepInputExpressionRequirementLoader = _RecordLoader( +StepInputExpressionRequirementLoader: Final = _RecordLoader( StepInputExpressionRequirement, None, None ) -SecretsLoader = _RecordLoader(Secrets, None, None) -MPIRequirementLoader = _RecordLoader(MPIRequirement, None, None) -CUDARequirementLoader = _RecordLoader(CUDARequirement, None, None) -ShmSizeLoader = _RecordLoader(ShmSize, None, None) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader = _UnionLoader( +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _UnionLoader( ( InlineJavascriptRequirementLoader, SchemaDefRequirementLoader, @@ -26315,52 +22899,38 @@ def save( ScatterFeatureRequirementLoader, MultipleInputFeatureRequirementLoader, StepInputExpressionRequirementLoader, - SecretsLoader, - MPIRequirementLoader, - CUDARequirementLoader, - ShmSizeLoader, ) ) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader ) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_CWLObjectTypeLoader = _UnionLoader( +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader: ( + Final +) = _UnionLoader( ( None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, CWLObjectTypeLoader, ) ) -map_of_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_CWLObjectTypeLoader = _MapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_CWLObjectTypeLoader, +map_of_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader: ( + Final +) = _MapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader, "CWLInputFile", "@list", True, ) -CWLInputFileLoader = map_of_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_CWLObjectTypeLoader -CWLVersionLoader = _EnumLoader( - ( - "draft-2", - "draft-3.dev1", - "draft-3.dev2", - "draft-3.dev3", - "draft-3.dev4", - "draft-3.dev5", - "draft-3", - "draft-4.dev1", - "draft-4.dev2", - "draft-4.dev3", - "v1.0.dev4", - "v1.0", - "v1.1.0-dev1", - "v1.1", - ), - "CWLVersion", +CWLInputFileLoader: Final = ( + map_of_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader ) +CWLVersionLoader: Final = _EnumLoader(("v1.1",), "CWLVersion") """ -Version symbols for published CWL document versions. +Current version symbol for CWL documents. """ -LoadListingEnumLoader = _EnumLoader( +LoadListingEnumLoader: Final = _EnumLoader( ( "no_listing", "shallow_listing", @@ -26369,156 +22939,187 @@ def save( "LoadListingEnum", ) """ -Specify the desired behavior for loading the `listing` field of -a Directory object for use by expressions. +Specify the desired behavior for loading the ``listing`` field of a Directory object for use by expressions. no_listing: Do not load the directory listing. + shallow_listing: Only load the top level listing, do not recurse into subdirectories. + deep_listing: Load the directory listing and recursively load all subdirectories as well. """ -ExpressionLoader = _ExpressionLoader(str) -InputBindingLoader = _RecordLoader(InputBinding, None, None) -InputRecordFieldLoader = _RecordLoader(InputRecordField, None, None) -InputRecordSchemaLoader = _RecordLoader(InputRecordSchema, None, None) -InputEnumSchemaLoader = _RecordLoader(InputEnumSchema, None, None) -InputArraySchemaLoader = _RecordLoader(InputArraySchema, None, None) -OutputRecordFieldLoader = _RecordLoader(OutputRecordField, None, None) -OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema, None, None) -OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema, None, None) -OutputArraySchemaLoader = _RecordLoader(OutputArraySchema, None, None) -SecondaryFileSchemaLoader = _RecordLoader(SecondaryFileSchema, None, None) -EnvironmentDefLoader = _RecordLoader(EnvironmentDef, None, None) -CommandLineBindingLoader = _RecordLoader(CommandLineBinding, None, None) -CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding, None, None) -CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField, None, None) -CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema, None, None) -CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema, None, None) -CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema, None, None) -CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField, None, None) -CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema, None, None) -CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema, None, None) -CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema, None, None) -CommandInputParameterLoader = _RecordLoader(CommandInputParameter, None, None) -CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter, None, None) -stdinLoader = _EnumLoader(("stdin",), "stdin") +ExpressionLoader: Final = _ExpressionLoader(str) +InputBindingLoader: Final = _RecordLoader(InputBinding, None, None) +InputRecordFieldLoader: Final = _RecordLoader(InputRecordField, None, None) +InputRecordSchemaLoader: Final = _RecordLoader(InputRecordSchema, None, None) +InputEnumSchemaLoader: Final = _RecordLoader(InputEnumSchema, None, None) +InputArraySchemaLoader: Final = _RecordLoader(InputArraySchema, None, None) +OutputRecordFieldLoader: Final = _RecordLoader(OutputRecordField, None, None) +OutputRecordSchemaLoader: Final = _RecordLoader(OutputRecordSchema, None, None) +OutputEnumSchemaLoader: Final = _RecordLoader(OutputEnumSchema, None, None) +OutputArraySchemaLoader: Final = _RecordLoader(OutputArraySchema, None, None) +SecondaryFileSchemaLoader: Final = _RecordLoader(SecondaryFileSchema, None, None) +EnvironmentDefLoader: Final = _RecordLoader(EnvironmentDef, None, None) +CommandLineBindingLoader: Final = _RecordLoader(CommandLineBinding, None, None) +CommandOutputBindingLoader: Final = _RecordLoader(CommandOutputBinding, None, None) +CommandInputRecordFieldLoader: Final = _RecordLoader( + CommandInputRecordField, None, None +) +CommandInputRecordSchemaLoader: Final = _RecordLoader( + CommandInputRecordSchema, None, None +) +CommandInputEnumSchemaLoader: Final = _RecordLoader(CommandInputEnumSchema, None, None) +CommandInputArraySchemaLoader: Final = _RecordLoader( + CommandInputArraySchema, None, None +) +CommandOutputRecordFieldLoader: Final = _RecordLoader( + CommandOutputRecordField, None, None +) +CommandOutputRecordSchemaLoader: Final = _RecordLoader( + CommandOutputRecordSchema, None, None +) +CommandOutputEnumSchemaLoader: Final = _RecordLoader( + CommandOutputEnumSchema, None, None +) +CommandOutputArraySchemaLoader: Final = _RecordLoader( + CommandOutputArraySchema, None, None +) +CommandInputParameterLoader: Final = _RecordLoader(CommandInputParameter, None, None) +CommandOutputParameterLoader: Final = _RecordLoader(CommandOutputParameter, None, None) +stdinLoader: Final = _EnumLoader(("stdin",), "stdin") """ -Only valid as a `type` for a `CommandLineTool` input with no -`inputBinding` set. `stdin` must not be specified at the `CommandLineTool` -level. +Only valid as a ``type`` for a ``CommandLineTool`` input with no ``inputBinding`` set. ``stdin`` must not be specified at the ``CommandLineTool`` level. The following -``` -inputs: - an_input_name: - type: stdin -``` + +:: + + inputs: + an_input_name: + type: stdin + + is equivalent to -``` -inputs: - an_input_name: - type: File - streamable: true - -stdin: ${inputs.an_input_name.path} -``` + +:: + + inputs: + an_input_name: + type: File + streamable: true + + stdin: ${inputs.an_input_name.path} """ -stdoutLoader = _EnumLoader(("stdout",), "stdout") +stdoutLoader: Final = _EnumLoader(("stdout",), "stdout") """ -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. +Only valid as a ``type`` for a ``CommandLineTool`` output with no ``outputBinding`` set. The following -``` -outputs: - an_output_name: - type: stdout -stdout: a_stdout_file -``` +:: + + outputs: + an_output_name: + type: stdout + + stdout: a_stdout_file + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stdout_file - -stdout: a_stdout_file -``` - -If there is no `stdout` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stdout -``` + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stdout_file + + stdout: a_stdout_file + + +If there is no ``stdout`` name provided, a random filename will be created. For example, the following + +:: + + outputs: + an_output_name: + type: stdout + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stdout_filenameABCDEFG - -stdout: random_stdout_filenameABCDEFG -``` + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stdout_filenameABCDEFG + + stdout: random_stdout_filenameABCDEFG """ -stderrLoader = _EnumLoader(("stderr",), "stderr") +stderrLoader: Final = _EnumLoader(("stderr",), "stderr") """ -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. +Only valid as a ``type`` for a ``CommandLineTool`` output with no ``outputBinding`` set. The following -``` -outputs: - an_output_name: - type: stderr -stderr: a_stderr_file -``` +:: + + outputs: + an_output_name: + type: stderr + + stderr: a_stderr_file + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stderr_file - -stderr: a_stderr_file -``` - -If there is no `stderr` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stderr -``` + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stderr_file + + stderr: a_stderr_file + + +If there is no ``stderr`` name provided, a random filename will be created. For example, the following + +:: + + outputs: + an_output_name: + type: stderr + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stderr_filenameABCDEFG - -stderr: random_stderr_filenameABCDEFG -``` + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stderr_filenameABCDEFG + + stderr: random_stderr_filenameABCDEFG """ -CommandLineToolLoader = _RecordLoader(CommandLineTool, None, None) -SoftwarePackageLoader = _RecordLoader(SoftwarePackage, None, None) -DirentLoader = _RecordLoader(Dirent, None, None) -ExpressionToolOutputParameterLoader = _RecordLoader( +CommandLineToolLoader: Final = _RecordLoader(CommandLineTool, None, None) +SoftwarePackageLoader: Final = _RecordLoader(SoftwarePackage, None, None) +DirentLoader: Final = _RecordLoader(Dirent, None, None) +ExpressionToolOutputParameterLoader: Final = _RecordLoader( ExpressionToolOutputParameter, None, None ) -WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter, None, None) -ExpressionToolLoader = _RecordLoader(ExpressionTool, None, None) -LinkMergeMethodLoader = _EnumLoader( +WorkflowInputParameterLoader: Final = _RecordLoader(WorkflowInputParameter, None, None) +ExpressionToolLoader: Final = _RecordLoader(ExpressionTool, None, None) +LinkMergeMethodLoader: Final = _EnumLoader( ( "merge_nested", "merge_flattened", @@ -26526,12 +23127,14 @@ def save( "LinkMergeMethod", ) """ -The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). +The input link merge method, described in `WorkflowStepInput <#WorkflowStepInput>`__. """ -WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter, None, None) -WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput, None, None) -WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput, None, None) -ScatterMethodLoader = _EnumLoader( +WorkflowOutputParameterLoader: Final = _RecordLoader( + WorkflowOutputParameter, None, None +) +WorkflowStepInputLoader: Final = _RecordLoader(WorkflowStepInput, None, None) +WorkflowStepOutputLoader: Final = _RecordLoader(WorkflowStepOutput, None, None) +ScatterMethodLoader: Final = _EnumLoader( ( "dotproduct", "nested_crossproduct", @@ -26540,21 +23143,22 @@ def save( "ScatterMethod", ) """ -The scatter method, as described in [workflow step scatter](#WorkflowStep). +The scatter method, as described in `workflow step scatter <#WorkflowStep>`__. """ -WorkflowStepLoader = _RecordLoader(WorkflowStep, None, None) -WorkflowLoader = _RecordLoader(Workflow, None, None) -ProcessGeneratorLoader = _RecordLoader(ProcessGenerator, None, None) -array_of_strtype = _ArrayLoader(strtype) -union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( +WorkflowStepLoader: Final = _RecordLoader(WorkflowStep, None, None) +WorkflowLoader: Final = _RecordLoader(Workflow, None, None) +array_of_strtype: Final = _ArrayLoader(strtype) +union_of_None_type_or_strtype_or_array_of_strtype: Final = _UnionLoader( ( None_type, strtype, array_of_strtype, ) ) -uri_strtype_True_False_None_None = _URILoader(strtype, True, False, None, None) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _UnionLoader( +uri_strtype_True_False_None_None: Final = _URILoader(strtype, True, False, None, None) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, RecordSchemaLoader, @@ -26565,10 +23169,14 @@ def save( strtype, ) ) -array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype ) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _UnionLoader( +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, RecordSchemaLoader, @@ -26580,51 +23188,57 @@ def save( array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, ) ) -typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, 2, "v1.1", ) -array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) -union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( +array_of_RecordFieldLoader: Final = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_RecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader: Final = _IdMapLoader( union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" ) -Record_nameLoader = _EnumLoader(("record",), "Record_name") -typedsl_Record_nameLoader_2 = _TypeDSLLoader(Record_nameLoader, 2, "v1.1") -union_of_None_type_or_strtype = _UnionLoader( +Record_nameLoader: Final = _EnumLoader(("record",), "Record_name") +typedsl_Record_nameLoader_2: Final = _TypeDSLLoader(Record_nameLoader, 2, "v1.1") +union_of_None_type_or_strtype: Final = _UnionLoader( ( None_type, strtype, ) ) -uri_union_of_None_type_or_strtype_True_False_None_None = _URILoader( +uri_union_of_None_type_or_strtype_True_False_None_None: Final = _URILoader( union_of_None_type_or_strtype, True, False, None, None ) -uri_array_of_strtype_True_False_None_None = _URILoader( +uri_array_of_strtype_True_False_None_None: Final = _URILoader( array_of_strtype, True, False, None, None ) -Enum_nameLoader = _EnumLoader(("enum",), "Enum_name") -typedsl_Enum_nameLoader_2 = _TypeDSLLoader(Enum_nameLoader, 2, "v1.1") -uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None = _URILoader( +Enum_nameLoader: Final = _EnumLoader(("enum",), "Enum_name") +typedsl_Enum_nameLoader_2: Final = _TypeDSLLoader(Enum_nameLoader, 2, "v1.1") +uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, False, True, 2, None, ) -Array_nameLoader = _EnumLoader(("array",), "Array_name") -typedsl_Array_nameLoader_2 = _TypeDSLLoader(Array_nameLoader, 2, "v1.1") -Map_nameLoader = _EnumLoader(("map",), "Map_name") -typedsl_Map_nameLoader_2 = _TypeDSLLoader(Map_nameLoader, 2, "v1.1") -Union_nameLoader = _EnumLoader(("union",), "Union_name") -typedsl_Union_nameLoader_2 = _TypeDSLLoader(Union_nameLoader, 2, "v1.1") -union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _UnionLoader( +Array_nameLoader: Final = _EnumLoader(("array",), "Array_name") +typedsl_Array_nameLoader_2: Final = _TypeDSLLoader(Array_nameLoader, 2, "v1.1") +Map_nameLoader: Final = _EnumLoader(("map",), "Map_name") +typedsl_Map_nameLoader_2: Final = _TypeDSLLoader(Map_nameLoader, 2, "v1.1") +Union_nameLoader: Final = _EnumLoader(("union",), "Union_name") +typedsl_Union_nameLoader_2: Final = _TypeDSLLoader(Union_nameLoader, 2, "v1.1") +union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, CWLRecordSchemaLoader, @@ -26633,10 +23247,14 @@ def save( strtype, ) ) -array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype ) -union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _UnionLoader( +union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, CWLRecordSchemaLoader, @@ -26646,57 +23264,65 @@ def save( array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, ) ) -uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, False, True, 2, None, ) -typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, 2, "v1.1", ) -array_of_CWLRecordFieldLoader = _ArrayLoader(CWLRecordFieldLoader) -union_of_None_type_or_array_of_CWLRecordFieldLoader = _UnionLoader( +array_of_CWLRecordFieldLoader: Final = _ArrayLoader(CWLRecordFieldLoader) +union_of_None_type_or_array_of_CWLRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_CWLRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader = _IdMapLoader( +idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader: Final = _IdMapLoader( union_of_None_type_or_array_of_CWLRecordFieldLoader, "name", "type" ) -File_classLoader = _EnumLoader(("File",), "File_class") -uri_File_classLoader_False_True_None_None = _URILoader( +File_classLoader: Final = _EnumLoader(("File",), "File_class") +uri_File_classLoader_False_True_None_None: Final = _URILoader( File_classLoader, False, True, None, None ) -uri_union_of_None_type_or_strtype_False_False_None_None = _URILoader( +uri_union_of_None_type_or_strtype_False_False_None_None: Final = _URILoader( union_of_None_type_or_strtype, False, False, None, None ) -union_of_None_type_or_inttype = _UnionLoader( +union_of_None_type_or_inttype: Final = _UnionLoader( ( None_type, inttype, ) ) -union_of_FileLoader_or_DirectoryLoader = _UnionLoader( +union_of_FileLoader_or_DirectoryLoader: Final = _UnionLoader( ( FileLoader, DirectoryLoader, ) ) -array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( +array_of_union_of_FileLoader_or_DirectoryLoader: Final = _ArrayLoader( union_of_FileLoader_or_DirectoryLoader ) -union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( - ( - None_type, - array_of_union_of_FileLoader_or_DirectoryLoader, +union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader: Final = ( + _UnionLoader( + ( + None_type, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) ) ) -secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( +secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader: ( + Final +) = _UnionLoader( ( _SecondaryDSLLoader( union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader @@ -26704,34 +23330,38 @@ def save( union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, ) ) -uri_union_of_None_type_or_strtype_True_False_None_True = _URILoader( +uri_union_of_None_type_or_strtype_True_False_None_True: Final = _URILoader( union_of_None_type_or_strtype, True, False, None, True ) -Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None_None = _URILoader( +Directory_classLoader: Final = _EnumLoader(("Directory",), "Directory_class") +uri_Directory_classLoader_False_True_None_None: Final = _URILoader( Directory_classLoader, False, True, None, None ) -union_of_None_type_or_booltype = _UnionLoader( +union_of_None_type_or_booltype: Final = _UnionLoader( ( None_type, booltype, ) ) -union_of_None_type_or_LoadListingEnumLoader = _UnionLoader( +union_of_None_type_or_LoadListingEnumLoader: Final = _UnionLoader( ( None_type, LoadListingEnumLoader, ) ) -array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) -union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( +array_of_SecondaryFileSchemaLoader: Final = _ArrayLoader(SecondaryFileSchemaLoader) +union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader: ( + Final +) = _UnionLoader( ( None_type, SecondaryFileSchemaLoader, array_of_SecondaryFileSchemaLoader, ) ) -secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( +secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader: ( + Final +) = _UnionLoader( ( _SecondaryDSLLoader( union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader @@ -26739,32 +23369,40 @@ def save( union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, ) ) -union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - strtype, - array_of_strtype, - ExpressionLoader, +union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader: Final = ( + _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ExpressionLoader, + ) ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True = _URILoader( +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True: ( + Final +) = _URILoader( union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, True, False, None, True, ) -union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_strtype_or_ExpressionLoader: Final = _UnionLoader( ( None_type, strtype, ExpressionLoader, ) ) -uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True = _URILoader( - union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None, True +uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True: Final = ( + _URILoader( + union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None, True + ) ) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, InputRecordSchemaLoader, @@ -26773,10 +23411,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, InputRecordSchemaLoader, @@ -26786,29 +23428,35 @@ def save( array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, 2, "v1.1", ) -array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) -union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( +array_of_InputRecordFieldLoader: Final = _ArrayLoader(InputRecordFieldLoader) +union_of_None_type_or_array_of_InputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_InputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader: Final = ( + _IdMapLoader(union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type") ) -uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, OutputRecordSchemaLoader, @@ -26817,10 +23465,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, OutputRecordSchemaLoader, @@ -26830,69 +23482,89 @@ def save( array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, 2, "v1.1", ) -array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) -union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( +array_of_OutputRecordFieldLoader: Final = _ArrayLoader(OutputRecordFieldLoader) +union_of_None_type_or_array_of_OutputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_OutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader: Final = ( + _IdMapLoader(union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type") ) -uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _UnionLoader( - ( - CommandInputParameterLoader, - WorkflowInputParameterLoader, +union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader: Final = ( + _UnionLoader( + ( + CommandInputParameterLoader, + WorkflowInputParameterLoader, + ) ) ) -array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = ( +array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader: Final = ( _ArrayLoader(union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader) ) -idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader = _IdMapLoader( +idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader: ( + Final +) = _IdMapLoader( array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader, "id", "type", ) -union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _UnionLoader( +union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader: ( + Final +) = _UnionLoader( ( CommandOutputParameterLoader, ExpressionToolOutputParameterLoader, WorkflowOutputParameterLoader, ) ) -array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _ArrayLoader( +array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader: ( + Final +) = _ArrayLoader( union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader ) -idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader = _IdMapLoader( +idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader: ( + Final +) = _IdMapLoader( array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader, "id", "type", ) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader = _UnionLoader( +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _UnionLoader( ( None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, ) ) -idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader, +idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, "class", "None", ) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type = _UnionLoader( +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _UnionLoader( ( InlineJavascriptRequirementLoader, SchemaDefRequirementLoader, @@ -26911,111 +23583,121 @@ def save( ScatterFeatureRequirementLoader, MultipleInputFeatureRequirementLoader, StepInputExpressionRequirementLoader, - SecretsLoader, - MPIRequirementLoader, - CUDARequirementLoader, - ShmSizeLoader, Any_type, ) ) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type ) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type = _UnionLoader( +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _UnionLoader( ( None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, ) ) -idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_ShmSizeLoader_or_Any_type, +idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, "class", "None", ) -union_of_None_type_or_CWLVersionLoader = _UnionLoader( +union_of_None_type_or_CWLVersionLoader: Final = _UnionLoader( ( None_type, CWLVersionLoader, ) ) -uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None = _URILoader( +uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None: Final = _URILoader( union_of_None_type_or_CWLVersionLoader, False, True, None, None ) -InlineJavascriptRequirement_classLoader = _EnumLoader( +InlineJavascriptRequirement_classLoader: Final = _EnumLoader( ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" ) -uri_InlineJavascriptRequirement_classLoader_False_True_None_None = _URILoader( +uri_InlineJavascriptRequirement_classLoader_False_True_None_None: Final = _URILoader( InlineJavascriptRequirement_classLoader, False, True, None, None ) -union_of_None_type_or_array_of_strtype = _UnionLoader( +union_of_None_type_or_array_of_strtype: Final = _UnionLoader( ( None_type, array_of_strtype, ) ) -SchemaDefRequirement_classLoader = _EnumLoader( +SchemaDefRequirement_classLoader: Final = _EnumLoader( ("SchemaDefRequirement",), "SchemaDefRequirement_class" ) -uri_SchemaDefRequirement_classLoader_False_True_None_None = _URILoader( +uri_SchemaDefRequirement_classLoader_False_True_None_None: Final = _URILoader( SchemaDefRequirement_classLoader, False, True, None, None ) -union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _UnionLoader( +union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader: ( + Final +) = _UnionLoader( ( CommandInputRecordSchemaLoader, CommandInputEnumSchemaLoader, CommandInputArraySchemaLoader, ) ) -array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _ArrayLoader( +array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader: ( + Final +) = _ArrayLoader( union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader ) -union_of_strtype_or_ExpressionLoader = _UnionLoader( +union_of_strtype_or_ExpressionLoader: Final = _UnionLoader( ( strtype, ExpressionLoader, ) ) -union_of_None_type_or_booltype_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_booltype_or_ExpressionLoader: Final = _UnionLoader( ( None_type, booltype, ExpressionLoader, ) ) -LoadListingRequirement_classLoader = _EnumLoader( +LoadListingRequirement_classLoader: Final = _EnumLoader( ("LoadListingRequirement",), "LoadListingRequirement_class" ) -uri_LoadListingRequirement_classLoader_False_True_None_None = _URILoader( +uri_LoadListingRequirement_classLoader_False_True_None_None: Final = _URILoader( LoadListingRequirement_classLoader, False, True, None, None ) -union_of_None_type_or_inttype_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_inttype_or_ExpressionLoader: Final = _UnionLoader( ( None_type, inttype, ExpressionLoader, ) ) -union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( - ( - None_type, - strtype, - ExpressionLoader, - array_of_strtype, +union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype: Final = ( + _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + array_of_strtype, + ) ) ) -union_of_None_type_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_ExpressionLoader: Final = _UnionLoader( ( None_type, ExpressionLoader, ) ) -union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( +union_of_None_type_or_CommandLineBindingLoader: Final = _UnionLoader( ( None_type, CommandLineBindingLoader, ) ) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandInputRecordSchemaLoader, @@ -27024,10 +23706,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandInputRecordSchemaLoader, @@ -27037,31 +23723,39 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, 2, "v1.1", ) -array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) -union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( +array_of_CommandInputRecordFieldLoader: Final = _ArrayLoader( + CommandInputRecordFieldLoader +) +union_of_None_type_or_array_of_CommandInputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_CommandInputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader: Final = ( _IdMapLoader( union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" ) ) -uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandOutputRecordSchemaLoader, @@ -27070,10 +23764,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandOutputRecordSchemaLoader, @@ -27083,37 +23781,45 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, 2, "v1.1", ) -union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( +union_of_None_type_or_CommandOutputBindingLoader: Final = _UnionLoader( ( None_type, CommandOutputBindingLoader, ) ) -array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) -union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( +array_of_CommandOutputRecordFieldLoader: Final = _ArrayLoader( + CommandOutputRecordFieldLoader +) +union_of_None_type_or_array_of_CommandOutputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_CommandOutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader: Final = ( _IdMapLoader( union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" ) ) -uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, stdinLoader, @@ -27124,12 +23830,16 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, 2, "v1.1", ) -union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, stdoutLoader, @@ -27141,72 +23851,82 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, 2, "v1.1", ) -CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") -uri_CommandLineTool_classLoader_False_True_None_None = _URILoader( +CommandLineTool_classLoader: Final = _EnumLoader( + ("CommandLineTool",), "CommandLineTool_class" +) +uri_CommandLineTool_classLoader_False_True_None_None: Final = _URILoader( CommandLineTool_classLoader, False, True, None, None ) -array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) -idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( +array_of_CommandInputParameterLoader: Final = _ArrayLoader(CommandInputParameterLoader) +idmap_inputs_array_of_CommandInputParameterLoader: Final = _IdMapLoader( array_of_CommandInputParameterLoader, "id", "type" ) -array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) -idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( +array_of_CommandOutputParameterLoader: Final = _ArrayLoader( + CommandOutputParameterLoader +) +idmap_outputs_array_of_CommandOutputParameterLoader: Final = _IdMapLoader( array_of_CommandOutputParameterLoader, "id", "type" ) -union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( +union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader: Final = _UnionLoader( ( strtype, ExpressionLoader, CommandLineBindingLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader: Final = ( _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) ) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader: ( + Final +) = _UnionLoader( ( None_type, array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, ) ) -array_of_inttype = _ArrayLoader(inttype) -union_of_None_type_or_array_of_inttype = _UnionLoader( +array_of_inttype: Final = _ArrayLoader(inttype) +union_of_None_type_or_array_of_inttype: Final = _UnionLoader( ( None_type, array_of_inttype, ) ) -DockerRequirement_classLoader = _EnumLoader( +DockerRequirement_classLoader: Final = _EnumLoader( ("DockerRequirement",), "DockerRequirement_class" ) -uri_DockerRequirement_classLoader_False_True_None_None = _URILoader( +uri_DockerRequirement_classLoader_False_True_None_None: Final = _URILoader( DockerRequirement_classLoader, False, True, None, None ) -SoftwareRequirement_classLoader = _EnumLoader( +SoftwareRequirement_classLoader: Final = _EnumLoader( ("SoftwareRequirement",), "SoftwareRequirement_class" ) -uri_SoftwareRequirement_classLoader_False_True_None_None = _URILoader( +uri_SoftwareRequirement_classLoader_False_True_None_None: Final = _URILoader( SoftwareRequirement_classLoader, False, True, None, None ) -array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) -idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( +array_of_SoftwarePackageLoader: Final = _ArrayLoader(SoftwarePackageLoader) +idmap_packages_array_of_SoftwarePackageLoader: Final = _IdMapLoader( array_of_SoftwarePackageLoader, "package", "specs" ) -uri_union_of_None_type_or_array_of_strtype_False_False_None_True = _URILoader( +uri_union_of_None_type_or_array_of_strtype_False_False_None_True: Final = _URILoader( union_of_None_type_or_array_of_strtype, False, False, None, True ) -InitialWorkDirRequirement_classLoader = _EnumLoader( +InitialWorkDirRequirement_classLoader: Final = _EnumLoader( ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" ) -uri_InitialWorkDirRequirement_classLoader_False_True_None_None = _URILoader( +uri_InitialWorkDirRequirement_classLoader_False_True_None_None: Final = _URILoader( InitialWorkDirRequirement_classLoader, False, True, None, None ) -union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader: ( + Final +) = _UnionLoader( ( None_type, FileLoader, @@ -27216,225 +23936,240 @@ def save( ExpressionLoader, ) ) -array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader = _ArrayLoader( +array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader: ( + Final +) = _ArrayLoader( union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader ) -union_of_array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader_or_ExpressionLoader = _UnionLoader( +union_of_array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader_or_ExpressionLoader: ( + Final +) = _UnionLoader( ( array_of_union_of_None_type_or_FileLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader_or_DirectoryLoader_or_DirentLoader_or_ExpressionLoader, ExpressionLoader, ) ) -EnvVarRequirement_classLoader = _EnumLoader( +EnvVarRequirement_classLoader: Final = _EnumLoader( ("EnvVarRequirement",), "EnvVarRequirement_class" ) -uri_EnvVarRequirement_classLoader_False_True_None_None = _URILoader( +uri_EnvVarRequirement_classLoader_False_True_None_None: Final = _URILoader( EnvVarRequirement_classLoader, False, True, None, None ) -array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) -idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( +array_of_EnvironmentDefLoader: Final = _ArrayLoader(EnvironmentDefLoader) +idmap_envDef_array_of_EnvironmentDefLoader: Final = _IdMapLoader( array_of_EnvironmentDefLoader, "envName", "envValue" ) -ShellCommandRequirement_classLoader = _EnumLoader( +ShellCommandRequirement_classLoader: Final = _EnumLoader( ("ShellCommandRequirement",), "ShellCommandRequirement_class" ) -uri_ShellCommandRequirement_classLoader_False_True_None_None = _URILoader( +uri_ShellCommandRequirement_classLoader_False_True_None_None: Final = _URILoader( ShellCommandRequirement_classLoader, False, True, None, None ) -ResourceRequirement_classLoader = _EnumLoader( +ResourceRequirement_classLoader: Final = _EnumLoader( ("ResourceRequirement",), "ResourceRequirement_class" ) -uri_ResourceRequirement_classLoader_False_True_None_None = _URILoader( +uri_ResourceRequirement_classLoader_False_True_None_None: Final = _URILoader( ResourceRequirement_classLoader, False, True, None, None ) -WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") -uri_WorkReuse_classLoader_False_True_None_None = _URILoader( +WorkReuse_classLoader: Final = _EnumLoader(("WorkReuse",), "WorkReuse_class") +uri_WorkReuse_classLoader_False_True_None_None: Final = _URILoader( WorkReuse_classLoader, False, True, None, None ) -union_of_booltype_or_ExpressionLoader = _UnionLoader( +union_of_booltype_or_ExpressionLoader: Final = _UnionLoader( ( booltype, ExpressionLoader, ) ) -NetworkAccess_classLoader = _EnumLoader(("NetworkAccess",), "NetworkAccess_class") -uri_NetworkAccess_classLoader_False_True_None_None = _URILoader( +NetworkAccess_classLoader: Final = _EnumLoader( + ("NetworkAccess",), "NetworkAccess_class" +) +uri_NetworkAccess_classLoader_False_True_None_None: Final = _URILoader( NetworkAccess_classLoader, False, True, None, None ) -InplaceUpdateRequirement_classLoader = _EnumLoader( +InplaceUpdateRequirement_classLoader: Final = _EnumLoader( ("InplaceUpdateRequirement",), "InplaceUpdateRequirement_class" ) -uri_InplaceUpdateRequirement_classLoader_False_True_None_None = _URILoader( +uri_InplaceUpdateRequirement_classLoader_False_True_None_None: Final = _URILoader( InplaceUpdateRequirement_classLoader, False, True, None, None ) -ToolTimeLimit_classLoader = _EnumLoader(("ToolTimeLimit",), "ToolTimeLimit_class") -uri_ToolTimeLimit_classLoader_False_True_None_None = _URILoader( +ToolTimeLimit_classLoader: Final = _EnumLoader( + ("ToolTimeLimit",), "ToolTimeLimit_class" +) +uri_ToolTimeLimit_classLoader_False_True_None_None: Final = _URILoader( ToolTimeLimit_classLoader, False, True, None, None ) -union_of_inttype_or_ExpressionLoader = _UnionLoader( +union_of_inttype_or_ExpressionLoader: Final = _UnionLoader( ( inttype, ExpressionLoader, ) ) -union_of_None_type_or_InputBindingLoader = _UnionLoader( +union_of_None_type_or_InputBindingLoader: Final = _UnionLoader( ( None_type, InputBindingLoader, ) ) -ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") -uri_ExpressionTool_classLoader_False_True_None_None = _URILoader( +ExpressionTool_classLoader: Final = _EnumLoader( + ("ExpressionTool",), "ExpressionTool_class" +) +uri_ExpressionTool_classLoader_False_True_None_None: Final = _URILoader( ExpressionTool_classLoader, False, True, None, None ) -array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) -idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( +array_of_WorkflowInputParameterLoader: Final = _ArrayLoader( + WorkflowInputParameterLoader +) +idmap_inputs_array_of_WorkflowInputParameterLoader: Final = _IdMapLoader( array_of_WorkflowInputParameterLoader, "id", "type" ) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( +array_of_ExpressionToolOutputParameterLoader: Final = _ArrayLoader( ExpressionToolOutputParameterLoader ) -idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( +idmap_outputs_array_of_ExpressionToolOutputParameterLoader: Final = _IdMapLoader( array_of_ExpressionToolOutputParameterLoader, "id", "type" ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1, None +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None: Final = ( + _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1, None) ) -union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( +union_of_None_type_or_LinkMergeMethodLoader: Final = _UnionLoader( ( None_type, LinkMergeMethodLoader, ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2, None +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None: Final = ( + _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2, None) ) -array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) -idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( +array_of_WorkflowStepInputLoader: Final = _ArrayLoader(WorkflowStepInputLoader) +idmap_in__array_of_WorkflowStepInputLoader: Final = _IdMapLoader( array_of_WorkflowStepInputLoader, "id", "source" ) -union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( +union_of_strtype_or_WorkflowStepOutputLoader: Final = _UnionLoader( ( strtype, WorkflowStepOutputLoader, ) ) -array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( +array_of_union_of_strtype_or_WorkflowStepOutputLoader: Final = _ArrayLoader( union_of_strtype_or_WorkflowStepOutputLoader ) -union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( +union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader: Final = _UnionLoader( (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) ) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None = _URILoader( +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None: ( + Final +) = _URILoader( union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, True, False, None, None, ) -array_of_Any_type = _ArrayLoader(Any_type) -union_of_None_type_or_array_of_Any_type = _UnionLoader( +array_of_Any_type: Final = _ArrayLoader(Any_type) +union_of_None_type_or_array_of_Any_type: Final = _UnionLoader( ( None_type, array_of_Any_type, ) ) -idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( +idmap_hints_union_of_None_type_or_array_of_Any_type: Final = _IdMapLoader( union_of_None_type_or_array_of_Any_type, "class", "None" ) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader = _UnionLoader( +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader: ( + Final +) = _UnionLoader( ( strtype, CommandLineToolLoader, ExpressionToolLoader, WorkflowLoader, - ProcessGeneratorLoader, ) ) -uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_False_False_None_None = _URILoader( - union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, +uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_False_False_None_None: ( + Final +) = _URILoader( + union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, False, False, None, None, ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0, None +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None: Final = ( + _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0, None) ) -union_of_None_type_or_ScatterMethodLoader = _UnionLoader( +union_of_None_type_or_ScatterMethodLoader: Final = _UnionLoader( ( None_type, ScatterMethodLoader, ) ) -uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None = _URILoader( +uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None: Final = _URILoader( union_of_None_type_or_ScatterMethodLoader, False, True, None, None ) -Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None_None = _URILoader( +Workflow_classLoader: Final = _EnumLoader(("Workflow",), "Workflow_class") +uri_Workflow_classLoader_False_True_None_None: Final = _URILoader( Workflow_classLoader, False, True, None, None ) -array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) -idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( +array_of_WorkflowOutputParameterLoader: Final = _ArrayLoader( + WorkflowOutputParameterLoader +) +idmap_outputs_array_of_WorkflowOutputParameterLoader: Final = _IdMapLoader( array_of_WorkflowOutputParameterLoader, "id", "type" ) -array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) -union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) -idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader( +array_of_WorkflowStepLoader: Final = _ArrayLoader(WorkflowStepLoader) +union_of_array_of_WorkflowStepLoader: Final = _UnionLoader( + (array_of_WorkflowStepLoader,) +) +idmap_steps_union_of_array_of_WorkflowStepLoader: Final = _IdMapLoader( union_of_array_of_WorkflowStepLoader, "id", "None" ) -SubworkflowFeatureRequirement_classLoader = _EnumLoader( +SubworkflowFeatureRequirement_classLoader: Final = _EnumLoader( ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" ) -uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None = _URILoader( +uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None: Final = _URILoader( SubworkflowFeatureRequirement_classLoader, False, True, None, None ) -ScatterFeatureRequirement_classLoader = _EnumLoader( +ScatterFeatureRequirement_classLoader: Final = _EnumLoader( ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" ) -uri_ScatterFeatureRequirement_classLoader_False_True_None_None = _URILoader( +uri_ScatterFeatureRequirement_classLoader_False_True_None_None: Final = _URILoader( ScatterFeatureRequirement_classLoader, False, True, None, None ) -MultipleInputFeatureRequirement_classLoader = _EnumLoader( +MultipleInputFeatureRequirement_classLoader: Final = _EnumLoader( ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" ) -uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None = _URILoader( - MultipleInputFeatureRequirement_classLoader, False, True, None, None +uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None: Final = ( + _URILoader(MultipleInputFeatureRequirement_classLoader, False, True, None, None) ) -StepInputExpressionRequirement_classLoader = _EnumLoader( +StepInputExpressionRequirement_classLoader: Final = _EnumLoader( ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" ) -uri_StepInputExpressionRequirement_classLoader_False_True_None_None = _URILoader( +uri_StepInputExpressionRequirement_classLoader_False_True_None_None: Final = _URILoader( StepInputExpressionRequirement_classLoader, False, True, None, None ) -uri_strtype_False_True_None_None = _URILoader(strtype, False, True, None, None) -uri_array_of_strtype_False_False_0_None = _URILoader( - array_of_strtype, False, False, 0, None -) -union_of_strtype_or_array_of_strtype = _UnionLoader( - ( - strtype, - array_of_strtype, - ) -) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader = _UnionLoader( - ( - CommandLineToolLoader, - ExpressionToolLoader, - WorkflowLoader, - ProcessGeneratorLoader, +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader: Final = ( + _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + ) ) ) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader = _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader: ( + Final +) = _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader ) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader = _UnionLoader( +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader: ( + Final +) = _UnionLoader( ( CommandLineToolLoader, ExpressionToolLoader, WorkflowLoader, - ProcessGeneratorLoader, - array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, + array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, ) ) @@ -27454,15 +24189,15 @@ def save( def load_document( doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, + baseuri: str | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: if baseuri is None: baseuri = file_uri(os.getcwd()) + "/" if loadingOptions is None: loadingOptions = LoadingOptions() result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, doc, baseuri, loadingOptions, @@ -27472,16 +24207,16 @@ def load_document( def load_document_with_metadata( doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, - addl_metadata_fields: Optional[MutableSequence[str]] = None, + baseuri: str | None = None, + loadingOptions: LoadingOptions | None = None, + addl_metadata_fields: MutableSequence[str] | None = None, ) -> Any: if baseuri is None: baseuri = file_uri(os.getcwd()) + "/" if loadingOptions is None: loadingOptions = LoadingOptions(fileuri=baseuri) return _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, doc, baseuri, loadingOptions, @@ -27492,7 +24227,7 @@ def load_document_with_metadata( def load_document_by_string( string: Any, uri: str, - loadingOptions: Optional[LoadingOptions] = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: yaml = yaml_no_ts() result = yaml.load(string) @@ -27502,7 +24237,7 @@ def load_document_by_string( loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, result, uri, loadingOptions, @@ -27513,7 +24248,7 @@ def load_document_by_string( def load_document_by_yaml( yaml: Any, uri: str, - loadingOptions: Optional[LoadingOptions] = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """ Shortcut to load via a YAML object. @@ -27525,7 +24260,7 @@ def load_document_by_yaml( loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader, yaml, uri, loadingOptions, diff --git a/src/cwl_utils/parser/cwl_v1_1_utils.py b/src/cwl_utils/parser/cwl_v1_1_utils.py index d1aa7169..df8b790c 100644 --- a/src/cwl_utils/parser/cwl_v1_1_utils.py +++ b/src/cwl_utils/parser/cwl_v1_1_utils.py @@ -9,6 +9,8 @@ from urllib.parse import urldefrag from schema_salad.exceptions import ValidationException +from schema_salad.metaschema import RecordSchema, ArraySchema +from schema_salad.runtime import shortname, LoadingOptions, save, file_uri from schema_salad.sourceline import SourceLine, add_lc_filename from schema_salad.utils import aslist, json_dumps, yaml_no_ts @@ -26,7 +28,7 @@ def _compare_records( - src: cwl.RecordSchema, sink: cwl.RecordSchema, strict: bool = False + src: RecordSchema, sink: RecordSchema, strict: bool = False ) -> bool: """ Compare two records, ensuring they have compatible fields. @@ -34,10 +36,8 @@ def _compare_records( This handles normalizing record names, which will be relative to workflow step, so that they can be compared. """ - srcfields = {cwl.shortname(field.name): field.type_ for field in (src.fields or {})} - sinkfields = { - cwl.shortname(field.name): field.type_ for field in (sink.fields or {}) - } + srcfields = {shortname(field.name): field.type_ for field in (src.fields or {})} + sinkfields = {shortname(field.name): field.type_ for field in (sink.fields or {})} for key in sinkfields.keys(): if ( not can_assign_src_to_sink( @@ -60,14 +60,14 @@ def _compare_records( def _compare_type(type1: Any, type2: Any) -> bool: match (type1, type1): - case cwl.ArraySchema() as t1, cwl.ArraySchema() as t2: + case ArraySchema() as t1, ArraySchema() as t2: return _compare_type(t1.items, t2.items) - case cwl.RecordSchema(), cwl.RecordSchema(): + case RecordSchema(), RecordSchema(): fields1 = { - cwl.shortname(field.name): field.type_ for field in (type1.fields or {}) + shortname(field.name): field.type_ for field in (type1.fields or {}) } fields2 = { - cwl.shortname(field.name): field.type_ for field in (type2.fields or {}) + shortname(field.name): field.type_ for field in (type2.fields or {}) } if fields1.keys() != fields2.keys(): return False @@ -85,9 +85,9 @@ def _compare_type(type1: Any, type2: Any) -> bool: def _inputfile_load( doc: str | MutableMapping[str, Any] | MutableSequence[Any], baseuri: str, - loadingOptions: cwl.LoadingOptions, + loadingOptions: LoadingOptions, addl_metadata_fields: MutableSequence[str] | None = None, -) -> tuple[Any, cwl.LoadingOptions]: +) -> tuple[Any, LoadingOptions]: loader = cwl.CWLInputFileLoader match doc: case str(): @@ -101,9 +101,7 @@ def _inputfile_load( yaml = yaml_no_ts() result = yaml.load(textIO) add_lc_filename(result, doc_url) - loadingOptions = cwl.LoadingOptions( - copyfrom=loadingOptions, fileuri=doc_url - ) + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) _inputfile_load( result, doc_url, @@ -117,7 +115,7 @@ def _inputfile_load( if mf in doc: addl_metadata[mf] = doc[mf] - loadingOptions = cwl.LoadingOptions( + loadingOptions = LoadingOptions( copyfrom=loadingOptions, baseuri=baseuri, addl_metadata=addl_metadata, @@ -157,9 +155,9 @@ def can_assign_src_to_sink(src: Any, sink: Any, strict: bool = False) -> bool: """ if "Any" in (src, sink): return True - if isinstance(src, cwl.ArraySchema) and isinstance(sink, cwl.ArraySchema): + if isinstance(src, ArraySchema) and isinstance(sink, ArraySchema): return can_assign_src_to_sink(src.items, sink.items, strict) - if isinstance(src, cwl.RecordSchema) and isinstance(sink, cwl.RecordSchema): + if isinstance(src, RecordSchema) and isinstance(sink, RecordSchema): return _compare_records(src, sink, strict) if isinstance(src, MutableSequence): if strict: @@ -245,7 +243,7 @@ def check_types( return "exception" if linkMerge == "merge_nested": return check_types( - cwl.ArraySchema(items=srctype, type_="array"), sinktype, None, None + ArraySchema(items=srctype, type_="array"), sinktype, None, None ) if linkMerge == "merge_flattened": return check_types(merge_flatten_type(srctype), sinktype, None, None) @@ -316,13 +314,13 @@ def convert_stdstreams_to_files(clt: cwl.CommandLineTool) -> None: def load_inputfile( doc: Any, baseuri: str | None = None, - loadingOptions: cwl.LoadingOptions | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """Load a CWL v1.1 input file from a serialized YAML string or a YAML object.""" if baseuri is None: - baseuri = cwl.file_uri(str(Path.cwd())) + "/" + baseuri = file_uri(str(Path.cwd())) + "/" if loadingOptions is None: - loadingOptions = cwl.LoadingOptions() + loadingOptions = LoadingOptions() result, metadata = _inputfile_load( doc, @@ -335,14 +333,14 @@ def load_inputfile( def load_inputfile_by_string( string: Any, uri: str, - loadingOptions: cwl.LoadingOptions | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """Load a CWL v1.1 input file from a serialized YAML string.""" result = yaml_no_ts().load(string) add_lc_filename(result, uri) if loadingOptions is None: - loadingOptions = cwl.LoadingOptions(fileuri=uri) + loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _inputfile_load( result, @@ -355,13 +353,13 @@ def load_inputfile_by_string( def load_inputfile_by_yaml( yaml: Any, uri: str, - loadingOptions: cwl.LoadingOptions | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """Load a CWL v1.1 input file from a YAML object.""" add_lc_filename(yaml, uri) if loadingOptions is None: - loadingOptions = cwl.LoadingOptions(fileuri=uri) + loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _inputfile_load( yaml, @@ -375,9 +373,9 @@ def merge_flatten_type(src: Any) -> Any: """Return the merge flattened type of the source type.""" if isinstance(src, MutableSequence): return [merge_flatten_type(t) for t in src] - if isinstance(src, cwl.ArraySchema): + if isinstance(src, ArraySchema): return src - return cwl.ArraySchema(type_="array", items=src) + return ArraySchema(type_="array", items=src) def type_for_step_input( @@ -394,7 +392,7 @@ def type_for_step_input( if cast(str, step_input.id).split("#")[-1] == in_.id.split("#")[-1]: input_type = step_input.type_ if step.scatter is not None and in_.id in aslist(step.scatter): - input_type = cwl.ArraySchema(items=input_type, type_="array") + input_type = ArraySchema(items=input_type, type_="array") return input_type return "Any" @@ -416,16 +414,14 @@ def type_for_step_output( if step.scatter is not None: if step.scatterMethod == "nested_crossproduct": for _ in range(len(aslist(step.scatter))): - output_type = cwl.ArraySchema( - items=output_type, type_="array" - ) + output_type = ArraySchema(items=output_type, type_="array") else: - output_type = cwl.ArraySchema(items=output_type, type_="array") + output_type = ArraySchema(items=output_type, type_="array") return output_type raise ValidationException( "param {} not found in {}.".format( sourcename, - yaml_dumps(cwl.save(step)), + yaml_dumps(save(step)), ) ) @@ -444,11 +440,11 @@ def type_for_source( if scatter_context[0] is not None: if scatter_context[0][1] == "nested_crossproduct": for _ in range(scatter_context[0][0]): - new_type = cwl.ArraySchema(items=new_type, type_="array") + new_type = ArraySchema(items=new_type, type_="array") else: - new_type = cwl.ArraySchema(items=new_type, type_="array") + new_type = ArraySchema(items=new_type, type_="array") if linkMerge == "merge_nested": - new_type = cwl.ArraySchema(items=new_type, type_="array") + new_type = ArraySchema(items=new_type, type_="array") elif linkMerge == "merge_flattened": new_type = merge_flatten_type(new_type) return new_type @@ -466,18 +462,18 @@ def type_for_source( if sc is not None: if sc[1] == "nested_crossproduct": for _ in range(sc[0]): - cur_type = cwl.ArraySchema(items=cur_type, type_="array") + cur_type = ArraySchema(items=cur_type, type_="array") else: - cur_type = cwl.ArraySchema(items=cur_type, type_="array") + cur_type = ArraySchema(items=cur_type, type_="array") new_type.append(cur_type) if len(new_type) == 1: new_type = new_type[0] if linkMerge == "merge_nested": - return cwl.ArraySchema(items=new_type, type_="array") + return ArraySchema(items=new_type, type_="array") elif linkMerge == "merge_flattened": return merge_flatten_type(new_type) elif isinstance(sourcenames, list) and len(sourcenames) > 1: - return cwl.ArraySchema(items=new_type, type_="array") + return ArraySchema(items=new_type, type_="array") else: return new_type @@ -572,7 +568,7 @@ def param_for_source_id( raise WorkflowException( "param {} not found in {}\n{}.".format( sourcename, - yaml_dumps(cwl.save(process)), - (f" or\n {yaml_dumps(cwl.save(parent))}" if parent is not None else ""), + yaml_dumps(save(process)), + (f" or\n {yaml_dumps(save(parent))}" if parent is not None else ""), ) ) diff --git a/src/cwl_utils/parser/cwl_v1_2.py b/src/cwl_utils/parser/cwl_v1_2.py index 85f744fc..201d5175 100644 --- a/src/cwl_utils/parser/cwl_v1_2.py +++ b/src/cwl_utils/parser/cwl_v1_2.py @@ -2,430 +2,51 @@ # This file was autogenerated using schema-salad-tool --codegen=python # The code itself is released under the Apache 2.0 license and the help text is # subject to the license of the original schema. +from __future__ import annotations -import copy -import logging import os -import pathlib -import tempfile -import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 -import xml.sax # nosec -from abc import ABC, abstractmethod -from collections.abc import MutableMapping, MutableSequence, Sequence +import sys +import uuid as _uuid__ +from collections.abc import Collection +from typing import ClassVar + +from schema_salad.runtime import ( + Saveable, + file_uri, + parse_errors, + prefix_url, + save, + save_relative_uri, +) + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self + +import schema_salad.metaschema + +import copy +from collections.abc import MutableSequence, Sequence, MutableMapping from io import StringIO from itertools import chain -from typing import Any, Final, Optional, Union, cast -from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit -from urllib.request import pathname2url +from typing import Any, Final, cast, Generic +from urllib.parse import urldefrag, urlsplit, urlunsplit -from rdflib import Graph -from rdflib.plugins.parsers.notation3 import BadSyntax from ruamel.yaml.comments import CommentedMap -from schema_salad.exceptions import SchemaSaladException, ValidationException -from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher +from schema_salad.exceptions import ValidationException, SchemaSaladException +from schema_salad.runtime import ( + LoadingOptions, + convert_typing, + extract_type, + SaveableType, +) from schema_salad.sourceline import SourceLine, add_lc_filename -from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ - -_vocab: dict[str, str] = {} -_rvocab: dict[str, str] = {} - -_logger: Final = logging.getLogger("salad") - - -IdxType = MutableMapping[str, tuple[Any, "LoadingOptions"]] - - -class LoadingOptions: - idx: Final[IdxType] - fileuri: Final[Optional[str]] - baseuri: Final[str] - namespaces: Final[MutableMapping[str, str]] - schemas: Final[MutableSequence[str]] - original_doc: Final[Optional[Any]] - addl_metadata: Final[MutableMapping[str, Any]] - fetcher: Final[Fetcher] - vocab: Final[dict[str, str]] - rvocab: Final[dict[str, str]] - cache: Final[CacheType] - imports: Final[list[str]] - includes: Final[list[str]] - no_link_check: Final[Optional[bool]] - container: Final[Optional[str]] - - def __init__( - self, - fetcher: Optional[Fetcher] = None, - namespaces: Optional[dict[str, str]] = None, - schemas: Optional[list[str]] = None, - fileuri: Optional[str] = None, - copyfrom: Optional["LoadingOptions"] = None, - original_doc: Optional[Any] = None, - addl_metadata: Optional[dict[str, str]] = None, - baseuri: Optional[str] = None, - idx: Optional[IdxType] = None, - imports: Optional[list[str]] = None, - includes: Optional[list[str]] = None, - no_link_check: Optional[bool] = None, - container: Optional[str] = None, - ) -> None: - """Create a LoadingOptions object.""" - self.original_doc = original_doc - - if idx is not None: - temp_idx = idx - else: - temp_idx = copyfrom.idx if copyfrom is not None else {} - self.idx = temp_idx - - if fileuri is not None: - temp_fileuri: Optional[str] = fileuri - else: - temp_fileuri = copyfrom.fileuri if copyfrom is not None else None - self.fileuri = temp_fileuri - - if baseuri is not None: - temp_baseuri = baseuri - else: - temp_baseuri = copyfrom.baseuri if copyfrom is not None else "" - self.baseuri = temp_baseuri - - if namespaces is not None: - temp_namespaces: MutableMapping[str, str] = namespaces - else: - temp_namespaces = copyfrom.namespaces if copyfrom is not None else {} - self.namespaces = temp_namespaces - - if schemas is not None: - temp_schemas: MutableSequence[str] = schemas - else: - temp_schemas = copyfrom.schemas if copyfrom is not None else [] - self.schemas = temp_schemas - - if addl_metadata is not None: - temp_addl_metadata: MutableMapping[str, Any] = addl_metadata - else: - temp_addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} - self.addl_metadata = temp_addl_metadata - - if imports is not None: - temp_imports = imports - else: - temp_imports = copyfrom.imports if copyfrom is not None else [] - self.imports = temp_imports - - if includes is not None: - temp_includes = includes - else: - temp_includes = copyfrom.includes if copyfrom is not None else [] - self.includes = temp_includes - - if no_link_check is not None: - temp_no_link_check: Optional[bool] = no_link_check - else: - temp_no_link_check = copyfrom.no_link_check if copyfrom is not None else False - self.no_link_check = temp_no_link_check - - if container is not None: - temp_container: Optional[str] = container - else: - temp_container = copyfrom.container if copyfrom is not None else None - self.container = temp_container - - if fetcher is not None: - temp_fetcher = fetcher - elif copyfrom is not None: - temp_fetcher = copyfrom.fetcher - else: - import requests - from cachecontrol.caches import SeparateBodyFileCache - from cachecontrol.wrapper import CacheControl - - root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) - session = CacheControl( - requests.Session(), - cache=SeparateBodyFileCache(root / ".cache" / "salad"), - ) - temp_fetcher = DefaultFetcher({}, session) - self.fetcher = temp_fetcher - - self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} - - if self.namespaces != {}: - temp_vocab = _vocab.copy() - temp_rvocab = _rvocab.copy() - for k, v in self.namespaces.items(): - temp_vocab[k] = v - temp_rvocab[v] = k - else: - temp_vocab = _vocab - temp_rvocab = _rvocab - self.vocab = temp_vocab - self.rvocab = temp_rvocab - - @property - def graph(self) -> Graph: - """Generate a merged rdflib.Graph from all entries in self.schemas.""" - graph = Graph() - if not self.schemas: - return graph - key: Final = str(hash(tuple(self.schemas))) - if key in self.cache: - return cast(Graph, self.cache[key]) - for schema in self.schemas: - fetchurl = ( - self.fetcher.urljoin(self.fileuri, schema) - if self.fileuri is not None - else pathlib.Path(schema).resolve().as_uri() - ) - if fetchurl not in self.cache or self.cache[fetchurl] is True: - _logger.debug("Getting external schema %s", fetchurl) - try: - content = self.fetcher.fetch_text(fetchurl) - except Exception as e: - _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) - continue - newGraph = Graph() - err_msg = "unknown error" - for fmt in ["xml", "turtle"]: - try: - newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) - self.cache[fetchurl] = newGraph - graph += newGraph - break - except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: - err_msg = str(e) - else: - _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) - self.cache[key] = graph - return graph - - -class Saveable(ABC): - """Mark classes than have a save() and fromDoc() function.""" - - @classmethod - @abstractmethod - def fromDoc( - cls, - _doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - ) -> "Saveable": - """Construct this object from the result of yaml.load().""" - - @abstractmethod - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - """Convert this object to a JSON/YAML friendly dictionary.""" - - -def load_field( - val: Union[str, dict[str, str]], - fieldtype: "_Loader", - baseuri: str, - loadingOptions: LoadingOptions, - lc: Optional[list[Any]] = None, -) -> Any: - """Load field.""" - if isinstance(val, MutableMapping): - if "$import" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url1: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) - result, metadata = _document_load_by_url( - fieldtype, - url1, - loadingOptions, - ) - loadingOptions.imports.append(url1) - return result - if "$include" in val: - if loadingOptions.fileuri is None: - raise SchemaSaladException("Cannot load $import without fileuri") - url2: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) - val = loadingOptions.fetcher.fetch_text(url2) - loadingOptions.includes.append(url2) - return fieldtype.load(val, baseuri, loadingOptions, lc=lc) - - -save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] - - -def extract_type(val_type: type[Any]) -> str: - """Take a type of value, and extracts the value as a string.""" - val_str: Final = str(val_type) - return val_str.split("'")[1] - - -def convert_typing(val_type: str) -> str: - """Normalize type names to schema-salad types.""" - if "None" in val_type: - return "null" - if "CommentedSeq" in val_type or "list" in val_type: - return "array" - if "CommentedMap" in val_type or "dict" in val_type: - return "object" - if "False" in val_type or "True" in val_type: - return "boolean" - return val_type - - -def parse_errors(error_message: str) -> tuple[str, str, str]: - """Parse error messages from several loaders into one error message.""" - if not error_message.startswith("Expected"): - return error_message, "", "" - vals: Final = error_message.split("\n") - if len(vals) == 1: - return error_message, "", "" - types1: Final = set() - for val in vals: - individual_vals = val.split(" ") - if val == "": - continue - if individual_vals[1] == "one": - individual_vals = val.split("(")[1].split(",") - for t in individual_vals: - types1.add(t.strip(" ").strip(")\n")) - elif individual_vals[2] == "").replace("'", "")) - elif individual_vals[0] == "Value": - types1.add(individual_vals[-1].strip(".")) - else: - types1.add(individual_vals[1].replace(",", "")) - types2: Final = {val for val in types1 if val != "NoneType"} - if "str" in types2: - types3 = {convert_typing(val) for val in types2 if "'" not in val} - else: - types3 = types2 - to_print = "" - for val in types3: - if "'" in val: - to_print = "value" if len(types3) == 1 else "values" - - if to_print == "": - to_print = "type" if len(types3) == 1 else "types" - - verb_tensage: Final = "is" if len(types3) == 1 else "are" - - return str(types3).replace("{", "(").replace("}", ")").replace("'", ""), to_print, verb_tensage - - -def save( - val: Any, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, -) -> save_type: - if isinstance(val, Saveable): - return val.save(top=top, base_url=base_url, relative_uris=relative_uris) - if isinstance(val, MutableSequence): - return [save(v, top=False, base_url=base_url, relative_uris=relative_uris) for v in val] - if isinstance(val, MutableMapping): - newdict: Final = {} - for key in val: - newdict[key] = save(val[key], top=False, base_url=base_url, relative_uris=relative_uris) - return newdict - if val is None or isinstance(val, (int, float, bool, str)): - return val - raise Exception("Not Saveable: %s" % type(val)) - - -def save_with_metadata( - val: Any, - valLoadingOpts: LoadingOptions, - top: bool = True, - base_url: str = "", - relative_uris: bool = True, -) -> save_type: - """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" - saved_val: Final = save(val, top, base_url, relative_uris) - newdict: MutableMapping[str, Any] = {} - if isinstance(saved_val, MutableSequence): - newdict = {"$graph": saved_val} - elif isinstance(saved_val, MutableMapping): - newdict = saved_val - - if valLoadingOpts.namespaces: - newdict["$namespaces"] = valLoadingOpts.namespaces - if valLoadingOpts.schemas: - newdict["$schemas"] = valLoadingOpts.schemas - if valLoadingOpts.baseuri: - newdict["$base"] = valLoadingOpts.baseuri - for k, v in valLoadingOpts.addl_metadata.items(): - if k not in newdict: - newdict[k] = v - - return newdict - - -def expand_url( - url: str, - base_url: str, - loadingOptions: LoadingOptions, - scoped_id: bool = False, - vocab_term: bool = False, - scoped_ref: Optional[int] = None, -) -> str: - if url in ("@id", "@type"): - return url - - if vocab_term and url in loadingOptions.vocab: - return url - - if bool(loadingOptions.vocab) and ":" in url: - prefix: Final = url.split(":")[0] - if prefix in loadingOptions.vocab: - url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] - - split1: Final = urlsplit(url) - - if ( - (bool(split1.scheme) and split1.scheme in loadingOptions.fetcher.supported_schemes()) - or url.startswith("$(") - or url.startswith("${") - ): - pass - elif scoped_id and not bool(split1.fragment): - splitbase1: Final = urlsplit(base_url) - frg: str - if bool(splitbase1.fragment): - frg = splitbase1.fragment + "/" + split1.path - else: - frg = split1.path - pt: Final = splitbase1.path if splitbase1.path != "" else "/" - url = urlunsplit((splitbase1.scheme, splitbase1.netloc, pt, splitbase1.query, frg)) - elif scoped_ref is not None and not bool(split1.fragment): - splitbase2: Final = urlsplit(base_url) - sp = splitbase2.fragment.split("/") - n = scoped_ref - while n > 0 and len(sp) > 0: - sp.pop() - n -= 1 - sp.append(url) - url = urlunsplit( - ( - splitbase2.scheme, - splitbase2.netloc, - splitbase2.path, - splitbase2.query, - "/".join(sp), - ) - ) - else: - url = loadingOptions.fetcher.urljoin(base_url, url) - - if vocab_term: - split2: Final = urlsplit(url) - if bool(split2.scheme): - if url in loadingOptions.rvocab: - return loadingOptions.rvocab[url] - else: - raise ValidationException(f"Term {url!r} not in vocabulary") +from schema_salad.utils import yaml_no_ts # requires schema-salad v8.2+ - return url +_vocab: Final[dict[str, str]] = {} +_rvocab: Final[dict[str, str]] = {} class _Loader: @@ -434,9 +55,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> Any | None: pass @@ -446,8 +67,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if doc is not None: return doc @@ -455,7 +76,7 @@ def load( class _PrimitiveLoader(_Loader): - def __init__(self, tp: Union[type, tuple[type[str], type[str]]]) -> None: + def __init__(self, tp: type | tuple[type[str], type[str]]) -> None: self.tp: Final = tp def load( @@ -463,8 +84,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if not isinstance(doc, self.tp): raise ValidationException(f"Expected a {self.tp} but got {doc.__class__.__name__}") @@ -483,9 +104,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> list[Any]: if not isinstance(doc, MutableSequence): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " @@ -496,7 +117,7 @@ def load( fields: Final[list[str]] = [] for i in range(0, len(doc)): try: - lf = load_field( + lf = _load_field( doc[i], _UnionLoader([self, self.items]), baseuri, loadingOptions, lc=lc ) flatten = loadingOptions.container != "@list" @@ -535,9 +156,9 @@ class _MapLoader(_Loader): def __init__( self, values: _Loader, - name: Optional[str] = None, - container: Optional[str] = None, - no_link_check: Optional[bool] = None, + name: str | None = None, + container: str | None = None, + no_link_check: bool | None = None, ) -> None: self.values: Final = values self.name: Final = name @@ -549,9 +170,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> dict[str, Any]: if not isinstance(doc, MutableMapping): raise ValidationException(f"Expected a map, was {type(doc)}") if self.container is not None or self.no_link_check is not None: @@ -562,7 +183,7 @@ def load( errors: Final[list[SchemaSaladException]] = [] for k, v in doc.items(): try: - lf = load_field(v, self.values, baseuri, loadingOptions, lc) + lf = _load_field(v, self.values, baseuri, loadingOptions, lc) r[k] = lf except ValidationException as e: errors.append(e.with_sourceline(SourceLine(doc, k, str))) @@ -584,11 +205,11 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> str: if doc in self.symbols: - return doc + return cast(str, doc) raise ValidationException(f"Expected one of {self.symbols}") def __repr__(self) -> str: @@ -604,75 +225,76 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: r: Final[list[dict[str, Any]]] = [] - if isinstance(doc, MutableSequence): - for d in doc: - if isinstance(d, str): - if d.endswith("?"): - r.append({"pattern": d[:-1], "required": False}) - else: - r.append({"pattern": d}) - elif isinstance(d, dict): - new_dict1: dict[str, Any] = {} - dict_copy = copy.deepcopy(d) - if "pattern" in dict_copy: - new_dict1["pattern"] = dict_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {d}" + match doc: + case MutableSequence() as dlist: + for d in dlist: + if isinstance(d, str): + if d.endswith("?"): + r.append({"pattern": d[:-1], "required": False}) + else: + r.append({"pattern": d}) + elif isinstance(d, dict): + new_dict1: dict[str, Any] = {} + dict_copy = copy.deepcopy(d) + if "pattern" in dict_copy: + new_dict1["pattern"] = dict_copy.pop("pattern") + else: + raise ValidationException( + f"Missing pattern in secondaryFiles specification entry: {d}" + ) + new_dict1["required"] = ( + dict_copy.pop("required") if "required" in dict_copy else None ) - new_dict1["required"] = ( - dict_copy.pop("required") if "required" in dict_copy else None - ) - if len(dict_copy): - raise ValidationException( - "Unallowed values in secondaryFiles specification entry: {}".format( - dict_copy + if len(dict_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + dict_copy + ) ) - ) - r.append(new_dict1) + r.append(new_dict1) + else: + raise ValidationException( + "Expected a string or sequence of (strings or mappings)." + ) + case MutableMapping() as decl: + new_dict2 = {} + doc_copy = copy.deepcopy(decl) + if "pattern" in doc_copy: + new_dict2["pattern"] = doc_copy.pop("pattern") else: raise ValidationException( - "Expected a string or sequence of (strings or mappings)." + f"Missing pattern in secondaryFiles specification entry: {decl}" ) - elif isinstance(doc, MutableMapping): - new_dict2: Final = {} - doc_copy: Final = copy.deepcopy(doc) - if "pattern" in doc_copy: - new_dict2["pattern"] = doc_copy.pop("pattern") - else: - raise ValidationException( - f"Missing pattern in secondaryFiles specification entry: {doc}" - ) - new_dict2["required"] = doc_copy.pop("required") if "required" in doc_copy else None + new_dict2["required"] = doc_copy.pop("required") if "required" in doc_copy else None - if len(doc_copy): - raise ValidationException( - f"Unallowed values in secondaryFiles specification entry: {doc_copy}" - ) - r.append(new_dict2) + if len(doc_copy): + raise ValidationException( + f"Unallowed values in secondaryFiles specification entry: {doc_copy}" + ) + r.append(new_dict2) - elif isinstance(doc, str): - if doc.endswith("?"): - r.append({"pattern": doc[:-1], "required": False}) - else: - r.append({"pattern": doc}) - else: - raise ValidationException("Expected str or sequence of str") + case str(decl): + if decl.endswith("?"): + r.append({"pattern": decl[:-1], "required": False}) + else: + r.append({"pattern": decl}) + case _: + raise ValidationException("Expected str or sequence of str") return self.inner.load(r, baseuri, loadingOptions, docRoot, lc=lc) -class _RecordLoader(_Loader): +class _RecordLoader(_Loader, Generic[SaveableType]): def __init__( self, - classtype: type[Saveable], - container: Optional[str] = None, - no_link_check: Optional[bool] = None, + classtype: type[SaveableType], + container: str | None = None, + no_link_check: bool | None = None, ) -> None: self.classtype: Final = classtype self.container: Final = container @@ -683,9 +305,9 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> SaveableType: if not isinstance(doc, MutableMapping): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " @@ -710,19 +332,20 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, - ) -> Any: + docRoot: str | None = None, + lc: Any | None = None, + ) -> str: if not isinstance(doc, str): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " f"but valid type for this field is a str." ) - return doc + else: + return doc class _UnionLoader(_Loader): - def __init__(self, alternates: Sequence[_Loader], name: Optional[str] = None) -> None: + def __init__(self, alternates: Sequence[_Loader], name: str | None = None) -> None: self.alternates = alternates self.name: Final = name @@ -734,8 +357,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: errors: Final = [] @@ -817,8 +440,8 @@ def __init__( inner: _Loader, scoped_id: bool, vocab_term: bool, - scoped_ref: Optional[int], - no_link_check: Optional[bool], + scoped_ref: int | None, + no_link_check: bool | None, ) -> None: self.inner: Final = inner self.scoped_id: Final = scoped_id @@ -831,39 +454,40 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if self.no_link_check is not None: loadingOptions = LoadingOptions( copyfrom=loadingOptions, no_link_check=self.no_link_check ) - if isinstance(doc, MutableSequence): - newdoc: Final = [] - for i in doc: - if isinstance(i, str): - newdoc.append( - expand_url( - i, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) - ) - else: - newdoc.append(i) - doc = newdoc - elif isinstance(doc, str): - doc = expand_url( - doc, - baseuri, - loadingOptions, - self.scoped_id, - self.vocab_term, - self.scoped_ref, - ) + match doc: + case MutableSequence() as decl: + newdoc: Final = [] + for i in decl: + if isinstance(i, str): + newdoc.append( + _expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + ) + else: + newdoc.append(i) + doc = newdoc + case str(decl): + doc = _expand_url( + decl, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) if isinstance(doc, str): if not loadingOptions.no_link_check: errors: Final = [] @@ -880,7 +504,12 @@ def load( class _TypeDSLLoader(_Loader): - def __init__(self, inner: _Loader, refScope: Optional[int], salad_version: str) -> None: + def __init__( + self, + inner: _Loader, + refScope: int | None, + salad_version: str, + ) -> None: self.inner: Final = inner self.refScope: Final = refScope self.salad_version: Final = salad_version @@ -890,7 +519,7 @@ def resolve( doc: str, baseuri: str, loadingOptions: LoadingOptions, - ) -> Union[list[Union[dict[str, Any], str]], dict[str, Any], str]: + ) -> list[dict[str, Any] | str] | dict[str, Any] | str: doc_ = doc optional = False if doc_.endswith("?"): @@ -899,21 +528,42 @@ def resolve( if doc_.endswith("[]"): salad_versions: Final = [int(v) for v in self.salad_version[1:].split(".")] - items: Union[list[Union[dict[str, Any], str]], dict[str, Any], str] = "" + items: list[dict[str, Any] | str] | dict[str, Any] | str = "" rest: Final = doc_[0:-2] if salad_versions < [1, 3]: if rest.endswith("[]"): # To show the error message with the original type return doc else: - items = expand_url(rest, baseuri, loadingOptions, False, True, self.refScope) + items = _expand_url( + rest, + baseuri, + loadingOptions, + False, + True, + self.refScope, + ) else: items = self.resolve(rest, baseuri, loadingOptions) if isinstance(items, str): - items = expand_url(items, baseuri, loadingOptions, False, True, self.refScope) - expanded: Union[dict[str, Any], str] = {"type": "array", "items": items} + items = _expand_url( + items, + baseuri, + loadingOptions, + False, + True, + self.refScope, + ) + expanded: dict[str, Any] | str = {"type": "array", "items": items} else: - expanded = expand_url(doc_, baseuri, loadingOptions, False, True, self.refScope) + expanded = _expand_url( + doc_, + baseuri, + loadingOptions, + False, + True, + self.refScope, + ) if optional: return ["null", expanded] @@ -925,8 +575,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if isinstance(doc, MutableSequence): r: Final[list[Any]] = [] @@ -950,7 +600,7 @@ def load( class _IdMapLoader(_Loader): - def __init__(self, inner: _Loader, mapSubject: str, mapPredicate: Optional[str]) -> None: + def __init__(self, inner: _Loader, mapSubject: str, mapPredicate: str | None) -> None: self.inner: Final = inner self.mapSubject: Final = mapSubject self.mapPredicate: Final = mapPredicate @@ -960,8 +610,8 @@ def load( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None, - lc: Optional[list[Any]] = None, + docRoot: str | None = None, + lc: Any | None = None, ) -> Any: if isinstance(doc, MutableMapping): r: Final[list[Any]] = [] @@ -990,10 +640,10 @@ def load( def _document_load( loader: _Loader, - doc: Union[str, MutableMapping[str, Any], MutableSequence[Any]], + doc: str | MutableMapping[str, Any] | MutableSequence[Any], baseuri: str, loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, + addl_metadata_fields: MutableSequence[str] | None = None, ) -> tuple[Any, LoadingOptions]: if isinstance(doc, str): return _document_load_by_url( @@ -1062,7 +712,7 @@ def _document_load_by_url( loader: _Loader, url: str, loadingOptions: LoadingOptions, - addl_metadata_fields: Optional[MutableSequence[str]] = None, + addl_metadata_fields: MutableSequence[str] | None = None, ) -> tuple[Any, LoadingOptions]: if url in loadingOptions.idx: return loadingOptions.idx[url] @@ -1089,103 +739,316 @@ def _document_load_by_url( return loadingOptions.idx[url] -def file_uri(path: str, split_frag: bool = False) -> str: - """Transform a file path into a URL with file scheme.""" - if path.startswith("file://"): - return path - if split_frag: - pathsp: Final = path.split("#", 2) - frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" - urlpath = pathname2url(str(pathsp[0])) - else: - urlpath = pathname2url(path) - frag = "" - if urlpath.startswith("//"): - return f"file:{urlpath}{frag}" - return f"file://{urlpath}{frag}" - - -def prefix_url(url: str, namespaces: dict[str, str]) -> str: - """Expand short forms into full URLs using the given namespace dictionary.""" - for k, v in namespaces.items(): - if url.startswith(v): - return k + ":" + url[len(v) :] - return url - - -def save_relative_uri( - uri: Any, +def _expand_url( + url: str, base_url: str, - scoped_id: bool, - ref_scope: Optional[int], - relative_uris: bool, -) -> Any: - """Convert any URI to a relative one, obeying the scoping rules.""" - if isinstance(uri, MutableSequence): - return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] - elif isinstance(uri, str): - if not relative_uris or uri == base_url: - return uri - urisplit: Final = urlsplit(uri) - basesplit: Final = urlsplit(base_url) - if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: - if urisplit.path != basesplit.path: - p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) - if urisplit.fragment: - p = p + "#" + urisplit.fragment - return p - - basefrag = basesplit.fragment + "/" - if ref_scope: - sp = basefrag.split("/") - i = 0 - while i < ref_scope: - sp.pop() - i += 1 - basefrag = "/".join(sp) - - if urisplit.fragment.startswith(basefrag): - return urisplit.fragment[len(basefrag) :] - return urisplit.fragment - return uri - else: - return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) - + loadingOptions: LoadingOptions, + scoped_id: bool = False, + vocab_term: bool = False, + scoped_ref: int | None = None, +) -> str: + if url in ("@id", "@type"): + return url -def shortname(inputid: str) -> str: - """ - Compute the shortname of a fully qualified identifier. + vocab = _vocab | loadingOptions.vocab + if vocab_term and url in vocab: + return url - See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. - """ - parsed_id: Final = urlparse(inputid) - if parsed_id.fragment: - return parsed_id.fragment.split("/")[-1] - return parsed_id.path.split("/")[-1] + if bool(vocab) and ":" in url: + prefix: Final = url.split(":")[0] + if prefix in vocab: + url = vocab[prefix] + url[len(prefix) + 1 :] + split1: Final = urlsplit(url) -def parser_info() -> str: - return "org.w3id.cwl.v1_2" + if ( + (bool(split1.scheme) and split1.scheme in loadingOptions.fetcher.supported_schemes()) + or url.startswith("$(") + or url.startswith("${") + ): + pass + elif scoped_id and not bool(split1.fragment): + splitbase1: Final = urlsplit(base_url) + frg: str + if bool(splitbase1.fragment): + frg = splitbase1.fragment + "/" + split1.path + else: + frg = split1.path + pt: Final = splitbase1.path if splitbase1.path != "" else "/" + url = urlunsplit((splitbase1.scheme, splitbase1.netloc, pt, splitbase1.query, frg)) + elif scoped_ref is not None and not bool(split1.fragment): + splitbase2: Final = urlsplit(base_url) + sp = splitbase2.fragment.split("/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urlunsplit( + ( + splitbase2.scheme, + splitbase2.netloc, + splitbase2.path, + splitbase2.query, + "/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) + if vocab_term: + split2: Final = urlsplit(url) + if bool(split2.scheme): + if url in (rvocab := _rvocab | loadingOptions.rvocab): + return rvocab[url] + else: + raise ValidationException(f"Term {url!r} not in vocabulary") -class Documented(Saveable): - pass + return url -class RecordField(Documented): - """ - A field of a record. - """ +def _load_field( + val: Any | None, + fieldtype: "_Loader", + baseuri: str, + loadingOptions: LoadingOptions, + lc: Any | None = None, +) -> Any: + """Load field.""" + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url1: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) + result, metadata = _document_load_by_url( + fieldtype, + url1, + loadingOptions, + ) + loadingOptions.imports.append(url1) + return result + if "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url2: Final = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) + val = loadingOptions.fetcher.fetch_text(url2) + loadingOptions.includes.append(url2) + return fieldtype.load(val, baseuri, loadingOptions, lc=lc) + + +def parser_info() -> str: + return "org.w3id.cwl.v1_2" + + +class CWLArraySchema(schema_salad.metaschema.ArraySchema): + def __init__( + self, + items: Any, + type_: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type_ = type_ + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CWLArraySchema): + return bool(self.items == other.items and self.type_ == other.type_) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = _load_field( + _doc.get("items"), + uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.items is not None: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["items", "type"]) + + +class CWLRecordField(schema_salad.metaschema.RecordField): name: str def __init__( self, name: Any, type_: Any, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + doc: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -1200,7 +1063,7 @@ def __init__( self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordField): + if isinstance(other, CWLRecordField): return bool( self.doc == other.doc and self.name == other.name @@ -1217,8 +1080,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "RecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -1228,7 +1091,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, @@ -1284,7 +1147,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -1332,9 +1195,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2, + typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -1376,7 +1239,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -1384,7 +1247,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -1441,16 +1304,16 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["doc", "name", "type"]) + attrs: ClassVar[Collection[str]] = frozenset(["doc", "name", "type"]) -class RecordSchema(Saveable): +class CWLRecordSchema(schema_salad.metaschema.RecordSchema): def __init__( self, type_: Any, - fields: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + fields: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -1464,7 +1327,7 @@ def __init__( self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, RecordSchema): + if isinstance(other, CWLRecordSchema): return bool(self.fields == other.fields and self.type_ == other.type_) return False @@ -1477,8 +1340,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "RecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -1488,9 +1351,9 @@ def fromDoc( fields = None if "fields" in _doc: try: - fields = load_field( + fields = _load_field( _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, + idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader, baseuri, loadingOptions, lc=_doc.get("fields") @@ -1536,7 +1399,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Record_nameLoader_2, baseuri, @@ -1580,7 +1443,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -1588,7 +1451,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -1640,24 +1503,52 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type"]) + attrs: ClassVar[Collection[str]] = frozenset(["fields", "type"]) -class EnumSchema(Saveable): +class File(Saveable): """ - Define an enumerated type. + Represents a file (or group of files when ``secondaryFiles`` is provided) that will be accessible by tools using standard POSIX file system call API such as open(2) and read(2). - """ + Files are represented as objects with ``class`` of ``File``. File objects have a number of properties that provide metadata about the file. - name: str + The ``location`` property of a File is a IRI that uniquely identifies the file. Implementations must support the ``file://`` IRI scheme and may support other schemes such as ``http://`` and ``https://``. The value of ``location`` may also be a relative reference, in which case it must be resolved relative to the IRI of the document it appears in. Alternately to ``location``, implementations must also accept the ``path`` property on File, which must be a filesystem path available on the same host as the CWL runner (for inputs) or the runtime environment of a command line tool execution (for command line tool outputs). + + If no ``location`` or ``path`` is specified, a file object must specify ``contents`` with the UTF-8 text content of the file. This is a "file literal". File literals do not correspond to external resources, but are created on disk with ``contents`` with when needed for executing a tool. Where appropriate, expressions can return file literals to define new files on a runtime. The maximum size of ``contents`` is 64 kilobytes. + + The ``basename`` property defines the filename on disk where the file is staged. This may differ from the resource name. If not provided, ``basename`` must be computed from the last path part of ``location`` and made available to expressions. + + The ``secondaryFiles`` property is a list of File or Directory objects that must be staged in the same directory as the primary file. It is an error for file names to be duplicated in ``secondaryFiles``. + + The ``size`` property is the size in bytes of the File. It must be computed from the resource and made available to expressions. The ``checksum`` field contains a cryptographic hash of the file content for use it verifying file contents. Implementations may, at user option, enable or disable computation of the ``checksum`` field for performance or other reasons. However, the ability to compute output checksums is required to pass the CWL conformance test suite. + + When executing a CommandLineTool, the files and secondary files may be staged to an arbitrary directory, but must use the value of ``basename`` for the filename. The ``path`` property must be file path in the context of the tool execution runtime (local to the compute node, or within the executing container). All computed properties should be available to expressions. File literals also must be staged and ``path`` must be set. + + When collecting CommandLineTool outputs, ``glob`` matching returns file paths (with the ``path`` property) and the derived properties. This can all be modified by ``outputEval``. Alternately, if the file ``cwl.output.json`` is present in the output, ``outputBinding`` is ignored. + + File objects in the output must provide either a ``location`` IRI or a ``path`` property in the context of the tool execution runtime (local to the compute node, or within the executing container). + + When evaluating an ExpressionTool, file objects must be referenced via ``location`` (the expression tool does not have access to files on disk so ``path`` is meaningless) or as file literals. It is legal to return a file object with an existing ``location`` but a different ``basename``. The ``loadContents`` field of ExpressionTool inputs behaves the same as on CommandLineTool inputs, however it is not meaningful on the outputs. + + An ExpressionTool may forward file references from input to output by using the same value for ``location``. + + """ def __init__( self, - symbols: Any, - type_: Any, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + location: Any | None = None, + path: Any | None = None, + basename: Any | None = None, + dirname: Any | None = None, + nameroot: Any | None = None, + nameext: Any | None = None, + checksum: Any | None = None, + size: Any | None = None, + secondaryFiles: Any | None = None, + format: Any | None = None, + contents: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -1667,21 +1558,54 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols - self.type_ = type_ + self.class_: Final[str] = "File" + self.location = location + self.path = path + self.basename = basename + self.dirname = dirname + self.nameroot = nameroot + self.nameext = nameext + self.checksum = checksum + self.size = size + self.secondaryFiles = secondaryFiles + self.format = format + self.contents = contents def __eq__(self, other: Any) -> bool: - if isinstance(other, EnumSchema): + if isinstance(other, File): return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type_ == other.type_ + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.dirname == other.dirname + and self.nameroot == other.nameroot + and self.nameext == other.nameext + and self.checksum == other.checksum + and self.size == other.size + and self.secondaryFiles == other.secondaryFiles + and self.format == other.format + and self.contents == other.contents ) return False def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type_)) + return hash( + ( + self.class_, + self.location, + self.path, + self.basename, + self.dirname, + self.nameroot, + self.nameext, + self.checksum, + self.size, + self.secondaryFiles, + self.format, + self.contents, + ) + ) @classmethod def fromDoc( @@ -1689,29 +1613,46 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "EnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_File_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + location = None + if "location" in _doc: try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + location = _load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("location") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `location`": _errors__.append( ValidationException( str(e), @@ -1719,13 +1660,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("location") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -1737,537 +1678,499 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `location` field with value `{val}` " "is not valid because:", ) ) + path = None + if "path" in _doc: + try: + path = _load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("path") + ) - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) - - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("symbols") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `symbols`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("symbols") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `path`": _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - detailed_message=f"the `symbols` field with value `{val}` " - "is not valid because:", + val = _doc.get("path") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + detailed_message=f"the `path` field with value `{val}` " + "is not valid because:", + ) + ) + basename = None + if "basename" in _doc: + try: + basename = _load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("basename") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `basename`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("basename") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + detailed_message=f"the `basename` field with value `{val}` " + "is not valid because:", + ) + ) + dirname = None + if "dirname" in _doc: + try: + dirname = _load_field( + _doc.get("dirname"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("dirname") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `dirname`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`".format( - k - ), - SourceLine(_doc, k, str), + val = _doc.get("dirname") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `dirname` field is not valid because:", + SourceLine(_doc, "dirname", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - name=name, - symbols=symbols, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "symbols", "type"]) - - -class ArraySchema(Saveable): - def __init__( - self, - items: Any, - type_: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ArraySchema): - return bool(self.items == other.items and self.type_ == other.type_) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ArraySchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) + else: + _errors__.append( + ValidationException( + "the `dirname` field is not valid because:", + SourceLine(_doc, "dirname", str), + [e], + detailed_message=f"the `dirname` field with value `{val}` " + "is not valid because:", + ) + ) + nameroot = None + if "nameroot" in _doc: + try: + nameroot = _load_field( + _doc.get("nameroot"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("nameroot") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `nameroot`": _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", + val = _doc.get("nameroot") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `nameroot` field is not valid because:", + SourceLine(_doc, "nameroot", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + _errors__.append( + ValidationException( + "the `nameroot` field is not valid because:", + SourceLine(_doc, "nameroot", str), + [e], + detailed_message=f"the `nameroot` field with value `{val}` " + "is not valid because:", + ) + ) + nameext = None + if "nameext" in _doc: + try: + nameext = _load_field( + _doc.get("nameext"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("nameext") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `nameext`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("nameext") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `nameext` field is not valid because:", + SourceLine(_doc, "nameext", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( - k - ), - SourceLine(_doc, k, str), + else: + _errors__.append( + ValidationException( + "the `nameext` field is not valid because:", + SourceLine(_doc, "nameext", str), + [e], + detailed_message=f"the `nameext` field with value `{val}` " + "is not valid because:", + ) ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - items=items, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.items is not None: - u = save_relative_uri(self.items, base_url, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type"]) - - -class MapSchema(Saveable): - def __init__( - self, - type_: Any, - values: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.type_ = type_ - self.values = values - - def __eq__(self, other: Any) -> bool: - if isinstance(other, MapSchema): - return bool(self.type_ == other.type_ and self.values == other.values) - return False - - def __hash__(self) -> int: - return hash((self.type_, self.values)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "MapSchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Map_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + checksum = None + if "checksum" in _doc: + try: + checksum = _load_field( + _doc.get("checksum"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("checksum") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `checksum`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("checksum") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `checksum` field is not valid because:", + SourceLine(_doc, "checksum", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("values") is None: - raise ValidationException("missing required field `values`", None, []) - - values = load_field( - _doc.get("values"), - uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("values") - ) + else: + _errors__.append( + ValidationException( + "the `checksum` field is not valid because:", + SourceLine(_doc, "checksum", str), + [e], + detailed_message=f"the `checksum` field with value `{val}` " + "is not valid because:", + ) + ) + size = None + if "size" in _doc: + try: + size = _load_field( + _doc.get("size"), + union_of_None_type_or_inttype, + baseuri, + loadingOptions, + lc=_doc.get("size") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `values`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("values") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `size`": _errors__.append( ValidationException( - "the `values` field is not valid because:", - SourceLine(_doc, "values", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("size") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `size` field is not valid because:", + SourceLine(_doc, "size", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `size` field is not valid because:", + SourceLine(_doc, "size", str), + [e], + detailed_message=f"the `size` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - "the `values` field is not valid because:", - SourceLine(_doc, "values", str), - [e], - detailed_message=f"the `values` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) + else: + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + contents = None + if "contents" in _doc: + try: + contents = _load_field( + _doc.get("contents"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("contents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `contents`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("contents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `contents` field is not valid because:", + SourceLine(_doc, "contents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `contents` field is not valid because:", + SourceLine(_doc, "contents", str), + [e], + detailed_message=f"the `contents` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `type`, `values`".format( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( k ), SourceLine(_doc, k, str), @@ -2277,8 +2180,17 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - type_=type_, - values=values, + location=location, + path=path, + basename=basename, + dirname=dirname, + nameroot=nameroot, + nameext=nameext, + checksum=checksum, + size=size, + secondaryFiles=secondaryFiles, + format=format, + contents=contents, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2295,13 +2207,60 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.location is not None: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + if self.path is not None: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + if self.basename is not None: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.dirname is not None: + r["dirname"] = save( + self.dirname, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.nameroot is not None: + r["nameroot"] = save( + self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.nameext is not None: + r["nameext"] = save( + self.nameext, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.checksum is not None: + r["checksum"] = save( + self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.size is not None: + r["size"] = save( + self.size, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, base_url, True, None, relative_uris) + r["format"] = u + if self.contents is not None: + r["contents"] = save( + self.contents, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.values is not None: - u = save_relative_uri(self.values, base_url, False, 2, relative_uris) - r["values"] = u # top refers to the directory level if top: @@ -2311,16 +2270,56 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["type", "values"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "class", + "location", + "path", + "basename", + "dirname", + "nameroot", + "nameext", + "checksum", + "size", + "secondaryFiles", + "format", + "contents", + ] + ) + +class Directory(Saveable): + """ + Represents a directory to present to a command line tool. + + Directories are represented as objects with ``class`` of ``Directory``. Directory objects have a number of properties that provide metadata about the directory. + + The ``location`` property of a Directory is a IRI that uniquely identifies the directory. Implementations must support the file:// IRI scheme and may support other schemes such as http://. Alternately to ``location``, implementations must also accept the ``path`` property on Directory, which must be a filesystem path available on the same host as the CWL runner (for inputs) or the runtime environment of a command line tool execution (for command line tool outputs). + + A Directory object may have a ``listing`` field. This is a list of File and Directory objects that are contained in the Directory. For each entry in ``listing``, the ``basename`` property defines the name of the File or Subdirectory when staged to disk. If ``listing`` is not provided, the implementation must have some way of fetching the Directory listing at runtime based on the ``location`` field. + + If a Directory does not have ``location``, it is a Directory literal. A Directory literal must provide ``listing``. Directory literals must be created on disk at runtime as needed. + + The resources in a Directory literal do not need to have any implied relationship in their ``location``. For example, a Directory listing may contain two files located on different hosts. It is the responsibility of the runtime to ensure that those files are staged to disk appropriately. Secondary files associated with files in ``listing`` must also be staged to the same Directory. + + When executing a CommandLineTool, Directories must be recursively staged first and have local values of ``path`` assigned. + + Directory objects in CommandLineTool output must provide either a ``location`` IRI or a ``path`` property in the context of the tool execution runtime (local to the compute node, or within the executing container). + + An ExpressionTool may forward file references from input to output by using the same value for ``location``. + + Name conflicts (the same ``basename`` appearing multiple times in ``listing`` or in any entry in ``secondaryFiles`` in the listing) is a fatal error. + + """ -class UnionSchema(Saveable): def __init__( self, - names: Any, - type_: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + location: Any | None = None, + path: Any | None = None, + basename: Any | None = None, + listing: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -2330,16 +2329,27 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.names = names - self.type_ = type_ + self.class_: Final[str] = "Directory" + self.location = location + self.path = path + self.basename = basename + self.listing = listing def __eq__(self, other: Any) -> bool: - if isinstance(other, UnionSchema): - return bool(self.names == other.names and self.type_ == other.type_) + if isinstance(other, Directory): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.listing == other.listing + ) return False def __hash__(self) -> int: - return hash((self.names, self.type_)) + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) @classmethod def fromDoc( @@ -2347,8 +2357,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "UnionSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -2356,102 +2366,211 @@ def fromDoc( _doc.lc.filename = doc.lc.filename _errors__ = [] try: - if _doc.get("names") is None: - raise ValidationException("missing required field `names`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - names = load_field( - _doc.get("names"), - uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, + class_ = _load_field( + _doc.get("class"), + uri_Directory_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("names") + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + raise e + location = None + if "location" in _doc: + try: + location = _load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("location") + ) - if str(e) == "missing required field `names`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("names") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `location`": _errors__.append( ValidationException( - "the `names` field is not valid because:", - SourceLine(_doc, "names", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("location") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `location` field is not valid because:", + SourceLine(_doc, "location", str), + [e], + detailed_message=f"the `location` field with value `{val}` " + "is not valid because:", + ) + ) + path = None + if "path" in _doc: + try: + path = _load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("path") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `path`": _errors__.append( ValidationException( - "the `names` field is not valid because:", - SourceLine(_doc, "names", str), - [e], - detailed_message=f"the `names` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Union_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + val = _doc.get("path") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `path` field is not valid because:", + SourceLine(_doc, "path", str), + [e], + detailed_message=f"the `path` field with value `{val}` " + "is not valid because:", + ) + ) + basename = None + if "basename" in _doc: + try: + basename = _load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("basename") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `basename`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("basename") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `basename` field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + detailed_message=f"the `basename` field with value `{val}` " + "is not valid because:", + ) + ) + listing = None + if "listing" in _doc: + try: + listing = _load_field( + _doc.get("listing"), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + lc=_doc.get("listing") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `listing`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} + else: + val = _doc.get("listing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + detailed_message=f"the `listing` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -2459,14 +2578,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `names`, `type`".format( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( k ), SourceLine(_doc, k, str), @@ -2476,8 +2595,10 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - names=names, - type_=type_, + location=location, + path=path, + basename=basename, + listing=listing, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2494,12 +2615,29 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.names is not None: - u = save_relative_uri(self.names, base_url, False, 2, relative_uris) - r["names"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.location is not None: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + if self.path is not None: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + if self.basename is not None: + r["basename"] = save( + self.basename, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.listing is not None: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -2510,35 +2648,72 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["names", "type"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["class", "location", "path", "basename", "listing"] + ) -class CWLArraySchema(ArraySchema): - def __init__( - self, - items: Any, - type_: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ +class Labeled(Saveable): + pass - def __eq__(self, other: Any) -> bool: - if isinstance(other, CWLArraySchema): - return bool(self.items == other.items and self.type_ == other.type_) - return False - def __hash__(self) -> int: - return hash((self.items, self.type_)) +class Identified(Saveable): + pass + + +class IdentifierRequired(Identified): + pass + + +class LoadContents(Saveable): + pass + + +class FieldBase(Labeled): + pass + + +class InputFormat(Saveable): + pass + + +class OutputFormat(Saveable): + pass + + +class Parameter(FieldBase, schema_salad.metaschema.Documented, IdentifierRequired): + """ + Define an input or output parameter to a process. + + """ + + pass + + +class InputBinding(Saveable): + def __init__( + self, + loadContents: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputBinding): + return bool(self.loadContents == other.loadContents) + return False + + def __hash__(self) -> int: + return hash((self.loadContents)) @classmethod def fromDoc( @@ -2546,111 +2721,62 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CWLArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -2658,14 +2784,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`".format( + "invalid field `{}`, expected one of: `loadContents`".format( k ), SourceLine(_doc, k, str), @@ -2675,8 +2801,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - items=items, - type_=type_, + loadContents=loadContents, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -2693,12 +2818,12 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.items is not None: - u = save_relative_uri(self.items, base_url, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -2709,19 +2834,37 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["items", "type"]) + attrs: ClassVar[Collection[str]] = frozenset(["loadContents"]) -class CWLRecordField(RecordField): +class IOSchema(Labeled, schema_salad.metaschema.Documented): + pass + + +class InputSchema(IOSchema): + pass + + +class OutputSchema(IOSchema): + pass + + +class InputRecordField(CWLRecordField, FieldBase, InputFormat, LoadContents): name: str def __init__( self, name: Any, type_: Any, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + doc: Any | None = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + format: Any | None = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -2734,18 +2877,42 @@ def __init__( self.doc = doc self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) self.type_ = type_ + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing def __eq__(self, other: Any) -> bool: - if isinstance(other, CWLRecordField): + if isinstance(other, InputRecordField): return bool( self.doc == other.doc and self.name == other.name and self.type_ == other.type_ + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing ) return False def __hash__(self) -> int: - return hash((self.doc, self.name, self.type_)) + return hash( + ( + self.doc, + self.name, + self.type_, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + ) + ) @classmethod def fromDoc( @@ -2753,8 +2920,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CWLRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -2764,7 +2931,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, @@ -2820,7 +2987,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -2868,9 +3035,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2, + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -2912,130 +3079,68 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": _errors__.append( - ValidationException("mapping with implicit null key") + ValidationException( + str(e), + None + ) ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( - k - ), - SourceLine(_doc, k, str), + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - doc=doc, - name=name, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["doc", "name", "type"]) - - -class CWLRecordSchema(RecordSchema): - def __init__( - self, - type_: Any, - fields: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.fields = fields - self.type_ = type_ - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CWLRecordSchema): - return bool(self.fields == other.fields and self.type_ == other.type_) - return False - - def __hash__(self) -> int: - return hash((self.fields, self.type_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CWLRecordSchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - fields = None - if "fields" in _doc: + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader, + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, loadingOptions, - lc=_doc.get("fields") + lc=_doc.get("secondaryFiles") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `fields`": + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( str(e), @@ -3043,13 +3148,13 @@ def fromDoc( ) ) else: - val = _doc.get("fields") + val = _doc.get("secondaryFiles") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3061,327 +3166,89 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [e], - detailed_message=f"the `fields` field with value `{val}` " + detailed_message=f"the `secondaryFiles` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + streamable = None + if "streamable" in _doc: + try: + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("streamable") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("streamable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: + else: + _errors__.append( + ValidationException( + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - fields=fields, - type_=type_, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["fields", "type"]) - - -class File(Saveable): - """ - Represents a file (or group of files when `secondaryFiles` is provided) that - will be accessible by tools using standard POSIX file system call API such as - open(2) and read(2). - - Files are represented as objects with `class` of `File`. File objects have - a number of properties that provide metadata about the file. - - The `location` property of a File is a IRI that uniquely identifies the - file. Implementations must support the `file://` IRI scheme and may support - other schemes such as `http://` and `https://`. The value of `location` may also be a - relative reference, in which case it must be resolved relative to the IRI - of the document it appears in. Alternately to `location`, implementations - must also accept the `path` property on File, which must be a filesystem - path available on the same host as the CWL runner (for inputs) or the - runtime environment of a command line tool execution (for command line tool - outputs). - - If no `location` or `path` is specified, a file object must specify - `contents` with the UTF-8 text content of the file. This is a "file - literal". File literals do not correspond to external resources, but are - created on disk with `contents` with when needed for executing a tool. - Where appropriate, expressions can return file literals to define new files - on a runtime. The maximum size of `contents` is 64 kilobytes. - - The `basename` property defines the filename on disk where the file is - staged. This may differ from the resource name. If not provided, - `basename` must be computed from the last path part of `location` and made - available to expressions. - - The `secondaryFiles` property is a list of File or Directory objects that - must be staged in the same directory as the primary file. It is an error - for file names to be duplicated in `secondaryFiles`. - - The `size` property is the size in bytes of the File. It must be computed - from the resource and made available to expressions. The `checksum` field - contains a cryptographic hash of the file content for use it verifying file - contents. Implementations may, at user option, enable or disable - computation of the `checksum` field for performance or other reasons. - However, the ability to compute output checksums is required to pass the - CWL conformance test suite. - - When executing a CommandLineTool, the files and secondary files may be - staged to an arbitrary directory, but must use the value of `basename` for - the filename. The `path` property must be file path in the context of the - tool execution runtime (local to the compute node, or within the executing - container). All computed properties should be available to expressions. - File literals also must be staged and `path` must be set. - - When collecting CommandLineTool outputs, `glob` matching returns file paths - (with the `path` property) and the derived properties. This can all be - modified by `outputEval`. Alternately, if the file `cwl.output.json` is - present in the output, `outputBinding` is ignored. - - File objects in the output must provide either a `location` IRI or a `path` - property in the context of the tool execution runtime (local to the compute - node, or within the executing container). - - When evaluating an ExpressionTool, file objects must be referenced via - `location` (the expression tool does not have access to files on disk so - `path` is meaningless) or as file literals. It is legal to return a file - object with an existing `location` but a different `basename`. The - `loadContents` field of ExpressionTool inputs behaves the same as on - CommandLineTool inputs, however it is not meaningful on the outputs. - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - """ - - def __init__( - self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - dirname: Optional[Any] = None, - nameroot: Optional[Any] = None, - nameext: Optional[Any] = None, - checksum: Optional[Any] = None, - size: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - format: Optional[Any] = None, - contents: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "File" - self.location = location - self.path = path - self.basename = basename - self.dirname = dirname - self.nameroot = nameroot - self.nameext = nameext - self.checksum = checksum - self.size = size - self.secondaryFiles = secondaryFiles - self.format = format - self.contents = contents - - def __eq__(self, other: Any) -> bool: - if isinstance(other, File): - return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.dirname == other.dirname - and self.nameroot == other.nameroot - and self.nameext == other.nameext - and self.checksum == other.checksum - and self.size == other.size - and self.secondaryFiles == other.secondaryFiles - and self.format == other.format - and self.contents == other.contents - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.location, - self.path, - self.basename, - self.dirname, - self.nameroot, - self.nameext, - self.checksum, - self.size, - self.secondaryFiles, - self.format, - self.contents, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "File": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_File_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - location = None - if "location" in _doc: - try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("location") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `location`": - _errors__.append( - ValidationException( - str(e), - None + str(e), + None ) ) else: - val = _doc.get("location") + val = _doc.get("format") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3393,28 +3260,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [e], - detailed_message=f"the `location` field with value `{val}` " + detailed_message=f"the `format` field with value `{val}` " "is not valid because:", ) ) - path = None - if "path" in _doc: + loadContents = None + if "loadContents" in _doc: try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None_None, + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("path") + lc=_doc.get("loadContents") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `path`": + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( str(e), @@ -3422,13 +3289,13 @@ def fromDoc( ) ) else: - val = _doc.get("path") + val = _doc.get("loadContents") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3440,28 +3307,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [e], - detailed_message=f"the `path` field with value `{val}` " + detailed_message=f"the `loadContents` field with value `{val}` " "is not valid because:", ) ) - basename = None - if "basename" in _doc: + loadListing = None + if "loadListing" in _doc: try: - basename = load_field( - _doc.get("basename"), - union_of_None_type_or_strtype, + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, baseuri, loadingOptions, - lc=_doc.get("basename") + lc=_doc.get("loadListing") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `basename`": + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( str(e), @@ -3469,13 +3336,13 @@ def fromDoc( ) ) else: - val = _doc.get("basename") + val = _doc.get("loadListing") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3487,75 +3354,204 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [e], - detailed_message=f"the `basename` field with value `{val}` " + detailed_message=f"the `loadListing` field with value `{val}` " "is not valid because:", ) ) - dirname = None - if "dirname" in _doc: - try: - dirname = load_field( - _doc.get("dirname"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dirname") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `dirname`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - str(e), - None - ) + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("dirname") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dirname` field is not valid because:", - SourceLine(_doc, "dirname", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), ) - else: - _errors__.append( - ValidationException( - "the `dirname` field is not valid because:", - SourceLine(_doc, "dirname", str), - [e], - detailed_message=f"the `dirname` field with value `{val}` " - "is not valid because:", - ) - ) - nameroot = None - if "nameroot" in _doc: + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + doc=doc, + name=name, + type_=type_, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, self.name, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + ] + ) + + +class InputRecordSchema(CWLRecordSchema, InputSchema): + name: str + + def __init__( + self, + type_: Any, + fields: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordSchema): + return bool( + self.fields == other.fields + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type_, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + name = None + if "name" in _doc: try: - nameroot = load_field( - _doc.get("nameroot"), - union_of_None_type_or_strtype, + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("nameroot") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `nameroot`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -3563,13 +3559,13 @@ def fromDoc( ) ) else: - val = _doc.get("nameroot") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `nameroot` field is not valid because:", - SourceLine(_doc, "nameroot", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3581,28 +3577,37 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `nameroot` field is not valid because:", - SourceLine(_doc, "nameroot", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `nameroot` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - nameext = None - if "nameext" in _doc: + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + fields = None + if "fields" in _doc: try: - nameext = load_field( - _doc.get("nameext"), - union_of_None_type_or_strtype, + fields = _load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, baseuri, loadingOptions, - lc=_doc.get("nameext") + lc=_doc.get("fields") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `nameext`": + if str(e) == "missing required field `fields`": _errors__.append( ValidationException( str(e), @@ -3610,13 +3615,13 @@ def fromDoc( ) ) else: - val = _doc.get("nameext") + val = _doc.get("fields") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `nameext` field is not valid because:", - SourceLine(_doc, "nameext", str), + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3628,169 +3633,76 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `nameext` field is not valid because:", - SourceLine(_doc, "nameext", str), + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), [e], - detailed_message=f"the `nameext` field with value `{val}` " + detailed_message=f"the `fields` field with value `{val}` " "is not valid because:", ) ) - checksum = None - if "checksum" in _doc: - try: - checksum = load_field( - _doc.get("checksum"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("checksum") - ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + type_ = _load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) - if str(e) == "missing required field `checksum`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("checksum") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `checksum` field is not valid because:", - SourceLine(_doc, "checksum", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `checksum` field is not valid because:", - SourceLine(_doc, "checksum", str), - [e], - detailed_message=f"the `checksum` field with value `{val}` " - "is not valid because:", - ) - ) - size = None - if "size" in _doc: - try: - size = load_field( - _doc.get("size"), - union_of_None_type_or_inttype, - baseuri, - loadingOptions, - lc=_doc.get("size") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `size`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("size") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `size` field is not valid because:", - SourceLine(_doc, "size", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `size` field is not valid because:", - SourceLine(_doc, "size", str), - [e], - detailed_message=f"the `size` field with value `{val}` " - "is not valid because:", - ) - ) - secondaryFiles = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, - baseuri, - loadingOptions, - lc=_doc.get("secondaryFiles") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - str(e), - None + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", ) ) - else: - val = _doc.get("secondaryFiles") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " - "is not valid because:", - ) - ) - format = None - if "format" in _doc: + label = None + if "label" in _doc: try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_True_False_None_True, + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("format") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `format`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -3798,13 +3710,13 @@ def fromDoc( ) ) else: - val = _doc.get("format") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3816,28 +3728,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - contents = None - if "contents" in _doc: + doc = None + if "doc" in _doc: try: - contents = load_field( - _doc.get("contents"), - union_of_None_type_or_strtype, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("contents") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `contents`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -3845,13 +3757,13 @@ def fromDoc( ) ) else: - val = _doc.get("contents") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `contents` field is not valid because:", - SourceLine(_doc, "contents", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -3863,14 +3775,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `contents` field is not valid because:", - SourceLine(_doc, "contents", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `contents` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -3878,14 +3790,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( k ), SourceLine(_doc, k, str), @@ -3895,20 +3807,15 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - location=location, - path=path, - basename=basename, - dirname=dirname, - nameroot=nameroot, - nameext=nameext, - checksum=checksum, - size=size, - secondaryFiles=secondaryFiles, - format=format, - contents=contents, + fields=fields, + type_=type_, + label=label, + doc=doc, + name=name, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -3922,57 +3829,24 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.location is not None: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - if self.path is not None: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - if self.basename is not None: - r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.dirname is not None: - r["dirname"] = save( - self.dirname, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.nameroot is not None: - r["nameroot"] = save( - self.nameroot, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.nameext is not None: - r["nameext"] = save( - self.nameext, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.checksum is not None: - r["checksum"] = save( - self.checksum, top=False, base_url=base_url, relative_uris=relative_uris + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.size is not None: - r["size"] = save( - self.size, top=False, base_url=base_url, relative_uris=relative_uris + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.format is not None: - u = save_relative_uri(self.format, base_url, True, None, relative_uris) - r["format"] = u - if self.contents is not None: - r["contents"] = save( - self.contents, top=False, base_url=base_url, relative_uris=relative_uris + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) # top refers to the directory level @@ -3983,80 +3857,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "class", - "location", - "path", - "basename", - "dirname", - "nameroot", - "nameext", - "checksum", - "size", - "secondaryFiles", - "format", - "contents", - ] + attrs: ClassVar[Collection[str]] = frozenset( + ["fields", "type", "label", "doc", "name"] ) -class Directory(Saveable): - """ - Represents a directory to present to a command line tool. - - Directories are represented as objects with `class` of `Directory`. Directory objects have - a number of properties that provide metadata about the directory. - - The `location` property of a Directory is a IRI that uniquely identifies - the directory. Implementations must support the file:// IRI scheme and may - support other schemes such as http://. Alternately to `location`, - implementations must also accept the `path` property on Directory, which - must be a filesystem path available on the same host as the CWL runner (for - inputs) or the runtime environment of a command line tool execution (for - command line tool outputs). - - A Directory object may have a `listing` field. This is a list of File and - Directory objects that are contained in the Directory. For each entry in - `listing`, the `basename` property defines the name of the File or - Subdirectory when staged to disk. If `listing` is not provided, the - implementation must have some way of fetching the Directory listing at - runtime based on the `location` field. - - If a Directory does not have `location`, it is a Directory literal. A - Directory literal must provide `listing`. Directory literals must be - created on disk at runtime as needed. - - The resources in a Directory literal do not need to have any implied - relationship in their `location`. For example, a Directory listing may - contain two files located on different hosts. It is the responsibility of - the runtime to ensure that those files are staged to disk appropriately. - Secondary files associated with files in `listing` must also be staged to - the same Directory. - - When executing a CommandLineTool, Directories must be recursively staged - first and have local values of `path` assigned. - - Directory objects in CommandLineTool output must provide either a - `location` IRI or a `path` property in the context of the tool execution - runtime (local to the compute node, or within the executing container). - - An ExpressionTool may forward file references from input to output by using - the same value for `location`. - - Name conflicts (the same `basename` appearing multiple times in `listing` - or in any entry in `secondaryFiles` in the listing) is a fatal error. - - """ +class InputEnumSchema(schema_salad.metaschema.EnumSchema, InputSchema): + name: str def __init__( self, - location: Optional[Any] = None, - path: Optional[Any] = None, - basename: Optional[Any] = None, - listing: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + symbols: Any, + type_: Any, + name: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -4066,27 +3883,25 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "Directory" - self.location = location - self.path = path - self.basename = basename - self.listing = listing + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols + self.type_ = type_ + self.label = label + self.doc = doc def __eq__(self, other: Any) -> bool: - if isinstance(other, Directory): + if isinstance(other, InputEnumSchema): return bool( - self.class_ == other.class_ - and self.location == other.location - and self.path == other.path - and self.basename == other.basename - and self.listing == other.listing + self.name == other.name + and self.symbols == other.symbols + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc ) return False def __hash__(self) -> int: - return hash( - (self.class_, self.location, self.path, self.basename, self.listing) - ) + return hash((self.name, self.symbols, self.type_, self.label, self.doc)) @classmethod def fromDoc( @@ -4094,45 +3909,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Directory": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_Directory_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - location = None - if "location" in _doc: + name = None + if "name" in _doc: try: - location = load_field( - _doc.get("location"), - uri_union_of_None_type_or_strtype_False_False_None_None, + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("location") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `location`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -4140,13 +3939,13 @@ def fromDoc( ) ) else: - val = _doc.get("location") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4158,75 +3957,133 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `location` field is not valid because:", - SourceLine(_doc, "location", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `location` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - path = None - if "path" in _doc: - try: - path = load_field( - _doc.get("path"), - uri_union_of_None_type_or_strtype_False_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("path") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) - if str(e) == "missing required field `path`": + symbols = _load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("path") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", ) - else: - _errors__.append( - ValidationException( - "the `path` field is not valid because:", - SourceLine(_doc, "path", str), - [e], - detailed_message=f"the `path` field with value `{val}` " - "is not valid because:", - ) + ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) - basename = None - if "basename" in _doc: + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: try: - basename = load_field( - _doc.get("basename"), + label = _load_field( + _doc.get("label"), union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("basename") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `basename`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -4234,13 +4091,13 @@ def fromDoc( ) ) else: - val = _doc.get("basename") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4252,28 +4109,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `basename` field is not valid because:", - SourceLine(_doc, "basename", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `basename` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - listing = None - if "listing" in _doc: + doc = None + if "doc" in _doc: try: - listing = load_field( - _doc.get("listing"), - union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("listing") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `listing`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -4281,13 +4138,13 @@ def fromDoc( ) ) else: - val = _doc.get("listing") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4299,14 +4156,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `listing` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -4314,14 +4171,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( k ), SourceLine(_doc, k, str), @@ -4331,13 +4188,15 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - location=location, - path=path, - basename=basename, - listing=listing, + name=name, + symbols=symbols, + type_=type_, + label=label, + doc=doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -4351,27 +4210,23 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.location is not None: - u = save_relative_uri(self.location, base_url, False, None, relative_uris) - r["location"] = u - if self.path is not None: - u = save_relative_uri(self.path, base_url, False, None, relative_uris) - r["path"] = u - if self.basename is not None: - r["basename"] = save( - self.basename, top=False, base_url=base_url, relative_uris=relative_uris + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.listing is not None: - r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) # top refers to the directory level @@ -4382,52 +4237,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "location", "path", "basename", "listing"]) - - -class Labeled(Saveable): - pass - - -class Identified(Saveable): - pass - - -class IdentifierRequired(Identified): - pass - - -class LoadContents(Saveable): - pass - - -class FieldBase(Labeled): - pass - - -class InputFormat(Saveable): - pass - - -class OutputFormat(Saveable): - pass - - -class Parameter(FieldBase, Documented, IdentifierRequired): - """ - Define an input or output parameter to a process. - - """ + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "doc"] + ) - pass +class InputArraySchema(CWLArraySchema, InputSchema): + name: str -class InputBinding(Saveable): def __init__( self, - loadContents: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + items: Any, + type_: Any, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -4437,15 +4263,25 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.loadContents = loadContents + self.items = items + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, InputBinding): - return bool(self.loadContents == other.loadContents) + if isinstance(other, InputArraySchema): + return bool( + self.items == other.items + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) return False def __hash__(self) -> int: - return hash((self.loadContents)) + return hash((self.items, self.type_, self.label, self.doc, self.name)) @classmethod def fromDoc( @@ -4453,29 +4289,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputBinding": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - loadContents = None - if "loadContents" in _doc: + name = None + if "name" in _doc: try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("loadContents") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadContents`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -4483,13 +4319,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadContents") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -4501,108 +4337,311 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `loadContents` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) + + items = _load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `loadContents`".format( - k - ), - SourceLine(_doc, k, str), + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", ) ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - loadContents=loadContents, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, + type_ = _load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") ) - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["loadContents"]) - - -class IOSchema(Labeled, Documented): - pass - - -class InputSchema(IOSchema): - pass - - -class OutputSchema(IOSchema): - pass - - -class InputRecordField(CWLRecordField, FieldBase, InputFormat, LoadContents): - name: str + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - def __init__( - self, - name: Any, - type_: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + items=items, + type_=type_, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.items is not None: + u = save_relative_uri(self.items, self.name, False, 2, relative_uris) + r["items"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "doc", "name"] + ) + + +class OutputRecordField(CWLRecordField, FieldBase, OutputFormat): + name: str + + def __init__( + self, + name: Any, + type_: Any, + doc: Any | None = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + format: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() @@ -4613,11 +4652,9 @@ def __init__( self.secondaryFiles = secondaryFiles self.streamable = streamable self.format = format - self.loadContents = loadContents - self.loadListing = loadListing def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordField): + if isinstance(other, OutputRecordField): return bool( self.doc == other.doc and self.name == other.name @@ -4626,8 +4663,6 @@ def __eq__(self, other: Any) -> bool: and self.secondaryFiles == other.secondaryFiles and self.streamable == other.streamable and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing ) return False @@ -4641,8 +4676,6 @@ def __hash__(self) -> int: self.secondaryFiles, self.streamable, self.format, - self.loadContents, - self.loadListing, ) ) @@ -4652,8 +4685,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -4663,7 +4696,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, @@ -4719,7 +4752,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -4767,9 +4800,9 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -4814,7 +4847,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -4861,7 +4894,7 @@ def fromDoc( secondaryFiles = None if "secondaryFiles" in _doc: try: - secondaryFiles = load_field( + secondaryFiles = _load_field( _doc.get("secondaryFiles"), secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, @@ -4908,7 +4941,7 @@ def fromDoc( streamable = None if "streamable" in _doc: try: - streamable = load_field( + streamable = _load_field( _doc.get("streamable"), union_of_None_type_or_booltype, baseuri, @@ -4955,9 +4988,9 @@ def fromDoc( format = None if "format" in _doc: try: - format = load_field( + format = _load_field( _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, lc=_doc.get("format") @@ -4999,119 +5032,25 @@ def fromDoc( "is not valid because:", ) ) - loadContents = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("loadContents") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadContents`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: _errors__.append( ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("loadContents") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - detailed_message=f"the `loadContents` field with value `{val}` " - "is not valid because:", - ) - ) - loadListing = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - lc=_doc.get("loadListing") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadListing`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("loadListing") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - detailed_message=f"the `loadListing` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`".format( - k - ), - SourceLine(_doc, k, str), + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`".format( + k + ), + SourceLine(_doc, k, str), ) ) @@ -5125,8 +5064,6 @@ def fromDoc( secondaryFiles=secondaryFiles, streamable=streamable, format=format, - loadContents=loadContents, - loadListing=loadListing, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -5176,20 +5113,6 @@ def save( if self.format is not None: u = save_relative_uri(self.format, self.name, True, None, relative_uris) r["format"] = u - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) # top refers to the directory level if top: @@ -5199,33 +5122,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "doc", - "name", - "type", - "label", - "secondaryFiles", - "streamable", - "format", - "loadContents", - "loadListing", - ] + attrs: ClassVar[Collection[str]] = frozenset( + ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] ) -class InputRecordSchema(CWLRecordSchema, InputSchema): +class OutputRecordSchema(CWLRecordSchema, OutputSchema): name: str def __init__( self, type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + fields: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -5242,7 +5155,7 @@ def __init__( self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, InputRecordSchema): + if isinstance(other, OutputRecordSchema): return bool( self.fields == other.fields and self.type_ == other.type_ @@ -5261,8 +5174,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -5272,7 +5185,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -5328,9 +5241,9 @@ def fromDoc( fields = None if "fields" in _doc: try: - fields = load_field( + fields = _load_field( _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, + idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, baseuri, loadingOptions, lc=_doc.get("fields") @@ -5376,7 +5289,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Record_nameLoader_2, baseuri, @@ -5423,7 +5336,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -5470,7 +5383,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -5514,7 +5427,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -5522,7 +5435,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -5589,21 +5502,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "label", "doc", "name"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["fields", "type", "label", "doc", "name"] + ) -class InputEnumSchema(EnumSchema, InputSchema): +class OutputEnumSchema(schema_salad.metaschema.EnumSchema, OutputSchema): name: str def __init__( self, symbols: Any, type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + name: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -5620,7 +5535,7 @@ def __init__( self.doc = doc def __eq__(self, other: Any) -> bool: - if isinstance(other, InputEnumSchema): + if isinstance(other, OutputEnumSchema): return bool( self.name == other.name and self.symbols == other.symbols @@ -5639,8 +5554,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputEnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -5650,7 +5565,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -5707,7 +5622,7 @@ def fromDoc( if _doc.get("symbols") is None: raise ValidationException("missing required field `symbols`", None, []) - symbols = load_field( + symbols = _load_field( _doc.get("symbols"), uri_array_of_strtype_True_False_None_None, baseuri, @@ -5755,7 +5670,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Enum_nameLoader_2, baseuri, @@ -5802,7 +5717,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -5849,7 +5764,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -5893,7 +5808,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -5901,7 +5816,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -5967,21 +5882,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "doc"] + ) -class InputArraySchema(CWLArraySchema, InputSchema): +class OutputArraySchema(CWLArraySchema, OutputSchema): name: str def __init__( self, items: Any, type_: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -5998,7 +5915,7 @@ def __init__( self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, InputArraySchema): + if isinstance(other, OutputArraySchema): return bool( self.items == other.items and self.type_ == other.type_ @@ -6017,8 +5934,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InputArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -6028,7 +5945,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -6085,9 +6002,9 @@ def fromDoc( if _doc.get("items") is None: raise ValidationException("missing required field `items`", None, []) - items = load_field( + items = _load_field( _doc.get("items"), - uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None, + uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None, baseuri, loadingOptions, lc=_doc.get("items") @@ -6133,7 +6050,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_Array_nameLoader_2, baseuri, @@ -6180,7 +6097,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -6227,7 +6144,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -6271,7 +6188,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -6279,7 +6196,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -6345,23 +6262,50 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["items", "type", "label", "doc", "name"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "doc", "name"] + ) + +class InputParameter(Parameter, InputFormat, LoadContents): + pass -class OutputRecordField(CWLRecordField, FieldBase, OutputFormat): - name: str + +class OutputParameter(Parameter, OutputFormat): + pass + + +class ProcessRequirement(Saveable): + """ + A process requirement declares a prerequisite that may or must be fulfilled before executing a process. See ```Process.hints`` <#process>`__ and ```Process.requirements`` <#process>`__. + + Process requirements are the primary mechanism for specifying extensions to the CWL core specification. + + """ + + pass + + +class Process(Identified, Labeled, schema_salad.metaschema.Documented): + """ + The base executable type in CWL is the ``Process`` object defined by the document. Note that the ``Process`` object is abstract and cannot be directly executed. + + """ + + pass + + +class InlineJavascriptRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support inline Javascript expressions. If this requirement is not present, the workflow platform must not perform expression interpolation. + + """ def __init__( self, - name: Any, - type_: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + expressionLib: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -6371,39 +6315,19 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.type_ = type_ - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format + self.class_: Final[str] = "InlineJavascriptRequirement" + self.expressionLib = expressionLib def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordField): + if isinstance(other, InlineJavascriptRequirement): return bool( - self.doc == other.doc - and self.name == other.name - and self.type_ == other.type_ - and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format + self.class_ == other.class_ + and self.expressionLib == other.expressionLib ) return False def __hash__(self) -> int: - return hash( - ( - self.doc, - self.name, - self.type_, - self.label, - self.secondaryFiles, - self.streamable, - self.format, - ) - ) + return hash((self.class_, self.expressionLib)) @classmethod def fromDoc( @@ -6411,29 +6335,46 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_InlineJavascriptRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + expressionLib = None + if "expressionLib" in _doc: try: - name = load_field( - _doc.get("name"), - uri_strtype_True_False_None_None, + expressionLib = _load_field( + _doc.get("expressionLib"), + union_of_None_type_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("expressionLib") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `expressionLib`": _errors__.append( ValidationException( str(e), @@ -6441,13 +6382,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("expressionLib") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `expressionLib` field is not valid because:", + SourceLine(_doc, "expressionLib", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -6459,85 +6400,170 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `expressionLib` field is not valid because:", + SourceLine(_doc, "expressionLib", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `expressionLib` field with value `{val}` " "is not valid because:", ) ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `expressionLib`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + expressionLib=expressionLib, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" else: - _errors__.append(ValidationException("missing name")) - if not __original_name_is_none: - baseuri = cast(str, name) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.expressionLib is not None: + r["expressionLib"] = save( + self.expressionLib, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) + attrs: ClassVar[Collection[str]] = frozenset(["class", "expressionLib"]) + + +class CommandInputSchema(Saveable): + pass + + +class SchemaDefRequirement(ProcessRequirement): + """ + This field consists of an array of type definitions which must be used when interpreting the ``inputs`` and ``outputs`` fields. When a ``type`` field contains a IRI, the implementation must check if the type is defined in ``schemaDefs`` and use that definition. If the type is not found in ``schemaDefs``, it is an error. The entries in ``schemaDefs`` must be processed in the order listed such that later schema definitions may refer to earlier schema definitions. + + - **Type definitions are allowed for ``enum`` and ``record`` types only.** + - Type definitions may be shared by defining them in a file and then ``$include``-ing them in the ``types`` field. + - A file can contain a list of type definitions + + """ + + def __init__( + self, + types: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_: Final[str] = "SchemaDefRequirement" + self.types = types + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SchemaDefRequirement): + return bool(self.class_ == other.class_ and self.types == other.types) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.types)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + class_ = _load_field( + _doc.get("class"), + uri_SchemaDefRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("types") is None: + raise ValidationException("missing required field `types`", None, []) + + types = _load_field( + _doc.get("types"), + array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("types") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `types`": _errors__.append( ValidationException( str(e), @@ -6545,13 +6571,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("types") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `types` field is not valid because:", + SourceLine(_doc, "types", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -6563,169 +6589,199 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `types` field is not valid because:", + SourceLine(_doc, "types", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `types` field with value `{val}` " "is not valid because:", ) ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - str(e), - None - ) + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - secondaryFiles = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - lc=_doc.get("secondaryFiles") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - str(e), - None + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), + SourceLine(_doc, k, str), ) ) - else: - val = _doc.get("secondaryFiles") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " - "is not valid because:", - ) - ) - streamable = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("streamable") - ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + types=types, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed - if str(e) == "missing required field `streamable`": + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.types is not None: + r["types"] = save( + self.types, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["class", "types"]) + + +class SecondaryFileSchema(Saveable): + """ + Secondary files are specified using the following micro-DSL for secondary files: + + * If the value is a string, it is transformed to an object with two fields ``pattern`` and ``required`` + * By default, the value of ``required`` is ``null`` (this indicates default behavior, which may be based on the context) + * If the value ends with a question mark ``?`` the question mark is stripped off and the value of the field ``required`` is set to ``False`` + * The remaining value is assigned to the field ``pattern`` + + For implementation details and examples, please see `this section `__ in the Schema Salad specification. + + """ + + def __init__( + self, + pattern: Any, + required: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.pattern = pattern + self.required = required + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SecondaryFileSchema): + return bool( + self.pattern == other.pattern and self.required == other.required + ) + return False + + def __hash__(self) -> int: + return hash((self.pattern, self.required)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("pattern") is None: + raise ValidationException("missing required field `pattern`", None, []) + + pattern = _load_field( + _doc.get("pattern"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("pattern") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `pattern`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("pattern") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `pattern` field is not valid because:", + SourceLine(_doc, "pattern", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("streamable") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - detailed_message=f"the `streamable` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `pattern` field is not valid because:", + SourceLine(_doc, "pattern", str), + [e], + detailed_message=f"the `pattern` field with value `{val}` " + "is not valid because:", ) - format = None - if "format" in _doc: + ) + required = None + if "required" in _doc: try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + required = _load_field( + _doc.get("required"), + union_of_None_type_or_booltype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("format") + lc=_doc.get("required") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `format`": + if str(e) == "missing required field `required`": _errors__.append( ValidationException( str(e), @@ -6733,13 +6789,13 @@ def fromDoc( ) ) else: - val = _doc.get("format") + val = _doc.get("required") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `required` field is not valid because:", + SourceLine(_doc, "required", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -6751,14 +6807,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `required` field is not valid because:", + SourceLine(_doc, "required", str), [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `required` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -6766,14 +6822,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`".format( + "invalid field `{}`, expected one of: `pattern`, `required`".format( k ), SourceLine(_doc, k, str), @@ -6783,17 +6839,11 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - doc=doc, - name=name, - type_=type_, - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, + pattern=pattern, + required=required, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -6807,38 +6857,14 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.name, - relative_uris=relative_uris, + if self.pattern is not None: + r["pattern"] = save( + self.pattern, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.name, - relative_uris=relative_uris, + if self.required is not None: + r["required"] = save( + self.required, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.format is not None: - u = save_relative_uri(self.format, self.name, True, None, relative_uris) - r["format"] = u # top refers to the directory level if top: @@ -6848,23 +6874,20 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] - ) + attrs: ClassVar[Collection[str]] = frozenset(["pattern", "required"]) -class OutputRecordSchema(CWLRecordSchema, OutputSchema): - name: str +class LoadListingRequirement(ProcessRequirement): + """ + Specify the desired behavior for loading the ``listing`` field of a Directory object for use by expressions. + + """ def __init__( self, - type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + loadListing: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -6874,25 +6897,18 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.fields = fields - self.type_ = type_ - self.label = label - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.class_: Final[str] = "LoadListingRequirement" + self.loadListing = loadListing def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputRecordSchema): + if isinstance(other, LoadListingRequirement): return bool( - self.fields == other.fields - and self.type_ == other.type_ - and self.label == other.label - and self.doc == other.doc - and self.name == other.name + self.class_ == other.class_ and self.loadListing == other.loadListing ) return False def __hash__(self) -> int: - return hash((self.fields, self.type_, self.label, self.doc, self.name)) + return hash((self.class_, self.loadListing)) @classmethod def fromDoc( @@ -6900,227 +6916,46 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("name") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `name`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("name") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [e], - detailed_message=f"the `name` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - fields = None - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, - baseuri, - loadingOptions, - lc=_doc.get("fields") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `fields`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("fields") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - detailed_message=f"the `fields` field with value `{val}` " - "is not valid because:", - ) - ) try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_Record_nameLoader_2, + class_ = _load_field( + _doc.get("class"), + uri_LoadListingRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: + raise e + loadListing = None + if "loadListing" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("loadListing") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( str(e), @@ -7128,13 +6963,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("loadListing") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7146,14 +6981,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `loadListing` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -7161,14 +6996,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + "invalid field `{}`, expected one of: `class`, `loadListing`".format( k ), SourceLine(_doc, k, str), @@ -7178,15 +7013,10 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - fields=fields, - type_=type_, - label=label, - doc=doc, - name=name, + loadListing=loadListing, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -7200,24 +7030,22 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -7228,21 +7056,21 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "label", "doc", "name"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "loadListing"]) -class OutputEnumSchema(EnumSchema, OutputSchema): - name: str +class EnvironmentDef(Saveable): + """ + Define an environment variable that will be set in the runtime environment by the workflow platform when executing the command line tool. May be the result of executing an expression, such as getting a parameter from input. + + """ def __init__( self, - symbols: Any, - type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + envName: Any, + envValue: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -7252,25 +7080,18 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols - self.type_ = type_ - self.label = label - self.doc = doc + self.envName = envName + self.envValue = envValue def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputEnumSchema): + if isinstance(other, EnvironmentDef): return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type_ == other.type_ - and self.label == other.label - and self.doc == other.doc + self.envName == other.envName and self.envValue == other.envValue ) return False def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type_, self.label, self.doc)) + return hash((self.envName, self.envValue)) @classmethod def fromDoc( @@ -7278,86 +7099,30 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputEnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("name") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `name`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("name") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), - [e], - detailed_message=f"the `name` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) + if _doc.get("envName") is None: + raise ValidationException("missing required field `envName`", None, []) - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, + envName = _load_field( + _doc.get("envName"), + strtype, baseuri, loadingOptions, - lc=_doc.get("symbols") + lc=_doc.get("envName") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `symbols`": + if str(e) == "missing required field `envName`": _errors__.append( ValidationException( str(e), @@ -7365,13 +7130,13 @@ def fromDoc( ) ) else: - val = _doc.get("symbols") + val = _doc.get("envName") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), + "the `envName` field is not valid because:", + SourceLine(_doc, "envName", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7383,29 +7148,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), + "the `envName` field is not valid because:", + SourceLine(_doc, "envName", str), [e], - detailed_message=f"the `symbols` field with value `{val}` " + detailed_message=f"the `envName` field with value `{val}` " "is not valid because:", ) ) try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("envValue") is None: + raise ValidationException("missing required field `envValue`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, + envValue = _load_field( + _doc.get("envValue"), + union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("envValue") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `envValue`": _errors__.append( ValidationException( str(e), @@ -7413,13 +7178,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("envValue") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `envValue` field is not valid because:", + SourceLine(_doc, "envValue", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7431,108 +7196,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `envValue` field is not valid because:", + SourceLine(_doc, "envValue", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `envValue` field with value `{val}` " "is not valid because:", ) ) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -7540,14 +7211,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + "invalid field `{}`, expected one of: `envName`, `envValue`".format( k ), SourceLine(_doc, k, str), @@ -7557,15 +7228,11 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - name=name, - symbols=symbols, - type_=type_, - label=label, - doc=doc, + envName=envName, + envValue=envValue, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -7579,23 +7246,13 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris + if self.envName is not None: + r["envName"] = save( + self.envName, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + if self.envValue is not None: + r["envValue"] = save( + self.envValue, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -7606,21 +7263,44 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + attrs: ClassVar[Collection[str]] = frozenset(["envName", "envValue"]) -class OutputArraySchema(CWLArraySchema, OutputSchema): - name: str +class CommandLineBinding(InputBinding): + """ + When listed under ``inputBinding`` in the input schema, the term "value" refers to the corresponding value in the input object. For binding objects listed in ``CommandLineTool.arguments``, the term "value" refers to the effective value after evaluating ``valueFrom``. + + The binding behavior when building the command line depends on the data type of the value. If there is a mismatch between the type described by the input schema and the effective value, such as resulting from an expression evaluation, an implementation must use the data type of the effective value. + + - **string**: Add ``prefix`` and the string to the command line. + + - **number**: Add ``prefix`` and decimal representation to command line. + + - **boolean**: If true, add ``prefix`` to the command line. If false, add nothing. + + - **File**: Add ``prefix`` and the value of ```File.path`` <#File>`__ to the command line. + + - **Directory**: Add ``prefix`` and the value of ```Directory.path`` <#Directory>`__ to the command line. + + - **array**: If ``itemSeparator`` is specified, add ``prefix`` and the join the array into a single string with ``itemSeparator`` separating the items. Otherwise, first add ``prefix``, then recursively process individual elements. If the array is empty, it does not add anything to command line. + + - **object**: Add ``prefix`` only, and recursively add object fields for which ``inputBinding`` is specified. + + - **null**: Add nothing. + + """ def __init__( self, - items: Any, - type_: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + loadContents: Any | None = None, + position: Any | None = None, + prefix: Any | None = None, + separate: Any | None = None, + itemSeparator: Any | None = None, + valueFrom: Any | None = None, + shellQuote: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -7630,25 +7310,39 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ - self.label = label - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.loadContents = loadContents + self.position = position + self.prefix = prefix + self.separate = separate + self.itemSeparator = itemSeparator + self.valueFrom = valueFrom + self.shellQuote = shellQuote def __eq__(self, other: Any) -> bool: - if isinstance(other, OutputArraySchema): + if isinstance(other, CommandLineBinding): return bool( - self.items == other.items - and self.type_ == other.type_ - and self.label == other.label - and self.doc == other.doc - and self.name == other.name + self.loadContents == other.loadContents + and self.position == other.position + and self.prefix == other.prefix + and self.separate == other.separate + and self.itemSeparator == other.itemSeparator + and self.valueFrom == other.valueFrom + and self.shellQuote == other.shellQuote ) return False def __hash__(self) -> int: - return hash((self.items, self.type_, self.label, self.doc, self.name)) + return hash( + ( + self.loadContents, + self.position, + self.prefix, + self.separate, + self.itemSeparator, + self.valueFrom, + self.shellQuote, + ) + ) @classmethod def fromDoc( @@ -7656,29 +7350,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OutputArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + loadContents = None + if "loadContents" in _doc: try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("loadContents") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( str(e), @@ -7686,13 +7380,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("loadContents") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7704,133 +7398,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `loadContents` field with value `{val}` " "is not valid because:", ) ) - - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Array_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - label = None - if "label" in _doc: + position = None + if "position" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + position = _load_field( + _doc.get("position"), + union_of_None_type_or_inttype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("position") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `position`": _errors__.append( ValidationException( str(e), @@ -7838,13 +7427,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("position") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `position` field is not valid because:", + SourceLine(_doc, "position", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7856,28 +7445,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `position` field is not valid because:", + SourceLine(_doc, "position", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `position` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + prefix = None + if "prefix" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + prefix = _load_field( + _doc.get("prefix"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("prefix") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `prefix`": _errors__.append( ValidationException( str(e), @@ -7885,13 +7474,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("prefix") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `prefix` field is not valid because:", + SourceLine(_doc, "prefix", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -7903,205 +7492,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `prefix` field is not valid because:", + SourceLine(_doc, "prefix", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `prefix` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - items=items, - type_=type_, - label=label, - doc=doc, - name=name, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.items is not None: - u = save_relative_uri(self.items, self.name, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["items", "type", "label", "doc", "name"]) - - -class InputParameter(Parameter, InputFormat, LoadContents): - pass - - -class OutputParameter(Parameter, OutputFormat): - pass - - -class ProcessRequirement(Saveable): - """ - A process requirement declares a prerequisite that may or must be fulfilled - before executing a process. See [`Process.hints`](#process) and - [`Process.requirements`](#process). - - Process requirements are the primary mechanism for specifying extensions to - the CWL core specification. - - """ - - pass - - -class Process(Identified, Labeled, Documented): - """ - - The base executable type in CWL is the `Process` object defined by the - document. Note that the `Process` object is abstract and cannot be - directly executed. - - """ - - pass - - -class InlineJavascriptRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support inline Javascript expressions. - If this requirement is not present, the workflow platform must not perform expression - interpolation. - - """ - - def __init__( - self, - expressionLib: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "InlineJavascriptRequirement" - self.expressionLib = expressionLib - - def __eq__(self, other: Any) -> bool: - if isinstance(other, InlineJavascriptRequirement): - return bool( - self.class_ == other.class_ - and self.expressionLib == other.expressionLib - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.expressionLib)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InlineJavascriptRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_InlineJavascriptRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - expressionLib = None - if "expressionLib" in _doc: + separate = None + if "separate" in _doc: try: - expressionLib = load_field( - _doc.get("expressionLib"), - union_of_None_type_or_array_of_strtype, + separate = _load_field( + _doc.get("separate"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("expressionLib") + lc=_doc.get("separate") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `expressionLib`": + if str(e) == "missing required field `separate`": _errors__.append( ValidationException( str(e), @@ -8109,13 +7521,13 @@ def fromDoc( ) ) else: - val = _doc.get("expressionLib") + val = _doc.get("separate") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `expressionLib` field is not valid because:", - SourceLine(_doc, "expressionLib", str), + "the `separate` field is not valid because:", + SourceLine(_doc, "separate", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8127,207 +7539,155 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `expressionLib` field is not valid because:", - SourceLine(_doc, "expressionLib", str), + "the `separate` field is not valid because:", + SourceLine(_doc, "separate", str), [e], - detailed_message=f"the `expressionLib` field with value `{val}` " + detailed_message=f"the `separate` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + itemSeparator = None + if "itemSeparator" in _doc: + try: + itemSeparator = _load_field( + _doc.get("itemSeparator"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("itemSeparator") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `itemSeparator`": _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ValidationException( + str(e), + None + ) ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("itemSeparator") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `itemSeparator` field is not valid because:", + SourceLine(_doc, "itemSeparator", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `itemSeparator` field is not valid because:", + SourceLine(_doc, "itemSeparator", str), + [e], + detailed_message=f"the `itemSeparator` field with value `{val}` " + "is not valid because:", + ) + ) + valueFrom = None + if "valueFrom" in _doc: + try: + valueFrom = _load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("valueFrom") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `valueFrom`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `expressionLib`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) + else: + val = _doc.get("valueFrom") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `valueFrom` field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + detailed_message=f"the `valueFrom` field with value `{val}` " + "is not valid because:", + ) + ) + shellQuote = None + if "shellQuote" in _doc: + try: + shellQuote = _load_field( + _doc.get("shellQuote"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("shellQuote") + ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - expressionLib=expressionLib, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.expressionLib is not None: - r["expressionLib"] = save( - self.expressionLib, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "expressionLib"]) - - -class CommandInputSchema(Saveable): - pass - - -class SchemaDefRequirement(ProcessRequirement): - """ - This field consists of an array of type definitions which must be used when - interpreting the `inputs` and `outputs` fields. When a `type` field - contains a IRI, the implementation must check if the type is defined in - `schemaDefs` and use that definition. If the type is not found in - `schemaDefs`, it is an error. The entries in `schemaDefs` must be - processed in the order listed such that later schema definitions may refer - to earlier schema definitions. - - - **Type definitions are allowed for `enum` and `record` types only.** - - Type definitions may be shared by defining them in a file and then - `$include`-ing them in the `types` field. - - A file can contain a list of type definitions - - """ - - def __init__( - self, - types: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SchemaDefRequirement" - self.types = types - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SchemaDefRequirement): - return bool(self.class_ == other.class_ and self.types == other.types) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.types)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SchemaDefRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_SchemaDefRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("types") is None: - raise ValidationException("missing required field `types`", None, []) - - types = load_field( - _doc.get("types"), - array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader, - baseuri, - loadingOptions, - lc=_doc.get("types") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `types`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("types") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `shellQuote`": _errors__.append( ValidationException( - "the `types` field is not valid because:", - SourceLine(_doc, "types", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `types` field is not valid because:", - SourceLine(_doc, "types", str), - [e], - detailed_message=f"the `types` field with value `{val}` " - "is not valid because:", + val = _doc.get("shellQuote") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `shellQuote` field is not valid because:", + SourceLine(_doc, "shellQuote", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} + else: + _errors__.append( + ValidationException( + "the `shellQuote` field is not valid because:", + SourceLine(_doc, "shellQuote", str), + [e], + detailed_message=f"the `shellQuote` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -8335,14 +7695,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `types`".format( + "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( k ), SourceLine(_doc, k, str), @@ -8352,7 +7712,13 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - types=types, + loadContents=loadContents, + position=position, + prefix=prefix, + separate=separate, + itemSeparator=itemSeparator, + valueFrom=valueFrom, + shellQuote=shellQuote, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -8369,17 +7735,45 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.types is not None: - r["types"] = save( - self.types, top=False, base_url=base_url, relative_uris=relative_uris + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.position is not None: + r["position"] = save( + self.position, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.prefix is not None: + r["prefix"] = save( + self.prefix, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.separate is not None: + r["separate"] = save( + self.separate, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.itemSeparator is not None: + r["itemSeparator"] = save( + self.itemSeparator, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.valueFrom is not None: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.shellQuote is not None: + r["shellQuote"] = save( + self.shellQuote, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -8390,33 +7784,40 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "types"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "loadContents", + "position", + "prefix", + "separate", + "itemSeparator", + "valueFrom", + "shellQuote", + ] + ) -class SecondaryFileSchema(Saveable): +class CommandOutputBinding(LoadContents): """ - Secondary files are specified using the following micro-DSL for secondary files: + Describes how to generate an output parameter based on the files produced by a CommandLineTool. - * If the value is a string, it is transformed to an object with two fields - `pattern` and `required` - * By default, the value of `required` is `null` - (this indicates default behavior, which may be based on the context) - * If the value ends with a question mark `?` the question mark is - stripped off and the value of the field `required` is set to `False` - * The remaining value is assigned to the field `pattern` + The output parameter value is generated by applying these operations in the following order: - For implementation details and examples, please see - [this section](SchemaSalad.html#Domain_Specific_Language_for_secondary_files) - in the Schema Salad specification. + - glob + - loadContents + - outputEval + - secondaryFiles """ def __init__( self, - pattern: Any, - required: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + glob: Any | None = None, + outputEval: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -8426,18 +7827,23 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.pattern = pattern - self.required = required + self.loadContents = loadContents + self.loadListing = loadListing + self.glob = glob + self.outputEval = outputEval def __eq__(self, other: Any) -> bool: - if isinstance(other, SecondaryFileSchema): + if isinstance(other, CommandOutputBinding): return bool( - self.pattern == other.pattern and self.required == other.required + self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.glob == other.glob + and self.outputEval == other.outputEval ) return False def __hash__(self) -> int: - return hash((self.pattern, self.required)) + return hash((self.loadContents, self.loadListing, self.glob, self.outputEval)) @classmethod def fromDoc( @@ -8445,77 +7851,170 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SecondaryFileSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("pattern") is None: - raise ValidationException("missing required field `pattern`", None, []) - - pattern = load_field( - _doc.get("pattern"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("pattern") - ) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `pattern`": - _errors__.append( - ValidationException( - str(e), - None + if str(e) == "missing required field `loadContents`": + _errors__.append( + ValidationException( + str(e), + None + ) ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: + try: + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") ) - else: - val = _doc.get("pattern") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( - "the `pattern` field is not valid because:", - SourceLine(_doc, "pattern", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("loadListing") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + detailed_message=f"the `loadListing` field with value `{val}` " + "is not valid because:", + ) + ) + glob = None + if "glob" in _doc: + try: + glob = _load_field( + _doc.get("glob"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("glob") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `glob`": _errors__.append( ValidationException( - "the `pattern` field is not valid because:", - SourceLine(_doc, "pattern", str), - [e], - detailed_message=f"the `pattern` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - required = None - if "required" in _doc: + else: + val = _doc.get("glob") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `glob` field is not valid because:", + SourceLine(_doc, "glob", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `glob` field is not valid because:", + SourceLine(_doc, "glob", str), + [e], + detailed_message=f"the `glob` field with value `{val}` " + "is not valid because:", + ) + ) + outputEval = None + if "outputEval" in _doc: try: - required = load_field( - _doc.get("required"), - union_of_None_type_or_booltype_or_ExpressionLoader, + outputEval = _load_field( + _doc.get("outputEval"), + union_of_None_type_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("required") + lc=_doc.get("outputEval") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `required`": + if str(e) == "missing required field `outputEval`": _errors__.append( ValidationException( str(e), @@ -8523,13 +8022,13 @@ def fromDoc( ) ) else: - val = _doc.get("required") + val = _doc.get("outputEval") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `required` field is not valid because:", - SourceLine(_doc, "required", str), + "the `outputEval` field is not valid because:", + SourceLine(_doc, "outputEval", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8541,14 +8040,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `required` field is not valid because:", - SourceLine(_doc, "required", str), + "the `outputEval` field is not valid because:", + SourceLine(_doc, "outputEval", str), [e], - detailed_message=f"the `required` field with value `{val}` " + detailed_message=f"the `outputEval` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -8556,14 +8055,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `pattern`, `required`".format( + "invalid field `{}`, expected one of: `loadContents`, `loadListing`, `glob`, `outputEval`".format( k ), SourceLine(_doc, k, str), @@ -8573,8 +8072,10 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - pattern=pattern, - required=required, + loadContents=loadContents, + loadListing=loadListing, + glob=glob, + outputEval=outputEval, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -8591,13 +8092,30 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.pattern is not None: - r["pattern"] = save( - self.pattern, top=False, base_url=base_url, relative_uris=relative_uris + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) - if self.required is not None: - r["required"] = save( - self.required, top=False, base_url=base_url, relative_uris=relative_uris + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.glob is not None: + r["glob"] = save( + self.glob, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.outputEval is not None: + r["outputEval"] = save( + self.outputEval, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -8608,21 +8126,32 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["pattern", "required"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["loadContents", "loadListing", "glob", "outputEval"] + ) -class LoadListingRequirement(ProcessRequirement): - """ - Specify the desired behavior for loading the `listing` field of - a Directory object for use by expressions. +class CommandLineBindable(Saveable): + pass - """ + +class CommandInputRecordField(InputRecordField, CommandLineBindable): + name: str def __init__( self, - loadListing: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + name: Any, + type_: Any, + doc: Any | None = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + format: Any | None = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -8632,18 +8161,48 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "LoadListingRequirement" + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.type_ = type_ + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents self.loadListing = loadListing + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, LoadListingRequirement): + if isinstance(other, CommandInputRecordField): return bool( - self.class_ == other.class_ and self.loadListing == other.loadListing + self.doc == other.doc + and self.name == other.name + and self.type_ == other.type_ + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: - return hash((self.class_, self.loadListing)) + return hash( + ( + self.doc, + self.name, + self.type_, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + self.inputBinding, + ) + ) @classmethod def fromDoc( @@ -8651,45 +8210,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "LoadListingRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_LoadListingRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - loadListing = None - if "loadListing" in _doc: + name = None + if "name" in _doc: try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, + name = _load_field( + _doc.get("name"), + uri_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("loadListing") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadListing`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -8697,13 +8240,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadListing") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8715,148 +8258,85 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `loadListing` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `loadListing`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - loadListing=loadListing, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "loadListing"]) - - -class EnvironmentDef(Saveable): - """ - Define an environment variable that will be set in the runtime environment - by the workflow platform when executing the command line tool. May be the - result of executing an expression, such as getting a parameter from input. - - """ - - def __init__( - self, - envName: Any, - envValue: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.envName = envName - self.envValue = envValue - - def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvironmentDef): - return bool( - self.envName == other.envName and self.envValue == other.envValue - ) - return False - - def __hash__(self) -> int: - return hash((self.envName, self.envValue)) + _errors__.append(ValidationException("missing name")) + if not __original_name_is_none: + baseuri = cast(str, name) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "EnvironmentDef": - _doc = copy.copy(doc) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) try: - if _doc.get("envName") is None: - raise ValidationException("missing required field `envName`", None, []) + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - envName = load_field( - _doc.get("envName"), - strtype, + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, - lc=_doc.get("envName") + lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `envName`": + if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), @@ -8864,13 +8344,13 @@ def fromDoc( ) ) else: - val = _doc.get("envName") + val = _doc.get("type") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `envName` field is not valid because:", - SourceLine(_doc, "envName", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -8882,247 +8362,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `envName` field is not valid because:", - SourceLine(_doc, "envName", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [e], - detailed_message=f"the `envName` field with value `{val}` " + detailed_message=f"the `type` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("envValue") is None: - raise ValidationException("missing required field `envValue`", None, []) - - envValue = load_field( - _doc.get("envValue"), - union_of_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("envValue") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `envValue`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("envValue") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `envValue` field is not valid because:", - SourceLine(_doc, "envValue", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `envValue` field is not valid because:", - SourceLine(_doc, "envValue", str), - [e], - detailed_message=f"the `envValue` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `envName`, `envValue`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - envName=envName, - envValue=envValue, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.envName is not None: - r["envName"] = save( - self.envName, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.envValue is not None: - r["envValue"] = save( - self.envValue, top=False, base_url=base_url, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["envName", "envValue"]) - - -class CommandLineBinding(InputBinding): - """ - - When listed under `inputBinding` in the input schema, the term - "value" refers to the corresponding value in the input object. For - binding objects listed in `CommandLineTool.arguments`, the term "value" - refers to the effective value after evaluating `valueFrom`. - - The binding behavior when building the command line depends on the data - type of the value. If there is a mismatch between the type described by - the input schema and the effective value, such as resulting from an - expression evaluation, an implementation must use the data type of the - effective value. - - - **string**: Add `prefix` and the string to the command line. - - - **number**: Add `prefix` and decimal representation to command line. - - - **boolean**: If true, add `prefix` to the command line. If false, add - nothing. - - - **File**: Add `prefix` and the value of - [`File.path`](#File) to the command line. - - - **Directory**: Add `prefix` and the value of - [`Directory.path`](#Directory) to the command line. - - - **array**: If `itemSeparator` is specified, add `prefix` and the join - the array into a single string with `itemSeparator` separating the - items. Otherwise, first add `prefix`, then recursively process - individual elements. - If the array is empty, it does not add anything to command line. - - - **object**: Add `prefix` only, and recursively add object fields for - which `inputBinding` is specified. - - - **null**: Add nothing. - - """ - - def __init__( - self, - loadContents: Optional[Any] = None, - position: Optional[Any] = None, - prefix: Optional[Any] = None, - separate: Optional[Any] = None, - itemSeparator: Optional[Any] = None, - valueFrom: Optional[Any] = None, - shellQuote: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - self.position = position - self.prefix = prefix - self.separate = separate - self.itemSeparator = itemSeparator - self.valueFrom = valueFrom - self.shellQuote = shellQuote - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineBinding): - return bool( - self.loadContents == other.loadContents - and self.position == other.position - and self.prefix == other.prefix - and self.separate == other.separate - and self.itemSeparator == other.itemSeparator - and self.valueFrom == other.valueFrom - and self.shellQuote == other.shellQuote - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.loadContents, - self.position, - self.prefix, - self.separate, - self.itemSeparator, - self.valueFrom, - self.shellQuote, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandLineBinding": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - loadContents = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("loadContents") - ) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadContents`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -9130,13 +8391,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadContents") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9148,28 +8409,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `loadContents` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - position = None - if "position" in _doc: + secondaryFiles = None + if "secondaryFiles" in _doc: try: - position = load_field( - _doc.get("position"), - union_of_None_type_or_inttype_or_ExpressionLoader, + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, loadingOptions, - lc=_doc.get("position") + lc=_doc.get("secondaryFiles") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `position`": + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( str(e), @@ -9177,13 +8438,13 @@ def fromDoc( ) ) else: - val = _doc.get("position") + val = _doc.get("secondaryFiles") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `position` field is not valid because:", - SourceLine(_doc, "position", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9195,28 +8456,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `position` field is not valid because:", - SourceLine(_doc, "position", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [e], - detailed_message=f"the `position` field with value `{val}` " + detailed_message=f"the `secondaryFiles` field with value `{val}` " "is not valid because:", ) ) - prefix = None - if "prefix" in _doc: + streamable = None + if "streamable" in _doc: try: - prefix = load_field( - _doc.get("prefix"), - union_of_None_type_or_strtype, + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("prefix") + lc=_doc.get("streamable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `prefix`": + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( str(e), @@ -9224,13 +8485,13 @@ def fromDoc( ) ) else: - val = _doc.get("prefix") + val = _doc.get("streamable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `prefix` field is not valid because:", - SourceLine(_doc, "prefix", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9242,28 +8503,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `prefix` field is not valid because:", - SourceLine(_doc, "prefix", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [e], - detailed_message=f"the `prefix` field with value `{val}` " + detailed_message=f"the `streamable` field with value `{val}` " "is not valid because:", ) ) - separate = None - if "separate" in _doc: + format = None + if "format" in _doc: try: - separate = load_field( - _doc.get("separate"), - union_of_None_type_or_booltype, + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, - lc=_doc.get("separate") + lc=_doc.get("format") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `separate`": + if str(e) == "missing required field `format`": _errors__.append( ValidationException( str(e), @@ -9271,13 +8532,13 @@ def fromDoc( ) ) else: - val = _doc.get("separate") + val = _doc.get("format") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `separate` field is not valid because:", - SourceLine(_doc, "separate", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9289,28 +8550,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `separate` field is not valid because:", - SourceLine(_doc, "separate", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [e], - detailed_message=f"the `separate` field with value `{val}` " + detailed_message=f"the `format` field with value `{val}` " "is not valid because:", ) ) - itemSeparator = None - if "itemSeparator" in _doc: + loadContents = None + if "loadContents" in _doc: try: - itemSeparator = load_field( - _doc.get("itemSeparator"), - union_of_None_type_or_strtype, + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("itemSeparator") + lc=_doc.get("loadContents") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `itemSeparator`": + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( str(e), @@ -9318,13 +8579,13 @@ def fromDoc( ) ) else: - val = _doc.get("itemSeparator") + val = _doc.get("loadContents") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `itemSeparator` field is not valid because:", - SourceLine(_doc, "itemSeparator", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9336,28 +8597,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `itemSeparator` field is not valid because:", - SourceLine(_doc, "itemSeparator", str), + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), [e], - detailed_message=f"the `itemSeparator` field with value `{val}` " + detailed_message=f"the `loadContents` field with value `{val}` " "is not valid because:", ) ) - valueFrom = None - if "valueFrom" in _doc: + loadListing = None + if "loadListing" in _doc: try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, baseuri, loadingOptions, - lc=_doc.get("valueFrom") + lc=_doc.get("loadListing") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `valueFrom`": + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( str(e), @@ -9365,13 +8626,13 @@ def fromDoc( ) ) else: - val = _doc.get("valueFrom") + val = _doc.get("loadListing") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9383,28 +8644,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [e], - detailed_message=f"the `valueFrom` field with value `{val}` " + detailed_message=f"the `loadListing` field with value `{val}` " "is not valid because:", ) ) - shellQuote = None - if "shellQuote" in _doc: + inputBinding = None + if "inputBinding" in _doc: try: - shellQuote = load_field( - _doc.get("shellQuote"), - union_of_None_type_or_booltype, + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("shellQuote") + lc=_doc.get("inputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `shellQuote`": + if str(e) == "missing required field `inputBinding`": _errors__.append( ValidationException( str(e), @@ -9412,13 +8673,13 @@ def fromDoc( ) ) else: - val = _doc.get("shellQuote") + val = _doc.get("inputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `shellQuote` field is not valid because:", - SourceLine(_doc, "shellQuote", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9430,14 +8691,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `shellQuote` field is not valid because:", - SourceLine(_doc, "shellQuote", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [e], - detailed_message=f"the `shellQuote` field with value `{val}` " + detailed_message=f"the `inputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -9445,14 +8706,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`, `inputBinding`".format( k ), SourceLine(_doc, k, str), @@ -9462,16 +8723,20 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + doc=doc, + name=name, + type_=type_, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, loadContents=loadContents, - position=position, - prefix=prefix, - separate=separate, - itemSeparator=itemSeparator, - valueFrom=valueFrom, - shellQuote=shellQuote, + loadListing=loadListing, + inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -9485,44 +8750,57 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.position is not None: - r["position"] = save( - self.position, top=False, base_url=base_url, relative_uris=relative_uris + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.prefix is not None: - r["prefix"] = save( - self.prefix, top=False, base_url=base_url, relative_uris=relative_uris + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.separate is not None: - r["separate"] = save( - self.separate, top=False, base_url=base_url, relative_uris=relative_uris + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.name, + relative_uris=relative_uris, ) - if self.itemSeparator is not None: - r["itemSeparator"] = save( - self.itemSeparator, + if self.streamable is not None: + r["streamable"] = save( + self.streamable, top=False, - base_url=base_url, + base_url=self.name, relative_uris=relative_uris, ) - if self.valueFrom is not None: - r["valueFrom"] = save( - self.valueFrom, + if self.format is not None: + u = save_relative_uri(self.format, self.name, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, top=False, - base_url=base_url, + base_url=self.name, relative_uris=relative_uris, ) - if self.shellQuote is not None: - r["shellQuote"] = save( - self.shellQuote, + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, top=False, - base_url=base_url, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.name, relative_uris=relative_uris, ) @@ -9534,42 +8812,37 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", "loadContents", - "position", - "prefix", - "separate", - "itemSeparator", - "valueFrom", - "shellQuote", + "loadListing", + "inputBinding", ] ) -class CommandOutputBinding(LoadContents): - """ - Describes how to generate an output parameter based on the files produced - by a CommandLineTool. - - The output parameter value is generated by applying these operations in the - following order: - - - glob - - loadContents - - outputEval - - secondaryFiles - - """ +class CommandInputRecordSchema( + InputRecordSchema, CommandInputSchema, CommandLineBindable +): + name: str def __init__( self, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - glob: Optional[Any] = None, - outputEval: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + type_: Any, + fields: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -9579,23 +8852,36 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.loadContents = loadContents - self.loadListing = loadListing - self.glob = glob - self.outputEval = outputEval + self.fields = fields + self.type_ = type_ + self.label = label + self.doc = doc + self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputBinding): + if isinstance(other, CommandInputRecordSchema): return bool( - self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.glob == other.glob - and self.outputEval == other.outputEval + self.fields == other.fields + and self.type_ == other.type_ + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: - return hash((self.loadContents, self.loadListing, self.glob, self.outputEval)) + return hash( + ( + self.fields, + self.type_, + self.label, + self.doc, + self.name, + self.inputBinding, + ) + ) @classmethod def fromDoc( @@ -9603,29 +8889,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputBinding": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - loadContents = None - if "loadContents" in _doc: + name = None + if "name" in _doc: try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, + name = _load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("loadContents") + lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadContents`": + if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), @@ -9633,13 +8919,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadContents") + val = _doc.get("name") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9651,28 +8937,37 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `name` field is not valid because:", + SourceLine(_doc, "name", str), [e], - detailed_message=f"the `loadContents` field with value `{val}` " + detailed_message=f"the `name` field with value `{val}` " "is not valid because:", ) ) - loadListing = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = cast(str, name) + fields = None + if "fields" in _doc: + try: + fields = _load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, + baseuri, loadingOptions, - lc=_doc.get("loadListing") + lc=_doc.get("fields") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadListing`": + if str(e) == "missing required field `fields`": _errors__.append( ValidationException( str(e), @@ -9680,13 +8975,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadListing") + val = _doc.get("fields") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9698,28 +8993,76 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), [e], - detailed_message=f"the `loadListing` field with value `{val}` " + detailed_message=f"the `fields` field with value `{val}` " "is not valid because:", ) ) - glob = None - if "glob" in _doc: + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + label = None + if "label" in _doc: try: - glob = load_field( - _doc.get("glob"), - union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("glob") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `glob`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -9727,13 +9070,13 @@ def fromDoc( ) ) else: - val = _doc.get("glob") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `glob` field is not valid because:", - SourceLine(_doc, "glob", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9745,28 +9088,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `glob` field is not valid because:", - SourceLine(_doc, "glob", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `glob` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - outputEval = None - if "outputEval" in _doc: + doc = None + if "doc" in _doc: try: - outputEval = load_field( - _doc.get("outputEval"), - union_of_None_type_or_ExpressionLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("outputEval") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputEval`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -9774,13 +9117,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputEval") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputEval` field is not valid because:", - SourceLine(_doc, "outputEval", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -9792,14 +9135,61 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputEval` field is not valid because:", - SourceLine(_doc, "outputEval", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `outputEval` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + inputBinding = None + if "inputBinding" in _doc: + try: + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + detailed_message=f"the `inputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -9807,14 +9197,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `loadContents`, `loadListing`, `glob`, `outputEval`".format( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`, `inputBinding`".format( k ), SourceLine(_doc, k, str), @@ -9824,13 +9214,16 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - loadContents=loadContents, - loadListing=loadListing, - glob=glob, - outputEval=outputEval, + fields=fields, + type_=type_, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( @@ -9844,29 +9237,30 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.name is not None: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.glob is not None: - r["glob"] = save( - self.glob, top=False, base_url=base_url, relative_uris=relative_uris + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.outputEval is not None: - r["outputEval"] = save( - self.outputEval, + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, top=False, - base_url=base_url, + base_url=self.name, relative_uris=relative_uris, ) @@ -9878,30 +9272,24 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["loadContents", "loadListing", "glob", "outputEval"]) - - -class CommandLineBindable(Saveable): - pass + attrs: ClassVar[Collection[str]] = frozenset( + ["fields", "type", "label", "doc", "name", "inputBinding"] + ) -class CommandInputRecordField(InputRecordField, CommandLineBindable): +class CommandInputEnumSchema(InputEnumSchema, CommandInputSchema, CommandLineBindable): name: str def __init__( self, - name: Any, + symbols: Any, type_: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + name: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -9911,29 +9299,21 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.doc = doc self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols self.type_ = type_ self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing + self.doc = doc self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordField): + if isinstance(other, CommandInputEnumSchema): return bool( - self.doc == other.doc - and self.name == other.name + self.name == other.name + and self.symbols == other.symbols and self.type_ == other.type_ and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing + and self.doc == other.doc and self.inputBinding == other.inputBinding ) return False @@ -9941,15 +9321,11 @@ def __eq__(self, other: Any) -> bool: def __hash__(self) -> int: return hash( ( - self.doc, self.name, + self.symbols, self.type_, self.label, - self.secondaryFiles, - self.streamable, - self.format, - self.loadContents, - self.loadListing, + self.doc, self.inputBinding, ) ) @@ -9960,8 +9336,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -9971,9 +9347,9 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), - uri_strtype_True_False_None_None, + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") @@ -10021,63 +9397,64 @@ def fromDoc( if docRoot is not None: name = docRoot else: - _errors__.append(ValidationException("missing name")) + name = "_:" + str(_uuid__.uuid4()) if not __original_name_is_none: baseuri = cast(str, name) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + symbols = _load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) - if str(e) == "missing required field `doc`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", ) + ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + typedsl_Enum_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -10122,7 +9499,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -10166,21 +9543,21 @@ def fromDoc( "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: + doc = None + if "doc" in _doc: try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("secondaryFiles") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secondaryFiles`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -10188,13 +9565,13 @@ def fromDoc( ) ) else: - val = _doc.get("secondaryFiles") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -10206,28 +9583,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - streamable = None - if "streamable" in _doc: + inputBinding = None + if "inputBinding" in _doc: try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("streamable") + lc=_doc.get("inputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `streamable`": + if str(e) == "missing required field `inputBinding`": _errors__.append( ValidationException( str(e), @@ -10235,13 +9612,13 @@ def fromDoc( ) ) else: - val = _doc.get("streamable") + val = _doc.get("inputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -10253,235 +9630,43 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [e], - detailed_message=f"the `streamable` field with value `{val}` " + detailed_message=f"the `inputBinding` field with value `{val}` " "is not valid because:", ) ) - format = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, - baseuri, - loadingOptions, - lc=_doc.get("format") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `format`": + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - str(e), - None - ) + ValidationException("mapping with implicit null key") ) - else: - val = _doc.get("format") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [e], - detailed_message=f"the `format` field with value `{val}` " - "is not valid because:", - ) - ) - loadContents = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("loadContents") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadContents`": - _errors__.append( - ValidationException( - str(e), - None - ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) + extension_fields[ex] = _doc[k] else: - val = _doc.get("loadContents") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - detailed_message=f"the `loadContents` field with value `{val}` " - "is not valid because:", - ) - ) - loadListing = None - if "loadListing" in _doc: - try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, - baseuri, - loadingOptions, - lc=_doc.get("loadListing") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("loadListing") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), - [e], - detailed_message=f"the `loadListing` field with value `{val}` " - "is not valid because:", - ) - ) - inputBinding = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - detailed_message=f"the `inputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`, `inputBinding`".format( - k - ), - SourceLine(_doc, k, str), + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - doc=doc, name=name, + symbols=symbols, type_=type_, label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, - loadContents=loadContents, - loadListing=loadListing, + doc=doc, inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, @@ -10503,10 +9688,9 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris @@ -10515,36 +9699,9 @@ def save( r["label"] = save( self.label, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - if self.format is not None: - u = save_relative_uri(self.format, self.name, True, None, relative_uris) - r["format"] = u - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=self.name, - relative_uris=relative_uris, + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) if self.inputBinding is not None: r["inputBinding"] = save( @@ -10562,37 +9719,26 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "doc", - "name", - "type", - "label", - "secondaryFiles", - "streamable", - "format", - "loadContents", - "loadListing", - "inputBinding", - ] + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "doc", "inputBinding"] ) -class CommandInputRecordSchema( - InputRecordSchema, CommandInputSchema, CommandLineBindable +class CommandInputArraySchema( + InputArraySchema, CommandInputSchema, CommandLineBindable ): name: str def __init__( self, + items: Any, type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -10602,7 +9748,7 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.fields = fields + self.items = items self.type_ = type_ self.label = label self.doc = doc @@ -10610,9 +9756,9 @@ def __init__( self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputRecordSchema): + if isinstance(other, CommandInputArraySchema): return bool( - self.fields == other.fields + self.items == other.items and self.type_ == other.type_ and self.label == other.label and self.doc == other.doc @@ -10623,14 +9769,7 @@ def __eq__(self, other: Any) -> bool: def __hash__(self) -> int: return hash( - ( - self.fields, - self.type_, - self.label, - self.doc, - self.name, - self.inputBinding, - ) + (self.items, self.type_, self.label, self.doc, self.name, self.inputBinding) ) @classmethod @@ -10639,8 +9778,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -10650,7 +9789,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -10703,60 +9842,61 @@ def fromDoc( name = "_:" + str(_uuid__.uuid4()) if not __original_name_is_none: baseuri = cast(str, name) - fields = None - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, - baseuri, - loadingOptions, - lc=_doc.get("fields") - ) + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + items = _load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) - if str(e) == "missing required field `fields`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("fields") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - detailed_message=f"the `fields` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", ) + ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_Record_nameLoader_2, + typedsl_Array_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -10801,7 +9941,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -10848,7 +9988,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -10895,7 +10035,7 @@ def fromDoc( inputBinding = None if "inputBinding" in _doc: try: - inputBinding = load_field( + inputBinding = _load_field( _doc.get("inputBinding"), union_of_None_type_or_CommandLineBindingLoader, baseuri, @@ -10939,7 +10079,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -10947,14 +10087,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`, `inputBinding`".format( k ), SourceLine(_doc, k, str), @@ -10964,7 +10104,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - fields=fields, + items=items, type_=type_, label=label, doc=doc, @@ -10990,10 +10130,9 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.items is not None: + u = save_relative_uri(self.items, self.name, False, 2, relative_uris) + r["items"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris @@ -11022,22 +10161,26 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "label", "doc", "name", "inputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "doc", "name", "inputBinding"] + ) -class CommandInputEnumSchema(InputEnumSchema, CommandInputSchema, CommandLineBindable): +class CommandOutputRecordField(OutputRecordField): name: str def __init__( self, - symbols: Any, + name: Any, type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + doc: Any | None = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + format: Any | None = None, + outputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -11047,34 +10190,40 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() + self.doc = doc self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols self.type_ = type_ self.label = label - self.doc = doc - self.inputBinding = inputBinding + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.outputBinding = outputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputEnumSchema): + if isinstance(other, CommandOutputRecordField): return bool( - self.name == other.name - and self.symbols == other.symbols + self.doc == other.doc + and self.name == other.name and self.type_ == other.type_ and self.label == other.label - and self.doc == other.doc - and self.inputBinding == other.inputBinding + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.outputBinding == other.outputBinding ) return False def __hash__(self) -> int: return hash( ( + self.doc, self.name, - self.symbols, self.type_, self.label, - self.doc, - self.inputBinding, + self.secondaryFiles, + self.streamable, + self.format, + self.outputBinding, ) ) @@ -11084,8 +10233,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputEnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -11095,9 +10244,9 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + uri_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") @@ -11145,64 +10294,63 @@ def fromDoc( if docRoot is not None: name = docRoot else: - name = "_:" + str(_uuid__.uuid4()) + _errors__.append(ValidationException("missing name")) if not __original_name_is_none: baseuri = cast(str, name) - try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) - - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("symbols") - ) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `symbols`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("symbols") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - detailed_message=f"the `symbols` field with value `{val}` " - "is not valid because:", + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) ) - ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_Enum_nameLoader_2, + typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -11247,7 +10395,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -11291,21 +10439,21 @@ def fromDoc( "is not valid because:", ) ) - doc = None - if "doc" in _doc: + secondaryFiles = None + if "secondaryFiles" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("secondaryFiles") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( str(e), @@ -11313,13 +10461,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("secondaryFiles") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -11331,28 +10479,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `secondaryFiles` field with value `{val}` " "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: + streamable = None + if "streamable" in _doc: try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("inputBinding") + lc=_doc.get("streamable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inputBinding`": + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( str(e), @@ -11360,13 +10508,13 @@ def fromDoc( ) ) else: - val = _doc.get("inputBinding") + val = _doc.get("streamable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -11378,14 +10526,108 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [e], - detailed_message=f"the `inputBinding` field with value `{val}` " + detailed_message=f"the `streamable` field with value `{val}` " + "is not valid because:", + ) + ) + format = None + if "format" in _doc: + try: + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + outputBinding = None + if "outputBinding" in _doc: + try: + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputBinding") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputBinding`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputBinding") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + detailed_message=f"the `outputBinding` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -11393,14 +10635,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`, `inputBinding`".format( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `outputBinding`".format( k ), SourceLine(_doc, k, str), @@ -11410,12 +10652,14 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + doc=doc, name=name, - symbols=symbols, type_=type_, label=label, - doc=doc, - inputBinding=inputBinding, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + outputBinding=outputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -11436,9 +10680,10 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris + ) if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris @@ -11447,13 +10692,26 @@ def save( r["label"] = save( self.label, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.name, + relative_uris=relative_uris, ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.name, + relative_uris=relative_uris, + ) + if self.format is not None: + u = save_relative_uri(self.format, self.name, True, None, relative_uris) + r["format"] = u + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, top=False, base_url=self.name, relative_uris=relative_uris, @@ -11467,24 +10725,32 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["name", "symbols", "type", "label", "doc", "inputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "outputBinding", + ] + ) -class CommandInputArraySchema( - InputArraySchema, CommandInputSchema, CommandLineBindable -): +class CommandOutputRecordSchema(OutputRecordSchema): name: str def __init__( self, - items: Any, type_: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + fields: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -11494,29 +10760,25 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.items = items + self.fields = fields self.type_ = type_ self.label = label self.doc = doc self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputArraySchema): + if isinstance(other, CommandOutputRecordSchema): return bool( - self.items == other.items + self.fields == other.fields and self.type_ == other.type_ and self.label == other.label and self.doc == other.doc and self.name == other.name - and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: - return hash( - (self.items, self.type_, self.label, self.doc, self.name, self.inputBinding) - ) + return hash((self.fields, self.type_, self.label, self.doc, self.name)) @classmethod def fromDoc( @@ -11524,8 +10786,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputArraySchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -11535,7 +10797,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -11588,61 +10850,60 @@ def fromDoc( name = "_:" + str(_uuid__.uuid4()) if not __original_name_is_none: baseuri = cast(str, name) - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) + fields = None + if "fields" in _doc: + try: + fields = _load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, + baseuri, + loadingOptions, + lc=_doc.get("fields") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `fields`": _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", + val = _doc.get("fields") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + detailed_message=f"the `fields` field with value `{val}` " + "is not valid because:", + ) ) - ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_Array_nameLoader_2, + typedsl_Record_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -11687,7 +10948,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -11734,7 +10995,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -11778,54 +11039,7 @@ def fromDoc( "is not valid because:", ) ) - inputBinding = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - detailed_message=f"the `inputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -11833,14 +11047,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( k ), SourceLine(_doc, k, str), @@ -11850,12 +11064,11 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - items=items, + fields=fields, type_=type_, label=label, doc=doc, name=name, - inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -11876,9 +11089,10 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.items is not None: - u = save_relative_uri(self.items, self.name, False, 2, relative_uris) - r["items"] = u + if self.fields is not None: + r["fields"] = save( + self.fields, top=False, base_url=self.name, relative_uris=relative_uris + ) if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris @@ -11891,13 +11105,6 @@ def save( r["doc"] = save( self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) # top refers to the directory level if top: @@ -11907,24 +11114,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["items", "type", "label", "doc", "name", "inputBinding"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["fields", "type", "label", "doc", "name"] + ) -class CommandOutputRecordField(OutputRecordField): +class CommandOutputEnumSchema(OutputEnumSchema): name: str def __init__( self, - name: Any, + symbols: Any, type_: Any, - doc: Optional[Any] = None, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - format: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + name: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -11934,42 +11140,25 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.doc = doc self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) + self.symbols = symbols self.type_ = type_ self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.format = format - self.outputBinding = outputBinding + self.doc = doc def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordField): + if isinstance(other, CommandOutputEnumSchema): return bool( - self.doc == other.doc - and self.name == other.name + self.name == other.name + and self.symbols == other.symbols and self.type_ == other.type_ and self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.format == other.format - and self.outputBinding == other.outputBinding + and self.doc == other.doc ) return False def __hash__(self) -> int: - return hash( - ( - self.doc, - self.name, - self.type_, - self.label, - self.secondaryFiles, - self.streamable, - self.format, - self.outputBinding, - ) - ) + return hash((self.name, self.symbols, self.type_, self.label, self.doc)) @classmethod def fromDoc( @@ -11977,8 +11166,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputRecordField": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -11988,9 +11177,9 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), - uri_strtype_True_False_None_None, + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") @@ -12038,63 +11227,64 @@ def fromDoc( if docRoot is not None: name = docRoot else: - _errors__.append(ValidationException("missing name")) + name = "_:" + str(_uuid__.uuid4()) if not __original_name_is_none: baseuri = cast(str, name) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) + try: + if _doc.get("symbols") is None: + raise ValidationException("missing required field `symbols`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + symbols = _load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("symbols") + ) - if str(e) == "missing required field `doc`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `symbols`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("symbols") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + detailed_message=f"the `symbols` field with value `{val}` " + "is not valid because:", ) + ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + typedsl_Enum_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -12139,7 +11329,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -12183,162 +11373,21 @@ def fromDoc( "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - lc=_doc.get("secondaryFiles") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `secondaryFiles`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("secondaryFiles") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), - [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " - "is not valid because:", - ) - ) - streamable = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("streamable") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `streamable`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("streamable") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - detailed_message=f"the `streamable` field with value `{val}` " - "is not valid because:", - ) - ) - format = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, - baseuri, - loadingOptions, - lc=_doc.get("format") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `format`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("format") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), - [e], - detailed_message=f"the `format` field with value `{val}` " - "is not valid because:", - ) - ) - outputBinding = None - if "outputBinding" in _doc: + doc = None + if "doc" in _doc: try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("outputBinding") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputBinding`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -12346,13 +11395,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputBinding") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12364,14 +11413,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `outputBinding` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -12379,14 +11428,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `outputBinding`".format( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( k ), SourceLine(_doc, k, str), @@ -12396,14 +11445,11 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - doc=doc, name=name, + symbols=symbols, type_=type_, label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - format=format, - outputBinding=outputBinding, + doc=doc, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -12424,10 +11470,9 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.symbols is not None: + u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + r["symbols"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris @@ -12436,29 +11481,9 @@ def save( r["label"] = save( self.label, top=False, base_url=self.name, relative_uris=relative_uris ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.name, - relative_uris=relative_uris, - ) - if self.format is not None: - u = save_relative_uri(self.format, self.name, True, None, relative_uris) - r["format"] = u - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=self.name, - relative_uris=relative_uris, + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) # top refers to the directory level @@ -12469,32 +11494,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "doc", - "name", - "type", - "label", - "secondaryFiles", - "streamable", - "format", - "outputBinding", - ] + attrs: ClassVar[Collection[str]] = frozenset( + ["name", "symbols", "type", "label", "doc"] ) -class CommandOutputRecordSchema(OutputRecordSchema): +class CommandOutputArraySchema(OutputArraySchema): name: str def __init__( self, + items: Any, type_: Any, - fields: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + doc: Any | None = None, + name: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -12504,16 +11520,16 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.fields = fields + self.items = items self.type_ = type_ self.label = label self.doc = doc self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputRecordSchema): + if isinstance(other, CommandOutputArraySchema): return bool( - self.fields == other.fields + self.items == other.items and self.type_ == other.type_ and self.label == other.label and self.doc == other.doc @@ -12522,7 +11538,7 @@ def __eq__(self, other: Any) -> bool: return False def __hash__(self) -> int: - return hash((self.fields, self.type_, self.label, self.doc, self.name)) + return hash((self.items, self.type_, self.label, self.doc, self.name)) @classmethod def fromDoc( @@ -12530,8 +11546,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputRecordSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -12541,7 +11557,7 @@ def fromDoc( name = None if "name" in _doc: try: - name = load_field( + name = _load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -12594,60 +11610,61 @@ def fromDoc( name = "_:" + str(_uuid__.uuid4()) if not __original_name_is_none: baseuri = cast(str, name) - fields = None - if "fields" in _doc: - try: - fields = load_field( - _doc.get("fields"), - idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, - baseuri, - loadingOptions, - lc=_doc.get("fields") - ) + try: + if _doc.get("items") is None: + raise ValidationException("missing required field `items`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + items = _load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None, + baseuri, + loadingOptions, + lc=_doc.get("items") + ) - if str(e) == "missing required field `fields`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `items`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("items") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("fields") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `fields` field is not valid because:", - SourceLine(_doc, "fields", str), - [e], - detailed_message=f"the `fields` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, "items", str), + [e], + detailed_message=f"the `items` field with value `{val}` " + "is not valid because:", ) + ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_Record_nameLoader_2, + typedsl_Array_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -12692,7 +11709,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -12739,7 +11756,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -12783,7 +11800,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -12791,14 +11808,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( k ), SourceLine(_doc, k, str), @@ -12808,7 +11825,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - fields=fields, + items=items, type_=type_, label=label, doc=doc, @@ -12833,10 +11850,9 @@ def save( if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.fields is not None: - r["fields"] = save( - self.fields, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.items is not None: + u = save_relative_uri(self.items, self.name, False, 2, relative_uris) + r["items"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris @@ -12858,21 +11874,34 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["fields", "type", "label", "doc", "name"]) + attrs: ClassVar[Collection[str]] = frozenset( + ["items", "type", "label", "doc", "name"] + ) -class CommandOutputEnumSchema(OutputEnumSchema): - name: str +class CommandInputParameter(InputParameter): + """ + An input parameter for a CommandLineTool. + + """ + + id: str def __init__( self, - symbols: Any, + id: Any, type_: Any, - name: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + default: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -12882,25 +11911,51 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - self.symbols = symbols - self.type_ = type_ self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type_ = type_ + self.inputBinding = inputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputEnumSchema): + if isinstance(other, CommandInputParameter): return bool( - self.name == other.name - and self.symbols == other.symbols - and self.type_ == other.type_ - and self.label == other.label + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type_ == other.type_ + and self.inputBinding == other.inputBinding ) return False def __hash__(self) -> int: - return hash((self.name, self.symbols, self.type_, self.label, self.doc)) + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type_, + self.inputBinding, + ) + ) @classmethod def fromDoc( @@ -12908,29 +11963,29 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputEnumSchema": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - name = None - if "name" in _doc: + id = None + if "id" in _doc: try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("name") + lc=_doc.get("id") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `id`": _errors__.append( ValidationException( str(e), @@ -12938,13 +11993,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("id") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -12956,133 +12011,131 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `id` field with value `{val}` " "is not valid because:", ) ) - __original_name_is_none = name is None - if name is None: + __original_id_is_none = id is None + if id is None: if docRoot is not None: - name = docRoot + id = docRoot else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - try: - if _doc.get("symbols") is None: - raise ValidationException("missing required field `symbols`", None, []) - - symbols = load_field( - _doc.get("symbols"), - uri_array_of_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("symbols") - ) + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `symbols`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("symbols") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `label`": _errors__.append( ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `symbols` field is not valid because:", - SourceLine(_doc, "symbols", str), - [e], - detailed_message=f"the `symbols` field with value `{val}` " - "is not valid because:", + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_Enum_nameLoader_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", + val = _doc.get("secondaryFiles") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - label = None - if "label" in _doc: + else: + _errors__.append( + ValidationException( + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + detailed_message=f"the `secondaryFiles` field with value `{val}` " + "is not valid because:", + ) + ) + streamable = None + if "streamable" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("streamable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( str(e), @@ -13090,13 +12143,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("streamable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13108,17 +12161,17 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `streamable` field with value `{val}` " "is not valid because:", ) ) doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -13162,153 +12215,115 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + format = None + if "format" in _doc: + try: + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ValidationException( + str(e), + None + ) ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) + ) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - name=name, - symbols=symbols, - type_=type_, - label=label, - doc=doc, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) - r["symbols"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["name", "symbols", "type", "label", "doc"]) - - -class CommandOutputArraySchema(OutputArraySchema): - name: str - - def __init__( - self, - items: Any, - type_: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - name: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.items = items - self.type_ = type_ - self.label = label - self.doc = doc - self.name = name if name is not None else "_:" + str(_uuid__.uuid4()) - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputArraySchema): - return bool( - self.items == other.items - and self.type_ == other.type_ - and self.label == other.label - and self.doc == other.doc - and self.name == other.name - ) - return False - - def __hash__(self) -> int: - return hash((self.items, self.type_, self.label, self.doc, self.name)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputArraySchema": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - name = None - if "name" in _doc: - try: - name = load_field( - _doc.get("name"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("name") - ) + else: + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: + try: + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + lc=_doc.get("loadListing") + ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `name`": + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( str(e), @@ -13316,13 +12331,13 @@ def fromDoc( ) ) else: - val = _doc.get("name") + val = _doc.get("loadListing") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13334,77 +12349,67 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `name` field is not valid because:", - SourceLine(_doc, "name", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [e], - detailed_message=f"the `name` field with value `{val}` " + detailed_message=f"the `loadListing` field with value `{val}` " "is not valid because:", ) ) + default = None + if "default" in _doc: + try: + default = _load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, + baseuri, + loadingOptions, + lc=_doc.get("default") + ) - __original_name_is_none = name is None - if name is None: - if docRoot is not None: - name = docRoot - else: - name = "_:" + str(_uuid__.uuid4()) - if not __original_name_is_none: - baseuri = cast(str, name) - try: - if _doc.get("items") is None: - raise ValidationException("missing required field `items`", None, []) - - items = load_field( - _doc.get("items"), - uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None, - baseuri, - loadingOptions, - lc=_doc.get("items") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `items`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("items") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `default`": _errors__.append( ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `items` field is not valid because:", - SourceLine(_doc, "items", str), - [e], - detailed_message=f"the `items` field with value `{val}` " - "is not valid because:", + val = _doc.get("default") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), + [e], + detailed_message=f"the `default` field with value `{val}` " + "is not valid because:", + ) ) - ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), - typedsl_Array_nameLoader_2, + typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") @@ -13446,21 +12451,21 @@ def fromDoc( "is not valid because:", ) ) - label = None - if "label" in _doc: + inputBinding = None + if "inputBinding" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + inputBinding = _load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("inputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `inputBinding`": _errors__.append( ValidationException( str(e), @@ -13468,13 +12473,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("inputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -13486,76 +12491,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `inputBinding` field is not valid because:", + SourceLine(_doc, "inputBinding", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `inputBinding` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( k ), SourceLine(_doc, k, str), @@ -13565,15 +12523,21 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - items=items, - type_=type_, label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, doc=doc, - name=name, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type_=type_, + inputBinding=inputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -13587,23 +12551,62 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: - u = save_relative_uri(self.name, base_url, True, None, relative_uris) - r["name"] = u - if self.items is not None: - u = save_relative_uri(self.items, self.name, False, 2, relative_uris) - r["items"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris - ) + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u if self.label is not None: r["label"] = save( - self.label, top=False, base_url=self.name, relative_uris=relative_uris + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) if self.doc is not None: r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.inputBinding is not None: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) # top refers to the directory level @@ -13614,12 +12617,27 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["items", "type", "label", "doc", "name"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) -class CommandInputParameter(InputParameter): +class CommandOutputParameter(OutputParameter): """ - An input parameter for a CommandLineTool. + An output parameter for a CommandLineTool. + """ id: str @@ -13628,17 +12646,14 @@ def __init__( self, id: Any, type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - default: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + outputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -13654,14 +12669,11 @@ def __init__( self.doc = doc self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.default = default self.type_ = type_ - self.inputBinding = inputBinding + self.outputBinding = outputBinding def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandInputParameter): + if isinstance(other, CommandOutputParameter): return bool( self.label == other.label and self.secondaryFiles == other.secondaryFiles @@ -13669,11 +12681,8 @@ def __eq__(self, other: Any) -> bool: and self.doc == other.doc and self.id == other.id and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.default == other.default and self.type_ == other.type_ - and self.inputBinding == other.inputBinding + and self.outputBinding == other.outputBinding ) return False @@ -13686,11 +12695,8 @@ def __hash__(self) -> int: self.doc, self.id, self.format, - self.loadContents, - self.loadListing, - self.default, self.type_, - self.inputBinding, + self.outputBinding, ) ) @@ -13700,8 +12706,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandInputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -13711,7 +12717,7 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), uri_strtype_True_False_None_None, baseuri, @@ -13767,7 +12773,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -13814,7 +12820,7 @@ def fromDoc( secondaryFiles = None if "secondaryFiles" in _doc: try: - secondaryFiles = load_field( + secondaryFiles = _load_field( _doc.get("secondaryFiles"), secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, @@ -13861,7 +12867,7 @@ def fromDoc( streamable = None if "streamable" in _doc: try: - streamable = load_field( + streamable = _load_field( _doc.get("streamable"), union_of_None_type_or_booltype, baseuri, @@ -13908,7 +12914,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -13955,9 +12961,9 @@ def fromDoc( format = None if "format" in _doc: try: - format = load_field( + format = _load_field( _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, lc=_doc.get("format") @@ -13999,68 +13005,69 @@ def fromDoc( "is not valid because:", ) ) - loadContents = None - if "loadContents" in _doc: - try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("loadContents") - ) + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) - if str(e) == "missing required field `loadContents`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("loadContents") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), - [e], - detailed_message=f"the `loadContents` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", ) - loadListing = None - if "loadListing" in _doc: + ) + outputBinding = None + if "outputBinding" in _doc: try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, + outputBinding = _load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, baseuri, loadingOptions, - lc=_doc.get("loadListing") + lc=_doc.get("outputBinding") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadListing`": + if str(e) == "missing required field `outputBinding`": _errors__.append( ValidationException( str(e), @@ -14068,13 +13075,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadListing") + val = _doc.get("outputBinding") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14086,156 +13093,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `outputBinding` field is not valid because:", + SourceLine(_doc, "outputBinding", str), [e], - detailed_message=f"the `loadListing` field with value `{val}` " + detailed_message=f"the `outputBinding` field with value `{val}` " "is not valid because:", ) ) - default = None - if "default" in _doc: - try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_CWLObjectTypeLoader, - baseuri, - loadingOptions, - lc=_doc.get("default") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `default`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("default") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), - [e], - detailed_message=f"the `default` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - inputBinding = None - if "inputBinding" in _doc: - try: - inputBinding = load_field( - _doc.get("inputBinding"), - union_of_None_type_or_CommandLineBindingLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputBinding") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputBinding`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputBinding") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputBinding` field is not valid because:", - SourceLine(_doc, "inputBinding", str), - [e], - detailed_message=f"the `inputBinding` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -14243,14 +13108,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`, `outputBinding`".format( k ), SourceLine(_doc, k, str), @@ -14266,11 +13131,8 @@ def fromDoc( doc=doc, id=id, format=format, - loadContents=loadContents, - loadListing=loadListing, - default=default, type_=type_, - inputBinding=inputBinding, + outputBinding=outputBinding, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -14316,31 +13178,13 @@ def save( if self.format is not None: u = save_relative_uri(self.format, self.id, True, None, relative_uris) r["format"] = u - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.default is not None: - r["default"] = save( - self.default, top=False, base_url=self.id, relative_uris=relative_uris - ) if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.inputBinding is not None: - r["inputBinding"] = save( - self.inputBinding, + if self.outputBinding is not None: + r["outputBinding"] = save( + self.outputBinding, top=False, base_url=self.id, relative_uris=relative_uris, @@ -14354,7 +13198,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ "label", "secondaryFiles", @@ -14362,34 +13206,41 @@ def save( "doc", "id", "format", - "loadContents", - "loadListing", - "default", "type", - "inputBinding", + "outputBinding", ] ) -class CommandOutputParameter(OutputParameter): +class CommandLineTool(Process): """ - An output parameter for a CommandLineTool. + This defines the schema of the CWL Command Line Tool Description document. + """ id: str def __init__( self, - id: Any, - type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - outputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + inputs: Any, + outputs: Any, + id: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + cwlVersion: Any | None = None, + intent: Any | None = None, + baseCommand: Any | None = None, + arguments: Any | None = None, + stdin: Any | None = None, + stderr: Any | None = None, + stdout: Any | None = None, + successCodes: Any | None = None, + temporaryFailCodes: Any | None = None, + permanentFailCodes: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -14399,40 +13250,70 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.format = format - self.type_ = type_ - self.outputBinding = outputBinding + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_: Final[str] = "CommandLineTool" + self.baseCommand = baseCommand + self.arguments = arguments + self.stdin = stdin + self.stderr = stderr + self.stdout = stdout + self.successCodes = successCodes + self.temporaryFailCodes = temporaryFailCodes + self.permanentFailCodes = permanentFailCodes def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandOutputParameter): + if isinstance(other, CommandLineTool): return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable + self.id == other.id + and self.label == other.label and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.type_ == other.type_ - and self.outputBinding == other.outputBinding + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.baseCommand == other.baseCommand + and self.arguments == other.arguments + and self.stdin == other.stdin + and self.stderr == other.stderr + and self.stdout == other.stdout + and self.successCodes == other.successCodes + and self.temporaryFailCodes == other.temporaryFailCodes + and self.permanentFailCodes == other.permanentFailCodes ) return False def __hash__(self) -> int: return hash( ( + self.id, self.label, - self.secondaryFiles, - self.streamable, self.doc, - self.id, - self.format, - self.type_, - self.outputBinding, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.baseCommand, + self.arguments, + self.stdin, + self.stderr, + self.stdout, + self.successCodes, + self.temporaryFailCodes, + self.permanentFailCodes, ) ) @@ -14442,8 +13323,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandOutputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -14453,9 +13334,9 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), - uri_strtype_True_False_None_None, + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("id") @@ -14503,13 +13384,30 @@ def fromDoc( if docRoot is not None: id = docRoot else: - _errors__.append(ValidationException("missing id")) + id = "_:" + str(_uuid__.uuid4()) if not __original_id_is_none: baseuri = cast(str, id) - label = None - if "label" in _doc: - try: - label = load_field( + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_CommandLineTool_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + label = None + if "label" in _doc: + try: + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -14553,21 +13451,21 @@ def fromDoc( "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: + doc = None + if "doc" in _doc: try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("secondaryFiles") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secondaryFiles`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -14575,13 +13473,13 @@ def fromDoc( ) ) else: - val = _doc.get("secondaryFiles") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14593,28 +13491,124 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - streamable = None - if "streamable" in _doc: + try: + if _doc.get("inputs") is None: + raise ValidationException("missing required field `inputs`", None, []) + + inputs = _load_field( + _doc.get("inputs"), + idmap_inputs_array_of_CommandInputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + detailed_message=f"the `inputs` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("outputs") is None: + raise ValidationException("missing required field `outputs`", None, []) + + outputs = _load_field( + _doc.get("outputs"), + idmap_outputs_array_of_CommandOutputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + detailed_message=f"the `outputs` field with value `{val}` " + "is not valid because:", + ) + ) + requirements = None + if "requirements" in _doc: try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, + requirements = _load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions, - lc=_doc.get("streamable") + lc=_doc.get("requirements") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `streamable`": + if str(e) == "missing required field `requirements`": _errors__.append( ValidationException( str(e), @@ -14622,13 +13616,13 @@ def fromDoc( ) ) else: - val = _doc.get("streamable") + val = _doc.get("requirements") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14640,28 +13634,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [e], - detailed_message=f"the `streamable` field with value `{val}` " + detailed_message=f"the `requirements` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + hints = None + if "hints" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + hints = _load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("hints") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `hints`": _errors__.append( ValidationException( str(e), @@ -14669,13 +13663,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("hints") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14687,28 +13681,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `hints` field with value `{val}` " "is not valid because:", ) ) - format = None - if "format" in _doc: + cwlVersion = None + if "cwlVersion" in _doc: try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + cwlVersion = _load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("format") + lc=_doc.get("cwlVersion") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `format`": + if str(e) == "missing required field `cwlVersion`": _errors__.append( ValidationException( str(e), @@ -14716,13 +13710,13 @@ def fromDoc( ) ) else: - val = _doc.get("format") + val = _doc.get("cwlVersion") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14734,76 +13728,75 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `cwlVersion` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) + intent = None + if "intent" in _doc: + try: + intent = _load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("intent") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `intent`": _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - outputBinding = None - if "outputBinding" in _doc: + val = _doc.get("intent") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + detailed_message=f"the `intent` field with value `{val}` " + "is not valid because:", + ) + ) + baseCommand = None + if "baseCommand" in _doc: try: - outputBinding = load_field( - _doc.get("outputBinding"), - union_of_None_type_or_CommandOutputBindingLoader, + baseCommand = _load_field( + _doc.get("baseCommand"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("outputBinding") + lc=_doc.get("baseCommand") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputBinding`": + if str(e) == "missing required field `baseCommand`": _errors__.append( ValidationException( str(e), @@ -14811,13 +13804,13 @@ def fromDoc( ) ) else: - val = _doc.get("outputBinding") + val = _doc.get("baseCommand") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `baseCommand` field is not valid because:", + SourceLine(_doc, "baseCommand", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -14829,259 +13822,216 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `outputBinding` field is not valid because:", - SourceLine(_doc, "outputBinding", str), + "the `baseCommand` field is not valid because:", + SourceLine(_doc, "baseCommand", str), [e], - detailed_message=f"the `outputBinding` field with value `{val}` " + detailed_message=f"the `baseCommand` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + arguments = None + if "arguments" in _doc: + try: + arguments = _load_field( + _doc.get("arguments"), + union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + lc=_doc.get("arguments") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `arguments`": _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ValidationException( + str(e), + None + ) ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("arguments") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `arguments` field is not valid because:", + SourceLine(_doc, "arguments", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `arguments` field is not valid because:", + SourceLine(_doc, "arguments", str), + [e], + detailed_message=f"the `arguments` field with value `{val}` " + "is not valid because:", + ) + ) + stdin = None + if "stdin" in _doc: + try: + stdin = _load_field( + _doc.get("stdin"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("stdin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `stdin`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`, `outputBinding`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) + else: + val = _doc.get("stdin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `stdin` field is not valid because:", + SourceLine(_doc, "stdin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `stdin` field is not valid because:", + SourceLine(_doc, "stdin", str), + [e], + detailed_message=f"the `stdin` field with value `{val}` " + "is not valid because:", + ) + ) + stderr = None + if "stderr" in _doc: + try: + stderr = _load_field( + _doc.get("stderr"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("stderr") + ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, - doc=doc, - id=id, - format=format, - type_=type_, - outputBinding=outputBinding, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} + if str(e) == "missing required field `stderr`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("stderr") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `stderr` field is not valid because:", + SourceLine(_doc, "stderr", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `stderr` field is not valid because:", + SourceLine(_doc, "stderr", str), + [e], + detailed_message=f"the `stderr` field with value `{val}` " + "is not valid because:", + ) + ) + stdout = None + if "stdout" in _doc: + try: + stdout = _load_field( + _doc.get("stdout"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("stdout") + ) - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputBinding is not None: - r["outputBinding"] = save( - self.outputBinding, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "label", - "secondaryFiles", - "streamable", - "doc", - "id", - "format", - "type", - "outputBinding", - ] - ) - - -class CommandLineTool(Process): - """ - This defines the schema of the CWL Command Line Tool Description document. - - """ - - id: str - - def __init__( - self, - inputs: Any, - outputs: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - intent: Optional[Any] = None, - baseCommand: Optional[Any] = None, - arguments: Optional[Any] = None, - stdin: Optional[Any] = None, - stderr: Optional[Any] = None, - stdout: Optional[Any] = None, - successCodes: Optional[Any] = None, - temporaryFailCodes: Optional[Any] = None, - permanentFailCodes: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.intent = intent - self.class_ = "CommandLineTool" - self.baseCommand = baseCommand - self.arguments = arguments - self.stdin = stdin - self.stderr = stderr - self.stdout = stdout - self.successCodes = successCodes - self.temporaryFailCodes = temporaryFailCodes - self.permanentFailCodes = permanentFailCodes - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CommandLineTool): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.intent == other.intent - and self.class_ == other.class_ - and self.baseCommand == other.baseCommand - and self.arguments == other.arguments - and self.stdin == other.stdin - and self.stderr == other.stderr - and self.stdout == other.stdout - and self.successCodes == other.successCodes - and self.temporaryFailCodes == other.temporaryFailCodes - and self.permanentFailCodes == other.permanentFailCodes - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.intent, - self.class_, - self.baseCommand, - self.arguments, - self.stdin, - self.stderr, - self.stdout, - self.successCodes, - self.temporaryFailCodes, - self.permanentFailCodes, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CommandLineTool": - _doc = copy.copy(doc) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: + if str(e) == "missing required field `stdout`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("stdout") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `stdout` field is not valid because:", + SourceLine(_doc, "stdout", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `stdout` field is not valid because:", + SourceLine(_doc, "stdout", str), + [e], + detailed_message=f"the `stdout` field with value `{val}` " + "is not valid because:", + ) + ) + successCodes = None + if "successCodes" in _doc: try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None_None, + successCodes = _load_field( + _doc.get("successCodes"), + union_of_None_type_or_array_of_inttype, baseuri, loadingOptions, - lc=_doc.get("id") + lc=_doc.get("successCodes") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `id`": + if str(e) == "missing required field `successCodes`": _errors__.append( ValidationException( str(e), @@ -15089,13 +14039,13 @@ def fromDoc( ) ) else: - val = _doc.get("id") + val = _doc.get("successCodes") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `successCodes` field is not valid because:", + SourceLine(_doc, "successCodes", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15107,53 +14057,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), + "the `successCodes` field is not valid because:", + SourceLine(_doc, "successCodes", str), [e], - detailed_message=f"the `id` field with value `{val}` " + detailed_message=f"the `successCodes` field with value `{val}` " "is not valid because:", ) ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = cast(str, id) - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_CommandLineTool_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - label = None - if "label" in _doc: + temporaryFailCodes = None + if "temporaryFailCodes" in _doc: try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, + temporaryFailCodes = _load_field( + _doc.get("temporaryFailCodes"), + union_of_None_type_or_array_of_inttype, baseuri, loadingOptions, - lc=_doc.get("label") + lc=_doc.get("temporaryFailCodes") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `label`": + if str(e) == "missing required field `temporaryFailCodes`": _errors__.append( ValidationException( str(e), @@ -15161,13 +14086,13 @@ def fromDoc( ) ) else: - val = _doc.get("label") + val = _doc.get("temporaryFailCodes") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `temporaryFailCodes` field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15179,28 +14104,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), + "the `temporaryFailCodes` field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), [e], - detailed_message=f"the `label` field with value `{val}` " + detailed_message=f"the `temporaryFailCodes` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + permanentFailCodes = None + if "permanentFailCodes" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + permanentFailCodes = _load_field( + _doc.get("permanentFailCodes"), + union_of_None_type_or_array_of_inttype, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("permanentFailCodes") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `permanentFailCodes`": _errors__.append( ValidationException( str(e), @@ -15208,13 +14133,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("permanentFailCodes") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `permanentFailCodes` field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15226,171 +14151,319 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `permanentFailCodes` field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `permanentFailCodes` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("inputs") is None: - raise ValidationException("missing required field `inputs`", None, []) - - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_CommandInputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) + ValidationException("mapping with implicit null key") ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - detailed_message=f"the `inputs` field with value `{val}` " - "is not valid because:", + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( + k + ), + SourceLine(_doc, k, str), ) ) - try: - if _doc.get("outputs") is None: - raise ValidationException("missing required field `outputs`", None, []) - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_CommandOutputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputs") - ) + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + baseCommand=baseCommand, + arguments=arguments, + stdin=stdin, + stderr=stderr, + stdout=stdout, + successCodes=successCodes, + temporaryFailCodes=temporaryFailCodes, + permanentFailCodes=permanentFailCodes, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) + return _constructed - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} - if str(e) == "missing required field `outputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" else: - val = _doc.get("outputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - detailed_message=f"the `outputs` field with value `{val}` " - "is not valid because:", - ) - ) - requirements = None - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader, - baseuri, - loadingOptions, - lc=_doc.get("requirements") - ) + uri = self.class_ + u = save_relative_uri(uri, self.id, False, None, relative_uris) + r["class"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.inputs is not None: + r["inputs"] = save( + self.inputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputs is not None: + r["outputs"] = save( + self.outputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.requirements is not None: + r["requirements"] = save( + self.requirements, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.cwlVersion is not None: + u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) + r["cwlVersion"] = u + if self.intent is not None: + u = save_relative_uri(self.intent, self.id, True, None, relative_uris) + r["intent"] = u + if self.baseCommand is not None: + r["baseCommand"] = save( + self.baseCommand, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.arguments is not None: + r["arguments"] = save( + self.arguments, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.stdin is not None: + r["stdin"] = save( + self.stdin, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.stderr is not None: + r["stderr"] = save( + self.stderr, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.stdout is not None: + r["stdout"] = save( + self.stdout, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.successCodes is not None: + r["successCodes"] = save( + self.successCodes, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.temporaryFailCodes is not None: + r["temporaryFailCodes"] = save( + self.temporaryFailCodes, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.permanentFailCodes is not None: + r["permanentFailCodes"] = save( + self.permanentFailCodes, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r - if str(e) == "missing required field `requirements`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("requirements") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - detailed_message=f"the `requirements` field with value `{val}` " - "is not valid because:", - ) - ) - hints = None - if "hints" in _doc: + attrs: ClassVar[Collection[str]] = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "baseCommand", + "arguments", + "stdin", + "stderr", + "stdout", + "successCodes", + "temporaryFailCodes", + "permanentFailCodes", + ] + ) + + +class DockerRequirement(ProcessRequirement): + """ + Indicates that a workflow component should be run in a `Docker `__ or Docker-compatible (such as `Singularity `__ and `udocker `__) container environment and specifies how to fetch or build the image. + + If a CommandLineTool lists ``DockerRequirement`` under ``hints`` (or ``requirements``), it may (or must) be run in the specified Docker container. + + The platform must first acquire or install the correct Docker image as specified by ``dockerPull``, ``dockerImport``, ``dockerLoad`` or ``dockerFile``. + + The platform must execute the tool in the container using ``docker run`` with the appropriate Docker image and tool command line. + + The workflow platform may provide input files and the designated output directory through the use of volume bind mounts. The platform should rewrite file paths in the input object to correspond to the Docker bind mounted locations. That is, the platform should rewrite values in the parameter context such as ``runtime.outdir``, ``runtime.tmpdir`` and others to be valid paths within the container. The platform must ensure that ``runtime.outdir`` and ``runtime.tmpdir`` are distinct directories. + + When running a tool contained in Docker, the workflow platform must not assume anything about the contents of the Docker container, such as the presence or absence of specific software, except to assume that the generated command line represents a valid command within the runtime environment of the container. + + A container image may specify an `ENTRYPOINT `__ and/or `CMD `__. Command line arguments will be appended after all elements of ENTRYPOINT, and will override all elements specified using CMD (in other words, CMD is only used when the CommandLineTool definition produces an empty command line). + + Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility concerns of the implicit hidden execution point (For further discussion, see `https://doi.org/10.12688/f1000research.15140.1 `__). Portable CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. CommandLineTools which do rely on ENTRYPOINT or CMD must list ``DockerRequirement`` in the ``requirements`` section. + + Interaction with other requirements + ----------------------------------- + + If `EnvVarRequirement <#EnvVarRequirement>`__ is specified alongside a DockerRequirement, the environment variables must be provided to Docker using ``--env`` or ``--env-file`` and interact with the container's preexisting environment as defined by Docker. + + """ + + def __init__( + self, + dockerPull: Any | None = None, + dockerLoad: Any | None = None, + dockerFile: Any | None = None, + dockerImport: Any | None = None, + dockerImageId: Any | None = None, + dockerOutputDirectory: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_: Final[str] = "DockerRequirement" + self.dockerPull = dockerPull + self.dockerLoad = dockerLoad + self.dockerFile = dockerFile + self.dockerImport = dockerImport + self.dockerImageId = dockerImageId + self.dockerOutputDirectory = dockerOutputDirectory + + def __eq__(self, other: Any) -> bool: + if isinstance(other, DockerRequirement): + return bool( + self.class_ == other.class_ + and self.dockerPull == other.dockerPull + and self.dockerLoad == other.dockerLoad + and self.dockerFile == other.dockerFile + and self.dockerImport == other.dockerImport + and self.dockerImageId == other.dockerImageId + and self.dockerOutputDirectory == other.dockerOutputDirectory + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.dockerPull, + self.dockerLoad, + self.dockerFile, + self.dockerImport, + self.dockerImageId, + self.dockerOutputDirectory, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_DockerRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + dockerPull = None + if "dockerPull" in _doc: try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type, + dockerPull = _load_field( + _doc.get("dockerPull"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("hints") + lc=_doc.get("dockerPull") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `hints`": + if str(e) == "missing required field `dockerPull`": _errors__.append( ValidationException( str(e), @@ -15398,13 +14471,13 @@ def fromDoc( ) ) else: - val = _doc.get("hints") + val = _doc.get("dockerPull") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `dockerPull` field is not valid because:", + SourceLine(_doc, "dockerPull", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15416,28 +14489,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `dockerPull` field is not valid because:", + SourceLine(_doc, "dockerPull", str), [e], - detailed_message=f"the `hints` field with value `{val}` " + detailed_message=f"the `dockerPull` field with value `{val}` " "is not valid because:", ) ) - cwlVersion = None - if "cwlVersion" in _doc: + dockerLoad = None + if "dockerLoad" in _doc: try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, + dockerLoad = _load_field( + _doc.get("dockerLoad"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("cwlVersion") + lc=_doc.get("dockerLoad") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `cwlVersion`": + if str(e) == "missing required field `dockerLoad`": _errors__.append( ValidationException( str(e), @@ -15445,13 +14518,13 @@ def fromDoc( ) ) else: - val = _doc.get("cwlVersion") + val = _doc.get("dockerLoad") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), + "the `dockerLoad` field is not valid because:", + SourceLine(_doc, "dockerLoad", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15463,28 +14536,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), + "the `dockerLoad` field is not valid because:", + SourceLine(_doc, "dockerLoad", str), [e], - detailed_message=f"the `cwlVersion` field with value `{val}` " + detailed_message=f"the `dockerLoad` field with value `{val}` " "is not valid because:", ) ) - intent = None - if "intent" in _doc: + dockerFile = None + if "dockerFile" in _doc: try: - intent = load_field( - _doc.get("intent"), - uri_union_of_None_type_or_array_of_strtype_True_False_None_None, + dockerFile = _load_field( + _doc.get("dockerFile"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("intent") + lc=_doc.get("dockerFile") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `intent`": + if str(e) == "missing required field `dockerFile`": _errors__.append( ValidationException( str(e), @@ -15492,13 +14565,13 @@ def fromDoc( ) ) else: - val = _doc.get("intent") + val = _doc.get("dockerFile") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `intent` field is not valid because:", - SourceLine(_doc, "intent", str), + "the `dockerFile` field is not valid because:", + SourceLine(_doc, "dockerFile", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15510,28 +14583,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `intent` field is not valid because:", - SourceLine(_doc, "intent", str), + "the `dockerFile` field is not valid because:", + SourceLine(_doc, "dockerFile", str), [e], - detailed_message=f"the `intent` field with value `{val}` " + detailed_message=f"the `dockerFile` field with value `{val}` " "is not valid because:", ) ) - baseCommand = None - if "baseCommand" in _doc: + dockerImport = None + if "dockerImport" in _doc: try: - baseCommand = load_field( - _doc.get("baseCommand"), - union_of_None_type_or_strtype_or_array_of_strtype, + dockerImport = _load_field( + _doc.get("dockerImport"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("baseCommand") + lc=_doc.get("dockerImport") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `baseCommand`": + if str(e) == "missing required field `dockerImport`": _errors__.append( ValidationException( str(e), @@ -15539,13 +14612,13 @@ def fromDoc( ) ) else: - val = _doc.get("baseCommand") + val = _doc.get("dockerImport") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `baseCommand` field is not valid because:", - SourceLine(_doc, "baseCommand", str), + "the `dockerImport` field is not valid because:", + SourceLine(_doc, "dockerImport", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15557,28 +14630,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `baseCommand` field is not valid because:", - SourceLine(_doc, "baseCommand", str), + "the `dockerImport` field is not valid because:", + SourceLine(_doc, "dockerImport", str), [e], - detailed_message=f"the `baseCommand` field with value `{val}` " + detailed_message=f"the `dockerImport` field with value `{val}` " "is not valid because:", ) ) - arguments = None - if "arguments" in _doc: + dockerImageId = None + if "dockerImageId" in _doc: try: - arguments = load_field( - _doc.get("arguments"), - union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + dockerImageId = _load_field( + _doc.get("dockerImageId"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("arguments") + lc=_doc.get("dockerImageId") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `arguments`": + if str(e) == "missing required field `dockerImageId`": _errors__.append( ValidationException( str(e), @@ -15586,13 +14659,13 @@ def fromDoc( ) ) else: - val = _doc.get("arguments") + val = _doc.get("dockerImageId") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `arguments` field is not valid because:", - SourceLine(_doc, "arguments", str), + "the `dockerImageId` field is not valid because:", + SourceLine(_doc, "dockerImageId", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15604,28 +14677,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `arguments` field is not valid because:", - SourceLine(_doc, "arguments", str), + "the `dockerImageId` field is not valid because:", + SourceLine(_doc, "dockerImageId", str), [e], - detailed_message=f"the `arguments` field with value `{val}` " + detailed_message=f"the `dockerImageId` field with value `{val}` " "is not valid because:", ) ) - stdin = None - if "stdin" in _doc: + dockerOutputDirectory = None + if "dockerOutputDirectory" in _doc: try: - stdin = load_field( - _doc.get("stdin"), - union_of_None_type_or_strtype_or_ExpressionLoader, + dockerOutputDirectory = _load_field( + _doc.get("dockerOutputDirectory"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("stdin") + lc=_doc.get("dockerOutputDirectory") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `stdin`": + if str(e) == "missing required field `dockerOutputDirectory`": _errors__.append( ValidationException( str(e), @@ -15633,13 +14706,13 @@ def fromDoc( ) ) else: - val = _doc.get("stdin") + val = _doc.get("dockerOutputDirectory") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `stdin` field is not valid because:", - SourceLine(_doc, "stdin", str), + "the `dockerOutputDirectory` field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -15651,249 +14724,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `stdin` field is not valid because:", - SourceLine(_doc, "stdin", str), + "the `dockerOutputDirectory` field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), [e], - detailed_message=f"the `stdin` field with value `{val}` " - "is not valid because:", - ) - ) - stderr = None - if "stderr" in _doc: - try: - stderr = load_field( - _doc.get("stderr"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("stderr") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `stderr`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("stderr") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `stderr` field is not valid because:", - SourceLine(_doc, "stderr", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `stderr` field is not valid because:", - SourceLine(_doc, "stderr", str), - [e], - detailed_message=f"the `stderr` field with value `{val}` " - "is not valid because:", - ) - ) - stdout = None - if "stdout" in _doc: - try: - stdout = load_field( - _doc.get("stdout"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("stdout") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `stdout`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("stdout") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `stdout` field is not valid because:", - SourceLine(_doc, "stdout", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `stdout` field is not valid because:", - SourceLine(_doc, "stdout", str), - [e], - detailed_message=f"the `stdout` field with value `{val}` " - "is not valid because:", - ) - ) - successCodes = None - if "successCodes" in _doc: - try: - successCodes = load_field( - _doc.get("successCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - lc=_doc.get("successCodes") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `successCodes`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("successCodes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `successCodes` field is not valid because:", - SourceLine(_doc, "successCodes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `successCodes` field is not valid because:", - SourceLine(_doc, "successCodes", str), - [e], - detailed_message=f"the `successCodes` field with value `{val}` " - "is not valid because:", - ) - ) - temporaryFailCodes = None - if "temporaryFailCodes" in _doc: - try: - temporaryFailCodes = load_field( - _doc.get("temporaryFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - lc=_doc.get("temporaryFailCodes") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `temporaryFailCodes`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("temporaryFailCodes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `temporaryFailCodes` field is not valid because:", - SourceLine(_doc, "temporaryFailCodes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `temporaryFailCodes` field is not valid because:", - SourceLine(_doc, "temporaryFailCodes", str), - [e], - detailed_message=f"the `temporaryFailCodes` field with value `{val}` " - "is not valid because:", - ) - ) - permanentFailCodes = None - if "permanentFailCodes" in _doc: - try: - permanentFailCodes = load_field( - _doc.get("permanentFailCodes"), - union_of_None_type_or_array_of_inttype, - baseuri, - loadingOptions, - lc=_doc.get("permanentFailCodes") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `permanentFailCodes`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("permanentFailCodes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `permanentFailCodes` field is not valid because:", - SourceLine(_doc, "permanentFailCodes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `permanentFailCodes` field is not valid because:", - SourceLine(_doc, "permanentFailCodes", str), - [e], - detailed_message=f"the `permanentFailCodes` field with value `{val}` " + detailed_message=f"the `dockerOutputDirectory` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -15901,14 +14739,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( + "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( k ), SourceLine(_doc, k, str), @@ -15918,28 +14756,16 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - intent=intent, - baseCommand=baseCommand, - arguments=arguments, - stdin=stdin, - stderr=stderr, - stdout=stdout, - successCodes=successCodes, - temporaryFailCodes=temporaryFailCodes, - permanentFailCodes=permanentFailCodes, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed + dockerPull=dockerPull, + dockerLoad=dockerLoad, + dockerFile=dockerFile, + dockerImport=dockerImport, + dockerImageId=dockerImageId, + dockerOutputDirectory=dockerOutputDirectory, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True @@ -15952,92 +14778,56 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ - u = save_relative_uri(uri, self.id, False, None, relative_uris) + u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.inputs is not None: - r["inputs"] = save( - self.inputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputs is not None: - r["outputs"] = save( - self.outputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.requirements is not None: - r["requirements"] = save( - self.requirements, + if self.dockerPull is not None: + r["dockerPull"] = save( + self.dockerPull, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.cwlVersion is not None: - u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) - r["cwlVersion"] = u - if self.intent is not None: - u = save_relative_uri(self.intent, self.id, True, None, relative_uris) - r["intent"] = u - if self.baseCommand is not None: - r["baseCommand"] = save( - self.baseCommand, + if self.dockerLoad is not None: + r["dockerLoad"] = save( + self.dockerLoad, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.arguments is not None: - r["arguments"] = save( - self.arguments, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.stdin is not None: - r["stdin"] = save( - self.stdin, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.stderr is not None: - r["stderr"] = save( - self.stderr, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.stdout is not None: - r["stdout"] = save( - self.stdout, top=False, base_url=self.id, relative_uris=relative_uris + if self.dockerFile is not None: + r["dockerFile"] = save( + self.dockerFile, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) - if self.successCodes is not None: - r["successCodes"] = save( - self.successCodes, + if self.dockerImport is not None: + r["dockerImport"] = save( + self.dockerImport, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.temporaryFailCodes is not None: - r["temporaryFailCodes"] = save( - self.temporaryFailCodes, + if self.dockerImageId is not None: + r["dockerImageId"] = save( + self.dockerImageId, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) - if self.permanentFailCodes is not None: - r["permanentFailCodes"] = save( - self.permanentFailCodes, + if self.dockerOutputDirectory is not None: + r["dockerOutputDirectory"] = save( + self.dockerOutputDirectory, top=False, - base_url=self.id, + base_url=base_url, relative_uris=relative_uris, ) @@ -16049,96 +14839,30 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "intent", "class", - "baseCommand", - "arguments", - "stdin", - "stderr", - "stdout", - "successCodes", - "temporaryFailCodes", - "permanentFailCodes", + "dockerPull", + "dockerLoad", + "dockerFile", + "dockerImport", + "dockerImageId", + "dockerOutputDirectory", ] ) -class DockerRequirement(ProcessRequirement): +class SoftwareRequirement(ProcessRequirement): """ - Indicates that a workflow component should be run in a - [Docker](https://docker.com) or Docker-compatible (such as - [Singularity](https://www.sylabs.io/) and [udocker](https://github.com/indigo-dc/udocker)) container environment and - specifies how to fetch or build the image. - - If a CommandLineTool lists `DockerRequirement` under - `hints` (or `requirements`), it may (or must) be run in the specified Docker - container. - - The platform must first acquire or install the correct Docker image as - specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. - - The platform must execute the tool in the container using `docker run` with - the appropriate Docker image and tool command line. - - The workflow platform may provide input files and the designated output - directory through the use of volume bind mounts. The platform should rewrite - file paths in the input object to correspond to the Docker bind mounted - locations. That is, the platform should rewrite values in the parameter context - such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths - within the container. The platform must ensure that `runtime.outdir` and - `runtime.tmpdir` are distinct directories. - - When running a tool contained in Docker, the workflow platform must not - assume anything about the contents of the Docker container, such as the - presence or absence of specific software, except to assume that the - generated command line represents a valid command within the runtime - environment of the container. - - A container image may specify an - [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint) - and/or - [CMD](https://docs.docker.com/engine/reference/builder/#cmd). - Command line arguments will be appended after all elements of - ENTRYPOINT, and will override all elements specified using CMD (in - other words, CMD is only used when the CommandLineTool definition - produces an empty command line). - - Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility - concerns of the implicit hidden execution point (For further discussion, see - [https://doi.org/10.12688/f1000research.15140.1](https://doi.org/10.12688/f1000research.15140.1)). Portable - CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. - CommandLineTools which do rely on ENTRYPOINT or CMD must list `DockerRequirement` in the - `requirements` section. - - ## Interaction with other requirements - - If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a - DockerRequirement, the environment variables must be provided to Docker - using `--env` or `--env-file` and interact with the container's preexisting - environment as defined by Docker. + A list of software packages that should be configured in the environment of the defined process. """ def __init__( self, - dockerPull: Optional[Any] = None, - dockerLoad: Optional[Any] = None, - dockerFile: Optional[Any] = None, - dockerImport: Optional[Any] = None, - dockerImageId: Optional[Any] = None, - dockerOutputDirectory: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + packages: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -16148,39 +14872,16 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "DockerRequirement" - self.dockerPull = dockerPull - self.dockerLoad = dockerLoad - self.dockerFile = dockerFile - self.dockerImport = dockerImport - self.dockerImageId = dockerImageId - self.dockerOutputDirectory = dockerOutputDirectory + self.class_: Final[str] = "SoftwareRequirement" + self.packages = packages def __eq__(self, other: Any) -> bool: - if isinstance(other, DockerRequirement): - return bool( - self.class_ == other.class_ - and self.dockerPull == other.dockerPull - and self.dockerLoad == other.dockerLoad - and self.dockerFile == other.dockerFile - and self.dockerImport == other.dockerImport - and self.dockerImageId == other.dockerImageId - and self.dockerOutputDirectory == other.dockerOutputDirectory - ) + if isinstance(other, SoftwareRequirement): + return bool(self.class_ == other.class_ and self.packages == other.packages) return False def __hash__(self) -> int: - return hash( - ( - self.class_, - self.dockerPull, - self.dockerLoad, - self.dockerFile, - self.dockerImport, - self.dockerImageId, - self.dockerOutputDirectory, - ) - ) + return hash((self.class_, self.packages)) @classmethod def fromDoc( @@ -16188,8 +14889,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "DockerRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -16200,174 +14901,245 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_DockerRequirement_classLoader_False_True_None_None, + uri_SoftwareRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e - dockerPull = None - if "dockerPull" in _doc: - try: - dockerPull = load_field( - _doc.get("dockerPull"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dockerPull") - ) + raise e + try: + if _doc.get("packages") is None: + raise ValidationException("missing required field `packages`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + packages = _load_field( + _doc.get("packages"), + idmap_packages_array_of_SoftwarePackageLoader, + baseuri, + loadingOptions, + lc=_doc.get("packages") + ) - if str(e) == "missing required field `dockerPull`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `packages`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("packages") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `packages` field is not valid because:", + SourceLine(_doc, "packages", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("dockerPull") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dockerPull` field is not valid because:", - SourceLine(_doc, "dockerPull", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `dockerPull` field is not valid because:", - SourceLine(_doc, "dockerPull", str), - [e], - detailed_message=f"the `dockerPull` field with value `{val}` " - "is not valid because:", - ) - ) - dockerLoad = None - if "dockerLoad" in _doc: - try: - dockerLoad = load_field( - _doc.get("dockerLoad"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dockerLoad") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `dockerLoad`": _errors__.append( ValidationException( - str(e), - None + "the `packages` field is not valid because:", + SourceLine(_doc, "packages", str), + [e], + detailed_message=f"the `packages` field with value `{val}` " + "is not valid because:", ) ) - else: - val = _doc.get("dockerLoad") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dockerLoad` field is not valid because:", - SourceLine(_doc, "dockerLoad", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `dockerLoad` field is not valid because:", - SourceLine(_doc, "dockerLoad", str), - [e], - detailed_message=f"the `dockerLoad` field with value `{val}` " - "is not valid because:", - ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), + SourceLine(_doc, k, str), ) - dockerFile = None - if "dockerFile" in _doc: - try: - dockerFile = load_field( - _doc.get("dockerFile"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dockerFile") - ) + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + packages=packages, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed - if str(e) == "missing required field `dockerFile`": + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.packages is not None: + r["packages"] = save( + self.packages, top=False, base_url=base_url, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset(["class", "packages"]) + + +class SoftwarePackage(Saveable): + def __init__( + self, + package: Any, + version: Any | None = None, + specs: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.package = package + self.version = version + self.specs = specs + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwarePackage): + return bool( + self.package == other.package + and self.version == other.version + and self.specs == other.specs + ) + return False + + def __hash__(self) -> int: + return hash((self.package, self.version, self.specs)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("package") is None: + raise ValidationException("missing required field `package`", None, []) + + package = _load_field( + _doc.get("package"), + strtype, + baseuri, + loadingOptions, + lc=_doc.get("package") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `package`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("package") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `package` field is not valid because:", + SourceLine(_doc, "package", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("dockerFile") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dockerFile` field is not valid because:", - SourceLine(_doc, "dockerFile", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `dockerFile` field is not valid because:", - SourceLine(_doc, "dockerFile", str), - [e], - detailed_message=f"the `dockerFile` field with value `{val}` " - "is not valid because:", - ) + _errors__.append( + ValidationException( + "the `package` field is not valid because:", + SourceLine(_doc, "package", str), + [e], + detailed_message=f"the `package` field with value `{val}` " + "is not valid because:", ) - dockerImport = None - if "dockerImport" in _doc: + ) + version = None + if "version" in _doc: try: - dockerImport = load_field( - _doc.get("dockerImport"), - union_of_None_type_or_strtype, + version = _load_field( + _doc.get("version"), + union_of_None_type_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("dockerImport") + lc=_doc.get("version") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `dockerImport`": + if str(e) == "missing required field `version`": _errors__.append( ValidationException( str(e), @@ -16375,13 +15147,13 @@ def fromDoc( ) ) else: - val = _doc.get("dockerImport") + val = _doc.get("version") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `dockerImport` field is not valid because:", - SourceLine(_doc, "dockerImport", str), + "the `version` field is not valid because:", + SourceLine(_doc, "version", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16393,28 +15165,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `dockerImport` field is not valid because:", - SourceLine(_doc, "dockerImport", str), + "the `version` field is not valid because:", + SourceLine(_doc, "version", str), [e], - detailed_message=f"the `dockerImport` field with value `{val}` " + detailed_message=f"the `version` field with value `{val}` " "is not valid because:", ) ) - dockerImageId = None - if "dockerImageId" in _doc: + specs = None + if "specs" in _doc: try: - dockerImageId = load_field( - _doc.get("dockerImageId"), - union_of_None_type_or_strtype, + specs = _load_field( + _doc.get("specs"), + uri_union_of_None_type_or_array_of_strtype_False_False_None_True, baseuri, loadingOptions, - lc=_doc.get("dockerImageId") + lc=_doc.get("specs") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `dockerImageId`": + if str(e) == "missing required field `specs`": _errors__.append( ValidationException( str(e), @@ -16422,13 +15194,13 @@ def fromDoc( ) ) else: - val = _doc.get("dockerImageId") + val = _doc.get("specs") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `dockerImageId` field is not valid because:", - SourceLine(_doc, "dockerImageId", str), + "the `specs` field is not valid because:", + SourceLine(_doc, "specs", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16440,61 +15212,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `dockerImageId` field is not valid because:", - SourceLine(_doc, "dockerImageId", str), + "the `specs` field is not valid because:", + SourceLine(_doc, "specs", str), [e], - detailed_message=f"the `dockerImageId` field with value `{val}` " + detailed_message=f"the `specs` field with value `{val}` " "is not valid because:", ) ) - dockerOutputDirectory = None - if "dockerOutputDirectory" in _doc: - try: - dockerOutputDirectory = load_field( - _doc.get("dockerOutputDirectory"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("dockerOutputDirectory") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `dockerOutputDirectory`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("dockerOutputDirectory") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `dockerOutputDirectory` field is not valid because:", - SourceLine(_doc, "dockerOutputDirectory", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `dockerOutputDirectory` field is not valid because:", - SourceLine(_doc, "dockerOutputDirectory", str), - [e], - detailed_message=f"the `dockerOutputDirectory` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -16502,14 +15227,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( + "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( k ), SourceLine(_doc, k, str), @@ -16519,12 +15244,9 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - dockerPull=dockerPull, - dockerLoad=dockerLoad, - dockerFile=dockerFile, - dockerImport=dockerImport, - dockerImageId=dockerImageId, - dockerOutputDirectory=dockerOutputDirectory, + package=package, + version=version, + specs=specs, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -16541,56 +15263,17 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.dockerPull is not None: - r["dockerPull"] = save( - self.dockerPull, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.dockerLoad is not None: - r["dockerLoad"] = save( - self.dockerLoad, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.dockerFile is not None: - r["dockerFile"] = save( - self.dockerFile, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.dockerImport is not None: - r["dockerImport"] = save( - self.dockerImport, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.dockerImageId is not None: - r["dockerImageId"] = save( - self.dockerImageId, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.package is not None: + r["package"] = save( + self.package, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.dockerOutputDirectory is not None: - r["dockerOutputDirectory"] = save( - self.dockerOutputDirectory, - top=False, - base_url=base_url, - relative_uris=relative_uris, + if self.version is not None: + r["version"] = save( + self.version, top=False, base_url=base_url, relative_uris=relative_uris ) + if self.specs is not None: + u = save_relative_uri(self.specs, base_url, False, None, relative_uris) + r["specs"] = u # top refers to the directory level if top: @@ -16600,31 +15283,24 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "class", - "dockerPull", - "dockerLoad", - "dockerFile", - "dockerImport", - "dockerImageId", - "dockerOutputDirectory", - ] - ) + attrs: ClassVar[Collection[str]] = frozenset(["package", "version", "specs"]) -class SoftwareRequirement(ProcessRequirement): +class Dirent(Saveable): """ - A list of software packages that should be configured in the environment of - the defined process. + Define a file or subdirectory that must be staged to a particular place prior to executing the command line tool. May be the result of executing an expression, such as building a configuration file from a template. + + Usually files are staged within the `designated output directory <#Runtime_environment>`__. However, under certain circumstances, files may be staged at arbitrary locations, see discussion for ``entryname``. """ def __init__( self, - packages: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + entry: Any, + entryname: Any | None = None, + writable: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -16634,16 +15310,21 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "SoftwareRequirement" - self.packages = packages + self.entryname = entryname + self.entry = entry + self.writable = writable def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwareRequirement): - return bool(self.class_ == other.class_ and self.packages == other.packages) + if isinstance(other, Dirent): + return bool( + self.entryname == other.entryname + and self.entry == other.entry + and self.writable == other.writable + ) return False def __hash__(self) -> int: - return hash((self.class_, self.packages)) + return hash((self.entryname, self.entry, self.writable)) @classmethod def fromDoc( @@ -16651,46 +15332,77 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SoftwareRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + entryname = None + if "entryname" in _doc: + try: + entryname = _load_field( + _doc.get("entryname"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("entryname") + ) - class_ = load_field( - _doc.get("class"), - uri_SoftwareRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e + if str(e) == "missing required field `entryname`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("entryname") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `entryname` field is not valid because:", + SourceLine(_doc, "entryname", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `entryname` field is not valid because:", + SourceLine(_doc, "entryname", str), + [e], + detailed_message=f"the `entryname` field with value `{val}` " + "is not valid because:", + ) + ) try: - if _doc.get("packages") is None: - raise ValidationException("missing required field `packages`", None, []) + if _doc.get("entry") is None: + raise ValidationException("missing required field `entry`", None, []) - packages = load_field( - _doc.get("packages"), - idmap_packages_array_of_SoftwarePackageLoader, + entry = _load_field( + _doc.get("entry"), + union_of_strtype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("packages") + lc=_doc.get("entry") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `packages`": + if str(e) == "missing required field `entry`": _errors__.append( ValidationException( str(e), @@ -16698,13 +15410,13 @@ def fromDoc( ) ) else: - val = _doc.get("packages") + val = _doc.get("entry") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `packages` field is not valid because:", - SourceLine(_doc, "packages", str), + "the `entry` field is not valid because:", + SourceLine(_doc, "entry", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16716,14 +15428,61 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `packages` field is not valid because:", - SourceLine(_doc, "packages", str), + "the `entry` field is not valid because:", + SourceLine(_doc, "entry", str), [e], - detailed_message=f"the `packages` field with value `{val}` " + detailed_message=f"the `entry` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + writable = None + if "writable" in _doc: + try: + writable = _load_field( + _doc.get("writable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("writable") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `writable`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("writable") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `writable` field is not valid because:", + SourceLine(_doc, "writable", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `writable` field is not valid because:", + SourceLine(_doc, "writable", str), + [e], + detailed_message=f"the `writable` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -16731,14 +15490,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `packages`".format( + "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( k ), SourceLine(_doc, k, str), @@ -16748,7 +15507,9 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - packages=packages, + entryname=entryname, + entry=entry, + writable=writable, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -16765,17 +15526,20 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.packages is not None: - r["packages"] = save( - self.packages, top=False, base_url=base_url, relative_uris=relative_uris + if self.entryname is not None: + r["entryname"] = save( + self.entryname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.entry is not None: + r["entry"] = save( + self.entry, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.writable is not None: + r["writable"] = save( + self.writable, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -16786,17 +15550,20 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "packages"]) + attrs: ClassVar[Collection[str]] = frozenset(["entryname", "entry", "writable"]) -class SoftwarePackage(Saveable): +class InitialWorkDirRequirement(ProcessRequirement): + """ + Define a list of files and subdirectories that must be staged by the workflow platform prior to executing the command line tool. Normally files are staged within the designated output directory. However, when running inside containers, files may be staged at arbitrary locations, see discussion for ```Dirent.entryname`` <#Dirent>`__. Together with ``DockerRequirement.dockerOutputDirectory`` it is possible to control the locations of both input and output files when running in containers. + + """ + def __init__( self, - package: Any, - version: Optional[Any] = None, - specs: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + listing: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -16806,21 +15573,16 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.package = package - self.version = version - self.specs = specs + self.class_: Final[str] = "InitialWorkDirRequirement" + self.listing = listing def __eq__(self, other: Any) -> bool: - if isinstance(other, SoftwarePackage): - return bool( - self.package == other.package - and self.version == other.version - and self.specs == other.specs - ) + if isinstance(other, InitialWorkDirRequirement): + return bool(self.class_ == other.class_ and self.listing == other.listing) return False def __hash__(self) -> int: - return hash((self.package, self.version, self.specs)) + return hash((self.class_, self.listing)) @classmethod def fromDoc( @@ -16828,8 +15590,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SoftwarePackage": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -16837,21 +15599,38 @@ def fromDoc( _doc.lc.filename = doc.lc.filename _errors__ = [] try: - if _doc.get("package") is None: - raise ValidationException("missing required field `package`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - package = load_field( - _doc.get("package"), - strtype, + class_ = _load_field( + _doc.get("class"), + uri_InitialWorkDirRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("package") + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + try: + if _doc.get("listing") is None: + raise ValidationException("missing required field `listing`", None, []) + + listing = _load_field( + _doc.get("listing"), + union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + lc=_doc.get("listing") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `package`": + if str(e) == "missing required field `listing`": _errors__.append( ValidationException( str(e), @@ -16859,13 +15638,13 @@ def fromDoc( ) ) else: - val = _doc.get("package") + val = _doc.get("listing") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `package` field is not valid because:", - SourceLine(_doc, "package", str), + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -16877,108 +15656,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `package` field is not valid because:", - SourceLine(_doc, "package", str), + "the `listing` field is not valid because:", + SourceLine(_doc, "listing", str), [e], - detailed_message=f"the `package` field with value `{val}` " + detailed_message=f"the `listing` field with value `{val}` " "is not valid because:", ) ) - version = None - if "version" in _doc: - try: - version = load_field( - _doc.get("version"), - union_of_None_type_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("version") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `version`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("version") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `version` field is not valid because:", - SourceLine(_doc, "version", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `version` field is not valid because:", - SourceLine(_doc, "version", str), - [e], - detailed_message=f"the `version` field with value `{val}` " - "is not valid because:", - ) - ) - specs = None - if "specs" in _doc: - try: - specs = load_field( - _doc.get("specs"), - uri_union_of_None_type_or_array_of_strtype_False_False_None_True, - baseuri, - loadingOptions, - lc=_doc.get("specs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `specs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("specs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `specs` field is not valid because:", - SourceLine(_doc, "specs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `specs` field is not valid because:", - SourceLine(_doc, "specs", str), - [e], - detailed_message=f"the `specs` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -16986,14 +15671,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( + "invalid field `{}`, expected one of: `class`, `listing`".format( k ), SourceLine(_doc, k, str), @@ -17003,9 +15688,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - package=package, - version=version, - specs=specs, + listing=listing, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -17022,17 +15705,20 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.package is not None: - r["package"] = save( - self.package, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.version is not None: - r["version"] = save( - self.version, top=False, base_url=base_url, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.listing is not None: + r["listing"] = save( + self.listing, top=False, base_url=base_url, relative_uris=relative_uris ) - if self.specs is not None: - u = save_relative_uri(self.specs, base_url, False, None, relative_uris) - r["specs"] = u # top refers to the directory level if top: @@ -17042,29 +15728,20 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["package", "version", "specs"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "listing"]) -class Dirent(Saveable): +class EnvVarRequirement(ProcessRequirement): """ - Define a file or subdirectory that must be staged to a particular - place prior to executing the command line tool. May be the result - of executing an expression, such as building a configuration file - from a template. - - Usually files are staged within the [designated output directory](#Runtime_environment). - However, under certain circumstances, files may be staged at - arbitrary locations, see discussion for `entryname`. + Define a list of environment variables which will be set in the execution environment of the tool. See ``EnvironmentDef`` for details. """ def __init__( self, - entry: Any, - entryname: Optional[Any] = None, - writable: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + envDef: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -17074,21 +15751,16 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.entryname = entryname - self.entry = entry - self.writable = writable + self.class_: Final[str] = "EnvVarRequirement" + self.envDef = envDef def __eq__(self, other: Any) -> bool: - if isinstance(other, Dirent): - return bool( - self.entryname == other.entryname - and self.entry == other.entry - and self.writable == other.writable - ) + if isinstance(other, EnvVarRequirement): + return bool(self.class_ == other.class_ and self.envDef == other.envDef) return False def __hash__(self) -> int: - return hash((self.entryname, self.entry, self.writable)) + return hash((self.class_, self.envDef)) @classmethod def fromDoc( @@ -17096,77 +15768,47 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Dirent": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - entryname = None - if "entryname" in _doc: - try: - entryname = load_field( - _doc.get("entryname"), - union_of_None_type_or_strtype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("entryname") - ) + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + class_ = _load_field( + _doc.get("class"), + uri_EnvVarRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) - if str(e) == "missing required field `entryname`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("entryname") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `entryname` field is not valid because:", - SourceLine(_doc, "entryname", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `entryname` field is not valid because:", - SourceLine(_doc, "entryname", str), - [e], - detailed_message=f"the `entryname` field with value `{val}` " - "is not valid because:", - ) - ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e try: - if _doc.get("entry") is None: - raise ValidationException("missing required field `entry`", None, []) + if _doc.get("envDef") is None: + raise ValidationException("missing required field `envDef`", None, []) - entry = load_field( - _doc.get("entry"), - union_of_strtype_or_ExpressionLoader, + envDef = _load_field( + _doc.get("envDef"), + idmap_envDef_array_of_EnvironmentDefLoader, baseuri, loadingOptions, - lc=_doc.get("entry") + lc=_doc.get("envDef") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `entry`": + if str(e) == "missing required field `envDef`": _errors__.append( ValidationException( str(e), @@ -17174,13 +15816,13 @@ def fromDoc( ) ) else: - val = _doc.get("entry") + val = _doc.get("envDef") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `entry` field is not valid because:", - SourceLine(_doc, "entry", str), + "the `envDef` field is not valid because:", + SourceLine(_doc, "envDef", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -17192,61 +15834,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `entry` field is not valid because:", - SourceLine(_doc, "entry", str), + "the `envDef` field is not valid because:", + SourceLine(_doc, "envDef", str), [e], - detailed_message=f"the `entry` field with value `{val}` " + detailed_message=f"the `envDef` field with value `{val}` " "is not valid because:", ) ) - writable = None - if "writable" in _doc: - try: - writable = load_field( - _doc.get("writable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("writable") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `writable`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("writable") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `writable` field is not valid because:", - SourceLine(_doc, "writable", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `writable` field is not valid because:", - SourceLine(_doc, "writable", str), - [e], - detailed_message=f"the `writable` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -17254,14 +15849,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( + "invalid field `{}`, expected one of: `class`, `envDef`".format( k ), SourceLine(_doc, k, str), @@ -17271,9 +15866,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - entryname=entryname, - entry=entry, - writable=writable, + envDef=envDef, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -17290,20 +15883,19 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.entryname is not None: - r["entryname"] = save( - self.entryname, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.entry is not None: - r["entry"] = save( - self.entry, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.writable is not None: - r["writable"] = save( - self.writable, top=False, base_url=base_url, relative_uris=relative_uris + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + if self.envDef is not None: + r["envDef"] = save( + self.envDef, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level @@ -17314,20 +15906,19 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["entryname", "entry", "writable"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "envDef"]) -class InitialWorkDirRequirement(ProcessRequirement): +class ShellCommandRequirement(ProcessRequirement): """ - Define a list of files and subdirectories that must be staged by the workflow platform prior to executing the command line tool. - Normally files are staged within the designated output directory. However, when running inside containers, files may be staged at arbitrary locations, see discussion for [`Dirent.entryname`](#Dirent). Together with `DockerRequirement.dockerOutputDirectory` it is possible to control the locations of both input and output files when running in containers. + Modify the behavior of CommandLineTool to generate a single string containing a shell command line. Each item in the ``arguments`` list must be joined into a string separated by single spaces and quoted to prevent interpretation by the shell, unless ``CommandLineBinding`` for that argument contains ``shellQuote: false``. If ``shellQuote: false`` is specified, the argument is joined into the command string without quoting, which allows the use of shell metacharacters such as ``|`` for pipes. + """ def __init__( self, - listing: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -17337,16 +15928,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "InitialWorkDirRequirement" - self.listing = listing + self.class_: Final[str] = "ShellCommandRequirement" def __eq__(self, other: Any) -> bool: - if isinstance(other, InitialWorkDirRequirement): - return bool(self.class_ == other.class_ and self.listing == other.listing) + if isinstance(other, ShellCommandRequirement): + return bool(self.class_ == other.class_) return False def __hash__(self) -> int: - return hash((self.class_, self.listing)) + return hash((self.class_)) @classmethod def fromDoc( @@ -17354,8 +15944,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InitialWorkDirRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -17366,67 +15956,20 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_InitialWorkDirRequirement_classLoader_False_True_None_None, + uri_ShellCommandRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("listing") is None: - raise ValidationException("missing required field `listing`", None, []) - - listing = load_field( - _doc.get("listing"), - union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, - baseuri, - loadingOptions, - lc=_doc.get("listing") - ) - + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `listing`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("listing") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `listing` field is not valid because:", - SourceLine(_doc, "listing", str), - [e], - detailed_message=f"the `listing` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + raise e + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -17434,16 +15977,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `listing`".format( - k - ), + "invalid field `{}`, expected one of: `class`".format(k), SourceLine(_doc, k, str), ) ) @@ -17451,7 +15992,6 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - listing=listing, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -17469,17 +16009,15 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.listing is not None: - r["listing"] = save( - self.listing, top=False, base_url=base_url, relative_uris=relative_uris - ) # top refers to the directory level if top: @@ -17489,21 +16027,41 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "listing"]) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) -class EnvVarRequirement(ProcessRequirement): +class ResourceRequirement(ProcessRequirement): """ - Define a list of environment variables which will be set in the - execution environment of the tool. See `EnvironmentDef` for details. + Specify basic hardware resource requirements. + + "min" is the minimum amount of a resource that must be reserved to schedule a job. If "min" cannot be satisfied, the job should not be run. + + "max" is the maximum amount of a resource that the job shall be allocated. If a node has sufficient resources, multiple jobs may be scheduled on a single node provided each job's "max" resource requirements are met. If a job attempts to exceed its resource allocation, an implementation may deny additional resources, which may result in job failure. + + If both "min" and "max" are specified, an implementation may choose to allocate any amount between "min" and "max", with the actual allocation provided in the ``runtime`` object. + + If "min" is specified but "max" is not, then "max" == "min" If "max" is specified by "min" is not, then "min" == "max". + + It is an error if max < min. + + It is an error if the value of any of these fields is negative. + + If neither "min" nor "max" is specified for a resource, use the default values below. """ def __init__( self, - envDef: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + coresMin: Any | None = None, + coresMax: Any | None = None, + ramMin: Any | None = None, + ramMax: Any | None = None, + tmpdirMin: Any | None = None, + tmpdirMax: Any | None = None, + outdirMin: Any | None = None, + outdirMax: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -17513,16 +16071,45 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "EnvVarRequirement" - self.envDef = envDef + self.class_: Final[str] = "ResourceRequirement" + self.coresMin = coresMin + self.coresMax = coresMax + self.ramMin = ramMin + self.ramMax = ramMax + self.tmpdirMin = tmpdirMin + self.tmpdirMax = tmpdirMax + self.outdirMin = outdirMin + self.outdirMax = outdirMax def __eq__(self, other: Any) -> bool: - if isinstance(other, EnvVarRequirement): - return bool(self.class_ == other.class_ and self.envDef == other.envDef) + if isinstance(other, ResourceRequirement): + return bool( + self.class_ == other.class_ + and self.coresMin == other.coresMin + and self.coresMax == other.coresMax + and self.ramMin == other.ramMin + and self.ramMax == other.ramMax + and self.tmpdirMin == other.tmpdirMin + and self.tmpdirMax == other.tmpdirMax + and self.outdirMin == other.outdirMin + and self.outdirMax == other.outdirMax + ) return False def __hash__(self) -> int: - return hash((self.class_, self.envDef)) + return hash( + ( + self.class_, + self.coresMin, + self.coresMax, + self.ramMin, + self.ramMax, + self.tmpdirMin, + self.tmpdirMax, + self.outdirMin, + self.outdirMax, + ) + ) @classmethod def fromDoc( @@ -17530,8 +16117,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "EnvVarRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -17542,93 +16129,429 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_EnvVarRequirement_classLoader_False_True_None_None, + uri_ResourceRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e - try: - if _doc.get("envDef") is None: - raise ValidationException("missing required field `envDef`", None, []) + raise e + coresMin = None + if "coresMin" in _doc: + try: + coresMin = _load_field( + _doc.get("coresMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("coresMin") + ) - envDef = load_field( - _doc.get("envDef"), - idmap_envDef_array_of_EnvironmentDefLoader, - baseuri, - loadingOptions, - lc=_doc.get("envDef") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `envDef`": - _errors__.append( - ValidationException( - str(e), - None + if str(e) == "missing required field `coresMin`": + _errors__.append( + ValidationException( + str(e), + None + ) ) + else: + val = _doc.get("coresMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `coresMin` field is not valid because:", + SourceLine(_doc, "coresMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `coresMin` field is not valid because:", + SourceLine(_doc, "coresMin", str), + [e], + detailed_message=f"the `coresMin` field with value `{val}` " + "is not valid because:", + ) + ) + coresMax = None + if "coresMax" in _doc: + try: + coresMax = _load_field( + _doc.get("coresMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("coresMax") ) - else: - val = _doc.get("envDef") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `coresMax`": _errors__.append( ValidationException( - "the `envDef` field is not valid because:", - SourceLine(_doc, "envDef", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("coresMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `coresMax` field is not valid because:", + SourceLine(_doc, "coresMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `coresMax` field is not valid because:", + SourceLine(_doc, "coresMax", str), + [e], + detailed_message=f"the `coresMax` field with value `{val}` " + "is not valid because:", + ) + ) + ramMin = None + if "ramMin" in _doc: + try: + ramMin = _load_field( + _doc.get("ramMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("ramMin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `ramMin`": _errors__.append( ValidationException( - "the `envDef` field is not valid because:", - SourceLine(_doc, "envDef", str), - [e], - detailed_message=f"the `envDef` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: + else: + val = _doc.get("ramMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `ramMin` field is not valid because:", + SourceLine(_doc, "ramMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `ramMin` field is not valid because:", + SourceLine(_doc, "ramMin", str), + [e], + detailed_message=f"the `ramMin` field with value `{val}` " + "is not valid because:", + ) + ) + ramMax = None + if "ramMax" in _doc: + try: + ramMax = _load_field( + _doc.get("ramMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("ramMax") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `ramMax`": _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False + ValidationException( + str(e), + None + ) ) - extension_fields[ex] = _doc[k] else: + val = _doc.get("ramMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `ramMax` field is not valid because:", + SourceLine(_doc, "ramMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `ramMax` field is not valid because:", + SourceLine(_doc, "ramMax", str), + [e], + detailed_message=f"the `ramMax` field with value `{val}` " + "is not valid because:", + ) + ) + tmpdirMin = None + if "tmpdirMin" in _doc: + try: + tmpdirMin = _load_field( + _doc.get("tmpdirMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("tmpdirMin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `tmpdirMin`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `envDef`".format( - k - ), - SourceLine(_doc, k, str), + str(e), + None ) ) + else: + val = _doc.get("tmpdirMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `tmpdirMin` field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `tmpdirMin` field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), + [e], + detailed_message=f"the `tmpdirMin` field with value `{val}` " + "is not valid because:", + ) + ) + tmpdirMax = None + if "tmpdirMax" in _doc: + try: + tmpdirMax = _load_field( + _doc.get("tmpdirMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("tmpdirMax") + ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - envDef=envDef, - extension_fields=extension_fields, + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `tmpdirMax`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("tmpdirMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `tmpdirMax` field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `tmpdirMax` field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), + [e], + detailed_message=f"the `tmpdirMax` field with value `{val}` " + "is not valid because:", + ) + ) + outdirMin = None + if "outdirMin" in _doc: + try: + outdirMin = _load_field( + _doc.get("outdirMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("outdirMin") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outdirMin`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outdirMin") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outdirMin` field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outdirMin` field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [e], + detailed_message=f"the `outdirMin` field with value `{val}` " + "is not valid because:", + ) + ) + outdirMax = None + if "outdirMax" in _doc: + try: + outdirMax = _load_field( + _doc.get("outdirMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("outdirMax") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outdirMax`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outdirMax") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outdirMax` field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outdirMax` field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [e], + detailed_message=f"the `outdirMax` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + coresMin=coresMin, + coresMax=coresMax, + ramMin=ramMin, + ramMax=ramMax, + tmpdirMin=tmpdirMin, + tmpdirMax=tmpdirMax, + outdirMin=outdirMin, + outdirMax=outdirMax, + extension_fields=extension_fields, loadingOptions=loadingOptions, ) return _constructed @@ -17645,16 +16568,58 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.envDef is not None: - r["envDef"] = save( - self.envDef, top=False, base_url=base_url, relative_uris=relative_uris + if self.coresMin is not None: + r["coresMin"] = save( + self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.coresMax is not None: + r["coresMax"] = save( + self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.ramMin is not None: + r["ramMin"] = save( + self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.ramMax is not None: + r["ramMax"] = save( + self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris + ) + if self.tmpdirMin is not None: + r["tmpdirMin"] = save( + self.tmpdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.tmpdirMax is not None: + r["tmpdirMax"] = save( + self.tmpdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.outdirMin is not None: + r["outdirMin"] = save( + self.outdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) + if self.outdirMax is not None: + r["outdirMax"] = save( + self.outdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, ) # top refers to the directory level @@ -17665,25 +16630,34 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "envDef"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "class", + "coresMin", + "coresMax", + "ramMin", + "ramMax", + "tmpdirMin", + "tmpdirMax", + "outdirMin", + "outdirMax", + ] + ) -class ShellCommandRequirement(ProcessRequirement): +class WorkReuse(ProcessRequirement): """ - Modify the behavior of CommandLineTool to generate a single string - containing a shell command line. Each item in the `arguments` list must - be joined into a string separated by single spaces and quoted to prevent - interpretation by the shell, unless `CommandLineBinding` for that argument - contains `shellQuote: false`. If `shellQuote: false` is specified, the - argument is joined into the command string without quoting, which allows - the use of shell metacharacters such as `|` for pipes. + For implementations that support reusing output from past work (on the assumption that same code and same input produce same results), control whether to enable or disable the reuse behavior for a particular tool or step (to accommodate situations where that assumption is incorrect). A reused step is not executed but instead returns the same output as the original execution. + + If ``WorkReuse`` is not specified, correct tools should assume it is enabled by default. """ def __init__( self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + enableReuse: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -17693,15 +16667,18 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "ShellCommandRequirement" + self.class_: Final[str] = "WorkReuse" + self.enableReuse = enableReuse def __eq__(self, other: Any) -> bool: - if isinstance(other, ShellCommandRequirement): - return bool(self.class_ == other.class_) + if isinstance(other, WorkReuse): + return bool( + self.class_ == other.class_ and self.enableReuse == other.enableReuse + ) return False def __hash__(self) -> int: - return hash((self.class_)) + return hash((self.class_, self.enableReuse)) @classmethod def fromDoc( @@ -17709,8 +16686,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ShellCommandRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -17721,41 +16698,93 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_ShellCommandRequirement_classLoader_False_True_None_None, + uri_WorkReuse_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) + raise e + try: + if _doc.get("enableReuse") is None: + raise ValidationException("missing required field `enableReuse`", None, []) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( + enableReuse = _load_field( + _doc.get("enableReuse"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("enableReuse") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `enableReuse`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("enableReuse") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `enableReuse` field is not valid because:", + SourceLine(_doc, "enableReuse", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `enableReuse` field is not valid because:", + SourceLine(_doc, "enableReuse", str), + [e], + detailed_message=f"the `enableReuse` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `enableReuse`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + enableReuse=enableReuse, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -17773,13 +16802,22 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u + if self.enableReuse is not None: + r["enableReuse"] = save( + self.enableReuse, + top=False, + base_url=base_url, + relative_uris=relative_uris, + ) # top refers to the directory level if top: @@ -17789,51 +16827,26 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "enableReuse"]) -class ResourceRequirement(ProcessRequirement): +class NetworkAccess(ProcessRequirement): """ - Specify basic hardware resource requirements. - - "min" is the minimum amount of a resource that must be reserved to - schedule a job. If "min" cannot be satisfied, the job should not - be run. - - "max" is the maximum amount of a resource that the job shall be - allocated. If a node has sufficient resources, multiple jobs may - be scheduled on a single node provided each job's "max" resource - requirements are met. If a job attempts to exceed its resource - allocation, an implementation may deny additional resources, which - may result in job failure. - - If both "min" and "max" are specified, an implementation may - choose to allocate any amount between "min" and "max", with the - actual allocation provided in the `runtime` object. + Indicate whether a process requires outgoing IPv4/IPv6 network access. Choice of IPv4 or IPv6 is implementation and site specific, correct tools must support both. - If "min" is specified but "max" is not, then "max" == "min" - If "max" is specified by "min" is not, then "min" == "max". + If ``networkAccess`` is false or not specified, tools must not assume network access, except for localhost (the loopback device). - It is an error if max < min. - - It is an error if the value of any of these fields is negative. + If ``networkAccess`` is true, the tool must be able to make outgoing connections to network resources. Resources may be on a private subnet or the public Internet. However, implementations and sites may apply their own security policies to restrict what is accessible by the tool. - If neither "min" nor "max" is specified for a resource, use the default values below. + Enabling network access does not imply a publicly routable IP address or the ability to accept inbound connections. """ def __init__( self, - coresMin: Optional[Any] = None, - coresMax: Optional[Any] = None, - ramMin: Optional[Any] = None, - ramMax: Optional[Any] = None, - tmpdirMin: Optional[Any] = None, - tmpdirMax: Optional[Any] = None, - outdirMin: Optional[Any] = None, - outdirMax: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + networkAccess: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -17843,45 +16856,19 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "ResourceRequirement" - self.coresMin = coresMin - self.coresMax = coresMax - self.ramMin = ramMin - self.ramMax = ramMax - self.tmpdirMin = tmpdirMin - self.tmpdirMax = tmpdirMax - self.outdirMin = outdirMin - self.outdirMax = outdirMax + self.class_: Final[str] = "NetworkAccess" + self.networkAccess = networkAccess def __eq__(self, other: Any) -> bool: - if isinstance(other, ResourceRequirement): + if isinstance(other, NetworkAccess): return bool( self.class_ == other.class_ - and self.coresMin == other.coresMin - and self.coresMax == other.coresMax - and self.ramMin == other.ramMin - and self.ramMax == other.ramMax - and self.tmpdirMin == other.tmpdirMin - and self.tmpdirMax == other.tmpdirMax - and self.outdirMin == other.outdirMin - and self.outdirMax == other.outdirMax + and self.networkAccess == other.networkAccess ) return False def __hash__(self) -> int: - return hash( - ( - self.class_, - self.coresMin, - self.coresMax, - self.ramMin, - self.ramMax, - self.tmpdirMin, - self.tmpdirMax, - self.outdirMin, - self.outdirMax, - ) - ) + return hash((self.class_, self.networkAccess)) @classmethod def fromDoc( @@ -17889,8 +16876,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ResourceRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -17901,395 +16888,68 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_ResourceRequirement_classLoader_False_True_None_None, + uri_NetworkAccess_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e - coresMin = None - if "coresMin" in _doc: - try: - coresMin = load_field( - _doc.get("coresMin"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("coresMin") - ) + raise e + try: + if _doc.get("networkAccess") is None: + raise ValidationException("missing required field `networkAccess`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + networkAccess = _load_field( + _doc.get("networkAccess"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + lc=_doc.get("networkAccess") + ) - if str(e) == "missing required field `coresMin`": - _errors__.append( - ValidationException( - str(e), - None - ) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `networkAccess`": + _errors__.append( + ValidationException( + str(e), + None ) - else: - val = _doc.get("coresMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `coresMin` field is not valid because:", - SourceLine(_doc, "coresMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `coresMin` field is not valid because:", - SourceLine(_doc, "coresMin", str), - [e], - detailed_message=f"the `coresMin` field with value `{val}` " - "is not valid because:", - ) - ) - coresMax = None - if "coresMax" in _doc: - try: - coresMax = load_field( - _doc.get("coresMax"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("coresMax") ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `coresMax`": + else: + val = _doc.get("networkAccess") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `networkAccess` field is not valid because:", + SourceLine(_doc, "networkAccess", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("coresMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `coresMax` field is not valid because:", - SourceLine(_doc, "coresMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `coresMax` field is not valid because:", - SourceLine(_doc, "coresMax", str), - [e], - detailed_message=f"the `coresMax` field with value `{val}` " - "is not valid because:", - ) - ) - ramMin = None - if "ramMin" in _doc: - try: - ramMin = load_field( - _doc.get("ramMin"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("ramMin") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `ramMin`": _errors__.append( ValidationException( - str(e), - None + "the `networkAccess` field is not valid because:", + SourceLine(_doc, "networkAccess", str), + [e], + detailed_message=f"the `networkAccess` field with value `{val}` " + "is not valid because:", ) ) - else: - val = _doc.get("ramMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `ramMin` field is not valid because:", - SourceLine(_doc, "ramMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `ramMin` field is not valid because:", - SourceLine(_doc, "ramMin", str), - [e], - detailed_message=f"the `ramMin` field with value `{val}` " - "is not valid because:", - ) - ) - ramMax = None - if "ramMax" in _doc: - try: - ramMax = load_field( - _doc.get("ramMax"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("ramMax") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `ramMax`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("ramMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `ramMax` field is not valid because:", - SourceLine(_doc, "ramMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `ramMax` field is not valid because:", - SourceLine(_doc, "ramMax", str), - [e], - detailed_message=f"the `ramMax` field with value `{val}` " - "is not valid because:", - ) - ) - tmpdirMin = None - if "tmpdirMin" in _doc: - try: - tmpdirMin = load_field( - _doc.get("tmpdirMin"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("tmpdirMin") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `tmpdirMin`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("tmpdirMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `tmpdirMin` field is not valid because:", - SourceLine(_doc, "tmpdirMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `tmpdirMin` field is not valid because:", - SourceLine(_doc, "tmpdirMin", str), - [e], - detailed_message=f"the `tmpdirMin` field with value `{val}` " - "is not valid because:", - ) - ) - tmpdirMax = None - if "tmpdirMax" in _doc: - try: - tmpdirMax = load_field( - _doc.get("tmpdirMax"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("tmpdirMax") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `tmpdirMax`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("tmpdirMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `tmpdirMax` field is not valid because:", - SourceLine(_doc, "tmpdirMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `tmpdirMax` field is not valid because:", - SourceLine(_doc, "tmpdirMax", str), - [e], - detailed_message=f"the `tmpdirMax` field with value `{val}` " - "is not valid because:", - ) - ) - outdirMin = None - if "outdirMin" in _doc: - try: - outdirMin = load_field( - _doc.get("outdirMin"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("outdirMin") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outdirMin`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outdirMin") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outdirMin` field is not valid because:", - SourceLine(_doc, "outdirMin", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outdirMin` field is not valid because:", - SourceLine(_doc, "outdirMin", str), - [e], - detailed_message=f"the `outdirMin` field with value `{val}` " - "is not valid because:", - ) - ) - outdirMax = None - if "outdirMax" in _doc: - try: - outdirMax = load_field( - _doc.get("outdirMax"), - union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("outdirMax") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outdirMax`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outdirMax") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outdirMax` field is not valid because:", - SourceLine(_doc, "outdirMax", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outdirMax` field is not valid because:", - SourceLine(_doc, "outdirMax", str), - [e], - detailed_message=f"the `outdirMax` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -18297,14 +16957,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( + "invalid field `{}`, expected one of: `class`, `networkAccess`".format( k ), SourceLine(_doc, k, str), @@ -18314,14 +16974,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - coresMin=coresMin, - coresMax=coresMax, - ramMin=ramMin, - ramMax=ramMax, - tmpdirMin=tmpdirMin, - tmpdirMax=tmpdirMax, - outdirMin=outdirMin, - outdirMax=outdirMax, + networkAccess=networkAccess, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -18339,53 +16992,18 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.coresMin is not None: - r["coresMin"] = save( - self.coresMin, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.coresMax is not None: - r["coresMax"] = save( - self.coresMax, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.ramMin is not None: - r["ramMin"] = save( - self.ramMin, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.ramMax is not None: - r["ramMax"] = save( - self.ramMax, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.tmpdirMin is not None: - r["tmpdirMin"] = save( - self.tmpdirMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.tmpdirMax is not None: - r["tmpdirMax"] = save( - self.tmpdirMax, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.outdirMin is not None: - r["outdirMin"] = save( - self.outdirMin, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - if self.outdirMax is not None: - r["outdirMax"] = save( - self.outdirMax, + if self.networkAccess is not None: + r["networkAccess"] = save( + self.networkAccess, top=False, base_url=base_url, relative_uris=relative_uris, @@ -18399,40 +17017,28 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "class", - "coresMin", - "coresMax", - "ramMin", - "ramMax", - "tmpdirMin", - "tmpdirMax", - "outdirMin", - "outdirMax", - ] - ) + attrs: ClassVar[Collection[str]] = frozenset(["class", "networkAccess"]) -class WorkReuse(ProcessRequirement): +class InplaceUpdateRequirement(ProcessRequirement): """ - For implementations that support reusing output from past work (on - the assumption that same code and same input produce same - results), control whether to enable or disable the reuse behavior - for a particular tool or step (to accommodate situations where that - assumption is incorrect). A reused step is not executed but - instead returns the same output as the original execution. + If ``inplaceUpdate`` is true, then an implementation supporting this feature may permit tools to directly update files with ``writable: true`` in InitialWorkDirRequirement. That is, as an optimization, files may be destructively modified in place as opposed to copied and updated. + + An implementation must ensure that only one workflow step may access a writable file at a time. It is an error if a file which is writable by one workflow step file is accessed (for reading or writing) by any other workflow step running independently. However, a file which has been updated in a previous completed step may be used as input to multiple steps, provided it is read-only in every step. + + Workflow steps which modify a file must produce the modified file as output. Downstream steps which further process the file must use the output of previous steps, and not refer to a common input (this is necessary for both ordering and correctness). - If `WorkReuse` is not specified, correct tools should assume it - is enabled by default. + Workflow authors should provide this in the ``hints`` section. The intent of this feature is that workflows produce the same results whether or not InplaceUpdateRequirement is supported by the implementation, and this feature is primarily available as an optimization for particular environments. + + Users and implementers should be aware that workflows that destructively modify inputs may not be repeatable or reproducible. In particular, enabling this feature implies that WorkReuse should not be enabled. """ def __init__( self, - enableReuse: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + inplaceUpdate: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -18442,18 +17048,19 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "WorkReuse" - self.enableReuse = enableReuse + self.class_: Final[str] = "InplaceUpdateRequirement" + self.inplaceUpdate = inplaceUpdate def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkReuse): + if isinstance(other, InplaceUpdateRequirement): return bool( - self.class_ == other.class_ and self.enableReuse == other.enableReuse + self.class_ == other.class_ + and self.inplaceUpdate == other.inplaceUpdate ) return False def __hash__(self) -> int: - return hash((self.class_, self.enableReuse)) + return hash((self.class_, self.inplaceUpdate)) @classmethod def fromDoc( @@ -18461,8 +17068,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkReuse": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -18473,34 +17080,35 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_WorkReuse_classLoader_False_True_None_None, + uri_InplaceUpdateRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + raise e try: - if _doc.get("enableReuse") is None: - raise ValidationException("missing required field `enableReuse`", None, []) + if _doc.get("inplaceUpdate") is None: + raise ValidationException("missing required field `inplaceUpdate`", None, []) - enableReuse = load_field( - _doc.get("enableReuse"), - union_of_booltype_or_ExpressionLoader, + inplaceUpdate = _load_field( + _doc.get("inplaceUpdate"), + booltype, baseuri, loadingOptions, - lc=_doc.get("enableReuse") + lc=_doc.get("inplaceUpdate") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `enableReuse`": + if str(e) == "missing required field `inplaceUpdate`": _errors__.append( ValidationException( str(e), @@ -18508,13 +17116,13 @@ def fromDoc( ) ) else: - val = _doc.get("enableReuse") + val = _doc.get("inplaceUpdate") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `enableReuse` field is not valid because:", - SourceLine(_doc, "enableReuse", str), + "the `inplaceUpdate` field is not valid because:", + SourceLine(_doc, "inplaceUpdate", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -18526,14 +17134,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `enableReuse` field is not valid because:", - SourceLine(_doc, "enableReuse", str), + "the `inplaceUpdate` field is not valid because:", + SourceLine(_doc, "inplaceUpdate", str), [e], - detailed_message=f"the `enableReuse` field with value `{val}` " + detailed_message=f"the `inplaceUpdate` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -18541,14 +17149,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `enableReuse`".format( + "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( k ), SourceLine(_doc, k, str), @@ -18558,7 +17166,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - enableReuse=enableReuse, + inplaceUpdate=inplaceUpdate, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -18576,16 +17184,18 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.enableReuse is not None: - r["enableReuse"] = save( - self.enableReuse, + if self.inplaceUpdate is not None: + r["inplaceUpdate"] = save( + self.inplaceUpdate, top=False, base_url=base_url, relative_uris=relative_uris, @@ -18599,34 +17209,20 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "enableReuse"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "inplaceUpdate"]) -class NetworkAccess(ProcessRequirement): +class ToolTimeLimit(ProcessRequirement): """ - Indicate whether a process requires outgoing IPv4/IPv6 network - access. Choice of IPv4 or IPv6 is implementation and site - specific, correct tools must support both. - - If `networkAccess` is false or not specified, tools must not - assume network access, except for localhost (the loopback device). - - If `networkAccess` is true, the tool must be able to make outgoing - connections to network resources. Resources may be on a private - subnet or the public Internet. However, implementations and sites - may apply their own security policies to restrict what is - accessible by the tool. - - Enabling network access does not imply a publicly routable IP - address or the ability to accept inbound connections. + Set an upper limit on the execution time of a CommandLineTool. A CommandLineTool whose execution duration exceeds the time limit may be preemptively terminated and considered failed. May also be used by batch systems to make scheduling decisions. The execution duration excludes external operations, such as staging of files, pulling a docker image etc, and only counts wall-time for the execution of the command line itself. """ def __init__( self, - networkAccess: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + timelimit: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -18636,19 +17232,18 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "NetworkAccess" - self.networkAccess = networkAccess + self.class_: Final[str] = "ToolTimeLimit" + self.timelimit = timelimit def __eq__(self, other: Any) -> bool: - if isinstance(other, NetworkAccess): + if isinstance(other, ToolTimeLimit): return bool( - self.class_ == other.class_ - and self.networkAccess == other.networkAccess + self.class_ == other.class_ and self.timelimit == other.timelimit ) return False def __hash__(self) -> int: - return hash((self.class_, self.networkAccess)) + return hash((self.class_, self.timelimit)) @classmethod def fromDoc( @@ -18656,8 +17251,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "NetworkAccess": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -18668,34 +17263,35 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_NetworkAccess_classLoader_False_True_None_None, + uri_ToolTimeLimit_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + raise e try: - if _doc.get("networkAccess") is None: - raise ValidationException("missing required field `networkAccess`", None, []) + if _doc.get("timelimit") is None: + raise ValidationException("missing required field `timelimit`", None, []) - networkAccess = load_field( - _doc.get("networkAccess"), - union_of_booltype_or_ExpressionLoader, + timelimit = _load_field( + _doc.get("timelimit"), + union_of_inttype_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("networkAccess") + lc=_doc.get("timelimit") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `networkAccess`": + if str(e) == "missing required field `timelimit`": _errors__.append( ValidationException( str(e), @@ -18703,13 +17299,13 @@ def fromDoc( ) ) else: - val = _doc.get("networkAccess") + val = _doc.get("timelimit") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `networkAccess` field is not valid because:", - SourceLine(_doc, "networkAccess", str), + "the `timelimit` field is not valid because:", + SourceLine(_doc, "timelimit", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -18721,14 +17317,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `networkAccess` field is not valid because:", - SourceLine(_doc, "networkAccess", str), + "the `timelimit` field is not valid because:", + SourceLine(_doc, "timelimit", str), [e], - detailed_message=f"the `networkAccess` field with value `{val}` " + detailed_message=f"the `timelimit` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -18736,14 +17332,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `networkAccess`".format( + "invalid field `{}`, expected one of: `class`, `timelimit`".format( k ), SourceLine(_doc, k, str), @@ -18753,7 +17349,7 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - networkAccess=networkAccess, + timelimit=timelimit, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -18771,16 +17367,18 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.networkAccess is not None: - r["networkAccess"] = save( - self.networkAccess, + if self.timelimit is not None: + r["timelimit"] = save( + self.timelimit, top=False, base_url=base_url, relative_uris=relative_uris, @@ -18794,49 +17392,23 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "networkAccess"]) + attrs: ClassVar[Collection[str]] = frozenset(["class", "timelimit"]) -class InplaceUpdateRequirement(ProcessRequirement): - """ - - If `inplaceUpdate` is true, then an implementation supporting this - feature may permit tools to directly update files with `writable: - true` in InitialWorkDirRequirement. That is, as an optimization, - files may be destructively modified in place as opposed to copied - and updated. - - An implementation must ensure that only one workflow step may - access a writable file at a time. It is an error if a file which - is writable by one workflow step file is accessed (for reading or - writing) by any other workflow step running independently. - However, a file which has been updated in a previous completed - step may be used as input to multiple steps, provided it is - read-only in every step. - - Workflow steps which modify a file must produce the modified file - as output. Downstream steps which further process the file must - use the output of previous steps, and not refer to a common input - (this is necessary for both ordering and correctness). - - Workflow authors should provide this in the `hints` section. The - intent of this feature is that workflows produce the same results - whether or not InplaceUpdateRequirement is supported by the - implementation, and this feature is primarily available as an - optimization for particular environments. - - Users and implementers should be aware that workflows that - destructively modify inputs may not be repeatable or reproducible. - In particular, enabling this feature implies that WorkReuse should - not be enabled. - - """ +class ExpressionToolOutputParameter(OutputParameter): + id: str def __init__( self, - inplaceUpdate: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + type_: Any, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -18846,19 +17418,39 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "InplaceUpdateRequirement" - self.inplaceUpdate = inplaceUpdate + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, InplaceUpdateRequirement): + if isinstance(other, ExpressionToolOutputParameter): return bool( - self.class_ == other.class_ - and self.inplaceUpdate == other.inplaceUpdate + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type_ == other.type_ ) return False def __hash__(self) -> int: - return hash((self.class_, self.inplaceUpdate)) + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type_, + ) + ) @classmethod def fromDoc( @@ -18866,526 +17458,136 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "InplaceUpdateRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_InplaceUpdateRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("inplaceUpdate") is None: - raise ValidationException("missing required field `inplaceUpdate`", None, []) - - inplaceUpdate = load_field( - _doc.get("inplaceUpdate"), - booltype, - baseuri, - loadingOptions, - lc=_doc.get("inplaceUpdate") - ) + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `inplaceUpdate`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inplaceUpdate") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `id`": _errors__.append( ValidationException( - "the `inplaceUpdate` field is not valid because:", - SourceLine(_doc, "inplaceUpdate", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `inplaceUpdate` field is not valid because:", - SourceLine(_doc, "inplaceUpdate", str), - [e], - detailed_message=f"the `inplaceUpdate` field with value `{val}` " - "is not valid because:", + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( - k - ), - SourceLine(_doc, k, str), + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - inplaceUpdate=inplaceUpdate, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.inplaceUpdate is not None: - r["inplaceUpdate"] = save( - self.inplaceUpdate, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "inplaceUpdate"]) - - -class ToolTimeLimit(ProcessRequirement): - """ - Set an upper limit on the execution time of a CommandLineTool. - A CommandLineTool whose execution duration exceeds the time - limit may be preemptively terminated and considered failed. - May also be used by batch systems to make scheduling decisions. - The execution duration excludes external operations, such as - staging of files, pulling a docker image etc, and only counts - wall-time for the execution of the command line itself. - - """ - - def __init__( - self, - timelimit: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ToolTimeLimit" - self.timelimit = timelimit - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ToolTimeLimit): - return bool( - self.class_ == other.class_ and self.timelimit == other.timelimit - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.timelimit)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ToolTimeLimit": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_ToolTimeLimit_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("timelimit") is None: - raise ValidationException("missing required field `timelimit`", None, []) - - timelimit = load_field( - _doc.get("timelimit"), - union_of_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("timelimit") - ) + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `timelimit`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("timelimit") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `label`": _errors__.append( ValidationException( - "the `timelimit` field is not valid because:", - SourceLine(_doc, "timelimit", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + secondaryFiles = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + lc=_doc.get("secondaryFiles") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( - "the `timelimit` field is not valid because:", - SourceLine(_doc, "timelimit", str), - [e], - detailed_message=f"the `timelimit` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `timelimit`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - timelimit=timelimit, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.timelimit is not None: - r["timelimit"] = save( - self.timelimit, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "timelimit"]) - - -class ExpressionToolOutputParameter(OutputParameter): - id: str - - def __init__( - self, - id: Any, - type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable - self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.format = format - self.type_ = type_ - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ExpressionToolOutputParameter): - return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable - and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.type_ == other.type_ - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.label, - self.secondaryFiles, - self.streamable, - self.doc, - self.id, - self.format, - self.type_, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ExpressionToolOutputParameter": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - secondaryFiles = None - if "secondaryFiles" in _doc: - try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, - baseuri, - loadingOptions, - lc=_doc.get("secondaryFiles") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `secondaryFiles`": - _errors__.append( - ValidationException( - str(e), - None + str(e), + None ) ) else: @@ -19417,7 +17619,7 @@ def fromDoc( streamable = None if "streamable" in _doc: try: - streamable = load_field( + streamable = _load_field( _doc.get("streamable"), union_of_None_type_or_booltype, baseuri, @@ -19464,7 +17666,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -19511,7 +17713,7 @@ def fromDoc( format = None if "format" in _doc: try: - format = load_field( + format = _load_field( _doc.get("format"), uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, @@ -19559,7 +17761,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, baseuri, @@ -19603,7 +17805,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -19611,7 +17813,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -19693,7 +17895,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] ) @@ -19705,17 +17907,17 @@ def __init__( self, id: Any, type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - default: Optional[Any] = None, - inputBinding: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + default: Any | None = None, + inputBinding: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -19777,8 +17979,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowInputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -19788,7 +17990,7 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), uri_strtype_True_False_None_None, baseuri, @@ -19844,7 +18046,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -19891,7 +18093,7 @@ def fromDoc( secondaryFiles = None if "secondaryFiles" in _doc: try: - secondaryFiles = load_field( + secondaryFiles = _load_field( _doc.get("secondaryFiles"), secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, @@ -19938,7 +18140,7 @@ def fromDoc( streamable = None if "streamable" in _doc: try: - streamable = load_field( + streamable = _load_field( _doc.get("streamable"), union_of_None_type_or_booltype, baseuri, @@ -19985,7 +18187,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -20032,7 +18234,7 @@ def fromDoc( format = None if "format" in _doc: try: - format = load_field( + format = _load_field( _doc.get("format"), uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, baseuri, @@ -20079,7 +18281,7 @@ def fromDoc( loadContents = None if "loadContents" in _doc: try: - loadContents = load_field( + loadContents = _load_field( _doc.get("loadContents"), union_of_None_type_or_booltype, baseuri, @@ -20126,7 +18328,7 @@ def fromDoc( loadListing = None if "loadListing" in _doc: try: - loadListing = load_field( + loadListing = _load_field( _doc.get("loadListing"), union_of_None_type_or_LoadListingEnumLoader, baseuri, @@ -20173,7 +18375,7 @@ def fromDoc( default = None if "default" in _doc: try: - default = load_field( + default = _load_field( _doc.get("default"), union_of_None_type_or_CWLObjectTypeLoader, baseuri, @@ -20221,7 +18423,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, baseuri, @@ -20268,7 +18470,7 @@ def fromDoc( inputBinding = None if "inputBinding" in _doc: try: - inputBinding = load_field( + inputBinding = _load_field( _doc.get("inputBinding"), union_of_None_type_or_InputBindingLoader, baseuri, @@ -20312,7 +18514,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -20320,7 +18522,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -20431,7 +18633,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ "label", "secondaryFiles", @@ -20450,13 +18652,7 @@ def save( class ExpressionTool(Process): """ - An ExpressionTool is a type of Process object that can be run by itself - or as a Workflow step. It executes a pure Javascript expression that has - access to the same input parameters as a workflow. It is meant to be used - sparingly as a way to isolate complex Javascript expressions that need to - operate on input data and produce some result; perhaps just a - rearrangement of the inputs. No Docker software container is required - or allowed. + An ExpressionTool is a type of Process object that can be run by itself or as a Workflow step. It executes a pure Javascript expression that has access to the same input parameters as a workflow. It is meant to be used sparingly as a way to isolate complex Javascript expressions that need to operate on input data and produce some result; perhaps just a rearrangement of the inputs. No Docker software container is required or allowed. """ @@ -20467,15 +18663,15 @@ def __init__( inputs: Any, outputs: Any, expression: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - intent: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + cwlVersion: Any | None = None, + intent: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -20494,7 +18690,7 @@ def __init__( self.hints = hints self.cwlVersion = cwlVersion self.intent = intent - self.class_ = "ExpressionTool" + self.class_: Final[str] = "ExpressionTool" self.expression = expression def __eq__(self, other: Any) -> bool: @@ -20537,8 +18733,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ExpressionTool": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -20548,7 +18744,7 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, @@ -20605,7 +18801,7 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), uri_ExpressionTool_classLoader_False_True_None_None, baseuri, @@ -20613,14 +18809,15 @@ def fromDoc( lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + raise e label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -20667,7 +18864,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -20715,7 +18912,7 @@ def fromDoc( if _doc.get("inputs") is None: raise ValidationException("missing required field `inputs`", None, []) - inputs = load_field( + inputs = _load_field( _doc.get("inputs"), idmap_inputs_array_of_WorkflowInputParameterLoader, baseuri, @@ -20763,7 +18960,7 @@ def fromDoc( if _doc.get("outputs") is None: raise ValidationException("missing required field `outputs`", None, []) - outputs = load_field( + outputs = _load_field( _doc.get("outputs"), idmap_outputs_array_of_ExpressionToolOutputParameterLoader, baseuri, @@ -20810,9 +19007,9 @@ def fromDoc( requirements = None if "requirements" in _doc: try: - requirements = load_field( + requirements = _load_field( _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader, + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions, lc=_doc.get("requirements") @@ -20857,9 +19054,9 @@ def fromDoc( hints = None if "hints" in _doc: try: - hints = load_field( + hints = _load_field( _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type, + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, baseuri, loadingOptions, lc=_doc.get("hints") @@ -20904,7 +19101,7 @@ def fromDoc( cwlVersion = None if "cwlVersion" in _doc: try: - cwlVersion = load_field( + cwlVersion = _load_field( _doc.get("cwlVersion"), uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, baseuri, @@ -20951,7 +19148,7 @@ def fromDoc( intent = None if "intent" in _doc: try: - intent = load_field( + intent = _load_field( _doc.get("intent"), uri_union_of_None_type_or_array_of_strtype_True_False_None_None, baseuri, @@ -20999,7 +19196,7 @@ def fromDoc( if _doc.get("expression") is None: raise ValidationException("missing required field `expression`", None, []) - expression = load_field( + expression = _load_field( _doc.get("expression"), ExpressionLoader, baseuri, @@ -21043,7 +19240,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -21051,7 +19248,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -21099,8 +19296,10 @@ def save( u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ @@ -21155,7 +19354,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ "id", "label", @@ -21174,13 +19373,9 @@ def save( class WorkflowOutputParameter(OutputParameter): """ - Describe an output parameter of a workflow. The parameter must be - connected to one or more parameters defined in the workflow that - will provide the value of the output parameter. It is legal to - connect a WorkflowInputParameter to a WorkflowOutputParameter. + Describe an output parameter of a workflow. The parameter must be connected to one or more parameters defined in the workflow that will provide the value of the output parameter. It is legal to connect a WorkflowInputParameter to a WorkflowOutputParameter. - See [WorkflowStepInput](#WorkflowStepInput) for discussion of - `linkMerge` and `pickValue`. + See `WorkflowStepInput <#WorkflowStepInput>`__ for discussion of ``linkMerge`` and ``pickValue``. """ @@ -21190,16 +19385,16 @@ def __init__( self, id: Any, type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - outputSource: Optional[Any] = None, - linkMerge: Optional[Any] = None, - pickValue: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + outputSource: Any | None = None, + linkMerge: Any | None = None, + pickValue: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -21258,8 +19453,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowOutputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -21269,7 +19464,7 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), uri_strtype_True_False_None_None, baseuri, @@ -21325,7 +19520,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -21372,7 +19567,7 @@ def fromDoc( secondaryFiles = None if "secondaryFiles" in _doc: try: - secondaryFiles = load_field( + secondaryFiles = _load_field( _doc.get("secondaryFiles"), secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, @@ -21419,7 +19614,7 @@ def fromDoc( streamable = None if "streamable" in _doc: try: - streamable = load_field( + streamable = _load_field( _doc.get("streamable"), union_of_None_type_or_booltype, baseuri, @@ -21466,7 +19661,7 @@ def fromDoc( doc = None if "doc" in _doc: try: - doc = load_field( + doc = _load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, @@ -21513,7 +19708,7 @@ def fromDoc( format = None if "format" in _doc: try: - format = load_field( + format = _load_field( _doc.get("format"), uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, @@ -21560,7 +19755,7 @@ def fromDoc( outputSource = None if "outputSource" in _doc: try: - outputSource = load_field( + outputSource = _load_field( _doc.get("outputSource"), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None, baseuri, @@ -21607,7 +19802,7 @@ def fromDoc( linkMerge = None if "linkMerge" in _doc: try: - linkMerge = load_field( + linkMerge = _load_field( _doc.get("linkMerge"), union_of_None_type_or_LinkMergeMethodLoader, baseuri, @@ -21654,7 +19849,7 @@ def fromDoc( pickValue = None if "pickValue" in _doc: try: - pickValue = load_field( + pickValue = _load_field( _doc.get("pickValue"), union_of_None_type_or_PickValueMethodLoader, baseuri, @@ -21702,7 +19897,7 @@ def fromDoc( if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) - type_ = load_field( + type_ = _load_field( _doc.get("type"), typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, baseuri, @@ -21746,7 +19941,7 @@ def fromDoc( "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -21754,7 +19949,7 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] @@ -21850,7 +20045,7 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ "label", "secondaryFiles", @@ -21872,114 +20067,77 @@ class Sink(Saveable): class WorkflowStepInput(IdentifierRequired, Sink, LoadContents, Labeled): """ - The input of a workflow step connects an upstream parameter (from the - workflow inputs, or the outputs of other workflows steps) with the input - parameters of the process specified by the `run` field. Only input parameters - declared by the target process will be passed through at runtime to the process - though additional parameters may be specified (for use within `valueFrom` - expressions for instance) - unconnected or unused parameters do not represent an - error condition. - - # Input object - - A WorkflowStepInput object must contain an `id` field in the form - `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash - `/` the field name consists of the characters following the final slash - (the prefix portion may contain one or more slashes to indicate scope). - This defines a field of the workflow step input object with the value of - the `source` parameter(s). - - # Merging multiple inbound data links - - To merge multiple inbound data links, - [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - If the sink parameter is an array, or named in a [workflow - scatter](#WorkflowStep) operation, there may be multiple inbound - data links listed in the `source` field. The values from the - input links are merged depending on the method specified in the - `linkMerge` field. If both `linkMerge` and `pickValue` are null - or not specified, and there is more than one element in the - `source` array, the default method is "merge_nested". - - If both `linkMerge` and `pickValue` are null or not specified, and - there is only a single element in the `source`, then the input - parameter takes the scalar value from the single input link (it is - *not* wrapped in a single-list). + The input of a workflow step connects an upstream parameter (from the workflow inputs, or the outputs of other workflows steps) with the input parameters of the process specified by the ``run`` field. Only input parameters declared by the target process will be passed through at runtime to the process though additional parameters may be specified (for use within ``valueFrom`` expressions for instance) - unconnected or unused parameters do not represent an error condition. + + Input object + ============ + + A WorkflowStepInput object must contain an ``id`` field in the form ``#fieldname`` or ``#prefix/fieldname``. When the ``id`` field contains a slash ``/`` the field name consists of the characters following the final slash (the prefix portion may contain one or more slashes to indicate scope). This defines a field of the workflow step input object with the value of the ``source`` parameter(s). + + Merging multiple inbound data links + =================================== + + To merge multiple inbound data links, `MultipleInputFeatureRequirement <#MultipleInputFeatureRequirement>`__ must be specified in the workflow or workflow step requirements. + + If the sink parameter is an array, or named in a `workflow scatter <#WorkflowStep>`__ operation, there may be multiple inbound data links listed in the ``source`` field. The values from the input links are merged depending on the method specified in the ``linkMerge`` field. If both ``linkMerge`` and ``pickValue`` are null or not specified, and there is more than one element in the ``source`` array, the default method is "merge_nested". + + If both ``linkMerge`` and ``pickValue`` are null or not specified, and there is only a single element in the ``source``, then the input parameter takes the scalar value from the single input link (it is *not* wrapped in a single-list). * **merge_nested** - The input must be an array consisting of exactly one entry for each - input link. If "merge_nested" is specified with a single link, the value - from the link must be wrapped in a single-item list. + The input must be an array consisting of exactly one entry for each input link. If "merge_nested" is specified with a single link, the value from the link must be wrapped in a single-item list. * **merge_flattened** - 1. The source and sink parameters must be compatible types, or the source - type must be compatible with single element from the "items" type of - the destination array parameter. - 2. Source parameters which are arrays are concatenated. - Source parameters which are single element types are appended as - single elements. + 1. The source and sink parameters must be compatible types, or the source type must be compatible with single element from the "items" type of the destination array parameter. + 2. Source parameters which are arrays are concatenated. Source parameters which are single element types are appended as single elements. - # Picking non-null values among inbound data links + Picking non-null values among inbound data links + ================================================ - If present, `pickValue` specifies how to pick non-null values among inbound data links. + If present, ``pickValue`` specifies how to pick non-null values among inbound data links. - `pickValue` is evaluated - 1. Once all source values from upstream step or parameters are available. - 2. After `linkMerge`. - 3. Before `scatter` or `valueFrom`. + ``pickValue`` is evaluated - This is specifically intended to be useful in combination with - [conditional execution](#WorkflowStep), where several upstream - steps may be connected to a single input (`source` is a list), and - skipped steps produce null values. + 1. Once all source values from upstream step or parameters are available. + 2. After ``linkMerge``. + 3. Before ``scatter`` or ``valueFrom``. - Static type checkers should check for type consistency after inferring what the type - will be after `pickValue` is applied, just as they do currently for `linkMerge`. + This is specifically intended to be useful in combination with `conditional execution <#WorkflowStep>`__, where several upstream steps may be connected to a single input (``source`` is a list), and skipped steps produce null values. + + Static type checkers should check for type consistency after inferring what the type will be after ``pickValue`` is applied, just as they do currently for ``linkMerge``. * **first_non_null** - For the first level of a list input, pick the first non-null element. The result is a scalar. - It is an error if there is no non-null element. Examples: - * `[null, x, null, y] -> x` - * `[null, [null], null, y] -> [null]` - * `[null, null, null] -> Runtime Error` + For the first level of a list input, pick the first non-null element. The result is a scalar. It is an error if there is no non-null element. Examples: + + * ``[null, x, null, y] -> x`` + * ``[null, [null], null, y] -> [null]`` + * ``[null, null, null] -> Runtime Error`` - *Intended use case*: If-else pattern where the - value comes either from a conditional step or from a default or - fallback value. The conditional step(s) should be placed first in - the list. + *Intended use case*: If-else pattern where the value comes either from a conditional step or from a default or fallback value. The conditional step(s) should be placed first in the list. * **the_only_non_null** - For the first level of a list input, pick the single non-null element. The result is a scalar. - It is an error if there is more than one non-null element. Examples: + For the first level of a list input, pick the single non-null element. The result is a scalar. It is an error if there is more than one non-null element. Examples: - * `[null, x, null] -> x` - * `[null, x, null, y] -> Runtime Error` - * `[null, [null], null] -> [null]` - * `[null, null, null] -> Runtime Error` + * ``[null, x, null] -> x`` + * ``[null, x, null, y] -> Runtime Error`` + * ``[null, [null], null] -> [null]`` + * ``[null, null, null] -> Runtime Error`` - *Intended use case*: Switch type patterns where developer considers - more than one active code path as a workflow error - (possibly indicating an error in writing `when` condition expressions). + *Intended use case*: Switch type patterns where developer considers more than one active code path as a workflow error (possibly indicating an error in writing ``when`` condition expressions). * **all_non_null** - For the first level of a list input, pick all non-null values. - The result is a list, which may be empty. Examples: + For the first level of a list input, pick all non-null values. The result is a list, which may be empty. Examples: - * `[null, x, null] -> [x]` - * `[x, null, y] -> [x, y]` - * `[null, [x], [null]] -> [[x], [null]]` - * `[null, null, null] -> []` + * ``[null, x, null] -> [x]`` + * ``[x, null, y] -> [x, y]`` + * ``[null, [x], [null]] -> [[x], [null]]`` + * ``[null, null, null] -> []`` - *Intended use case*: It is valid to have more than one source, but - sources are conditional, so null sources (from skipped steps) - should be filtered out. + *Intended use case*: It is valid to have more than one source, but sources are conditional, so null sources (from skipped steps) should be filtered out. """ @@ -21988,16 +20146,16 @@ class WorkflowStepInput(IdentifierRequired, Sink, LoadContents, Labeled): def __init__( self, id: Any, - source: Optional[Any] = None, - linkMerge: Optional[Any] = None, - pickValue: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - label: Optional[Any] = None, - default: Optional[Any] = None, - valueFrom: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + source: Any | None = None, + linkMerge: Any | None = None, + pickValue: Any | None = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + label: Any | None = None, + default: Any | None = None, + valueFrom: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -22053,8 +20211,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowStepInput": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -22064,7 +20222,7 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), uri_strtype_True_False_None_None, baseuri, @@ -22120,7 +20278,7 @@ def fromDoc( source = None if "source" in _doc: try: - source = load_field( + source = _load_field( _doc.get("source"), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None, baseuri, @@ -22167,7 +20325,7 @@ def fromDoc( linkMerge = None if "linkMerge" in _doc: try: - linkMerge = load_field( + linkMerge = _load_field( _doc.get("linkMerge"), union_of_None_type_or_LinkMergeMethodLoader, baseuri, @@ -22214,7 +20372,7 @@ def fromDoc( pickValue = None if "pickValue" in _doc: try: - pickValue = load_field( + pickValue = _load_field( _doc.get("pickValue"), union_of_None_type_or_PickValueMethodLoader, baseuri, @@ -22261,7 +20419,7 @@ def fromDoc( loadContents = None if "loadContents" in _doc: try: - loadContents = load_field( + loadContents = _load_field( _doc.get("loadContents"), union_of_None_type_or_booltype, baseuri, @@ -22308,7 +20466,7 @@ def fromDoc( loadListing = None if "loadListing" in _doc: try: - loadListing = load_field( + loadListing = _load_field( _doc.get("loadListing"), union_of_None_type_or_LoadListingEnumLoader, baseuri, @@ -22355,7 +20513,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -22402,7 +20560,7 @@ def fromDoc( default = None if "default" in _doc: try: - default = load_field( + default = _load_field( _doc.get("default"), union_of_None_type_or_CWLObjectTypeLoader, baseuri, @@ -22449,7 +20607,7 @@ def fromDoc( valueFrom = None if "valueFrom" in _doc: try: - valueFrom = load_field( + valueFrom = _load_field( _doc.get("valueFrom"), union_of_None_type_or_strtype_or_ExpressionLoader, baseuri, @@ -22488,2060 +20646,12 @@ def fromDoc( ValidationException( "the `valueFrom` field is not valid because:", SourceLine(_doc, "valueFrom", str), - [e], - detailed_message=f"the `valueFrom` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `source`, `linkMerge`, `pickValue`, `loadContents`, `loadListing`, `label`, `default`, `valueFrom`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - id=id, - source=source, - linkMerge=linkMerge, - pickValue=pickValue, - loadContents=loadContents, - loadListing=loadListing, - label=label, - default=default, - valueFrom=valueFrom, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.source is not None: - u = save_relative_uri(self.source, self.id, False, 2, relative_uris) - r["source"] = u - if self.linkMerge is not None: - r["linkMerge"] = save( - self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.pickValue is not None: - r["pickValue"] = save( - self.pickValue, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.default is not None: - r["default"] = save( - self.default, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.valueFrom is not None: - r["valueFrom"] = save( - self.valueFrom, top=False, base_url=self.id, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "source", - "linkMerge", - "pickValue", - "loadContents", - "loadListing", - "label", - "default", - "valueFrom", - ] - ) - - -class WorkflowStepOutput(IdentifierRequired): - """ - Associate an output parameter of the underlying process with a workflow - parameter. The workflow parameter (given in the `id` field) be may be used - as a `source` to connect with input parameters of other workflow steps, or - with an output parameter of the process. - - A unique identifier for this workflow output parameter. This is - the identifier to use in the `source` field of `WorkflowStepInput` - to connect the output value to downstream parameters. - - """ - - id: str - - def __init__( - self, - id: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStepOutput): - return bool(self.id == other.id) - return False - - def __hash__(self) -> int: - return hash((self.id)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowStepOutput": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`".format(k), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - id=id, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["id"]) - - -class WorkflowStep(IdentifierRequired, Labeled, Documented): - """ - A workflow step is an executable element of a workflow. It specifies the - underlying process implementation (such as `CommandLineTool` or another - `Workflow`) in the `run` field and connects the input and output parameters - of the underlying process to workflow parameters. - - # Scatter/gather - - To use scatter/gather, - [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified - in the workflow or workflow step requirements. - - A "scatter" operation specifies that the associated workflow step or - subworkflow should execute separately over a list of input elements. Each - job making up a scatter operation is independent and may be executed - concurrently. - - The `scatter` field specifies one or more input parameters which will be - scattered. An input parameter may be listed more than once. The declared - type of each input parameter implicitly becomes an array of items of the - input parameter type. If a parameter is listed more than once, it becomes - a nested array. As a result, upstream parameters which are connected to - scattered parameters must be arrays. - - All output parameter types are also implicitly wrapped in arrays. Each job - in the scatter results in an entry in the output array. - - If any scattered parameter runtime value is an empty array, all outputs are - set to empty arrays and no work is done for the step, according to - applicable scattering rules. - - If `scatter` declares more than one input parameter, `scatterMethod` - describes how to decompose the input into a discrete set of jobs. - - * **dotproduct** specifies that each of the input arrays are aligned and one - element taken from each array to construct each job. It is an error - if all input arrays are not the same length. - - * **nested_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output must be nested arrays for each level of scattering, in the - order that the input arrays are listed in the `scatter` field. - - * **flat_crossproduct** specifies the Cartesian product of the inputs, - producing a job for every combination of the scattered inputs. The - output arrays must be flattened to a single level, but otherwise listed in the - order that the input arrays are listed in the `scatter` field. - - # Conditional execution (Optional) - - Conditional execution makes execution of a step conditional on an - expression. A step that is not executed is "skipped". A skipped - step produces `null` for all output parameters. - - The condition is evaluated after `scatter`, using the input object - of each individual scatter job. This means over a set of scatter - jobs, some may be executed and some may be skipped. When the - results are gathered, skipped steps must be `null` in the output - arrays. - - The `when` field controls conditional execution. This is an - expression that must be evaluated with `inputs` bound to the step - input object (or individual scatter job), and returns a boolean - value. It is an error if this expression returns a value other - than `true` or `false`. - - Conditionals in CWL are an optional feature and are not required - to be implemented by all consumers of CWL documents. An - implementation that does not support conditionals must return a - fatal error when attempting to execute a workflow that uses - conditional constructs the implementation does not support. - - # Subworkflows - - To specify a nested workflow as part of a workflow step, - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be - specified in the workflow or workflow step requirements. - - It is a fatal error if a workflow directly or indirectly invokes itself as - a subworkflow (recursive workflows are not allowed). - - """ - - id: str - - def __init__( - self, - id: Any, - in_: Any, - out: Any, - run: Any, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - when: Optional[Any] = None, - scatter: Optional[Any] = None, - scatterMethod: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.label = label - self.doc = doc - self.in_ = in_ - self.out = out - self.requirements = requirements - self.hints = hints - self.run = run - self.when = when - self.scatter = scatter - self.scatterMethod = scatterMethod - - def __eq__(self, other: Any) -> bool: - if isinstance(other, WorkflowStep): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.in_ == other.in_ - and self.out == other.out - and self.requirements == other.requirements - and self.hints == other.hints - and self.run == other.run - and self.when == other.when - and self.scatter == other.scatter - and self.scatterMethod == other.scatterMethod - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.in_, - self.out, - self.requirements, - self.hints, - self.run, - self.when, - self.scatter, - self.scatterMethod, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "WorkflowStep": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - _errors__.append(ValidationException("missing id")) - if not __original_id_is_none: - baseuri = cast(str, id) - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("in") is None: - raise ValidationException("missing required field `in`", None, []) - - in_ = load_field( - _doc.get("in"), - idmap_in__array_of_WorkflowStepInputLoader, - baseuri, - loadingOptions, - lc=_doc.get("in") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `in`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("in") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `in` field is not valid because:", - SourceLine(_doc, "in", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `in` field is not valid because:", - SourceLine(_doc, "in", str), - [e], - detailed_message=f"the `in` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("out") is None: - raise ValidationException("missing required field `out`", None, []) - - out = load_field( - _doc.get("out"), - uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("out") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `out`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("out") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `out` field is not valid because:", - SourceLine(_doc, "out", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `out` field is not valid because:", - SourceLine(_doc, "out", str), - [e], - detailed_message=f"the `out` field with value `{val}` " - "is not valid because:", - ) - ) - requirements = None - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader, - baseuri, - loadingOptions, - lc=_doc.get("requirements") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `requirements`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("requirements") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - detailed_message=f"the `requirements` field with value `{val}` " - "is not valid because:", - ) - ) - hints = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_Any_type, - baseuri, - loadingOptions, - lc=_doc.get("hints") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `hints`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("hints") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - detailed_message=f"the `hints` field with value `{val}` " - "is not valid because:", - ) - ) - - subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) - try: - if _doc.get("run") is None: - raise ValidationException("missing required field `run`", None, []) - - run = load_field( - _doc.get("run"), - uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader_False_False_None_None, - subscope_baseuri, - loadingOptions, - lc=_doc.get("run") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `run`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("run") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), - [e], - detailed_message=f"the `run` field with value `{val}` " - "is not valid because:", - ) - ) - when = None - if "when" in _doc: - try: - when = load_field( - _doc.get("when"), - union_of_None_type_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("when") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `when`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("when") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `when` field is not valid because:", - SourceLine(_doc, "when", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `when` field is not valid because:", - SourceLine(_doc, "when", str), - [e], - detailed_message=f"the `when` field with value `{val}` " - "is not valid because:", - ) - ) - scatter = None - if "scatter" in _doc: - try: - scatter = load_field( - _doc.get("scatter"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None, - baseuri, - loadingOptions, - lc=_doc.get("scatter") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `scatter`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("scatter") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `scatter` field is not valid because:", - SourceLine(_doc, "scatter", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `scatter` field is not valid because:", - SourceLine(_doc, "scatter", str), - [e], - detailed_message=f"the `scatter` field with value `{val}` " - "is not valid because:", - ) - ) - scatterMethod = None - if "scatterMethod" in _doc: - try: - scatterMethod = load_field( - _doc.get("scatterMethod"), - uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("scatterMethod") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `scatterMethod`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("scatterMethod") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `scatterMethod` field is not valid because:", - SourceLine(_doc, "scatterMethod", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `scatterMethod` field is not valid because:", - SourceLine(_doc, "scatterMethod", str), - [e], - detailed_message=f"the `scatterMethod` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `when`, `scatter`, `scatterMethod`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - id=id, - label=label, - doc=doc, - in_=in_, - out=out, - requirements=requirements, - hints=hints, - run=run, - when=when, - scatter=scatter, - scatterMethod=scatterMethod, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.in_ is not None: - r["in"] = save( - self.in_, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.out is not None: - u = save_relative_uri(self.out, self.id, True, None, relative_uris) - r["out"] = u - if self.requirements is not None: - r["requirements"] = save( - self.requirements, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.run is not None: - u = save_relative_uri(self.run, self.id, False, None, relative_uris) - r["run"] = u - if self.when is not None: - r["when"] = save( - self.when, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.scatter is not None: - u = save_relative_uri(self.scatter, self.id, False, 0, relative_uris) - r["scatter"] = u - if self.scatterMethod is not None: - u = save_relative_uri( - self.scatterMethod, self.id, False, None, relative_uris - ) - r["scatterMethod"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "in", - "out", - "requirements", - "hints", - "run", - "when", - "scatter", - "scatterMethod", - ] - ) - - -class Workflow(Process): - """ - A workflow describes a set of **steps** and the **dependencies** between - those steps. When a step produces output that will be consumed by a - second step, the first step is a dependency of the second step. - - When there is a dependency, the workflow engine must execute the preceding - step and wait for it to successfully produce output before executing the - dependent step. If two steps are defined in the workflow graph that - are not directly or indirectly dependent, these steps are **independent**, - and may execute in any order or execute concurrently. A workflow is - complete when all steps have been executed. - - Dependencies between parameters are expressed using the `source` - field on [workflow step input parameters](#WorkflowStepInput) and - `outputSource` field on [workflow output - parameters](#WorkflowOutputParameter). - - The `source` field on each workflow step input parameter expresses - the data links that contribute to the value of the step input - parameter (the "sink"). A workflow step can only begin execution - when every data link connected to a step has been fulfilled. - - The `outputSource` field on each workflow step input parameter - expresses the data links that contribute to the value of the - workflow output parameter (the "sink"). Workflow execution cannot - complete successfully until every data link connected to an output - parameter has been fulfilled. - - ## Workflow success and failure - - A completed step must result in one of `success`, `temporaryFailure` or - `permanentFailure` states. An implementation may choose to retry a step - execution which resulted in `temporaryFailure`. An implementation may - choose to either continue running other steps of a workflow, or terminate - immediately upon `permanentFailure`. - - * If any step of a workflow execution results in `permanentFailure`, then - the workflow status is `permanentFailure`. - - * If one or more steps result in `temporaryFailure` and all other steps - complete `success` or are not executed, then the workflow status is - `temporaryFailure`. - - * If all workflow steps are executed and complete with `success`, then the - workflow status is `success`. - - # Extensions - - [ScatterFeatureRequirement](#ScatterFeatureRequirement) and - [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are - available as standard [extensions](#Extensions_and_Metadata) to core - workflow semantics. - - """ - - id: str - - def __init__( - self, - inputs: Any, - outputs: Any, - steps: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - intent: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.intent = intent - self.class_ = "Workflow" - self.steps = steps - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Workflow): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.intent == other.intent - and self.class_ == other.class_ - and self.steps == other.steps - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.intent, - self.class_, - self.steps, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Workflow": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = cast(str, id) - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_Workflow_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("inputs") is None: - raise ValidationException("missing required field `inputs`", None, []) - - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_WorkflowInputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - detailed_message=f"the `inputs` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("outputs") is None: - raise ValidationException("missing required field `outputs`", None, []) - - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_WorkflowOutputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - detailed_message=f"the `outputs` field with value `{val}` " - "is not valid because:", - ) - ) - requirements = None - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader, - baseuri, - loadingOptions, - lc=_doc.get("requirements") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `requirements`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("requirements") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - detailed_message=f"the `requirements` field with value `{val}` " - "is not valid because:", - ) - ) - hints = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type, - baseuri, - loadingOptions, - lc=_doc.get("hints") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `hints`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("hints") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - detailed_message=f"the `hints` field with value `{val}` " - "is not valid because:", - ) - ) - cwlVersion = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("cwlVersion") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `cwlVersion`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cwlVersion") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - detailed_message=f"the `cwlVersion` field with value `{val}` " - "is not valid because:", - ) - ) - intent = None - if "intent" in _doc: - try: - intent = load_field( - _doc.get("intent"), - uri_union_of_None_type_or_array_of_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("intent") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `intent`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("intent") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `intent` field is not valid because:", - SourceLine(_doc, "intent", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `intent` field is not valid because:", - SourceLine(_doc, "intent", str), - [e], - detailed_message=f"the `intent` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("steps") is None: - raise ValidationException("missing required field `steps`", None, []) - - steps = load_field( - _doc.get("steps"), - idmap_steps_union_of_array_of_WorkflowStepLoader, - baseuri, - loadingOptions, - lc=_doc.get("steps") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `steps`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("steps") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `steps` field is not valid because:", - SourceLine(_doc, "steps", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `steps` field is not valid because:", - SourceLine(_doc, "steps", str), - [e], - detailed_message=f"the `steps` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `steps`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - intent=intent, - steps=steps, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, self.id, False, None, relative_uris) - r["class"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.inputs is not None: - r["inputs"] = save( - self.inputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputs is not None: - r["outputs"] = save( - self.outputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.requirements is not None: - r["requirements"] = save( - self.requirements, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.cwlVersion is not None: - u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) - r["cwlVersion"] = u - if self.intent is not None: - u = save_relative_uri(self.intent, self.id, True, None, relative_uris) - r["intent"] = u - if self.steps is not None: - r["steps"] = save( - self.steps, top=False, base_url=self.id, relative_uris=relative_uris - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "intent", - "class", - "steps", - ] - ) - - -class SubworkflowFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support nested workflows in - the `run` field of [WorkflowStep](#WorkflowStep). - - """ - - def __init__( - self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "SubworkflowFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, SubworkflowFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "SubworkflowFeatureRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class ScatterFeatureRequirement(ProcessRequirement): - """ - Indicates that the workflow platform must support the `scatter` and - `scatterMethod` fields of [WorkflowStep](#WorkflowStep). - - """ - - def __init__( - self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ScatterFeatureRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ScatterFeatureRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ScatterFeatureRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_ScatterFeatureRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} + [e], + detailed_message=f"the `valueFrom` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -24549,14 +20659,16 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), + "invalid field `{}`, expected one of: `id`, `source`, `linkMerge`, `pickValue`, `loadContents`, `loadListing`, `label`, `default`, `valueFrom`".format( + k + ), SourceLine(_doc, k, str), ) ) @@ -24564,9 +20676,19 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + id=id, + source=source, + linkMerge=linkMerge, + pickValue=pickValue, + loadContents=loadContents, + loadListing=loadListing, + label=label, + default=default, + valueFrom=valueFrom, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -24580,14 +20702,46 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.source is not None: + u = save_relative_uri(self.source, self.id, False, 2, relative_uris) + r["source"] = u + if self.linkMerge is not None: + r["linkMerge"] = save( + self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.pickValue is not None: + r["pickValue"] = save( + self.pickValue, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.valueFrom is not None: + r["valueFrom"] = save( + self.valueFrom, top=False, base_url=self.id, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -24597,20 +20751,36 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "id", + "source", + "linkMerge", + "pickValue", + "loadContents", + "loadListing", + "label", + "default", + "valueFrom", + ] + ) -class MultipleInputFeatureRequirement(ProcessRequirement): +class WorkflowStepOutput(IdentifierRequired): """ - Indicates that the workflow platform must support multiple inbound data links - listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). + Associate an output parameter of the underlying process with a workflow parameter. The workflow parameter (given in the ``id`` field) be may be used as a ``source`` to connect with input parameters of other workflow steps, or with an output parameter of the process. + + A unique identifier for this workflow output parameter. This is the identifier to use in the ``source`` field of ``WorkflowStepInput`` to connect the output value to downstream parameters. """ + id: str + def __init__( self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -24620,15 +20790,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "MultipleInputFeatureRequirement" + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) def __eq__(self, other: Any) -> bool: - if isinstance(other, MultipleInputFeatureRequirement): - return bool(self.class_ == other.class_) + if isinstance(other, WorkflowStepOutput): + return bool(self.id == other.id) return False def __hash__(self) -> int: - return hash((self.class_)) + return hash((self.id)) @classmethod def fromDoc( @@ -24636,150 +20806,71 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "MultipleInputFeatureRequirement": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) - class_ = load_field( - _doc.get("class"), - uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: + if str(e) == "missing required field `id`": _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), - SourceLine(_doc, k, str), + str(e), + None ) ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class"]) - - -class StepInputExpressionRequirement(ProcessRequirement): - """ - Indicate that the workflow platform must support the `valueFrom` field - of [WorkflowStepInput](#WorkflowStepInput). - - """ - - def __init__( - self, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "StepInputExpressionRequirement" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, StepInputExpressionRequirement): - return bool(self.class_ == other.class_) - return False - - def __hash__(self) -> int: - return hash((self.class_)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "StepInputExpressionRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_StepInputExpressionRequirement_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - extension_fields: dict[str, Any] = {} + _errors__.append(ValidationException("missing id")) + if not __original_id_is_none: + baseuri = cast(str, id) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -24787,14 +20878,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`".format(k), + "invalid field `{}`, expected one of: `id`".format(k), SourceLine(_doc, k, str), ) ) @@ -24802,9 +20893,11 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + id=id, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -24818,14 +20911,9 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u # top refers to the directory level if top: @@ -24835,12 +20923,51 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class"]) + attrs: ClassVar[Collection[str]] = frozenset(["id"]) -class OperationInputParameter(InputParameter): +class WorkflowStep(IdentifierRequired, Labeled, schema_salad.metaschema.Documented): """ - Describe an input parameter of an operation. + A workflow step is an executable element of a workflow. It specifies the underlying process implementation (such as ``CommandLineTool`` or another ``Workflow``) in the ``run`` field and connects the input and output parameters of the underlying process to workflow parameters. + + Scatter/gather + ============== + + To use scatter/gather, `ScatterFeatureRequirement <#ScatterFeatureRequirement>`__ must be specified in the workflow or workflow step requirements. + + A "scatter" operation specifies that the associated workflow step or subworkflow should execute separately over a list of input elements. Each job making up a scatter operation is independent and may be executed concurrently. + + The ``scatter`` field specifies one or more input parameters which will be scattered. An input parameter may be listed more than once. The declared type of each input parameter implicitly becomes an array of items of the input parameter type. If a parameter is listed more than once, it becomes a nested array. As a result, upstream parameters which are connected to scattered parameters must be arrays. + + All output parameter types are also implicitly wrapped in arrays. Each job in the scatter results in an entry in the output array. + + If any scattered parameter runtime value is an empty array, all outputs are set to empty arrays and no work is done for the step, according to applicable scattering rules. + + If ``scatter`` declares more than one input parameter, ``scatterMethod`` describes how to decompose the input into a discrete set of jobs. + + * **dotproduct** specifies that each of the input arrays are aligned and one element taken from each array to construct each job. It is an error if all input arrays are not the same length. + + * **nested_crossproduct** specifies the Cartesian product of the inputs, producing a job for every combination of the scattered inputs. The output must be nested arrays for each level of scattering, in the order that the input arrays are listed in the ``scatter`` field. + + * **flat_crossproduct** specifies the Cartesian product of the inputs, producing a job for every combination of the scattered inputs. The output arrays must be flattened to a single level, but otherwise listed in the order that the input arrays are listed in the ``scatter`` field. + + Conditional execution (Optional) + ================================ + + Conditional execution makes execution of a step conditional on an expression. A step that is not executed is "skipped". A skipped step produces ``null`` for all output parameters. + + The condition is evaluated after ``scatter``, using the input object of each individual scatter job. This means over a set of scatter jobs, some may be executed and some may be skipped. When the results are gathered, skipped steps must be ``null`` in the output arrays. + + The ``when`` field controls conditional execution. This is an expression that must be evaluated with ``inputs`` bound to the step input object (or individual scatter job), and returns a boolean value. It is an error if this expression returns a value other than ``true`` or ``false``. + + Conditionals in CWL are an optional feature and are not required to be implemented by all consumers of CWL documents. An implementation that does not support conditionals must return a fatal error when attempting to execute a workflow that uses conditional constructs the implementation does not support. + + Subworkflows + ============ + + To specify a nested workflow as part of a workflow step, `SubworkflowFeatureRequirement <#SubworkflowFeatureRequirement>`__ must be specified in the workflow or workflow step requirements. + + It is a fatal error if a workflow directly or indirectly invokes itself as a subworkflow (recursive workflows are not allowed). """ @@ -24849,17 +20976,18 @@ class OperationInputParameter(InputParameter): def __init__( self, id: Any, - type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - loadContents: Optional[Any] = None, - loadListing: Optional[Any] = None, - default: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + in_: Any, + out: Any, + run: Any, + label: Any | None = None, + doc: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + when: Any | None = None, + scatter: Any | None = None, + scatterMethod: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -24869,46 +20997,49 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.format = format - self.loadContents = loadContents - self.loadListing = loadListing - self.default = default - self.type_ = type_ + self.in_ = in_ + self.out = out + self.requirements = requirements + self.hints = hints + self.run = run + self.when = when + self.scatter = scatter + self.scatterMethod = scatterMethod def __eq__(self, other: Any) -> bool: - if isinstance(other, OperationInputParameter): + if isinstance(other, WorkflowStep): return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable + self.id == other.id + and self.label == other.label and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.loadContents == other.loadContents - and self.loadListing == other.loadListing - and self.default == other.default - and self.type_ == other.type_ + and self.in_ == other.in_ + and self.out == other.out + and self.requirements == other.requirements + and self.hints == other.hints + and self.run == other.run + and self.when == other.when + and self.scatter == other.scatter + and self.scatterMethod == other.scatterMethod ) return False def __hash__(self) -> int: return hash( ( + self.id, self.label, - self.secondaryFiles, - self.streamable, self.doc, - self.id, - self.format, - self.loadContents, - self.loadListing, - self.default, - self.type_, + self.in_, + self.out, + self.requirements, + self.hints, + self.run, + self.when, + self.scatter, + self.scatterMethod, ) ) @@ -24918,8 +21049,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OperationInputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -24929,7 +21060,7 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), uri_strtype_True_False_None_None, baseuri, @@ -24985,7 +21116,7 @@ def fromDoc( label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -25029,21 +21160,21 @@ def fromDoc( "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: + doc = None + if "doc" in _doc: try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("secondaryFiles") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secondaryFiles`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -25051,13 +21182,13 @@ def fromDoc( ) ) else: - val = _doc.get("secondaryFiles") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25069,75 +21200,124 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - streamable = None - if "streamable" in _doc: - try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, - baseuri, - loadingOptions, - lc=_doc.get("streamable") - ) + try: + if _doc.get("in") is None: + raise ValidationException("missing required field `in`", None, []) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + in_ = _load_field( + _doc.get("in"), + idmap_in__array_of_WorkflowStepInputLoader, + baseuri, + loadingOptions, + lc=_doc.get("in") + ) - if str(e) == "missing required field `streamable`": + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `in`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("in") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - str(e), - None + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) ) else: - val = _doc.get("streamable") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) + _errors__.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, "in", str), + [e], + detailed_message=f"the `in` field with value `{val}` " + "is not valid because:", ) - else: - _errors__.append( - ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), - [e], - detailed_message=f"the `streamable` field with value `{val}` " - "is not valid because:", - ) + ) + try: + if _doc.get("out") is None: + raise ValidationException("missing required field `out`", None, []) + + out = _load_field( + _doc.get("out"), + uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("out") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `out`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("out") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], ) - doc = None - if "doc" in _doc: + ) + else: + _errors__.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, "out", str), + [e], + detailed_message=f"the `out` field with value `{val}` " + "is not valid because:", + ) + ) + requirements = None + if "requirements" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + requirements = _load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("requirements") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `requirements`": _errors__.append( ValidationException( str(e), @@ -25145,13 +21325,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("requirements") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25163,28 +21343,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `requirements` field with value `{val}` " "is not valid because:", ) ) - format = None - if "format" in _doc: - try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + hints = None + if "hints" in _doc: + try: + hints = _load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_Any_type, baseuri, loadingOptions, - lc=_doc.get("format") + lc=_doc.get("hints") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `format`": + if str(e) == "missing required field `hints`": _errors__.append( ValidationException( str(e), @@ -25192,13 +21372,13 @@ def fromDoc( ) ) else: - val = _doc.get("format") + val = _doc.get("hints") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25210,28 +21390,78 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `hints` field with value `{val}` " "is not valid because:", ) ) - loadContents = None - if "loadContents" in _doc: + + subscope_baseuri = _expand_url('run', baseuri, loadingOptions, True) + try: + if _doc.get("run") is None: + raise ValidationException("missing required field `run`", None, []) + + run = _load_field( + _doc.get("run"), + uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None_None, + subscope_baseuri, + loadingOptions, + lc=_doc.get("run") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `run`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("run") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, "run", str), + [e], + detailed_message=f"the `run` field with value `{val}` " + "is not valid because:", + ) + ) + when = None + if "when" in _doc: try: - loadContents = load_field( - _doc.get("loadContents"), - union_of_None_type_or_booltype, + when = _load_field( + _doc.get("when"), + union_of_None_type_or_ExpressionLoader, baseuri, loadingOptions, - lc=_doc.get("loadContents") + lc=_doc.get("when") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadContents`": + if str(e) == "missing required field `when`": _errors__.append( ValidationException( str(e), @@ -25239,13 +21469,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadContents") + val = _doc.get("when") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `when` field is not valid because:", + SourceLine(_doc, "when", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25257,28 +21487,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadContents` field is not valid because:", - SourceLine(_doc, "loadContents", str), + "the `when` field is not valid because:", + SourceLine(_doc, "when", str), [e], - detailed_message=f"the `loadContents` field with value `{val}` " + detailed_message=f"the `when` field with value `{val}` " "is not valid because:", ) ) - loadListing = None - if "loadListing" in _doc: + scatter = None + if "scatter" in _doc: try: - loadListing = load_field( - _doc.get("loadListing"), - union_of_None_type_or_LoadListingEnumLoader, + scatter = _load_field( + _doc.get("scatter"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None, baseuri, loadingOptions, - lc=_doc.get("loadListing") + lc=_doc.get("scatter") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loadListing`": + if str(e) == "missing required field `scatter`": _errors__.append( ValidationException( str(e), @@ -25286,13 +21516,13 @@ def fromDoc( ) ) else: - val = _doc.get("loadListing") + val = _doc.get("scatter") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `scatter` field is not valid because:", + SourceLine(_doc, "scatter", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25304,28 +21534,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loadListing` field is not valid because:", - SourceLine(_doc, "loadListing", str), + "the `scatter` field is not valid because:", + SourceLine(_doc, "scatter", str), [e], - detailed_message=f"the `loadListing` field with value `{val}` " + detailed_message=f"the `scatter` field with value `{val}` " "is not valid because:", ) ) - default = None - if "default" in _doc: + scatterMethod = None + if "scatterMethod" in _doc: try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_CWLObjectTypeLoader, + scatterMethod = _load_field( + _doc.get("scatterMethod"), + uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("default") + lc=_doc.get("scatterMethod") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `default`": + if str(e) == "missing required field `scatterMethod`": _errors__.append( ValidationException( str(e), @@ -25333,13 +21563,13 @@ def fromDoc( ) ) else: - val = _doc.get("default") + val = _doc.get("scatterMethod") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `scatterMethod` field is not valid because:", + SourceLine(_doc, "scatterMethod", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25351,62 +21581,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `scatterMethod` field is not valid because:", + SourceLine(_doc, "scatterMethod", str), [e], - detailed_message=f"the `default` field with value `{val}` " + detailed_message=f"the `scatterMethod` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) - - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, - baseuri, - loadingOptions, - lc=_doc.get("type") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `type`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("type") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), - [e], - detailed_message=f"the `type` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -25414,14 +21596,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`".format( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `when`, `scatter`, `scatterMethod`".format( k ), SourceLine(_doc, k, str), @@ -25431,16 +21613,17 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + id=id, label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, doc=doc, - id=id, - format=format, - loadContents=loadContents, - loadListing=loadListing, - default=default, - type_=type_, + in_=in_, + out=out, + requirements=requirements, + hints=hints, + run=run, + when=when, + scatter=scatter, + scatterMethod=scatterMethod, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -25465,49 +21648,43 @@ def save( r["label"] = save( self.label, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) if self.doc is not None: r["doc"] = save( self.doc, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.loadContents is not None: - r["loadContents"] = save( - self.loadContents, - top=False, - base_url=self.id, - relative_uris=relative_uris, + if self.in_ is not None: + r["in"] = save( + self.in_, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.loadListing is not None: - r["loadListing"] = save( - self.loadListing, + if self.out is not None: + u = save_relative_uri(self.out, self.id, True, None, relative_uris) + r["out"] = u + if self.requirements is not None: + r["requirements"] = save( + self.requirements, top=False, base_url=self.id, relative_uris=relative_uris, ) - if self.default is not None: - r["default"] = save( - self.default, top=False, base_url=self.id, relative_uris=relative_uris + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris + if self.run is not None: + u = save_relative_uri(self.run, self.id, False, None, relative_uris) + r["run"] = u + if self.when is not None: + r["when"] = save( + self.when, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.scatter is not None: + u = save_relative_uri(self.scatter, self.id, False, 0, relative_uris) + r["scatter"] = u + if self.scatterMethod is not None: + u = save_relative_uri( + self.scatterMethod, self.id, False, None, relative_uris ) + r["scatterMethod"] = u # top refers to the directory level if top: @@ -25517,25 +21694,50 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ + "id", "label", - "secondaryFiles", - "streamable", "doc", - "id", - "format", - "loadContents", - "loadListing", - "default", - "type", + "in", + "out", + "requirements", + "hints", + "run", + "when", + "scatter", + "scatterMethod", ] ) -class OperationOutputParameter(OutputParameter): +class Workflow(Process): """ - Describe an output parameter of an operation. + A workflow describes a set of **steps** and the **dependencies** between those steps. When a step produces output that will be consumed by a second step, the first step is a dependency of the second step. + + When there is a dependency, the workflow engine must execute the preceding step and wait for it to successfully produce output before executing the dependent step. If two steps are defined in the workflow graph that are not directly or indirectly dependent, these steps are **independent**, and may execute in any order or execute concurrently. A workflow is complete when all steps have been executed. + + Dependencies between parameters are expressed using the ``source`` field on `workflow step input parameters <#WorkflowStepInput>`__ and ``outputSource`` field on `workflow output parameters <#WorkflowOutputParameter>`__. + + The ``source`` field on each workflow step input parameter expresses the data links that contribute to the value of the step input parameter (the "sink"). A workflow step can only begin execution when every data link connected to a step has been fulfilled. + + The ``outputSource`` field on each workflow step input parameter expresses the data links that contribute to the value of the workflow output parameter (the "sink"). Workflow execution cannot complete successfully until every data link connected to an output parameter has been fulfilled. + + Workflow success and failure + ---------------------------- + + A completed step must result in one of ``success``, ``temporaryFailure`` or ``permanentFailure`` states. An implementation may choose to retry a step execution which resulted in ``temporaryFailure``. An implementation may choose to either continue running other steps of a workflow, or terminate immediately upon ``permanentFailure``. + + * If any step of a workflow execution results in ``permanentFailure``, then the workflow status is ``permanentFailure``. + + * If one or more steps result in ``temporaryFailure`` and all other steps complete ``success`` or are not executed, then the workflow status is ``temporaryFailure``. + + * If all workflow steps are executed and complete with ``success``, then the workflow status is ``success``. + + Extensions + ========== + + `ScatterFeatureRequirement <#ScatterFeatureRequirement>`__ and `SubworkflowFeatureRequirement <#SubworkflowFeatureRequirement>`__ are available as standard `extensions <#Extensions_and_Metadata>`__ to core workflow semantics. """ @@ -25543,15 +21745,18 @@ class OperationOutputParameter(OutputParameter): def __init__( self, - id: Any, - type_: Any, - label: Optional[Any] = None, - secondaryFiles: Optional[Any] = None, - streamable: Optional[Any] = None, - doc: Optional[Any] = None, - format: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + inputs: Any, + outputs: Any, + steps: Any, + id: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + cwlVersion: Any | None = None, + intent: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -25561,37 +21766,49 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) self.label = label - self.secondaryFiles = secondaryFiles - self.streamable = streamable self.doc = doc - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.format = format - self.type_ = type_ + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_: Final[str] = "Workflow" + self.steps = steps def __eq__(self, other: Any) -> bool: - if isinstance(other, OperationOutputParameter): + if isinstance(other, Workflow): return bool( - self.label == other.label - and self.secondaryFiles == other.secondaryFiles - and self.streamable == other.streamable + self.id == other.id + and self.label == other.label and self.doc == other.doc - and self.id == other.id - and self.format == other.format - and self.type_ == other.type_ + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.steps == other.steps ) return False def __hash__(self) -> int: return hash( ( + self.id, self.label, - self.secondaryFiles, - self.streamable, self.doc, - self.id, - self.format, - self.type_, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.steps, ) ) @@ -25601,8 +21818,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "OperationOutputParameter": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -25612,9 +21829,9 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), - uri_strtype_True_False_None_None, + uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("id") @@ -25662,13 +21879,30 @@ def fromDoc( if docRoot is not None: id = docRoot else: - _errors__.append(ValidationException("missing id")) + id = "_:" + str(_uuid__.uuid4()) if not __original_id_is_none: baseuri = cast(str, id) + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_Workflow_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -25712,21 +21946,21 @@ def fromDoc( "is not valid because:", ) ) - secondaryFiles = None - if "secondaryFiles" in _doc: + doc = None + if "doc" in _doc: try: - secondaryFiles = load_field( - _doc.get("secondaryFiles"), - secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("secondaryFiles") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `secondaryFiles`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -25734,13 +21968,156 @@ def fromDoc( ) ) else: - val = _doc.get("secondaryFiles") + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("inputs") is None: + raise ValidationException("missing required field `inputs`", None, []) + + inputs = _load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("inputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `inputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("inputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + detailed_message=f"the `inputs` field with value `{val}` " + "is not valid because:", + ) + ) + try: + if _doc.get("outputs") is None: + raise ValidationException("missing required field `outputs`", None, []) + + outputs = _load_field( + _doc.get("outputs"), + idmap_outputs_array_of_WorkflowOutputParameterLoader, + baseuri, + loadingOptions, + lc=_doc.get("outputs") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `outputs`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("outputs") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + detailed_message=f"the `outputs` field with value `{val}` " + "is not valid because:", + ) + ) + requirements = None + if "requirements" in _doc: + try: + requirements = _load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + lc=_doc.get("requirements") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `requirements`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("requirements") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25752,28 +22129,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `secondaryFiles` field is not valid because:", - SourceLine(_doc, "secondaryFiles", str), + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), [e], - detailed_message=f"the `secondaryFiles` field with value `{val}` " + detailed_message=f"the `requirements` field with value `{val}` " "is not valid because:", ) ) - streamable = None - if "streamable" in _doc: + hints = None + if "hints" in _doc: try: - streamable = load_field( - _doc.get("streamable"), - union_of_None_type_or_booltype, + hints = _load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, baseuri, loadingOptions, - lc=_doc.get("streamable") + lc=_doc.get("hints") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `streamable`": + if str(e) == "missing required field `hints`": _errors__.append( ValidationException( str(e), @@ -25781,13 +22158,13 @@ def fromDoc( ) ) else: - val = _doc.get("streamable") + val = _doc.get("hints") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25799,28 +22176,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `streamable` field is not valid because:", - SourceLine(_doc, "streamable", str), + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), [e], - detailed_message=f"the `streamable` field with value `{val}` " + detailed_message=f"the `hints` field with value `{val}` " "is not valid because:", ) ) - doc = None - if "doc" in _doc: + cwlVersion = None + if "cwlVersion" in _doc: try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, + cwlVersion = _load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("doc") + lc=_doc.get("cwlVersion") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `doc`": + if str(e) == "missing required field `cwlVersion`": _errors__.append( ValidationException( str(e), @@ -25828,13 +22205,13 @@ def fromDoc( ) ) else: - val = _doc.get("doc") + val = _doc.get("cwlVersion") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25846,28 +22223,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), [e], - detailed_message=f"the `doc` field with value `{val}` " + detailed_message=f"the `cwlVersion` field with value `{val}` " "is not valid because:", ) ) - format = None - if "format" in _doc: + intent = None + if "intent" in _doc: try: - format = load_field( - _doc.get("format"), - uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, + intent = _load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None_None, baseuri, loadingOptions, - lc=_doc.get("format") + lc=_doc.get("intent") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `format`": + if str(e) == "missing required field `intent`": _errors__.append( ValidationException( str(e), @@ -25875,13 +22252,13 @@ def fromDoc( ) ) else: - val = _doc.get("format") + val = _doc.get("intent") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25893,29 +22270,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `format` field is not valid because:", - SourceLine(_doc, "format", str), + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), [e], - detailed_message=f"the `format` field with value `{val}` " + detailed_message=f"the `intent` field with value `{val}` " "is not valid because:", ) ) try: - if _doc.get("type") is None: - raise ValidationException("missing required field `type`", None, []) + if _doc.get("steps") is None: + raise ValidationException("missing required field `steps`", None, []) - type_ = load_field( - _doc.get("type"), - typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + steps = _load_field( + _doc.get("steps"), + idmap_steps_union_of_array_of_WorkflowStepLoader, baseuri, loadingOptions, - lc=_doc.get("type") + lc=_doc.get("steps") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `type`": + if str(e) == "missing required field `steps`": _errors__.append( ValidationException( str(e), @@ -25923,13 +22300,13 @@ def fromDoc( ) ) else: - val = _doc.get("type") + val = _doc.get("steps") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `steps` field is not valid because:", + SourceLine(_doc, "steps", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -25941,14 +22318,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `type` field is not valid because:", - SourceLine(_doc, "type", str), + "the `steps` field is not valid because:", + SourceLine(_doc, "steps", str), [e], - detailed_message=f"the `type` field with value `{val}` " + detailed_message=f"the `steps` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -25956,14 +22333,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `steps`".format( k ), SourceLine(_doc, k, str), @@ -25973,13 +22350,16 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( + id=id, label=label, - secondaryFiles=secondaryFiles, - streamable=streamable, doc=doc, - id=id, - format=format, - type_=type_, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + steps=steps, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -26000,36 +22380,189 @@ def save( if self.id is not None: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, self.id, False, None, relative_uris) + r["class"] = u if self.label is not None: r["label"] = save( self.label, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.secondaryFiles is not None: - r["secondaryFiles"] = save( - self.secondaryFiles, - top=False, - base_url=self.id, - relative_uris=relative_uris, + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.streamable is not None: - r["streamable"] = save( - self.streamable, + if self.inputs is not None: + r["inputs"] = save( + self.inputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputs is not None: + r["outputs"] = save( + self.outputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.requirements is not None: + r["requirements"] = save( + self.requirements, top=False, base_url=self.id, relative_uris=relative_uris, ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.format is not None: - u = save_relative_uri(self.format, self.id, True, None, relative_uris) - r["format"] = u - if self.type_ is not None: - r["type"] = save( - self.type_, top=False, base_url=self.id, relative_uris=relative_uris + if self.cwlVersion is not None: + u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) + r["cwlVersion"] = u + if self.intent is not None: + u = save_relative_uri(self.intent, self.id, True, None, relative_uris) + r["intent"] = u + if self.steps is not None: + r["steps"] = save( + self.steps, top=False, base_url=self.id, relative_uris=relative_uris + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs: ClassVar[Collection[str]] = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "steps", + ] + ) + + +class SubworkflowFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support nested workflows in the ``run`` field of `WorkflowStep <#WorkflowStep>`__. + + """ + + def __init__( + self, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_: Final[str] = "SubworkflowFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SubworkflowFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: + _errors__.append( + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u + # top refers to the directory level if top: if self.loadingOptions.namespaces: @@ -26038,38 +22571,19 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] - ) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) -class Operation(Process): +class ScatterFeatureRequirement(ProcessRequirement): """ - This record describes an abstract operation. It is a potential - step of a workflow that has not yet been bound to a concrete - implementation. It specifies an input and output signature, but - does not provide enough information to be executed. An - implementation (or other tooling) may provide a means of binding - an Operation to a concrete process (such as Workflow, - CommandLineTool, or ExpressionTool) with a compatible signature. + Indicates that the workflow platform must support the ``scatter`` and ``scatterMethod`` fields of `WorkflowStep <#WorkflowStep>`__. """ - id: str - def __init__( self, - inputs: Any, - outputs: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - intent: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -26079,48 +22593,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.label = label - self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.intent = intent - self.class_ = "Operation" + self.class_: Final[str] = "ScatterFeatureRequirement" def __eq__(self, other: Any) -> bool: - if isinstance(other, Operation): - return bool( - self.id == other.id - and self.label == other.label - and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.intent == other.intent - and self.class_ == other.class_ - ) + if isinstance(other, ScatterFeatureRequirement): + return bool(self.class_ == other.class_) return False def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.intent, - self.class_, - ) - ) + return hash((self.class_)) @classmethod def fromDoc( @@ -26128,465 +22609,153 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Operation": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - id = None - if "id" in _doc: - try: - id = load_field( - _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("id") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `id`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("id") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `id` field is not valid because:", - SourceLine(_doc, "id", str), - [e], - detailed_message=f"the `id` field with value `{val}` " - "is not valid because:", - ) - ) - - __original_id_is_none = id is None - if id is None: - if docRoot is not None: - id = docRoot - else: - id = "_:" + str(_uuid__.uuid4()) - if not __original_id_is_none: - baseuri = cast(str, id) - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_Operation_classLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - label = None - if "label" in _doc: - try: - label = load_field( - _doc.get("label"), - union_of_None_type_or_strtype, - baseuri, - loadingOptions, - lc=_doc.get("label") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `label`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("label") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `label` field is not valid because:", - SourceLine(_doc, "label", str), - [e], - detailed_message=f"the `label` field with value `{val}` " - "is not valid because:", - ) - ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("inputs") is None: - raise ValidationException("missing required field `inputs`", None, []) - - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_OperationInputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) - if str(e) == "missing required field `inputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - detailed_message=f"the `inputs` field with value `{val}` " - "is not valid because:", - ) - ) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] try: - if _doc.get("outputs") is None: - raise ValidationException("missing required field `outputs`", None, []) + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_OperationOutputParameterLoader, + class_ = _load_field( + _doc.get("class"), + uri_ScatterFeatureRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, - lc=_doc.get("outputs") + lc=_doc.get("class") ) + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + raise e + extension_fields: MutableMapping[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if not k: _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) + ValidationException("mapping with implicit null key") + ) + elif ":" in k: + ex = _expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False ) + extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - detailed_message=f"the `outputs` field with value `{val}` " - "is not valid because:", + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), ) ) - requirements = None - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader, - baseuri, - loadingOptions, - lc=_doc.get("requirements") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `requirements`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("requirements") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - detailed_message=f"the `requirements` field with value `{val}` " - "is not valid because:", - ) - ) - hints = None - if "hints" in _doc: - try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type, - baseuri, - loadingOptions, - lc=_doc.get("hints") - ) + if _errors__: + raise ValidationException("", None, _errors__, "*") + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + def save( + self, top: bool = False, base_url: str = "", relative_uris: bool = True + ) -> dict[str, Any]: + r: dict[str, Any] = {} - if str(e) == "missing required field `hints`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("hints") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), - [e], - detailed_message=f"the `hints` field with value `{val}` " - "is not valid because:", - ) - ) - cwlVersion = None - if "cwlVersion" in _doc: - try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("cwlVersion") - ) + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + if self.class_ is not None: + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): + uri = f"{p}:{self.class_}" + else: + uri = self.class_ + u = save_relative_uri(uri, base_url, False, None, relative_uris) + r["class"] = u - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r - if str(e) == "missing required field `cwlVersion`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cwlVersion") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), - [e], - detailed_message=f"the `cwlVersion` field with value `{val}` " - "is not valid because:", - ) - ) - intent = None - if "intent" in _doc: - try: - intent = load_field( - _doc.get("intent"), - uri_union_of_None_type_or_array_of_strtype_True_False_None_None, - baseuri, - loadingOptions, - lc=_doc.get("intent") - ) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `intent`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("intent") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `intent` field is not valid because:", - SourceLine(_doc, "intent", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `intent` field is not valid because:", - SourceLine(_doc, "intent", str), - [e], - detailed_message=f"the `intent` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} +class MultipleInputFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support multiple inbound data links listed in the ``source`` field of `WorkflowStepInput <#WorkflowStepInput>`__. + + """ + + def __init__( + self, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, + ) -> None: + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_: Final[str] = "MultipleInputFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, MultipleInputFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: str | None = None + ) -> Self: + _doc = copy.copy(doc) + + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + if _doc.get("class") is None: + raise ValidationException("missing required field `class`", None, []) + + class_ = _load_field( + _doc.get("class"), + uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("class") + ) + + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") + except ValidationException as e: + raise e + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -26594,16 +22763,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`".format( - k - ), + "invalid field `{}`, expected one of: `class`".format(k), SourceLine(_doc, k, str), ) ) @@ -26611,19 +22778,9 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - intent=intent, extension_fields=extension_fields, loadingOptions=loadingOptions, ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -26637,50 +22794,16 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ - u = save_relative_uri(uri, self.id, False, None, relative_uris) + u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.inputs is not None: - r["inputs"] = save( - self.inputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputs is not None: - r["outputs"] = save( - self.outputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.requirements is not None: - r["requirements"] = save( - self.requirements, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.cwlVersion is not None: - u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) - r["cwlVersion"] = u - if self.intent is not None: - u = save_relative_uri(self.intent, self.id, True, None, relative_uris) - r["intent"] = u # top refers to the directory level if top: @@ -26690,28 +22813,19 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "intent", - "class", - ] - ) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) + + +class StepInputExpressionRequirement(ProcessRequirement): + """ + Indicate that the workflow platform must support the ``valueFrom`` field of `WorkflowStepInput <#WorkflowStepInput>`__. + """ -class Secrets(ProcessRequirement): def __init__( self, - secrets: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -26721,16 +22835,15 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "Secrets" - self.secrets = secrets + self.class_: Final[str] = "StepInputExpressionRequirement" def __eq__(self, other: Any) -> bool: - if isinstance(other, Secrets): - return bool(self.class_ == other.class_ and self.secrets == other.secrets) + if isinstance(other, StepInputExpressionRequirement): + return bool(self.class_ == other.class_) return False def __hash__(self) -> int: - return hash((self.class_, self.secrets)) + return hash((self.class_)) @classmethod def fromDoc( @@ -26738,8 +22851,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Secrets": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -26750,67 +22863,20 @@ def fromDoc( if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_strtype_False_True_None_None, + uri_StepInputExpressionRequirement_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("secrets") is None: - raise ValidationException("missing required field `secrets`", None, []) - - secrets = load_field( - _doc.get("secrets"), - uri_array_of_strtype_False_False_0_None, - baseuri, - loadingOptions, - lc=_doc.get("secrets") - ) - + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `secrets`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("secrets") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `secrets` field is not valid because:", - SourceLine(_doc, "secrets", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `secrets` field is not valid because:", - SourceLine(_doc, "secrets", str), - [e], - detailed_message=f"the `secrets` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} + raise e + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -26818,16 +22884,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `secrets`".format( - k - ), + "invalid field `{}`, expected one of: `class`".format(k), SourceLine(_doc, k, str), ) ) @@ -26835,7 +22899,6 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - secrets=secrets, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -26853,16 +22916,15 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ u = save_relative_uri(uri, base_url, False, None, relative_uris) r["class"] = u - if self.secrets is not None: - u = save_relative_uri(self.secrets, base_url, False, 0, relative_uris) - r["secrets"] = u # top refers to the directory level if top: @@ -26872,26 +22934,31 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "secrets"]) + attrs: ClassVar[Collection[str]] = frozenset(["class"]) + + +class OperationInputParameter(InputParameter): + """ + Describe an input parameter of an operation. + """ -class ProcessGenerator(Process): id: str def __init__( self, - inputs: Any, - outputs: Any, - run: Any, - id: Optional[Any] = None, - label: Optional[Any] = None, - doc: Optional[Any] = None, - requirements: Optional[Any] = None, - hints: Optional[Any] = None, - cwlVersion: Optional[Any] = None, - intent: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + type_: Any, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + loadContents: Any | None = None, + loadListing: Any | None = None, + default: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -26901,49 +22968,46 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable self.doc = doc - self.inputs = inputs - self.outputs = outputs - self.requirements = requirements - self.hints = hints - self.cwlVersion = cwlVersion - self.intent = intent - self.class_ = "ProcessGenerator" - self.run = run + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, ProcessGenerator): + if isinstance(other, OperationInputParameter): return bool( - self.id == other.id - and self.label == other.label + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable and self.doc == other.doc - and self.inputs == other.inputs - and self.outputs == other.outputs - and self.requirements == other.requirements - and self.hints == other.hints - and self.cwlVersion == other.cwlVersion - and self.intent == other.intent - and self.class_ == other.class_ - and self.run == other.run + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type_ == other.type_ ) return False def __hash__(self) -> int: - return hash( - ( - self.id, - self.label, - self.doc, - self.inputs, - self.outputs, - self.requirements, - self.hints, - self.cwlVersion, - self.intent, - self.class_, - self.run, + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type_, ) ) @@ -26953,8 +23017,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ProcessGenerator": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -26964,9 +23028,9 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None_None, + uri_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("id") @@ -27014,29 +23078,13 @@ def fromDoc( if docRoot is not None: id = docRoot else: - id = "_:" + str(_uuid__.uuid4()) + _errors__.append(ValidationException("missing id")) if not __original_id_is_none: baseuri = cast(str, id) - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_strtype_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e label = None if "label" in _doc: try: - label = load_field( + label = _load_field( _doc.get("label"), union_of_None_type_or_strtype, baseuri, @@ -27080,211 +23128,21 @@ def fromDoc( "is not valid because:", ) ) - doc = None - if "doc" in _doc: - try: - doc = load_field( - _doc.get("doc"), - union_of_None_type_or_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("doc") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `doc`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("doc") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `doc` field is not valid because:", - SourceLine(_doc, "doc", str), - [e], - detailed_message=f"the `doc` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("inputs") is None: - raise ValidationException("missing required field `inputs`", None, []) - - inputs = load_field( - _doc.get("inputs"), - idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("inputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `inputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("inputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `inputs` field is not valid because:", - SourceLine(_doc, "inputs", str), - [e], - detailed_message=f"the `inputs` field with value `{val}` " - "is not valid because:", - ) - ) - try: - if _doc.get("outputs") is None: - raise ValidationException("missing required field `outputs`", None, []) - - outputs = load_field( - _doc.get("outputs"), - idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputs") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `outputs`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputs") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `outputs` field is not valid because:", - SourceLine(_doc, "outputs", str), - [e], - detailed_message=f"the `outputs` field with value `{val}` " - "is not valid because:", - ) - ) - requirements = None - if "requirements" in _doc: - try: - requirements = load_field( - _doc.get("requirements"), - idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader, - baseuri, - loadingOptions, - lc=_doc.get("requirements") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `requirements`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("requirements") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `requirements` field is not valid because:", - SourceLine(_doc, "requirements", str), - [e], - detailed_message=f"the `requirements` field with value `{val}` " - "is not valid because:", - ) - ) - hints = None - if "hints" in _doc: + secondaryFiles = None + if "secondaryFiles" in _doc: try: - hints = load_field( - _doc.get("hints"), - idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type, + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, loadingOptions, - lc=_doc.get("hints") + lc=_doc.get("secondaryFiles") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `hints`": + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( str(e), @@ -27292,13 +23150,13 @@ def fromDoc( ) ) else: - val = _doc.get("hints") + val = _doc.get("secondaryFiles") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -27310,28 +23168,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `hints` field is not valid because:", - SourceLine(_doc, "hints", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [e], - detailed_message=f"the `hints` field with value `{val}` " + detailed_message=f"the `secondaryFiles` field with value `{val}` " "is not valid because:", ) ) - cwlVersion = None - if "cwlVersion" in _doc: + streamable = None + if "streamable" in _doc: try: - cwlVersion = load_field( - _doc.get("cwlVersion"), - uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("cwlVersion") + lc=_doc.get("streamable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `cwlVersion`": + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( str(e), @@ -27339,13 +23197,13 @@ def fromDoc( ) ) else: - val = _doc.get("cwlVersion") + val = _doc.get("streamable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -27357,28 +23215,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `cwlVersion` field is not valid because:", - SourceLine(_doc, "cwlVersion", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [e], - detailed_message=f"the `cwlVersion` field with value `{val}` " + detailed_message=f"the `streamable` field with value `{val}` " "is not valid because:", ) ) - intent = None - if "intent" in _doc: + doc = None + if "doc" in _doc: try: - intent = load_field( - _doc.get("intent"), - uri_union_of_None_type_or_array_of_strtype_True_False_None_None, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("intent") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `intent`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -27386,13 +23244,13 @@ def fromDoc( ) ) else: - val = _doc.get("intent") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `intent` field is not valid because:", - SourceLine(_doc, "intent", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -27404,512 +23262,122 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `intent` field is not valid because:", - SourceLine(_doc, "intent", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `intent` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - - subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) - try: - if _doc.get("run") is None: - raise ValidationException("missing required field `run`", None, []) - - run = load_field( - _doc.get("run"), - uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader_False_False_None_None, - subscope_baseuri, - loadingOptions, - lc=_doc.get("run") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `run`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("run") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) - _errors__.append( - ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], - ) - ) - else: - _errors__.append( - ValidationException( - "the `run` field is not valid because:", - SourceLine(_doc, "run", str), - [e], - detailed_message=f"the `run` field with value `{val}` " - "is not valid because:", - ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `run`".format( - k - ), - SourceLine(_doc, k, str), - ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - id=id, - label=label, - doc=doc, - inputs=inputs, - outputs=outputs, - requirements=requirements, - hints=hints, - cwlVersion=cwlVersion, - intent=intent, - run=run, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.id is not None: - u = save_relative_uri(self.id, base_url, True, None, relative_uris) - r["id"] = u - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, self.id, False, None, relative_uris) - r["class"] = u - if self.label is not None: - r["label"] = save( - self.label, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.doc is not None: - r["doc"] = save( - self.doc, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.inputs is not None: - r["inputs"] = save( - self.inputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.outputs is not None: - r["outputs"] = save( - self.outputs, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.requirements is not None: - r["requirements"] = save( - self.requirements, - top=False, - base_url=self.id, - relative_uris=relative_uris, - ) - if self.hints is not None: - r["hints"] = save( - self.hints, top=False, base_url=self.id, relative_uris=relative_uris - ) - if self.cwlVersion is not None: - u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) - r["cwlVersion"] = u - if self.intent is not None: - u = save_relative_uri(self.intent, self.id, True, None, relative_uris) - r["intent"] = u - if self.run is not None: - u = save_relative_uri(self.run, self.id, False, None, relative_uris) - r["run"] = u - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset( - [ - "id", - "label", - "doc", - "inputs", - "outputs", - "requirements", - "hints", - "cwlVersion", - "intent", - "class", - "run", - ] - ) - - -class MPIRequirement(ProcessRequirement): - """ - Indicates that a process requires an MPI runtime. - - """ - - def __init__( - self, - processes: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "MPIRequirement" - self.processes = processes - - def __eq__(self, other: Any) -> bool: - if isinstance(other, MPIRequirement): - return bool( - self.class_ == other.class_ and self.processes == other.processes - ) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.processes)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "MPIRequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_strtype_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("processes") is None: - raise ValidationException("missing required field `processes`", None, []) - - processes = load_field( - _doc.get("processes"), - union_of_inttype_or_ExpressionLoader, - baseuri, - loadingOptions, - lc=_doc.get("processes") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `processes`": - _errors__.append( - ValidationException( - str(e), - None - ) + format = None + if "format" in _doc: + try: + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True, + baseuri, + loadingOptions, + lc=_doc.get("format") ) - else: - val = _doc.get("processes") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `format`": _errors__.append( ValidationException( - "the `processes` field is not valid because:", - SourceLine(_doc, "processes", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `processes` field is not valid because:", - SourceLine(_doc, "processes", str), - [e], - detailed_message=f"the `processes` field with value `{val}` " - "is not valid because:", + val = _doc.get("format") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] - else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `processes`".format( - k - ), - SourceLine(_doc, k, str), + else: + _errors__.append( + ValidationException( + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), + [e], + detailed_message=f"the `format` field with value `{val}` " + "is not valid because:", + ) ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - processes=processes, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.processes is not None: - r["processes"] = save( - self.processes, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "processes"]) - - -class CUDARequirement(ProcessRequirement): - """ - Require support for NVIDA CUDA (GPU hardware acceleration). - - """ - - def __init__( - self, - cudaComputeCapability: Any, - cudaVersionMin: Any, - cudaDeviceCountMax: Optional[Any] = None, - cudaDeviceCountMin: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "CUDARequirement" - self.cudaComputeCapability = cudaComputeCapability - self.cudaDeviceCountMax = cudaDeviceCountMax - self.cudaDeviceCountMin = cudaDeviceCountMin - self.cudaVersionMin = cudaVersionMin - - def __eq__(self, other: Any) -> bool: - if isinstance(other, CUDARequirement): - return bool( - self.class_ == other.class_ - and self.cudaComputeCapability == other.cudaComputeCapability - and self.cudaDeviceCountMax == other.cudaDeviceCountMax - and self.cudaDeviceCountMin == other.cudaDeviceCountMin - and self.cudaVersionMin == other.cudaVersionMin - ) - return False - - def __hash__(self) -> int: - return hash( - ( - self.class_, - self.cudaComputeCapability, - self.cudaDeviceCountMax, - self.cudaDeviceCountMin, - self.cudaVersionMin, - ) - ) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "CUDARequirement": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_strtype_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("cudaComputeCapability") is None: - raise ValidationException("missing required field `cudaComputeCapability`", None, []) - - cudaComputeCapability = load_field( - _doc.get("cudaComputeCapability"), - union_of_strtype_or_array_of_strtype, - baseuri, - loadingOptions, - lc=_doc.get("cudaComputeCapability") - ) + loadContents = None + if "loadContents" in _doc: + try: + loadContents = _load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + lc=_doc.get("loadContents") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `cudaComputeCapability`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("cudaComputeCapability") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `loadContents`": _errors__.append( ValidationException( - "the `cudaComputeCapability` field is not valid because:", - SourceLine(_doc, "cudaComputeCapability", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: - _errors__.append( - ValidationException( - "the `cudaComputeCapability` field is not valid because:", - SourceLine(_doc, "cudaComputeCapability", str), - [e], - detailed_message=f"the `cudaComputeCapability` field with value `{val}` " - "is not valid because:", + val = _doc.get("loadContents") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - cudaDeviceCountMax = None - if "cudaDeviceCountMax" in _doc: + else: + _errors__.append( + ValidationException( + "the `loadContents` field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + detailed_message=f"the `loadContents` field with value `{val}` " + "is not valid because:", + ) + ) + loadListing = None + if "loadListing" in _doc: try: - cudaDeviceCountMax = load_field( - _doc.get("cudaDeviceCountMax"), - union_of_None_type_or_inttype_or_ExpressionLoader, + loadListing = _load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, baseuri, loadingOptions, - lc=_doc.get("cudaDeviceCountMax") + lc=_doc.get("loadListing") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `cudaDeviceCountMax`": + if str(e) == "missing required field `loadListing`": _errors__.append( ValidationException( str(e), @@ -27917,13 +23385,13 @@ def fromDoc( ) ) else: - val = _doc.get("cudaDeviceCountMax") + val = _doc.get("loadListing") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `cudaDeviceCountMax` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMax", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -27935,28 +23403,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `cudaDeviceCountMax` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMax", str), + "the `loadListing` field is not valid because:", + SourceLine(_doc, "loadListing", str), [e], - detailed_message=f"the `cudaDeviceCountMax` field with value `{val}` " + detailed_message=f"the `loadListing` field with value `{val}` " "is not valid because:", ) ) - cudaDeviceCountMin = None - if "cudaDeviceCountMin" in _doc: + default = None + if "default" in _doc: try: - cudaDeviceCountMin = load_field( - _doc.get("cudaDeviceCountMin"), - union_of_None_type_or_inttype_or_ExpressionLoader, + default = _load_field( + _doc.get("default"), + union_of_None_type_or_CWLObjectTypeLoader, baseuri, loadingOptions, - lc=_doc.get("cudaDeviceCountMin") + lc=_doc.get("default") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `cudaDeviceCountMin`": + if str(e) == "missing required field `default`": _errors__.append( ValidationException( str(e), @@ -27964,13 +23432,13 @@ def fromDoc( ) ) else: - val = _doc.get("cudaDeviceCountMin") + val = _doc.get("default") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `cudaDeviceCountMin` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMin", str), + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -27982,29 +23450,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `cudaDeviceCountMin` field is not valid because:", - SourceLine(_doc, "cudaDeviceCountMin", str), + "the `default` field is not valid because:", + SourceLine(_doc, "default", str), [e], - detailed_message=f"the `cudaDeviceCountMin` field with value `{val}` " + detailed_message=f"the `default` field with value `{val}` " "is not valid because:", ) ) try: - if _doc.get("cudaVersionMin") is None: - raise ValidationException("missing required field `cudaVersionMin`", None, []) + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) - cudaVersionMin = load_field( - _doc.get("cudaVersionMin"), - strtype, + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, baseuri, loadingOptions, - lc=_doc.get("cudaVersionMin") + lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `cudaVersionMin`": + if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), @@ -28012,13 +23480,13 @@ def fromDoc( ) ) else: - val = _doc.get("cudaVersionMin") + val = _doc.get("type") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `cudaVersionMin` field is not valid because:", - SourceLine(_doc, "cudaVersionMin", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -28030,14 +23498,14 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `cudaVersionMin` field is not valid because:", - SourceLine(_doc, "cudaVersionMin", str), + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), [e], - detailed_message=f"the `cudaVersionMin` field with value `{val}` " + detailed_message=f"the `type` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -28045,14 +23513,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `cudaComputeCapability`, `cudaDeviceCountMax`, `cudaDeviceCountMin`, `cudaVersionMin`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`".format( k ), SourceLine(_doc, k, str), @@ -28062,13 +23530,20 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - cudaComputeCapability=cudaComputeCapability, - cudaDeviceCountMax=cudaDeviceCountMax, - cudaDeviceCountMin=cudaDeviceCountMin, - cudaVersionMin=cudaVersionMin, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -28082,42 +23557,56 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.cudaComputeCapability is not None: - r["cudaComputeCapability"] = save( - self.cudaComputeCapability, + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, top=False, - base_url=base_url, + base_url=self.id, relative_uris=relative_uris, ) - if self.cudaDeviceCountMax is not None: - r["cudaDeviceCountMax"] = save( - self.cudaDeviceCountMax, + if self.streamable is not None: + r["streamable"] = save( + self.streamable, top=False, - base_url=base_url, + base_url=self.id, relative_uris=relative_uris, ) - if self.cudaDeviceCountMin is not None: - r["cudaDeviceCountMin"] = save( - self.cudaDeviceCountMin, + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.loadContents is not None: + r["loadContents"] = save( + self.loadContents, top=False, - base_url=base_url, + base_url=self.id, relative_uris=relative_uris, ) - if self.cudaVersionMin is not None: - r["cudaVersionMin"] = save( - self.cudaVersionMin, + if self.loadListing is not None: + r["loadListing"] = save( + self.loadListing, top=False, - base_url=base_url, + base_url=self.id, relative_uris=relative_uris, ) + if self.default is not None: + r["default"] = save( + self.default, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris + ) # top refers to the directory level if top: @@ -28127,30 +23616,41 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( + attrs: ClassVar[Collection[str]] = frozenset( [ - "class", - "cudaComputeCapability", - "cudaDeviceCountMax", - "cudaDeviceCountMin", - "cudaVersionMin", + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", ] ) -class LoopInput(Saveable): +class OperationOutputParameter(OutputParameter): + """ + Describe an output parameter of an operation. + + """ + id: str def __init__( self, - default: Optional[Any] = None, - id: Optional[Any] = None, - linkMerge: Optional[Any] = None, - loopSource: Optional[Any] = None, - pickValue: Optional[Any] = None, - valueFrom: Optional[Any] = None, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + id: Any, + type_: Any, + label: Any | None = None, + secondaryFiles: Any | None = None, + streamable: Any | None = None, + doc: Any | None = None, + format: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -28160,34 +23660,37 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.default = default + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) - self.linkMerge = linkMerge - self.loopSource = loopSource - self.pickValue = pickValue - self.valueFrom = valueFrom + self.format = format + self.type_ = type_ def __eq__(self, other: Any) -> bool: - if isinstance(other, LoopInput): + if isinstance(other, OperationOutputParameter): return bool( - self.default == other.default + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc and self.id == other.id - and self.linkMerge == other.linkMerge - and self.loopSource == other.loopSource - and self.pickValue == other.pickValue - and self.valueFrom == other.valueFrom + and self.format == other.format + and self.type_ == other.type_ ) return False def __hash__(self) -> int: return hash( ( - self.default, + self.label, + self.secondaryFiles, + self.streamable, + self.doc, self.id, - self.linkMerge, - self.loopSource, - self.pickValue, - self.valueFrom, + self.format, + self.type_, ) ) @@ -28197,8 +23700,8 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "LoopInput": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): @@ -28208,9 +23711,9 @@ def fromDoc( id = None if "id" in _doc: try: - id = load_field( + id = _load_field( _doc.get("id"), - uri_union_of_None_type_or_strtype_True_False_None_None, + uri_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("id") @@ -28258,24 +23761,24 @@ def fromDoc( if docRoot is not None: id = docRoot else: - id = "_:" + str(_uuid__.uuid4()) + _errors__.append(ValidationException("missing id")) if not __original_id_is_none: baseuri = cast(str, id) - default = None - if "default" in _doc: + label = None + if "label" in _doc: try: - default = load_field( - _doc.get("default"), - union_of_None_type_or_Any_type, + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, baseuri, loadingOptions, - lc=_doc.get("default") + lc=_doc.get("label") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `default`": + if str(e) == "missing required field `label`": _errors__.append( ValidationException( str(e), @@ -28283,13 +23786,13 @@ def fromDoc( ) ) else: - val = _doc.get("default") + val = _doc.get("label") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -28301,28 +23804,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `default` field is not valid because:", - SourceLine(_doc, "default", str), + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), [e], - detailed_message=f"the `default` field with value `{val}` " + detailed_message=f"the `label` field with value `{val}` " "is not valid because:", ) ) - linkMerge = None - if "linkMerge" in _doc: + secondaryFiles = None + if "secondaryFiles" in _doc: try: - linkMerge = load_field( - _doc.get("linkMerge"), - union_of_None_type_or_LinkMergeMethodLoader, + secondaryFiles = _load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, baseuri, loadingOptions, - lc=_doc.get("linkMerge") + lc=_doc.get("secondaryFiles") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `linkMerge`": + if str(e) == "missing required field `secondaryFiles`": _errors__.append( ValidationException( str(e), @@ -28330,13 +23833,13 @@ def fromDoc( ) ) else: - val = _doc.get("linkMerge") + val = _doc.get("secondaryFiles") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", - SourceLine(_doc, "linkMerge", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -28348,28 +23851,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `linkMerge` field is not valid because:", - SourceLine(_doc, "linkMerge", str), + "the `secondaryFiles` field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), [e], - detailed_message=f"the `linkMerge` field with value `{val}` " + detailed_message=f"the `secondaryFiles` field with value `{val}` " "is not valid because:", ) ) - loopSource = None - if "loopSource" in _doc: + streamable = None + if "streamable" in _doc: try: - loopSource = load_field( - _doc.get("loopSource"), - uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None, + streamable = _load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, baseuri, loadingOptions, - lc=_doc.get("loopSource") + lc=_doc.get("streamable") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loopSource`": + if str(e) == "missing required field `streamable`": _errors__.append( ValidationException( str(e), @@ -28377,13 +23880,13 @@ def fromDoc( ) ) else: - val = _doc.get("loopSource") + val = _doc.get("streamable") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loopSource` field is not valid because:", - SourceLine(_doc, "loopSource", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -28395,28 +23898,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loopSource` field is not valid because:", - SourceLine(_doc, "loopSource", str), + "the `streamable` field is not valid because:", + SourceLine(_doc, "streamable", str), [e], - detailed_message=f"the `loopSource` field with value `{val}` " + detailed_message=f"the `streamable` field with value `{val}` " "is not valid because:", ) ) - pickValue = None - if "pickValue" in _doc: + doc = None + if "doc" in _doc: try: - pickValue = load_field( - _doc.get("pickValue"), - union_of_None_type_or_PickValueMethodLoader, + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, - lc=_doc.get("pickValue") + lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `pickValue`": + if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), @@ -28424,13 +23927,13 @@ def fromDoc( ) ) else: - val = _doc.get("pickValue") + val = _doc.get("doc") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `pickValue` field is not valid because:", - SourceLine(_doc, "pickValue", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -28442,28 +23945,28 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `pickValue` field is not valid because:", - SourceLine(_doc, "pickValue", str), + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), [e], - detailed_message=f"the `pickValue` field with value `{val}` " + detailed_message=f"the `doc` field with value `{val}` " "is not valid because:", ) ) - valueFrom = None - if "valueFrom" in _doc: + format = None + if "format" in _doc: try: - valueFrom = load_field( - _doc.get("valueFrom"), - union_of_None_type_or_strtype_or_ExpressionLoader, + format = _load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True, baseuri, loadingOptions, - lc=_doc.get("valueFrom") + lc=_doc.get("format") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `valueFrom`": + if str(e) == "missing required field `format`": _errors__.append( ValidationException( str(e), @@ -28471,13 +23974,13 @@ def fromDoc( ) ) else: - val = _doc.get("valueFrom") + val = _doc.get("format") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -28489,14 +23992,62 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `valueFrom` field is not valid because:", - SourceLine(_doc, "valueFrom", str), + "the `format` field is not valid because:", + SourceLine(_doc, "format", str), [e], - detailed_message=f"the `valueFrom` field with value `{val}` " + detailed_message=f"the `format` field with value `{val}` " "is not valid because:", ) ) - extension_fields: dict[str, Any] = {} + try: + if _doc.get("type") is None: + raise ValidationException("missing required field `type`", None, []) + + type_ = _load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + lc=_doc.get("type") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `type`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("type") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, "type", str), + [e], + detailed_message=f"the `type` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -28504,14 +24055,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `default`, `id`, `linkMerge`, `loopSource`, `pickValue`, `valueFrom`".format( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( k ), SourceLine(_doc, k, str), @@ -28521,12 +24072,13 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - default=default, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, id=id, - linkMerge=linkMerge, - loopSource=loopSource, - pickValue=pickValue, - valueFrom=valueFrom, + format=format, + type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) @@ -28547,24 +24099,34 @@ def save( if self.id is not None: u = save_relative_uri(self.id, base_url, True, None, relative_uris) r["id"] = u - if self.default is not None: - r["default"] = save( - self.default, top=False, base_url=self.id, relative_uris=relative_uris + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris ) - if self.linkMerge is not None: - r["linkMerge"] = save( - self.linkMerge, top=False, base_url=self.id, relative_uris=relative_uris + if self.secondaryFiles is not None: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) - if self.loopSource is not None: - u = save_relative_uri(self.loopSource, self.id, False, 1, relative_uris) - r["loopSource"] = u - if self.pickValue is not None: - r["pickValue"] = save( - self.pickValue, top=False, base_url=self.id, relative_uris=relative_uris + if self.streamable is not None: + r["streamable"] = save( + self.streamable, + top=False, + base_url=self.id, + relative_uris=relative_uris, ) - if self.valueFrom is not None: - r["valueFrom"] = save( - self.valueFrom, top=False, base_url=self.id, relative_uris=relative_uris + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.format is not None: + u = save_relative_uri(self.format, self.id, True, None, relative_uris) + r["format"] = u + if self.type_ is not None: + r["type"] = save( + self.type_, top=False, base_url=self.id, relative_uris=relative_uris ) # top refers to the directory level @@ -28575,35 +24137,32 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset( - ["default", "id", "linkMerge", "loopSource", "pickValue", "valueFrom"] + attrs: ClassVar[Collection[str]] = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] ) -class Loop(ProcessRequirement): +class Operation(Process): """ - Prototype to enable workflow-level looping of a step. - - Valid only under `requirements` of a https://www.commonwl.org/v1.2/Workflow.html#WorkflowStep. - Unlike other CWL requirements, Loop requirement is not propagated to inner steps. - - `loopWhen` is an expansion of the CWL v1.2 `when` construct which controls - conditional execution. - - Using `loopWhen` and `when` for the same step will produce an error. - - `loopWhen` is not compatible with `scatter` at this time and combining the - two in the same step will produce an error. + This record describes an abstract operation. It is a potential step of a workflow that has not yet been bound to a concrete implementation. It specifies an input and output signature, but does not provide enough information to be executed. An implementation (or other tooling) may provide a means of binding an Operation to a concrete process (such as Workflow, CommandLineTool, or ExpressionTool) with a compatible signature. """ + id: str + def __init__( self, - loop: Any, - loopWhen: Any, - outputMethod: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, + inputs: Any, + outputs: Any, + id: Any | None = None, + label: Any | None = None, + doc: Any | None = None, + requirements: Any | None = None, + hints: Any | None = None, + cwlVersion: Any | None = None, + intent: Any | None = None, + extension_fields: MutableMapping[str, Any] | None = None, + loadingOptions: LoadingOptions | None = None, ) -> None: if extension_fields: self.extension_fields = extension_fields @@ -28613,23 +24172,48 @@ def __init__( self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() - self.class_ = "Loop" - self.loop = loop - self.loopWhen = loopWhen - self.outputMethod = outputMethod + self.id = id if id is not None else "_:" + str(_uuid__.uuid4()) + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_: Final[str] = "Operation" def __eq__(self, other: Any) -> bool: - if isinstance(other, Loop): + if isinstance(other, Operation): return bool( - self.class_ == other.class_ - and self.loop == other.loop - and self.loopWhen == other.loopWhen - and self.outputMethod == other.outputMethod + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ ) return False def __hash__(self) -> int: - return hash((self.class_, self.loop, self.loopWhen, self.outputMethod)) + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + ) + ) @classmethod def fromDoc( @@ -28637,46 +24221,197 @@ def fromDoc( doc: Any, baseuri: str, loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "Loop": + docRoot: str | None = None + ) -> Self: _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] + id = None + if "id" in _doc: + try: + id = _load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("id") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `id`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("id") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, "id", str), + [e], + detailed_message=f"the `id` field with value `{val}` " + "is not valid because:", + ) + ) + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = cast(str, id) try: if _doc.get("class") is None: raise ValidationException("missing required field `class`", None, []) - class_ = load_field( + class_ = _load_field( _doc.get("class"), - uri_strtype_False_True_None_None, + uri_Operation_classLoader_False_True_None_None, baseuri, loadingOptions, lc=_doc.get("class") ) - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") + vocab = _vocab | loadingOptions.vocab + if class_ not in (cls.__name__, vocab.get(cls.__name__)): + raise ValidationException(f"tried `{cls.__name__}` but") except ValidationException as e: - raise e + raise e + label = None + if "label" in _doc: + try: + label = _load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + lc=_doc.get("label") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `label`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("label") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, "label", str), + [e], + detailed_message=f"the `label` field with value `{val}` " + "is not valid because:", + ) + ) + doc = None + if "doc" in _doc: + try: + doc = _load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + lc=_doc.get("doc") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `doc`": + _errors__.append( + ValidationException( + str(e), + None + ) + ) + else: + val = _doc.get("doc") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + detailed_message=f"the `doc` field with value `{val}` " + "is not valid because:", + ) + ) try: - if _doc.get("loop") is None: - raise ValidationException("missing required field `loop`", None, []) + if _doc.get("inputs") is None: + raise ValidationException("missing required field `inputs`", None, []) - loop = load_field( - _doc.get("loop"), - idmap_loop_array_of_LoopInputLoader, + inputs = _load_field( + _doc.get("inputs"), + idmap_inputs_array_of_OperationInputParameterLoader, baseuri, loadingOptions, - lc=_doc.get("loop") + lc=_doc.get("inputs") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loop`": + if str(e) == "missing required field `inputs`": _errors__.append( ValidationException( str(e), @@ -28684,13 +24419,13 @@ def fromDoc( ) ) else: - val = _doc.get("loop") + val = _doc.get("inputs") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loop` field is not valid because:", - SourceLine(_doc, "loop", str), + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -28702,29 +24437,29 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loop` field is not valid because:", - SourceLine(_doc, "loop", str), + "the `inputs` field is not valid because:", + SourceLine(_doc, "inputs", str), [e], - detailed_message=f"the `loop` field with value `{val}` " + detailed_message=f"the `inputs` field with value `{val}` " "is not valid because:", ) ) try: - if _doc.get("loopWhen") is None: - raise ValidationException("missing required field `loopWhen`", None, []) + if _doc.get("outputs") is None: + raise ValidationException("missing required field `outputs`", None, []) - loopWhen = load_field( - _doc.get("loopWhen"), - ExpressionLoader, + outputs = _load_field( + _doc.get("outputs"), + idmap_outputs_array_of_OperationOutputParameterLoader, baseuri, loadingOptions, - lc=_doc.get("loopWhen") + lc=_doc.get("outputs") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `loopWhen`": + if str(e) == "missing required field `outputs`": _errors__.append( ValidationException( str(e), @@ -28732,13 +24467,13 @@ def fromDoc( ) ) else: - val = _doc.get("loopWhen") + val = _doc.get("outputs") if error_message != str(e): val_type = convert_typing(extract_type(type(val))) _errors__.append( ValidationException( - "the `loopWhen` field is not valid because:", - SourceLine(_doc, "loopWhen", str), + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}", @@ -28750,245 +24485,202 @@ def fromDoc( else: _errors__.append( ValidationException( - "the `loopWhen` field is not valid because:", - SourceLine(_doc, "loopWhen", str), + "the `outputs` field is not valid because:", + SourceLine(_doc, "outputs", str), [e], - detailed_message=f"the `loopWhen` field with value `{val}` " + detailed_message=f"the `outputs` field with value `{val}` " "is not valid because:", ) ) - try: - if _doc.get("outputMethod") is None: - raise ValidationException("missing required field `outputMethod`", None, []) - - outputMethod = load_field( - _doc.get("outputMethod"), - LoopOutputModesLoader, - baseuri, - loadingOptions, - lc=_doc.get("outputMethod") - ) + requirements = None + if "requirements" in _doc: + try: + requirements = _load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + lc=_doc.get("requirements") + ) - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) - if str(e) == "missing required field `outputMethod`": - _errors__.append( - ValidationException( - str(e), - None - ) - ) - else: - val = _doc.get("outputMethod") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + if str(e) == "missing required field `requirements`": _errors__.append( ValidationException( - "the `outputMethod` field is not valid because:", - SourceLine(_doc, "outputMethod", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("requirements") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `requirements` field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + detailed_message=f"the `requirements` field with value `{val}` " + "is not valid because:", + ) + ) + hints = None + if "hints" in _doc: + try: + hints = _load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + lc=_doc.get("hints") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `hints`": _errors__.append( ValidationException( - "the `outputMethod` field is not valid because:", - SourceLine(_doc, "outputMethod", str), - [e], - detailed_message=f"the `outputMethod` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} - for k in _doc.keys(): - if k not in cls.attrs: - if not k: - _errors__.append( - ValidationException("mapping with implicit null key") - ) - elif ":" in k: - ex = expand_url( - k, "", loadingOptions, scoped_id=False, vocab_term=False - ) - extension_fields[ex] = _doc[k] else: - _errors__.append( - ValidationException( - "invalid field `{}`, expected one of: `class`, `loop`, `loopWhen`, `outputMethod`".format( - k - ), - SourceLine(_doc, k, str), + val = _doc.get("hints") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) ) - ) - - if _errors__: - raise ValidationException("", None, _errors__, "*") - _constructed = cls( - loop=loop, - loopWhen=loopWhen, - outputMethod=outputMethod, - extension_fields=extension_fields, - loadingOptions=loadingOptions, - ) - return _constructed - - def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> dict[str, Any]: - r: dict[str, Any] = {} - - if relative_uris: - for ef in self.extension_fields: - r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] - else: - for ef in self.extension_fields: - r[ef] = self.extension_fields[ef] - if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): - uri = f"{p}:{self.class_}" - else: - uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) - r["class"] = u - if self.loop is not None: - r["loop"] = save( - self.loop, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.loopWhen is not None: - r["loopWhen"] = save( - self.loopWhen, top=False, base_url=base_url, relative_uris=relative_uris - ) - if self.outputMethod is not None: - r["outputMethod"] = save( - self.outputMethod, - top=False, - base_url=base_url, - relative_uris=relative_uris, - ) - - # top refers to the directory level - if top: - if self.loadingOptions.namespaces: - r["$namespaces"] = self.loadingOptions.namespaces - if self.loadingOptions.schemas: - r["$schemas"] = self.loadingOptions.schemas - return r - - attrs = frozenset(["class", "loop", "loopWhen", "outputMethod"]) - - -class ShmSize(ProcessRequirement): - def __init__( - self, - shmSize: Any, - extension_fields: Optional[dict[str, Any]] = None, - loadingOptions: Optional[LoadingOptions] = None, - ) -> None: - if extension_fields: - self.extension_fields = extension_fields - else: - self.extension_fields = CommentedMap() - if loadingOptions: - self.loadingOptions = loadingOptions - else: - self.loadingOptions = LoadingOptions() - self.class_ = "ShmSize" - self.shmSize = shmSize - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ShmSize): - return bool(self.class_ == other.class_ and self.shmSize == other.shmSize) - return False - - def __hash__(self) -> int: - return hash((self.class_, self.shmSize)) - - @classmethod - def fromDoc( - cls, - doc: Any, - baseuri: str, - loadingOptions: LoadingOptions, - docRoot: Optional[str] = None - ) -> "ShmSize": - _doc = copy.copy(doc) - - if hasattr(doc, "lc"): - _doc.lc.data = doc.lc.data - _doc.lc.filename = doc.lc.filename - _errors__ = [] - try: - if _doc.get("class") is None: - raise ValidationException("missing required field `class`", None, []) - - class_ = load_field( - _doc.get("class"), - uri_strtype_False_True_None_None, - baseuri, - loadingOptions, - lc=_doc.get("class") - ) - - if class_ not in (cls.__name__, loadingOptions.vocab.get(cls.__name__)): - raise ValidationException(f"tried `{cls.__name__}` but") - except ValidationException as e: - raise e - try: - if _doc.get("shmSize") is None: - raise ValidationException("missing required field `shmSize`", None, []) - - shmSize = load_field( - _doc.get("shmSize"), - strtype, - baseuri, - loadingOptions, - lc=_doc.get("shmSize") - ) - - except ValidationException as e: - error_message, to_print, verb_tensage = parse_errors(str(e)) - - if str(e) == "missing required field `shmSize`": - _errors__.append( - ValidationException( - str(e), - None - ) + else: + _errors__.append( + ValidationException( + "the `hints` field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + detailed_message=f"the `hints` field with value `{val}` " + "is not valid because:", + ) + ) + cwlVersion = None + if "cwlVersion" in _doc: + try: + cwlVersion = _load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None, + baseuri, + loadingOptions, + lc=_doc.get("cwlVersion") ) - else: - val = _doc.get("shmSize") - if error_message != str(e): - val_type = convert_typing(extract_type(type(val))) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `cwlVersion`": _errors__.append( ValidationException( - "the `shmSize` field is not valid because:", - SourceLine(_doc, "shmSize", str), - [ValidationException(f"Value is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}", - detailed_message=f"Value `{val}` is a {val_type}, " - f"but valid {to_print} for this field " - f"{verb_tensage} {error_message}")], + str(e), + None ) ) else: + val = _doc.get("cwlVersion") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `cwlVersion` field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + detailed_message=f"the `cwlVersion` field with value `{val}` " + "is not valid because:", + ) + ) + intent = None + if "intent" in _doc: + try: + intent = _load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None_None, + baseuri, + loadingOptions, + lc=_doc.get("intent") + ) + + except ValidationException as e: + error_message, to_print, verb_tensage = parse_errors(str(e)) + + if str(e) == "missing required field `intent`": _errors__.append( ValidationException( - "the `shmSize` field is not valid because:", - SourceLine(_doc, "shmSize", str), - [e], - detailed_message=f"the `shmSize` field with value `{val}` " - "is not valid because:", + str(e), + None ) ) - extension_fields: dict[str, Any] = {} + else: + val = _doc.get("intent") + if error_message != str(e): + val_type = convert_typing(extract_type(type(val))) + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [ValidationException(f"Value is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}", + detailed_message=f"Value `{val}` is a {val_type}, " + f"but valid {to_print} for this field " + f"{verb_tensage} {error_message}")], + ) + ) + else: + _errors__.append( + ValidationException( + "the `intent` field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + detailed_message=f"the `intent` field with value `{val}` " + "is not valid because:", + ) + ) + extension_fields: MutableMapping[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: @@ -28996,14 +24688,14 @@ def fromDoc( ValidationException("mapping with implicit null key") ) elif ":" in k: - ex = expand_url( + ex = _expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( - "invalid field `{}`, expected one of: `class`, `shmSize`".format( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`".format( k ), SourceLine(_doc, k, str), @@ -29013,10 +24705,19 @@ def fromDoc( if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( - shmSize=shmSize, + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, extension_fields=extension_fields, loadingOptions=loadingOptions, ) + loadingOptions.idx[cast(str, id)] = (_constructed, loadingOptions) return _constructed def save( @@ -29030,18 +24731,52 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] + if self.id is not None: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u if self.class_ is not None: - uri = self.loadingOptions.vocab[self.class_] - if p := self.loadingOptions.rvocab.get(uri[: -len(self.class_)]): + vocab = _vocab | self.loadingOptions.vocab + rvocab = _rvocab | self.loadingOptions.rvocab + uri = vocab[self.class_] + if p := rvocab.get(uri[: -len(self.class_)]): uri = f"{p}:{self.class_}" else: uri = self.class_ - u = save_relative_uri(uri, base_url, False, None, relative_uris) + u = save_relative_uri(uri, self.id, False, None, relative_uris) r["class"] = u - if self.shmSize is not None: - r["shmSize"] = save( - self.shmSize, top=False, base_url=base_url, relative_uris=relative_uris + if self.label is not None: + r["label"] = save( + self.label, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.doc is not None: + r["doc"] = save( + self.doc, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.inputs is not None: + r["inputs"] = save( + self.inputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.outputs is not None: + r["outputs"] = save( + self.outputs, top=False, base_url=self.id, relative_uris=relative_uris + ) + if self.requirements is not None: + r["requirements"] = save( + self.requirements, + top=False, + base_url=self.id, + relative_uris=relative_uris, + ) + if self.hints is not None: + r["hints"] = save( + self.hints, top=False, base_url=self.id, relative_uris=relative_uris ) + if self.cwlVersion is not None: + u = save_relative_uri(self.cwlVersion, self.id, False, None, relative_uris) + r["cwlVersion"] = u + if self.intent is not None: + u = save_relative_uri(self.intent, self.id, True, None, relative_uris) + r["intent"] = u # top refers to the directory level if top: @@ -29051,13 +24786,25 @@ def save( r["$schemas"] = self.loadingOptions.schemas return r - attrs = frozenset(["class", "shmSize"]) + attrs: ClassVar[Collection[str]] = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + ] + ) -_vocab = { +_vocab.update({ "Any": "https://w3id.org/cwl/salad#Any", "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", - "CUDARequirement": "http://commonwl.org/cwltool#CUDARequirement", "CWLArraySchema": "https://w3id.org/cwl/cwl#CWLArraySchema", "CWLInputFile": "https://w3id.org/cwl/cwl#CWLInputFile", "CWLObjectType": "https://w3id.org/cwl/cwl#CWLObjectType", @@ -29112,9 +24859,6 @@ def save( "LoadContents": "https://w3id.org/cwl/cwl#LoadContents", "LoadListingEnum": "https://w3id.org/cwl/cwl#LoadListingEnum", "LoadListingRequirement": "https://w3id.org/cwl/cwl#LoadListingRequirement", - "Loop": "http://commonwl.org/cwltool#Loop", - "LoopInput": "http://commonwl.org/cwltool#LoopInput", - "MPIRequirement": "http://commonwl.org/cwltool#MPIRequirement", "MapSchema": "https://w3id.org/cwl/salad#MapSchema", "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", "NetworkAccess": "https://w3id.org/cwl/cwl#NetworkAccess", @@ -29132,7 +24876,6 @@ def save( "PickValueMethod": "https://w3id.org/cwl/cwl#PickValueMethod", "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", "Process": "https://w3id.org/cwl/cwl#Process", - "ProcessGenerator": "http://commonwl.org/cwltool#ProcessGenerator", "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", "RecordField": "https://w3id.org/cwl/salad#RecordField", "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", @@ -29141,9 +24884,7 @@ def save( "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", "SecondaryFileSchema": "https://w3id.org/cwl/cwl#SecondaryFileSchema", - "Secrets": "http://commonwl.org/cwltool#Secrets", "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", - "ShmSize": "http://commonwl.org/cwltool#ShmSize", "Sink": "https://w3id.org/cwl/cwl#Sink", "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", @@ -29158,29 +24899,17 @@ def save( "WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", "WorkflowStepInput": "https://w3id.org/cwl/cwl#WorkflowStepInput", "WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", - "all": "http://commonwl.org/cwltool#Loop/outputMethod/LoopOutputModes/all", "all_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null", "array": "https://w3id.org/cwl/salad#array", "boolean": "http://www.w3.org/2001/XMLSchema#boolean", "deep_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing", "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", "double": "http://www.w3.org/2001/XMLSchema#double", - "draft-2": "https://w3id.org/cwl/cwl#draft-2", - "draft-3": "https://w3id.org/cwl/cwl#draft-3", - "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", - "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", - "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", - "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", - "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", - "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", - "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", - "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", "enum": "https://w3id.org/cwl/salad#enum", "first_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null", "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", "float": "http://www.w3.org/2001/XMLSchema#float", "int": "http://www.w3.org/2001/XMLSchema#int", - "last": "http://commonwl.org/cwltool#Loop/outputMethod/LoopOutputModes/last", "long": "http://www.w3.org/2001/XMLSchema#long", "map": "https://w3id.org/cwl/salad#map", "merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", @@ -29196,21 +24925,11 @@ def save( "string": "http://www.w3.org/2001/XMLSchema#string", "the_only_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null", "union": "https://w3id.org/cwl/salad#union", - "v1.0": "https://w3id.org/cwl/cwl#v1.0", - "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", - "v1.1": "https://w3id.org/cwl/cwl#v1.1", - "v1.1.0-dev1": "https://w3id.org/cwl/cwl#v1.1.0-dev1", "v1.2": "https://w3id.org/cwl/cwl#v1.2", - "v1.2.0-dev1": "https://w3id.org/cwl/cwl#v1.2.0-dev1", - "v1.2.0-dev2": "https://w3id.org/cwl/cwl#v1.2.0-dev2", - "v1.2.0-dev3": "https://w3id.org/cwl/cwl#v1.2.0-dev3", - "v1.2.0-dev4": "https://w3id.org/cwl/cwl#v1.2.0-dev4", - "v1.2.0-dev5": "https://w3id.org/cwl/cwl#v1.2.0-dev5", -} -_rvocab = { +}) +_rvocab.update({ "https://w3id.org/cwl/salad#Any": "Any", "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", - "http://commonwl.org/cwltool#CUDARequirement": "CUDARequirement", "https://w3id.org/cwl/cwl#CWLArraySchema": "CWLArraySchema", "https://w3id.org/cwl/cwl#CWLInputFile": "CWLInputFile", "https://w3id.org/cwl/cwl#CWLObjectType": "CWLObjectType", @@ -29265,9 +24984,6 @@ def save( "https://w3id.org/cwl/cwl#LoadContents": "LoadContents", "https://w3id.org/cwl/cwl#LoadListingEnum": "LoadListingEnum", "https://w3id.org/cwl/cwl#LoadListingRequirement": "LoadListingRequirement", - "http://commonwl.org/cwltool#Loop": "Loop", - "http://commonwl.org/cwltool#LoopInput": "LoopInput", - "http://commonwl.org/cwltool#MPIRequirement": "MPIRequirement", "https://w3id.org/cwl/salad#MapSchema": "MapSchema", "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", "https://w3id.org/cwl/cwl#NetworkAccess": "NetworkAccess", @@ -29285,7 +25001,6 @@ def save( "https://w3id.org/cwl/cwl#PickValueMethod": "PickValueMethod", "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", "https://w3id.org/cwl/cwl#Process": "Process", - "http://commonwl.org/cwltool#ProcessGenerator": "ProcessGenerator", "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", "https://w3id.org/cwl/salad#RecordField": "RecordField", "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", @@ -29294,9 +25009,7 @@ def save( "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", "https://w3id.org/cwl/cwl#SecondaryFileSchema": "SecondaryFileSchema", - "http://commonwl.org/cwltool#Secrets": "Secrets", "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", - "http://commonwl.org/cwltool#ShmSize": "ShmSize", "https://w3id.org/cwl/cwl#Sink": "Sink", "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", @@ -29311,29 +25024,17 @@ def save( "https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", "https://w3id.org/cwl/cwl#WorkflowStepInput": "WorkflowStepInput", "https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", - "http://commonwl.org/cwltool#Loop/outputMethod/LoopOutputModes/all": "all", "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null": "all_non_null", "https://w3id.org/cwl/salad#array": "array", "http://www.w3.org/2001/XMLSchema#boolean": "boolean", "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing": "deep_listing", "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", "http://www.w3.org/2001/XMLSchema#double": "double", - "https://w3id.org/cwl/cwl#draft-2": "draft-2", - "https://w3id.org/cwl/cwl#draft-3": "draft-3", - "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", - "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", - "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", - "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", - "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", - "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", - "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", - "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", "https://w3id.org/cwl/salad#enum": "enum", "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null": "first_non_null", "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", "http://www.w3.org/2001/XMLSchema#float": "float", "http://www.w3.org/2001/XMLSchema#int": "int", - "http://commonwl.org/cwltool#Loop/outputMethod/LoopOutputModes/last": "last", "http://www.w3.org/2001/XMLSchema#long": "long", "https://w3id.org/cwl/salad#map": "map", "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", @@ -29349,25 +25050,16 @@ def save( "http://www.w3.org/2001/XMLSchema#string": "string", "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null": "the_only_non_null", "https://w3id.org/cwl/salad#union": "union", - "https://w3id.org/cwl/cwl#v1.0": "v1.0", - "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", - "https://w3id.org/cwl/cwl#v1.1": "v1.1", - "https://w3id.org/cwl/cwl#v1.1.0-dev1": "v1.1.0-dev1", "https://w3id.org/cwl/cwl#v1.2": "v1.2", - "https://w3id.org/cwl/cwl#v1.2.0-dev1": "v1.2.0-dev1", - "https://w3id.org/cwl/cwl#v1.2.0-dev2": "v1.2.0-dev2", - "https://w3id.org/cwl/cwl#v1.2.0-dev3": "v1.2.0-dev3", - "https://w3id.org/cwl/cwl#v1.2.0-dev4": "v1.2.0-dev4", - "https://w3id.org/cwl/cwl#v1.2.0-dev5": "v1.2.0-dev5", -} - -strtype = _PrimitiveLoader(str) -inttype = _PrimitiveLoader(int) -floattype = _PrimitiveLoader(float) -booltype = _PrimitiveLoader(bool) -None_type = _PrimitiveLoader(type(None)) -Any_type = _AnyLoader() -PrimitiveTypeLoader = _EnumLoader( +}) + +strtype: Final = _PrimitiveLoader(str) +inttype: Final = _PrimitiveLoader(int) +floattype: Final = _PrimitiveLoader(float) +booltype: Final = _PrimitiveLoader(bool) +None_type: Final = _PrimitiveLoader(type(None)) +Any_type: Final = _AnyLoader() +PrimitiveTypeLoader: Final = _EnumLoader( ( "null", "boolean", @@ -29382,28 +25074,41 @@ def save( """ Names of salad data types (based on Avro schema declarations). -Refer to the [Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for -detailed information. +Refer to the `Avro schema declaration documentation `__ for detailed information. null: no value + boolean: a binary value + int: 32-bit signed integer + long: 64-bit signed integer + float: single precision (32-bit) IEEE 754 floating-point number + double: double precision (64-bit) IEEE 754 floating-point number + string: Unicode character sequence """ -AnyLoader = _EnumLoader(("Any",), "Any") +AnyLoader: Final = _EnumLoader(("Any",), "Any") """ The **Any** type validates for any non-null value. """ -RecordFieldLoader = _RecordLoader(RecordField, None, None) -RecordSchemaLoader = _RecordLoader(RecordSchema, None, None) -EnumSchemaLoader = _RecordLoader(EnumSchema, None, None) -ArraySchemaLoader = _RecordLoader(ArraySchema, None, None) -MapSchemaLoader = _RecordLoader(MapSchema, None, None) -UnionSchemaLoader = _RecordLoader(UnionSchema, None, None) -CWLTypeLoader = _EnumLoader( +RecordFieldLoader: Final = _RecordLoader( + schema_salad.metaschema.RecordField, None, None +) +RecordSchemaLoader: Final = _RecordLoader( + schema_salad.metaschema.RecordSchema, None, None +) +EnumSchemaLoader: Final = _RecordLoader(schema_salad.metaschema.EnumSchema, None, None) +ArraySchemaLoader: Final = _RecordLoader( + schema_salad.metaschema.ArraySchema, None, None +) +MapSchemaLoader: Final = _RecordLoader(schema_salad.metaschema.MapSchema, None, None) +UnionSchemaLoader: Final = _RecordLoader( + schema_salad.metaschema.UnionSchema, None, None +) +CWLTypeLoader: Final = _EnumLoader( ( "null", "boolean", @@ -29419,58 +25124,65 @@ def save( ) """ Extends primitive types with the concept of a file and directory as a builtin type. + File: A File object + Directory: A Directory object """ -CWLArraySchemaLoader = _RecordLoader(CWLArraySchema, None, None) -CWLRecordFieldLoader = _RecordLoader(CWLRecordField, None, None) -CWLRecordSchemaLoader = _RecordLoader(CWLRecordSchema, None, None) -FileLoader = _RecordLoader(File, None, None) -DirectoryLoader = _RecordLoader(Directory, None, None) -CWLObjectTypeLoader = _UnionLoader((), "CWLObjectTypeLoader") -union_of_None_type_or_CWLObjectTypeLoader = _UnionLoader( +CWLArraySchemaLoader: Final = _RecordLoader(CWLArraySchema, None, None) +CWLRecordFieldLoader: Final = _RecordLoader(CWLRecordField, None, None) +CWLRecordSchemaLoader: Final = _RecordLoader(CWLRecordSchema, None, None) +FileLoader: Final = _RecordLoader(File, None, None) +DirectoryLoader: Final = _RecordLoader(Directory, None, None) +CWLObjectTypeLoader: Final = _UnionLoader((), "CWLObjectTypeLoader") +union_of_None_type_or_CWLObjectTypeLoader: Final = _UnionLoader( ( None_type, CWLObjectTypeLoader, ) ) -array_of_union_of_None_type_or_CWLObjectTypeLoader = _ArrayLoader( +array_of_union_of_None_type_or_CWLObjectTypeLoader: Final = _ArrayLoader( union_of_None_type_or_CWLObjectTypeLoader ) -map_of_union_of_None_type_or_CWLObjectTypeLoader = _MapLoader( +map_of_union_of_None_type_or_CWLObjectTypeLoader: Final = _MapLoader( union_of_None_type_or_CWLObjectTypeLoader, "None", None, None ) -InlineJavascriptRequirementLoader = _RecordLoader( +InlineJavascriptRequirementLoader: Final = _RecordLoader( InlineJavascriptRequirement, None, None ) -SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement, None, None) -LoadListingRequirementLoader = _RecordLoader(LoadListingRequirement, None, None) -DockerRequirementLoader = _RecordLoader(DockerRequirement, None, None) -SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement, None, None) -InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement, None, None) -EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement, None, None) -ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement, None, None) -ResourceRequirementLoader = _RecordLoader(ResourceRequirement, None, None) -WorkReuseLoader = _RecordLoader(WorkReuse, None, None) -NetworkAccessLoader = _RecordLoader(NetworkAccess, None, None) -InplaceUpdateRequirementLoader = _RecordLoader(InplaceUpdateRequirement, None, None) -ToolTimeLimitLoader = _RecordLoader(ToolTimeLimit, None, None) -SubworkflowFeatureRequirementLoader = _RecordLoader( +SchemaDefRequirementLoader: Final = _RecordLoader(SchemaDefRequirement, None, None) +LoadListingRequirementLoader: Final = _RecordLoader(LoadListingRequirement, None, None) +DockerRequirementLoader: Final = _RecordLoader(DockerRequirement, None, None) +SoftwareRequirementLoader: Final = _RecordLoader(SoftwareRequirement, None, None) +InitialWorkDirRequirementLoader: Final = _RecordLoader( + InitialWorkDirRequirement, None, None +) +EnvVarRequirementLoader: Final = _RecordLoader(EnvVarRequirement, None, None) +ShellCommandRequirementLoader: Final = _RecordLoader( + ShellCommandRequirement, None, None +) +ResourceRequirementLoader: Final = _RecordLoader(ResourceRequirement, None, None) +WorkReuseLoader: Final = _RecordLoader(WorkReuse, None, None) +NetworkAccessLoader: Final = _RecordLoader(NetworkAccess, None, None) +InplaceUpdateRequirementLoader: Final = _RecordLoader( + InplaceUpdateRequirement, None, None +) +ToolTimeLimitLoader: Final = _RecordLoader(ToolTimeLimit, None, None) +SubworkflowFeatureRequirementLoader: Final = _RecordLoader( SubworkflowFeatureRequirement, None, None ) -ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement, None, None) -MultipleInputFeatureRequirementLoader = _RecordLoader( +ScatterFeatureRequirementLoader: Final = _RecordLoader( + ScatterFeatureRequirement, None, None +) +MultipleInputFeatureRequirementLoader: Final = _RecordLoader( MultipleInputFeatureRequirement, None, None ) -StepInputExpressionRequirementLoader = _RecordLoader( +StepInputExpressionRequirementLoader: Final = _RecordLoader( StepInputExpressionRequirement, None, None ) -SecretsLoader = _RecordLoader(Secrets, None, None) -MPIRequirementLoader = _RecordLoader(MPIRequirement, None, None) -CUDARequirementLoader = _RecordLoader(CUDARequirement, None, None) -LoopLoader = _RecordLoader(Loop, None, None) -ShmSizeLoader = _RecordLoader(ShmSize, None, None) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader = _UnionLoader( +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _UnionLoader( ( InlineJavascriptRequirementLoader, SchemaDefRequirementLoader, @@ -29489,59 +25201,38 @@ def save( ScatterFeatureRequirementLoader, MultipleInputFeatureRequirementLoader, StepInputExpressionRequirementLoader, - SecretsLoader, - MPIRequirementLoader, - CUDARequirementLoader, - LoopLoader, - ShmSizeLoader, ) ) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader ) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_CWLObjectTypeLoader = _UnionLoader( +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader: ( + Final +) = _UnionLoader( ( None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, CWLObjectTypeLoader, ) ) -map_of_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_CWLObjectTypeLoader = _MapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_CWLObjectTypeLoader, +map_of_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader: ( + Final +) = _MapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader, "CWLInputFile", "@list", True, ) -CWLInputFileLoader = map_of_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_CWLObjectTypeLoader -CWLVersionLoader = _EnumLoader( - ( - "draft-2", - "draft-3.dev1", - "draft-3.dev2", - "draft-3.dev3", - "draft-3.dev4", - "draft-3.dev5", - "draft-3", - "draft-4.dev1", - "draft-4.dev2", - "draft-4.dev3", - "v1.0.dev4", - "v1.0", - "v1.1.0-dev1", - "v1.1", - "v1.2.0-dev1", - "v1.2.0-dev2", - "v1.2.0-dev3", - "v1.2.0-dev4", - "v1.2.0-dev5", - "v1.2", - ), - "CWLVersion", +CWLInputFileLoader: Final = ( + map_of_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_CWLObjectTypeLoader ) +CWLVersionLoader: Final = _EnumLoader(("v1.2",), "CWLVersion") """ -Version symbols for published CWL document versions. +Current version symbol for CWL documents. """ -LoadListingEnumLoader = _EnumLoader( +LoadListingEnumLoader: Final = _EnumLoader( ( "no_listing", "shallow_listing", @@ -29550,160 +25241,190 @@ def save( "LoadListingEnum", ) """ -Specify the desired behavior for loading the `listing` field of -a Directory object for use by expressions. +Specify the desired behavior for loading the ``listing`` field of a Directory object for use by expressions. no_listing: Do not load the directory listing. + shallow_listing: Only load the top level listing, do not recurse into subdirectories. + deep_listing: Load the directory listing and recursively load all subdirectories as well. """ -ExpressionLoader = _ExpressionLoader(str) -InputBindingLoader = _RecordLoader(InputBinding, None, None) -InputRecordFieldLoader = _RecordLoader(InputRecordField, None, None) -InputRecordSchemaLoader = _RecordLoader(InputRecordSchema, None, None) -InputEnumSchemaLoader = _RecordLoader(InputEnumSchema, None, None) -InputArraySchemaLoader = _RecordLoader(InputArraySchema, None, None) -OutputRecordFieldLoader = _RecordLoader(OutputRecordField, None, None) -OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema, None, None) -OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema, None, None) -OutputArraySchemaLoader = _RecordLoader(OutputArraySchema, None, None) -SecondaryFileSchemaLoader = _RecordLoader(SecondaryFileSchema, None, None) -EnvironmentDefLoader = _RecordLoader(EnvironmentDef, None, None) -CommandLineBindingLoader = _RecordLoader(CommandLineBinding, None, None) -CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding, None, None) -CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField, None, None) -CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema, None, None) -CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema, None, None) -CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema, None, None) -CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField, None, None) -CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema, None, None) -CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema, None, None) -CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema, None, None) -CommandInputParameterLoader = _RecordLoader(CommandInputParameter, None, None) -CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter, None, None) -stdinLoader = _EnumLoader(("stdin",), "stdin") +ExpressionLoader: Final = _ExpressionLoader(str) +InputBindingLoader: Final = _RecordLoader(InputBinding, None, None) +InputRecordFieldLoader: Final = _RecordLoader(InputRecordField, None, None) +InputRecordSchemaLoader: Final = _RecordLoader(InputRecordSchema, None, None) +InputEnumSchemaLoader: Final = _RecordLoader(InputEnumSchema, None, None) +InputArraySchemaLoader: Final = _RecordLoader(InputArraySchema, None, None) +OutputRecordFieldLoader: Final = _RecordLoader(OutputRecordField, None, None) +OutputRecordSchemaLoader: Final = _RecordLoader(OutputRecordSchema, None, None) +OutputEnumSchemaLoader: Final = _RecordLoader(OutputEnumSchema, None, None) +OutputArraySchemaLoader: Final = _RecordLoader(OutputArraySchema, None, None) +SecondaryFileSchemaLoader: Final = _RecordLoader(SecondaryFileSchema, None, None) +EnvironmentDefLoader: Final = _RecordLoader(EnvironmentDef, None, None) +CommandLineBindingLoader: Final = _RecordLoader(CommandLineBinding, None, None) +CommandOutputBindingLoader: Final = _RecordLoader(CommandOutputBinding, None, None) +CommandInputRecordFieldLoader: Final = _RecordLoader( + CommandInputRecordField, None, None +) +CommandInputRecordSchemaLoader: Final = _RecordLoader( + CommandInputRecordSchema, None, None +) +CommandInputEnumSchemaLoader: Final = _RecordLoader(CommandInputEnumSchema, None, None) +CommandInputArraySchemaLoader: Final = _RecordLoader( + CommandInputArraySchema, None, None +) +CommandOutputRecordFieldLoader: Final = _RecordLoader( + CommandOutputRecordField, None, None +) +CommandOutputRecordSchemaLoader: Final = _RecordLoader( + CommandOutputRecordSchema, None, None +) +CommandOutputEnumSchemaLoader: Final = _RecordLoader( + CommandOutputEnumSchema, None, None +) +CommandOutputArraySchemaLoader: Final = _RecordLoader( + CommandOutputArraySchema, None, None +) +CommandInputParameterLoader: Final = _RecordLoader(CommandInputParameter, None, None) +CommandOutputParameterLoader: Final = _RecordLoader(CommandOutputParameter, None, None) +stdinLoader: Final = _EnumLoader(("stdin",), "stdin") """ -Only valid as a `type` for a `CommandLineTool` input with no -`inputBinding` set. `stdin` must not be specified at the `CommandLineTool` -level. +Only valid as a ``type`` for a ``CommandLineTool`` input with no ``inputBinding`` set. ``stdin`` must not be specified at the ``CommandLineTool`` level. The following -``` -inputs: - an_input_name: - type: stdin -``` + +:: + + inputs: + an_input_name: + type: stdin + + is equivalent to -``` -inputs: - an_input_name: - type: File - streamable: true - -stdin: $(inputs.an_input_name.path) -``` + +:: + + inputs: + an_input_name: + type: File + streamable: true + + stdin: $(inputs.an_input_name.path) """ -stdoutLoader = _EnumLoader(("stdout",), "stdout") +stdoutLoader: Final = _EnumLoader(("stdout",), "stdout") """ -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. +Only valid as a ``type`` for a ``CommandLineTool`` output with no ``outputBinding`` set. The following -``` -outputs: - an_output_name: - type: stdout -stdout: a_stdout_file -``` +:: + + outputs: + an_output_name: + type: stdout + + stdout: a_stdout_file + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stdout_file - -stdout: a_stdout_file -``` - -If there is no `stdout` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stdout -``` + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stdout_file + + stdout: a_stdout_file + + +If there is no ``stdout`` name provided, a random filename will be created. For example, the following + +:: + + outputs: + an_output_name: + type: stdout + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stdout_filenameABCDEFG - -stdout: random_stdout_filenameABCDEFG -``` - -If the `CommandLineTool` contains logically chained commands -(e.g. `echo a && echo b`) `stdout` must include the output of -every command. + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stdout_filenameABCDEFG + + stdout: random_stdout_filenameABCDEFG + + +If the ``CommandLineTool`` contains logically chained commands (e.g. ``echo a && echo b``) ``stdout`` must include the output of every command. """ -stderrLoader = _EnumLoader(("stderr",), "stderr") +stderrLoader: Final = _EnumLoader(("stderr",), "stderr") """ -Only valid as a `type` for a `CommandLineTool` output with no -`outputBinding` set. +Only valid as a ``type`` for a ``CommandLineTool`` output with no ``outputBinding`` set. The following -``` -outputs: - an_output_name: - type: stderr -stderr: a_stderr_file -``` +:: + + outputs: + an_output_name: + type: stderr + + stderr: a_stderr_file + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: a_stderr_file - -stderr: a_stderr_file -``` - -If there is no `stderr` name provided, a random filename will be created. -For example, the following -``` -outputs: - an_output_name: - type: stderr -``` + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stderr_file + + stderr: a_stderr_file + + +If there is no ``stderr`` name provided, a random filename will be created. For example, the following + +:: + + outputs: + an_output_name: + type: stderr + + is equivalent to -``` -outputs: - an_output_name: - type: File - streamable: true - outputBinding: - glob: random_stderr_filenameABCDEFG - -stderr: random_stderr_filenameABCDEFG -``` + +:: + + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stderr_filenameABCDEFG + + stderr: random_stderr_filenameABCDEFG """ -CommandLineToolLoader = _RecordLoader(CommandLineTool, None, None) -SoftwarePackageLoader = _RecordLoader(SoftwarePackage, None, None) -DirentLoader = _RecordLoader(Dirent, None, None) -ExpressionToolOutputParameterLoader = _RecordLoader( +CommandLineToolLoader: Final = _RecordLoader(CommandLineTool, None, None) +SoftwarePackageLoader: Final = _RecordLoader(SoftwarePackage, None, None) +DirentLoader: Final = _RecordLoader(Dirent, None, None) +ExpressionToolOutputParameterLoader: Final = _RecordLoader( ExpressionToolOutputParameter, None, None ) -WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter, None, None) -ExpressionToolLoader = _RecordLoader(ExpressionTool, None, None) -LinkMergeMethodLoader = _EnumLoader( +WorkflowInputParameterLoader: Final = _RecordLoader(WorkflowInputParameter, None, None) +ExpressionToolLoader: Final = _RecordLoader(ExpressionTool, None, None) +LinkMergeMethodLoader: Final = _EnumLoader( ( "merge_nested", "merge_flattened", @@ -29711,9 +25432,9 @@ def save( "LinkMergeMethod", ) """ -The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). +The input link merge method, described in `WorkflowStepInput <#WorkflowStepInput>`__. """ -PickValueMethodLoader = _EnumLoader( +PickValueMethodLoader: Final = _EnumLoader( ( "first_non_null", "the_only_non_null", @@ -29722,12 +25443,14 @@ def save( "PickValueMethod", ) """ -Picking non-null values among inbound data links, described in [WorkflowStepInput](#WorkflowStepInput). +Picking non-null values among inbound data links, described in `WorkflowStepInput <#WorkflowStepInput>`__. """ -WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter, None, None) -WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput, None, None) -WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput, None, None) -ScatterMethodLoader = _EnumLoader( +WorkflowOutputParameterLoader: Final = _RecordLoader( + WorkflowOutputParameter, None, None +) +WorkflowStepInputLoader: Final = _RecordLoader(WorkflowStepInput, None, None) +WorkflowStepOutputLoader: Final = _RecordLoader(WorkflowStepOutput, None, None) +ScatterMethodLoader: Final = _EnumLoader( ( "dotproduct", "nested_crossproduct", @@ -29736,25 +25459,29 @@ def save( "ScatterMethod", ) """ -The scatter method, as described in [workflow step scatter](#WorkflowStep). +The scatter method, as described in `workflow step scatter <#WorkflowStep>`__. """ -WorkflowStepLoader = _RecordLoader(WorkflowStep, None, None) -WorkflowLoader = _RecordLoader(Workflow, None, None) -OperationInputParameterLoader = _RecordLoader(OperationInputParameter, None, None) -OperationOutputParameterLoader = _RecordLoader(OperationOutputParameter, None, None) -OperationLoader = _RecordLoader(Operation, None, None) -ProcessGeneratorLoader = _RecordLoader(ProcessGenerator, None, None) -LoopInputLoader = _RecordLoader(LoopInput, None, None) -array_of_strtype = _ArrayLoader(strtype) -union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( +WorkflowStepLoader: Final = _RecordLoader(WorkflowStep, None, None) +WorkflowLoader: Final = _RecordLoader(Workflow, None, None) +OperationInputParameterLoader: Final = _RecordLoader( + OperationInputParameter, None, None +) +OperationOutputParameterLoader: Final = _RecordLoader( + OperationOutputParameter, None, None +) +OperationLoader: Final = _RecordLoader(Operation, None, None) +array_of_strtype: Final = _ArrayLoader(strtype) +union_of_None_type_or_strtype_or_array_of_strtype: Final = _UnionLoader( ( None_type, strtype, array_of_strtype, ) ) -uri_strtype_True_False_None_None = _URILoader(strtype, True, False, None, None) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _UnionLoader( +uri_strtype_True_False_None_None: Final = _URILoader(strtype, True, False, None, None) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, RecordSchemaLoader, @@ -29765,10 +25492,14 @@ def save( strtype, ) ) -array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype ) -union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _UnionLoader( +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, RecordSchemaLoader, @@ -29780,51 +25511,57 @@ def save( array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, ) ) -typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, 2, "v1.1", ) -array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) -union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( +array_of_RecordFieldLoader: Final = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_RecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader: Final = _IdMapLoader( union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" ) -Record_nameLoader = _EnumLoader(("record",), "Record_name") -typedsl_Record_nameLoader_2 = _TypeDSLLoader(Record_nameLoader, 2, "v1.1") -union_of_None_type_or_strtype = _UnionLoader( +Record_nameLoader: Final = _EnumLoader(("record",), "Record_name") +typedsl_Record_nameLoader_2: Final = _TypeDSLLoader(Record_nameLoader, 2, "v1.1") +union_of_None_type_or_strtype: Final = _UnionLoader( ( None_type, strtype, ) ) -uri_union_of_None_type_or_strtype_True_False_None_None = _URILoader( +uri_union_of_None_type_or_strtype_True_False_None_None: Final = _URILoader( union_of_None_type_or_strtype, True, False, None, None ) -uri_array_of_strtype_True_False_None_None = _URILoader( +uri_array_of_strtype_True_False_None_None: Final = _URILoader( array_of_strtype, True, False, None, None ) -Enum_nameLoader = _EnumLoader(("enum",), "Enum_name") -typedsl_Enum_nameLoader_2 = _TypeDSLLoader(Enum_nameLoader, 2, "v1.1") -uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None = _URILoader( +Enum_nameLoader: Final = _EnumLoader(("enum",), "Enum_name") +typedsl_Enum_nameLoader_2: Final = _TypeDSLLoader(Enum_nameLoader, 2, "v1.1") +uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, False, True, 2, None, ) -Array_nameLoader = _EnumLoader(("array",), "Array_name") -typedsl_Array_nameLoader_2 = _TypeDSLLoader(Array_nameLoader, 2, "v1.1") -Map_nameLoader = _EnumLoader(("map",), "Map_name") -typedsl_Map_nameLoader_2 = _TypeDSLLoader(Map_nameLoader, 2, "v1.1") -Union_nameLoader = _EnumLoader(("union",), "Union_name") -typedsl_Union_nameLoader_2 = _TypeDSLLoader(Union_nameLoader, 2, "v1.1") -union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _UnionLoader( +Array_nameLoader: Final = _EnumLoader(("array",), "Array_name") +typedsl_Array_nameLoader_2: Final = _TypeDSLLoader(Array_nameLoader, 2, "v1.1") +Map_nameLoader: Final = _EnumLoader(("map",), "Map_name") +typedsl_Map_nameLoader_2: Final = _TypeDSLLoader(Map_nameLoader, 2, "v1.1") +Union_nameLoader: Final = _EnumLoader(("union",), "Union_name") +typedsl_Union_nameLoader_2: Final = _TypeDSLLoader(Union_nameLoader, 2, "v1.1") +union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, CWLRecordSchemaLoader, @@ -29833,10 +25570,14 @@ def save( strtype, ) ) -array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype ) -union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype = _UnionLoader( +union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( PrimitiveTypeLoader, CWLRecordSchemaLoader, @@ -29846,57 +25587,65 @@ def save( array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, ) ) -uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, False, True, 2, None, ) -typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_CWLRecordSchemaLoader_or_EnumSchemaLoader_or_CWLArraySchemaLoader_or_strtype, 2, "v1.1", ) -array_of_CWLRecordFieldLoader = _ArrayLoader(CWLRecordFieldLoader) -union_of_None_type_or_array_of_CWLRecordFieldLoader = _UnionLoader( +array_of_CWLRecordFieldLoader: Final = _ArrayLoader(CWLRecordFieldLoader) +union_of_None_type_or_array_of_CWLRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_CWLRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader = _IdMapLoader( +idmap_fields_union_of_None_type_or_array_of_CWLRecordFieldLoader: Final = _IdMapLoader( union_of_None_type_or_array_of_CWLRecordFieldLoader, "name", "type" ) -File_classLoader = _EnumLoader(("File",), "File_class") -uri_File_classLoader_False_True_None_None = _URILoader( +File_classLoader: Final = _EnumLoader(("File",), "File_class") +uri_File_classLoader_False_True_None_None: Final = _URILoader( File_classLoader, False, True, None, None ) -uri_union_of_None_type_or_strtype_False_False_None_None = _URILoader( +uri_union_of_None_type_or_strtype_False_False_None_None: Final = _URILoader( union_of_None_type_or_strtype, False, False, None, None ) -union_of_None_type_or_inttype = _UnionLoader( +union_of_None_type_or_inttype: Final = _UnionLoader( ( None_type, inttype, ) ) -union_of_FileLoader_or_DirectoryLoader = _UnionLoader( +union_of_FileLoader_or_DirectoryLoader: Final = _UnionLoader( ( FileLoader, DirectoryLoader, ) ) -array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( +array_of_union_of_FileLoader_or_DirectoryLoader: Final = _ArrayLoader( union_of_FileLoader_or_DirectoryLoader ) -union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( - ( - None_type, - array_of_union_of_FileLoader_or_DirectoryLoader, +union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader: Final = ( + _UnionLoader( + ( + None_type, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) ) ) -secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( +secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader: ( + Final +) = _UnionLoader( ( _SecondaryDSLLoader( union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader @@ -29904,34 +25653,38 @@ def save( union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, ) ) -uri_union_of_None_type_or_strtype_True_False_None_True = _URILoader( +uri_union_of_None_type_or_strtype_True_False_None_True: Final = _URILoader( union_of_None_type_or_strtype, True, False, None, True ) -Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") -uri_Directory_classLoader_False_True_None_None = _URILoader( +Directory_classLoader: Final = _EnumLoader(("Directory",), "Directory_class") +uri_Directory_classLoader_False_True_None_None: Final = _URILoader( Directory_classLoader, False, True, None, None ) -union_of_None_type_or_booltype = _UnionLoader( +union_of_None_type_or_booltype: Final = _UnionLoader( ( None_type, booltype, ) ) -union_of_None_type_or_LoadListingEnumLoader = _UnionLoader( +union_of_None_type_or_LoadListingEnumLoader: Final = _UnionLoader( ( None_type, LoadListingEnumLoader, ) ) -array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) -union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( +array_of_SecondaryFileSchemaLoader: Final = _ArrayLoader(SecondaryFileSchemaLoader) +union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader: ( + Final +) = _UnionLoader( ( None_type, SecondaryFileSchemaLoader, array_of_SecondaryFileSchemaLoader, ) ) -secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( +secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader: ( + Final +) = _UnionLoader( ( _SecondaryDSLLoader( union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader @@ -29939,32 +25692,40 @@ def save( union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, ) ) -union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( - ( - None_type, - strtype, - array_of_strtype, - ExpressionLoader, +union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader: Final = ( + _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ExpressionLoader, + ) ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True = _URILoader( +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None_True: ( + Final +) = _URILoader( union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, True, False, None, True, ) -union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_strtype_or_ExpressionLoader: Final = _UnionLoader( ( None_type, strtype, ExpressionLoader, ) ) -uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True = _URILoader( - union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None, True +uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None_True: Final = ( + _URILoader( + union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None, True + ) ) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, InputRecordSchemaLoader, @@ -29973,10 +25734,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, InputRecordSchemaLoader, @@ -29986,29 +25751,35 @@ def save( array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, 2, "v1.1", ) -array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) -union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( +array_of_InputRecordFieldLoader: Final = _ArrayLoader(InputRecordFieldLoader) +union_of_None_type_or_array_of_InputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_InputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader: Final = ( + _IdMapLoader(union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type") ) -uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, OutputRecordSchemaLoader, @@ -30017,10 +25788,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, OutputRecordSchemaLoader, @@ -30030,44 +25805,56 @@ def save( array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, 2, "v1.1", ) -array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) -union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( +array_of_OutputRecordFieldLoader: Final = _ArrayLoader(OutputRecordFieldLoader) +union_of_None_type_or_array_of_OutputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_OutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( - union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" +idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader: Final = ( + _IdMapLoader(union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type") ) -uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _UnionLoader( +union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader: ( + Final +) = _UnionLoader( ( CommandInputParameterLoader, WorkflowInputParameterLoader, OperationInputParameterLoader, ) ) -array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _ArrayLoader( +array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader: ( + Final +) = _ArrayLoader( union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader ) -idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _IdMapLoader( +idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader: ( + Final +) = _IdMapLoader( array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader, "id", "type", ) -union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _UnionLoader( +union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader: ( + Final +) = _UnionLoader( ( CommandOutputParameterLoader, ExpressionToolOutputParameterLoader, @@ -30075,26 +25862,36 @@ def save( OperationOutputParameterLoader, ) ) -array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _ArrayLoader( +array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader: ( + Final +) = _ArrayLoader( union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader ) -idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _IdMapLoader( +idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader: ( + Final +) = _IdMapLoader( array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader, "id", "type", ) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader = _UnionLoader( +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _UnionLoader( ( None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, ) ) -idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader, +idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader: ( + Final +) = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, "class", "None", ) -union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type = _UnionLoader( +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _UnionLoader( ( InlineJavascriptRequirementLoader, SchemaDefRequirementLoader, @@ -30113,115 +25910,124 @@ def save( ScatterFeatureRequirementLoader, MultipleInputFeatureRequirementLoader, StepInputExpressionRequirementLoader, - SecretsLoader, - MPIRequirementLoader, - CUDARequirementLoader, - LoopLoader, - ShmSizeLoader, Any_type, ) ) -array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type = _ArrayLoader( - union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type ) -union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type = _UnionLoader( +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _UnionLoader( ( None_type, - array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, ) ) -idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type = _IdMapLoader( - union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_SecretsLoader_or_MPIRequirementLoader_or_CUDARequirementLoader_or_LoopLoader_or_ShmSizeLoader_or_Any_type, +idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type: ( + Final +) = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, "class", "None", ) -union_of_None_type_or_CWLVersionLoader = _UnionLoader( +union_of_None_type_or_CWLVersionLoader: Final = _UnionLoader( ( None_type, CWLVersionLoader, ) ) -uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None = _URILoader( +uri_union_of_None_type_or_CWLVersionLoader_False_True_None_None: Final = _URILoader( union_of_None_type_or_CWLVersionLoader, False, True, None, None ) -union_of_None_type_or_array_of_strtype = _UnionLoader( +union_of_None_type_or_array_of_strtype: Final = _UnionLoader( ( None_type, array_of_strtype, ) ) -uri_union_of_None_type_or_array_of_strtype_True_False_None_None = _URILoader( +uri_union_of_None_type_or_array_of_strtype_True_False_None_None: Final = _URILoader( union_of_None_type_or_array_of_strtype, True, False, None, None ) -InlineJavascriptRequirement_classLoader = _EnumLoader( +InlineJavascriptRequirement_classLoader: Final = _EnumLoader( ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" ) -uri_InlineJavascriptRequirement_classLoader_False_True_None_None = _URILoader( +uri_InlineJavascriptRequirement_classLoader_False_True_None_None: Final = _URILoader( InlineJavascriptRequirement_classLoader, False, True, None, None ) -SchemaDefRequirement_classLoader = _EnumLoader( +SchemaDefRequirement_classLoader: Final = _EnumLoader( ("SchemaDefRequirement",), "SchemaDefRequirement_class" ) -uri_SchemaDefRequirement_classLoader_False_True_None_None = _URILoader( +uri_SchemaDefRequirement_classLoader_False_True_None_None: Final = _URILoader( SchemaDefRequirement_classLoader, False, True, None, None ) -union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _UnionLoader( +union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader: ( + Final +) = _UnionLoader( ( CommandInputRecordSchemaLoader, CommandInputEnumSchemaLoader, CommandInputArraySchemaLoader, ) ) -array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _ArrayLoader( +array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader: ( + Final +) = _ArrayLoader( union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader ) -union_of_strtype_or_ExpressionLoader = _UnionLoader( +union_of_strtype_or_ExpressionLoader: Final = _UnionLoader( ( strtype, ExpressionLoader, ) ) -union_of_None_type_or_booltype_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_booltype_or_ExpressionLoader: Final = _UnionLoader( ( None_type, booltype, ExpressionLoader, ) ) -LoadListingRequirement_classLoader = _EnumLoader( +LoadListingRequirement_classLoader: Final = _EnumLoader( ("LoadListingRequirement",), "LoadListingRequirement_class" ) -uri_LoadListingRequirement_classLoader_False_True_None_None = _URILoader( +uri_LoadListingRequirement_classLoader_False_True_None_None: Final = _URILoader( LoadListingRequirement_classLoader, False, True, None, None ) -union_of_None_type_or_inttype_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_inttype_or_ExpressionLoader: Final = _UnionLoader( ( None_type, inttype, ExpressionLoader, ) ) -union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( - ( - None_type, - strtype, - ExpressionLoader, - array_of_strtype, +union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype: Final = ( + _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + array_of_strtype, + ) ) ) -union_of_None_type_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_ExpressionLoader: Final = _UnionLoader( ( None_type, ExpressionLoader, ) ) -union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( +union_of_None_type_or_CommandLineBindingLoader: Final = _UnionLoader( ( None_type, CommandLineBindingLoader, ) ) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandInputRecordSchemaLoader, @@ -30230,10 +26036,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandInputRecordSchemaLoader, @@ -30243,31 +26053,39 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, 2, "v1.1", ) -array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) -union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( +array_of_CommandInputRecordFieldLoader: Final = _ArrayLoader( + CommandInputRecordFieldLoader +) +union_of_None_type_or_array_of_CommandInputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_CommandInputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader: Final = ( _IdMapLoader( union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" ) ) -uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandOutputRecordSchemaLoader, @@ -30276,10 +26094,14 @@ def save( strtype, ) ) -array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( +array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _ArrayLoader( union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype ) -union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, CommandOutputRecordSchemaLoader, @@ -30289,37 +26111,45 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, 2, "v1.1", ) -union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( +union_of_None_type_or_CommandOutputBindingLoader: Final = _UnionLoader( ( None_type, CommandOutputBindingLoader, ) ) -array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) -union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( +array_of_CommandOutputRecordFieldLoader: Final = _ArrayLoader( + CommandOutputRecordFieldLoader +) +union_of_None_type_or_array_of_CommandOutputRecordFieldLoader: Final = _UnionLoader( ( None_type, array_of_CommandOutputRecordFieldLoader, ) ) -idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader: Final = ( _IdMapLoader( union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" ) ) -uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None = _URILoader( +uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2_None: ( + Final +) = _URILoader( union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, False, True, 2, None, ) -union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, stdinLoader, @@ -30330,12 +26160,16 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, 2, "v1.1", ) -union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( +union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype: ( + Final +) = _UnionLoader( ( CWLTypeLoader, stdoutLoader, @@ -30347,72 +26181,82 @@ def save( array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, ) ) -typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( +typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2: ( + Final +) = _TypeDSLLoader( union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, 2, "v1.1", ) -CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") -uri_CommandLineTool_classLoader_False_True_None_None = _URILoader( +CommandLineTool_classLoader: Final = _EnumLoader( + ("CommandLineTool",), "CommandLineTool_class" +) +uri_CommandLineTool_classLoader_False_True_None_None: Final = _URILoader( CommandLineTool_classLoader, False, True, None, None ) -array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) -idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( +array_of_CommandInputParameterLoader: Final = _ArrayLoader(CommandInputParameterLoader) +idmap_inputs_array_of_CommandInputParameterLoader: Final = _IdMapLoader( array_of_CommandInputParameterLoader, "id", "type" ) -array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) -idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( +array_of_CommandOutputParameterLoader: Final = _ArrayLoader( + CommandOutputParameterLoader +) +idmap_outputs_array_of_CommandOutputParameterLoader: Final = _IdMapLoader( array_of_CommandOutputParameterLoader, "id", "type" ) -union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( +union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader: Final = _UnionLoader( ( strtype, ExpressionLoader, CommandLineBindingLoader, ) ) -array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader: Final = ( _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) ) -union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader: ( + Final +) = _UnionLoader( ( None_type, array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, ) ) -array_of_inttype = _ArrayLoader(inttype) -union_of_None_type_or_array_of_inttype = _UnionLoader( +array_of_inttype: Final = _ArrayLoader(inttype) +union_of_None_type_or_array_of_inttype: Final = _UnionLoader( ( None_type, array_of_inttype, ) ) -DockerRequirement_classLoader = _EnumLoader( +DockerRequirement_classLoader: Final = _EnumLoader( ("DockerRequirement",), "DockerRequirement_class" ) -uri_DockerRequirement_classLoader_False_True_None_None = _URILoader( +uri_DockerRequirement_classLoader_False_True_None_None: Final = _URILoader( DockerRequirement_classLoader, False, True, None, None ) -SoftwareRequirement_classLoader = _EnumLoader( +SoftwareRequirement_classLoader: Final = _EnumLoader( ("SoftwareRequirement",), "SoftwareRequirement_class" ) -uri_SoftwareRequirement_classLoader_False_True_None_None = _URILoader( +uri_SoftwareRequirement_classLoader_False_True_None_None: Final = _URILoader( SoftwareRequirement_classLoader, False, True, None, None ) -array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) -idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( +array_of_SoftwarePackageLoader: Final = _ArrayLoader(SoftwarePackageLoader) +idmap_packages_array_of_SoftwarePackageLoader: Final = _IdMapLoader( array_of_SoftwarePackageLoader, "package", "specs" ) -uri_union_of_None_type_or_array_of_strtype_False_False_None_True = _URILoader( +uri_union_of_None_type_or_array_of_strtype_False_False_None_True: Final = _URILoader( union_of_None_type_or_array_of_strtype, False, False, None, True ) -InitialWorkDirRequirement_classLoader = _EnumLoader( +InitialWorkDirRequirement_classLoader: Final = _EnumLoader( ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" ) -uri_InitialWorkDirRequirement_classLoader_False_True_None_None = _URILoader( +uri_InitialWorkDirRequirement_classLoader_False_True_None_None: Final = _URILoader( InitialWorkDirRequirement_classLoader, False, True, None, None ) -union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( +union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader: ( + Final +) = _UnionLoader( ( None_type, DirentLoader, @@ -30422,38 +26266,42 @@ def save( array_of_union_of_FileLoader_or_DirectoryLoader, ) ) -array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( +array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader: ( + Final +) = _ArrayLoader( union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader ) -union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( +union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader: ( + Final +) = _UnionLoader( ( ExpressionLoader, array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, ) ) -EnvVarRequirement_classLoader = _EnumLoader( +EnvVarRequirement_classLoader: Final = _EnumLoader( ("EnvVarRequirement",), "EnvVarRequirement_class" ) -uri_EnvVarRequirement_classLoader_False_True_None_None = _URILoader( +uri_EnvVarRequirement_classLoader_False_True_None_None: Final = _URILoader( EnvVarRequirement_classLoader, False, True, None, None ) -array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) -idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( +array_of_EnvironmentDefLoader: Final = _ArrayLoader(EnvironmentDefLoader) +idmap_envDef_array_of_EnvironmentDefLoader: Final = _IdMapLoader( array_of_EnvironmentDefLoader, "envName", "envValue" ) -ShellCommandRequirement_classLoader = _EnumLoader( +ShellCommandRequirement_classLoader: Final = _EnumLoader( ("ShellCommandRequirement",), "ShellCommandRequirement_class" ) -uri_ShellCommandRequirement_classLoader_False_True_None_None = _URILoader( +uri_ShellCommandRequirement_classLoader_False_True_None_None: Final = _URILoader( ShellCommandRequirement_classLoader, False, True, None, None ) -ResourceRequirement_classLoader = _EnumLoader( +ResourceRequirement_classLoader: Final = _EnumLoader( ("ResourceRequirement",), "ResourceRequirement_class" ) -uri_ResourceRequirement_classLoader_False_True_None_None = _URILoader( +uri_ResourceRequirement_classLoader_False_True_None_None: Final = _URILoader( ResourceRequirement_classLoader, False, True, None, None ) -union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader = _UnionLoader( +union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader: Final = _UnionLoader( ( None_type, inttype, @@ -30461,232 +26309,230 @@ def save( ExpressionLoader, ) ) -WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") -uri_WorkReuse_classLoader_False_True_None_None = _URILoader( +WorkReuse_classLoader: Final = _EnumLoader(("WorkReuse",), "WorkReuse_class") +uri_WorkReuse_classLoader_False_True_None_None: Final = _URILoader( WorkReuse_classLoader, False, True, None, None ) -union_of_booltype_or_ExpressionLoader = _UnionLoader( +union_of_booltype_or_ExpressionLoader: Final = _UnionLoader( ( booltype, ExpressionLoader, ) ) -NetworkAccess_classLoader = _EnumLoader(("NetworkAccess",), "NetworkAccess_class") -uri_NetworkAccess_classLoader_False_True_None_None = _URILoader( +NetworkAccess_classLoader: Final = _EnumLoader( + ("NetworkAccess",), "NetworkAccess_class" +) +uri_NetworkAccess_classLoader_False_True_None_None: Final = _URILoader( NetworkAccess_classLoader, False, True, None, None ) -InplaceUpdateRequirement_classLoader = _EnumLoader( +InplaceUpdateRequirement_classLoader: Final = _EnumLoader( ("InplaceUpdateRequirement",), "InplaceUpdateRequirement_class" ) -uri_InplaceUpdateRequirement_classLoader_False_True_None_None = _URILoader( +uri_InplaceUpdateRequirement_classLoader_False_True_None_None: Final = _URILoader( InplaceUpdateRequirement_classLoader, False, True, None, None ) -ToolTimeLimit_classLoader = _EnumLoader(("ToolTimeLimit",), "ToolTimeLimit_class") -uri_ToolTimeLimit_classLoader_False_True_None_None = _URILoader( +ToolTimeLimit_classLoader: Final = _EnumLoader( + ("ToolTimeLimit",), "ToolTimeLimit_class" +) +uri_ToolTimeLimit_classLoader_False_True_None_None: Final = _URILoader( ToolTimeLimit_classLoader, False, True, None, None ) -union_of_inttype_or_ExpressionLoader = _UnionLoader( +union_of_inttype_or_ExpressionLoader: Final = _UnionLoader( ( inttype, ExpressionLoader, ) ) -union_of_None_type_or_InputBindingLoader = _UnionLoader( +union_of_None_type_or_InputBindingLoader: Final = _UnionLoader( ( None_type, InputBindingLoader, ) ) -ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") -uri_ExpressionTool_classLoader_False_True_None_None = _URILoader( +ExpressionTool_classLoader: Final = _EnumLoader( + ("ExpressionTool",), "ExpressionTool_class" +) +uri_ExpressionTool_classLoader_False_True_None_None: Final = _URILoader( ExpressionTool_classLoader, False, True, None, None ) -array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) -idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( +array_of_WorkflowInputParameterLoader: Final = _ArrayLoader( + WorkflowInputParameterLoader +) +idmap_inputs_array_of_WorkflowInputParameterLoader: Final = _IdMapLoader( array_of_WorkflowInputParameterLoader, "id", "type" ) -array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( +array_of_ExpressionToolOutputParameterLoader: Final = _ArrayLoader( ExpressionToolOutputParameterLoader ) -idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( +idmap_outputs_array_of_ExpressionToolOutputParameterLoader: Final = _IdMapLoader( array_of_ExpressionToolOutputParameterLoader, "id", "type" ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1, None +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None: Final = ( + _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1, None) ) -union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( +union_of_None_type_or_LinkMergeMethodLoader: Final = _UnionLoader( ( None_type, LinkMergeMethodLoader, ) ) -union_of_None_type_or_PickValueMethodLoader = _UnionLoader( +union_of_None_type_or_PickValueMethodLoader: Final = _UnionLoader( ( None_type, PickValueMethodLoader, ) ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2, None +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2_None: Final = ( + _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2, None) ) -array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) -idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( +array_of_WorkflowStepInputLoader: Final = _ArrayLoader(WorkflowStepInputLoader) +idmap_in__array_of_WorkflowStepInputLoader: Final = _IdMapLoader( array_of_WorkflowStepInputLoader, "id", "source" ) -union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( +union_of_strtype_or_WorkflowStepOutputLoader: Final = _UnionLoader( ( strtype, WorkflowStepOutputLoader, ) ) -array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( +array_of_union_of_strtype_or_WorkflowStepOutputLoader: Final = _ArrayLoader( union_of_strtype_or_WorkflowStepOutputLoader ) -union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( +union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader: Final = _UnionLoader( (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) ) -uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None = _URILoader( +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None_None: ( + Final +) = _URILoader( union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, True, False, None, None, ) -array_of_Any_type = _ArrayLoader(Any_type) -union_of_None_type_or_array_of_Any_type = _UnionLoader( +array_of_Any_type: Final = _ArrayLoader(Any_type) +union_of_None_type_or_array_of_Any_type: Final = _UnionLoader( ( None_type, array_of_Any_type, ) ) -idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( +idmap_hints_union_of_None_type_or_array_of_Any_type: Final = _IdMapLoader( union_of_None_type_or_array_of_Any_type, "class", "None" ) -union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader = _UnionLoader( +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader: ( + Final +) = _UnionLoader( ( strtype, CommandLineToolLoader, ExpressionToolLoader, WorkflowLoader, OperationLoader, - ProcessGeneratorLoader, ) ) -uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader_False_False_None_None = _URILoader( - union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader, +uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None_None: ( + Final +) = _URILoader( + union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, False, False, None, None, ) -uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None = _URILoader( - union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0, None +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0_None: Final = ( + _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0, None) ) -union_of_None_type_or_ScatterMethodLoader = _UnionLoader( +union_of_None_type_or_ScatterMethodLoader: Final = _UnionLoader( ( None_type, ScatterMethodLoader, ) ) -uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None = _URILoader( +uri_union_of_None_type_or_ScatterMethodLoader_False_True_None_None: Final = _URILoader( union_of_None_type_or_ScatterMethodLoader, False, True, None, None ) -Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") -uri_Workflow_classLoader_False_True_None_None = _URILoader( +Workflow_classLoader: Final = _EnumLoader(("Workflow",), "Workflow_class") +uri_Workflow_classLoader_False_True_None_None: Final = _URILoader( Workflow_classLoader, False, True, None, None ) -array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) -idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( +array_of_WorkflowOutputParameterLoader: Final = _ArrayLoader( + WorkflowOutputParameterLoader +) +idmap_outputs_array_of_WorkflowOutputParameterLoader: Final = _IdMapLoader( array_of_WorkflowOutputParameterLoader, "id", "type" ) -array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) -union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) -idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader( +array_of_WorkflowStepLoader: Final = _ArrayLoader(WorkflowStepLoader) +union_of_array_of_WorkflowStepLoader: Final = _UnionLoader( + (array_of_WorkflowStepLoader,) +) +idmap_steps_union_of_array_of_WorkflowStepLoader: Final = _IdMapLoader( union_of_array_of_WorkflowStepLoader, "id", "None" ) -SubworkflowFeatureRequirement_classLoader = _EnumLoader( +SubworkflowFeatureRequirement_classLoader: Final = _EnumLoader( ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" ) -uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None = _URILoader( +uri_SubworkflowFeatureRequirement_classLoader_False_True_None_None: Final = _URILoader( SubworkflowFeatureRequirement_classLoader, False, True, None, None ) -ScatterFeatureRequirement_classLoader = _EnumLoader( +ScatterFeatureRequirement_classLoader: Final = _EnumLoader( ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" ) -uri_ScatterFeatureRequirement_classLoader_False_True_None_None = _URILoader( +uri_ScatterFeatureRequirement_classLoader_False_True_None_None: Final = _URILoader( ScatterFeatureRequirement_classLoader, False, True, None, None ) -MultipleInputFeatureRequirement_classLoader = _EnumLoader( +MultipleInputFeatureRequirement_classLoader: Final = _EnumLoader( ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" ) -uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None = _URILoader( - MultipleInputFeatureRequirement_classLoader, False, True, None, None +uri_MultipleInputFeatureRequirement_classLoader_False_True_None_None: Final = ( + _URILoader(MultipleInputFeatureRequirement_classLoader, False, True, None, None) ) -StepInputExpressionRequirement_classLoader = _EnumLoader( +StepInputExpressionRequirement_classLoader: Final = _EnumLoader( ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" ) -uri_StepInputExpressionRequirement_classLoader_False_True_None_None = _URILoader( +uri_StepInputExpressionRequirement_classLoader_False_True_None_None: Final = _URILoader( StepInputExpressionRequirement_classLoader, False, True, None, None ) -Operation_classLoader = _EnumLoader(("Operation",), "Operation_class") -uri_Operation_classLoader_False_True_None_None = _URILoader( +Operation_classLoader: Final = _EnumLoader(("Operation",), "Operation_class") +uri_Operation_classLoader_False_True_None_None: Final = _URILoader( Operation_classLoader, False, True, None, None ) -array_of_OperationInputParameterLoader = _ArrayLoader(OperationInputParameterLoader) -idmap_inputs_array_of_OperationInputParameterLoader = _IdMapLoader( - array_of_OperationInputParameterLoader, "id", "type" -) -array_of_OperationOutputParameterLoader = _ArrayLoader(OperationOutputParameterLoader) -idmap_outputs_array_of_OperationOutputParameterLoader = _IdMapLoader( - array_of_OperationOutputParameterLoader, "id", "type" -) -uri_strtype_False_True_None_None = _URILoader(strtype, False, True, None, None) -uri_array_of_strtype_False_False_0_None = _URILoader( - array_of_strtype, False, False, 0, None -) -union_of_strtype_or_array_of_strtype = _UnionLoader( - ( - strtype, - array_of_strtype, - ) +array_of_OperationInputParameterLoader: Final = _ArrayLoader( + OperationInputParameterLoader ) -union_of_None_type_or_Any_type = _UnionLoader( - ( - None_type, - Any_type, - ) +idmap_inputs_array_of_OperationInputParameterLoader: Final = _IdMapLoader( + array_of_OperationInputParameterLoader, "id", "type" ) -array_of_LoopInputLoader = _ArrayLoader(LoopInputLoader) -idmap_loop_array_of_LoopInputLoader = _IdMapLoader( - array_of_LoopInputLoader, "id", "loopSource" +array_of_OperationOutputParameterLoader: Final = _ArrayLoader( + OperationOutputParameterLoader ) -LoopOutputModesLoader = _EnumLoader( - ( - "last", - "all", - ), - "LoopOutputModes", +idmap_outputs_array_of_OperationOutputParameterLoader: Final = _IdMapLoader( + array_of_OperationOutputParameterLoader, "id", "type" ) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader = _UnionLoader( +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader: ( + Final +) = _UnionLoader( ( CommandLineToolLoader, ExpressionToolLoader, WorkflowLoader, OperationLoader, - ProcessGeneratorLoader, ) ) -array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader = _ArrayLoader( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader: ( + Final +) = _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader ) -union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader = _UnionLoader( +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader: ( + Final +) = _UnionLoader( ( CommandLineToolLoader, ExpressionToolLoader, WorkflowLoader, OperationLoader, - ProcessGeneratorLoader, - array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader, + array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, ) ) @@ -30706,15 +26552,15 @@ def save( def load_document( doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, + baseuri: str | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: if baseuri is None: baseuri = file_uri(os.getcwd()) + "/" if loadingOptions is None: loadingOptions = LoadingOptions() result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, doc, baseuri, loadingOptions, @@ -30724,16 +26570,16 @@ def load_document( def load_document_with_metadata( doc: Any, - baseuri: Optional[str] = None, - loadingOptions: Optional[LoadingOptions] = None, - addl_metadata_fields: Optional[MutableSequence[str]] = None, + baseuri: str | None = None, + loadingOptions: LoadingOptions | None = None, + addl_metadata_fields: MutableSequence[str] | None = None, ) -> Any: if baseuri is None: baseuri = file_uri(os.getcwd()) + "/" if loadingOptions is None: loadingOptions = LoadingOptions(fileuri=baseuri) return _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, doc, baseuri, loadingOptions, @@ -30744,7 +26590,7 @@ def load_document_with_metadata( def load_document_by_string( string: Any, uri: str, - loadingOptions: Optional[LoadingOptions] = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: yaml = yaml_no_ts() result = yaml.load(string) @@ -30754,7 +26600,7 @@ def load_document_by_string( loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, result, uri, loadingOptions, @@ -30765,7 +26611,7 @@ def load_document_by_string( def load_document_by_yaml( yaml: Any, uri: str, - loadingOptions: Optional[LoadingOptions] = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """ Shortcut to load via a YAML object. @@ -30777,7 +26623,7 @@ def load_document_by_yaml( loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _document_load( - union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_ProcessGeneratorLoader, + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, yaml, uri, loadingOptions, diff --git a/src/cwl_utils/parser/cwl_v1_2_utils.py b/src/cwl_utils/parser/cwl_v1_2_utils.py index c0e5bf50..f2be05e5 100644 --- a/src/cwl_utils/parser/cwl_v1_2_utils.py +++ b/src/cwl_utils/parser/cwl_v1_2_utils.py @@ -9,6 +9,8 @@ from urllib.parse import urldefrag from schema_salad.exceptions import ValidationException +from schema_salad.metaschema import RecordSchema, ArraySchema +from schema_salad.runtime import shortname, LoadingOptions, save, file_uri from schema_salad.sourceline import SourceLine, add_lc_filename from schema_salad.utils import aslist, json_dumps, yaml_no_ts @@ -26,7 +28,7 @@ def _compare_records( - src: cwl.RecordSchema, sink: cwl.RecordSchema, strict: bool = False + src: RecordSchema, sink: RecordSchema, strict: bool = False ) -> bool: """ Compare two records, ensuring they have compatible fields. @@ -34,10 +36,8 @@ def _compare_records( This handles normalizing record names, which will be relative to workflow step, so that they can be compared. """ - srcfields = {cwl.shortname(field.name): field.type_ for field in (src.fields or {})} - sinkfields = { - cwl.shortname(field.name): field.type_ for field in (sink.fields or {}) - } + srcfields = {shortname(field.name): field.type_ for field in (src.fields or {})} + sinkfields = {shortname(field.name): field.type_ for field in (sink.fields or {})} for key in sinkfields.keys(): if ( not can_assign_src_to_sink( @@ -60,14 +60,14 @@ def _compare_records( def _compare_type(type1: Any, type2: Any) -> bool: match (type1, type1): - case cwl.ArraySchema() as t1, cwl.ArraySchema() as t2: + case ArraySchema() as t1, ArraySchema() as t2: return _compare_type(t1.items, t2.items) - case cwl.RecordSchema(), cwl.RecordSchema(): + case RecordSchema(), RecordSchema(): fields1 = { - cwl.shortname(field.name): field.type_ for field in (type1.fields or {}) + shortname(field.name): field.type_ for field in (type1.fields or {}) } fields2 = { - cwl.shortname(field.name): field.type_ for field in (type2.fields or {}) + shortname(field.name): field.type_ for field in (type2.fields or {}) } if fields1.keys() != fields2.keys(): return False @@ -82,16 +82,6 @@ def _compare_type(type1: Any, type2: Any) -> bool: return bool(type1 == type2) -def _is_all_output_method_loop_step( - param_to_step: dict[str, cwl.WorkflowStep], parm_id: str -) -> bool: - if (source_step := param_to_step.get(parm_id)) is not None: - for requirement in source_step.requirements or []: - if isinstance(requirement, cwl.Loop) and requirement.outputMethod == "all": - return True - return False - - def _is_conditional_step( param_to_step: dict[str, cwl.WorkflowStep], parm_id: str ) -> bool: @@ -104,9 +94,9 @@ def _is_conditional_step( def _inputfile_load( doc: str | MutableMapping[str, Any] | MutableSequence[Any], baseuri: str, - loadingOptions: cwl.LoadingOptions, + loadingOptions: LoadingOptions, addl_metadata_fields: MutableSequence[str] | None = None, -) -> tuple[Any, cwl.LoadingOptions]: +) -> tuple[Any, LoadingOptions]: loader = cwl.CWLInputFileLoader match doc: case str(): @@ -120,9 +110,7 @@ def _inputfile_load( yaml = yaml_no_ts() result = yaml.load(textIO) add_lc_filename(result, doc_url) - loadingOptions = cwl.LoadingOptions( - copyfrom=loadingOptions, fileuri=doc_url - ) + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) _inputfile_load( result, doc_url, @@ -136,7 +124,7 @@ def _inputfile_load( if mf in doc: addl_metadata[mf] = doc[mf] - loadingOptions = cwl.LoadingOptions( + loadingOptions = LoadingOptions( copyfrom=loadingOptions, baseuri=baseuri, addl_metadata=addl_metadata, @@ -176,9 +164,9 @@ def can_assign_src_to_sink(src: Any, sink: Any, strict: bool = False) -> bool: """ if "Any" in (src, sink): return True - if isinstance(src, cwl.ArraySchema) and isinstance(sink, cwl.ArraySchema): + if isinstance(src, ArraySchema) and isinstance(sink, ArraySchema): return can_assign_src_to_sink(src.items, sink.items, strict) - if isinstance(src, cwl.RecordSchema) and isinstance(sink, cwl.RecordSchema): + if isinstance(src, RecordSchema) and isinstance(sink, RecordSchema): return _compare_records(src, sink, strict) if isinstance(src, MutableSequence): if strict: @@ -250,11 +238,6 @@ def check_all_types( ) ) type_dict[src_dict[parm_id].id] = src_typ - if _is_all_output_method_loop_step(param_to_step, parm_id): - src_typ = type_dict[src_dict[parm_id].id] - type_dict[src_dict[parm_id].id] = cwl.ArraySchema( - items=src_typ, type_="array" - ) else: if isinstance(sourceField, MutableSequence): parm_id = cast(str, sourceField[0]) @@ -293,11 +276,6 @@ def check_all_types( ) ) type_dict[src_dict[parm_id].id] = src_typ - if _is_all_output_method_loop_step(param_to_step, parm_id): - src_typ = type_dict[src_dict[parm_id].id] - type_dict[src_dict[parm_id].id] = cwl.ArraySchema( - items=src_typ, type_="array" - ) for src in srcs_of_sink: check_result = check_types( type_dict[cast(str, src.id)], @@ -334,7 +312,7 @@ def check_types( return "exception" case "merge_nested": return check_types( - cwl.ArraySchema(items=srctype, type_="array"), + ArraySchema(items=srctype, type_="array"), sinktype, None, None, @@ -414,13 +392,13 @@ def convert_stdstreams_to_files(clt: cwl.CommandLineTool) -> None: def load_inputfile( doc: Any, baseuri: str | None = None, - loadingOptions: cwl.LoadingOptions | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """Load a CWL v1.2 input file from a serialized YAML string or a YAML object.""" if baseuri is None: - baseuri = cwl.file_uri(str(Path.cwd())) + "/" + baseuri = file_uri(str(Path.cwd())) + "/" if loadingOptions is None: - loadingOptions = cwl.LoadingOptions() + loadingOptions = LoadingOptions() result, metadata = _inputfile_load( doc, @@ -433,14 +411,14 @@ def load_inputfile( def load_inputfile_by_string( string: Any, uri: str, - loadingOptions: cwl.LoadingOptions | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """Load a CWL v1.2 input file from a serialized YAML string.""" result = yaml_no_ts().load(string) add_lc_filename(result, uri) if loadingOptions is None: - loadingOptions = cwl.LoadingOptions(fileuri=uri) + loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _inputfile_load( result, @@ -453,13 +431,13 @@ def load_inputfile_by_string( def load_inputfile_by_yaml( yaml: Any, uri: str, - loadingOptions: cwl.LoadingOptions | None = None, + loadingOptions: LoadingOptions | None = None, ) -> Any: """Load a CWL v1.2 input file from a YAML object.""" add_lc_filename(yaml, uri) if loadingOptions is None: - loadingOptions = cwl.LoadingOptions(fileuri=uri) + loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _inputfile_load( yaml, @@ -473,9 +451,9 @@ def merge_flatten_type(src: Any) -> Any: """Return the merge flattened type of the source type.""" if isinstance(src, MutableSequence): return [merge_flatten_type(t) for t in src] - if isinstance(src, cwl.ArraySchema): + if isinstance(src, ArraySchema): return src - return cwl.ArraySchema(type_="array", items=src) + return ArraySchema(type_="array", items=src) def type_for_step_input( @@ -492,7 +470,7 @@ def type_for_step_input( if cast(str, step_input.id).split("#")[-1] == in_.id.split("#")[-1]: input_type = step_input.type_ if step.scatter is not None and in_.id in aslist(step.scatter): - input_type = cwl.ArraySchema(items=input_type, type_="array") + input_type = ArraySchema(items=input_type, type_="array") return input_type return "Any" @@ -514,16 +492,14 @@ def type_for_step_output( if step.scatter is not None: if step.scatterMethod == "nested_crossproduct": for _ in range(len(aslist(step.scatter))): - output_type = cwl.ArraySchema( - items=output_type, type_="array" - ) + output_type = ArraySchema(items=output_type, type_="array") else: - output_type = cwl.ArraySchema(items=output_type, type_="array") + output_type = ArraySchema(items=output_type, type_="array") return output_type raise ValidationException( "param {} not found in {}.".format( sourcename, - yaml_dumps(cwl.save(step)), + yaml_dumps(save(step)), ) ) @@ -543,15 +519,15 @@ def type_for_source( if scatter_context[0] is not None: if scatter_context[0][1] == "nested_crossproduct": for _ in range(scatter_context[0][0]): - new_type = cwl.ArraySchema(items=new_type, type_="array") + new_type = ArraySchema(items=new_type, type_="array") else: - new_type = cwl.ArraySchema(items=new_type, type_="array") + new_type = ArraySchema(items=new_type, type_="array") if linkMerge == "merge_nested": - new_type = cwl.ArraySchema(items=new_type, type_="array") + new_type = ArraySchema(items=new_type, type_="array") elif linkMerge == "merge_flattened": new_type = merge_flatten_type(new_type) if pickValue is not None: - if isinstance(new_type, cwl.ArraySchema): + if isinstance(new_type, ArraySchema): if pickValue in ("first_non_null", "the_only_non_null"): new_type = new_type.items return new_type @@ -569,20 +545,20 @@ def type_for_source( if sc is not None: if sc[1] == "nested_crossproduct": for _ in range(sc[0]): - cur_type = cwl.ArraySchema(items=cur_type, type_="array") + cur_type = ArraySchema(items=cur_type, type_="array") else: - cur_type = cwl.ArraySchema(items=cur_type, type_="array") + cur_type = ArraySchema(items=cur_type, type_="array") new_type.append(cur_type) if len(new_type) == 1: new_type = new_type[0] if linkMerge == "merge_nested": - new_type = cwl.ArraySchema(items=new_type, type_="array") + new_type = ArraySchema(items=new_type, type_="array") elif linkMerge == "merge_flattened": new_type = merge_flatten_type(new_type) elif isinstance(sourcenames, list) and len(sourcenames) > 1: - new_type = cwl.ArraySchema(items=new_type, type_="array") + new_type = ArraySchema(items=new_type, type_="array") if pickValue is not None: - if isinstance(new_type, cwl.ArraySchema): + if isinstance(new_type, ArraySchema): if pickValue in ("first_non_null", "the_only_non_null"): new_type = new_type.items return new_type @@ -678,7 +654,7 @@ def param_for_source_id( raise WorkflowException( "param {} not found in {}\n{}.".format( sourcename, - yaml_dumps(cwl.save(process)), - (f" or\n {yaml_dumps(cwl.save(parent))}" if parent is not None else ""), + yaml_dumps(save(process)), + (f" or\n {yaml_dumps(save(parent))}" if parent is not None else ""), ) ) diff --git a/src/cwl_utils/parser/utils.py b/src/cwl_utils/parser/utils.py index 967beb12..5be110a0 100644 --- a/src/cwl_utils/parser/utils.py +++ b/src/cwl_utils/parser/utils.py @@ -4,11 +4,11 @@ import logging from collections.abc import MutableSequence from pathlib import Path -from types import ModuleType -from typing import Any, Optional, cast +from typing import Any, cast from urllib.parse import unquote_plus, urlparse from schema_salad.exceptions import ValidationException +from schema_salad.runtime import file_uri, shortname, save from schema_salad.sourceline import SourceLine, strip_dup_lineno from schema_salad.utils import json_dumps, yaml_no_ts @@ -64,17 +64,7 @@ def load_inputfile_by_uri( baseuri: str = real_path if loadingOptions is None: - match version: - case "v1.0": - loadingOptions = cwl_v1_0.LoadingOptions(fileuri=baseuri) - case "v1.1": - loadingOptions = cwl_v1_1.LoadingOptions(fileuri=baseuri) - case "v1.2": - loadingOptions = cwl_v1_2.LoadingOptions(fileuri=baseuri) - case _: - raise ValidationException( - f"Version error. Did not recognise {version} as a CWL version" - ) + loadingOptions = LoadingOptions(fileuri=baseuri) doc = loadingOptions.fetcher.fetch_text(real_path) return load_inputfile_by_string(version, doc, baseuri, loadingOptions) @@ -88,7 +78,7 @@ def load_inputfile( ) -> Any: """Load a CWL input file from a serialized YAML string or a YAML object.""" if baseuri is None: - baseuri = cwl_v1_0.file_uri(str(Path.cwd())) + "/" + baseuri = file_uri(str(Path.cwd())) + "/" if isinstance(doc, str): return load_inputfile_by_string(version, doc, baseuri, loadingOptions) return load_inputfile_by_yaml(version, doc, baseuri, loadingOptions) @@ -114,17 +104,11 @@ def load_inputfile_by_yaml( """Load a CWL input file from a YAML object.""" match version: case "v1.0": - return cwl_v1_0_utils.load_inputfile_by_yaml( - yaml, uri, cast(Optional[cwl_v1_0.LoadingOptions], loadingOptions) - ) + return cwl_v1_0_utils.load_inputfile_by_yaml(yaml, uri, loadingOptions) case "v1.1": - return cwl_v1_1_utils.load_inputfile_by_yaml( - yaml, uri, cast(Optional[cwl_v1_1.LoadingOptions], loadingOptions) - ) + return cwl_v1_1_utils.load_inputfile_by_yaml(yaml, uri, loadingOptions) case "v1.2": - return cwl_v1_2_utils.load_inputfile_by_yaml( - yaml, uri, cast(Optional[cwl_v1_2.LoadingOptions], loadingOptions) - ) + return cwl_v1_2_utils.load_inputfile_by_yaml(yaml, uri, loadingOptions) case None: raise ValidationException("could not get the cwlVersion") case _: @@ -204,12 +188,10 @@ def static_checker(workflow: cwl_utils.parser.Workflow) -> None: **{param.id: param.type_ for param in workflow.outputs}, } - parser: ModuleType step_inputs_val: dict[str, Any] workflow_outputs_val: dict[str, Any] match workflow.cwlVersion: case "v1.0": - parser = cwl_v1_0 step_inputs_val = cwl_v1_0_utils.check_all_types( src_dict, step_inputs, type_dict ) @@ -217,7 +199,6 @@ def static_checker(workflow: cwl_utils.parser.Workflow) -> None: src_dict, workflow.outputs, type_dict ) case "v1.1": - parser = cwl_v1_1 step_inputs_val = cwl_v1_1_utils.check_all_types( src_dict, step_inputs, type_dict ) @@ -225,7 +206,6 @@ def static_checker(workflow: cwl_utils.parser.Workflow) -> None: src_dict, workflow.outputs, type_dict ) case "v1.2": - parser = cwl_v1_2 step_inputs_val = cwl_v1_2_utils.check_all_types( src_dict, step_inputs, param_to_step, type_dict ) @@ -248,16 +228,16 @@ def static_checker(workflow: cwl_utils.parser.Workflow) -> None: SourceLine(src, "type").makeError( "Source '%s' of type %s may be incompatible" % ( - parser.shortname(src.id), - json_dumps(parser.save(type_dict[src.id])), + shortname(src.id), + json_dumps(save(type_dict[src.id])), ) ) + "\n" + SourceLine(sink, "type").makeError( " with sink '%s' of type %s" % ( - parser.shortname(sink.id), - json_dumps(parser.save(type_dict[sink.id])), + shortname(sink.id), + json_dumps(save(type_dict[sink.id])), ) ) ) @@ -281,14 +261,14 @@ def static_checker(workflow: cwl_utils.parser.Workflow) -> None: msg = ( SourceLine(src, "type").makeError( "Source '%s' of type %s is incompatible" - % (parser.shortname(src.id), json_dumps(parser.save(type_dict[src.id]))) + % (shortname(src.id), json_dumps(save(type_dict[src.id]))) ) + "\n" + SourceLine(sink, "type").makeError( " with sink '%s' of type %s" % ( - parser.shortname(sink.id), - json_dumps(parser.save(type_dict[sink.id])), + shortname(sink.id), + json_dumps(save(type_dict[sink.id])), ) ) ) @@ -314,7 +294,7 @@ def static_checker(workflow: cwl_utils.parser.Workflow) -> None: ): msg = SourceLine(sink).makeError( "Required parameter '%s' does not have source, default, or valueFrom expression" - % parser.shortname(sink.id) + % shortname(sink.id) ) exception_msgs.append(msg) diff --git a/src/cwl_utils/testdata/extensions/all-output-loop_v1_2.cwl b/src/cwl_utils/testdata/extensions/all-output-loop_v1_2.cwl deleted file mode 100755 index 096be519..00000000 --- a/src/cwl_utils/testdata/extensions/all-output-loop_v1_2.cwl +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env cwl-runner -cwlVersion: v1.2 -class: Workflow -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -requirements: - InlineJavascriptRequirement: {} -inputs: - i1: int -outputs: - o1: - type: int[] - outputSource: subworkflow/o1 -steps: - subworkflow: - run: - class: ExpressionTool - inputs: - i1: int - outputs: - o1: int - expression: > - ${return {'o1': inputs.i1 + 1};} - in: - i1: i1 - out: [o1] - requirements: - cwltool:Loop: - loopWhen: $(inputs.i1 < 10) - loop: - i1: o1 - outputMethod: all diff --git a/src/cwl_utils/testdata/extensions/cuda-requirement_v1_0.cwl b/src/cwl_utils/testdata/extensions/cuda-requirement_v1_0.cwl deleted file mode 100755 index 21985e6c..00000000 --- a/src/cwl_utils/testdata/extensions/cuda-requirement_v1_0.cwl +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env cwl-runner -cwlVersion: v1.0 -class: CommandLineTool -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -requirements: - cwltool:CUDARequirement: - cudaVersionMin: "1.0" - cudaComputeCapability: "1.0" - cudaDeviceCountMin: $(inputs.gpus) -inputs: - gpus: - type: int - default: 1 -outputs: [] -baseCommand: "nvidia-smi" \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/cuda-requirement_v1_1.cwl b/src/cwl_utils/testdata/extensions/cuda-requirement_v1_1.cwl deleted file mode 100755 index 9c24f0fe..00000000 --- a/src/cwl_utils/testdata/extensions/cuda-requirement_v1_1.cwl +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env cwl-runner -cwlVersion: v1.1 -class: CommandLineTool -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -requirements: - cwltool:CUDARequirement: - cudaVersionMin: "1.0" - cudaComputeCapability: "1.0" - cudaDeviceCountMin: $(inputs.gpus) -inputs: - gpus: - type: int - default: 1 -outputs: [] -baseCommand: "nvidia-smi" \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/cuda-requirement_v1_2.cwl b/src/cwl_utils/testdata/extensions/cuda-requirement_v1_2.cwl deleted file mode 100755 index 4f1ecbd8..00000000 --- a/src/cwl_utils/testdata/extensions/cuda-requirement_v1_2.cwl +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env cwl-runner -cwlVersion: v1.2 -class: CommandLineTool -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -requirements: - cwltool:CUDARequirement: - cudaVersionMin: "1.0" - cudaComputeCapability: "1.0" - cudaDeviceCountMin: $(inputs.gpus) -inputs: - gpus: - type: int - default: 1 -outputs: [] -baseCommand: "nvidia-smi" \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/inplace-update-requirement_v1_0.cwl b/src/cwl_utils/testdata/extensions/inplace-update-requirement_v1_0.cwl deleted file mode 100755 index 14f72054..00000000 --- a/src/cwl_utils/testdata/extensions/inplace-update-requirement_v1_0.cwl +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.0 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -requirements: - cwltool:InplaceUpdateRequirement: - inplaceUpdate: true -inputs: - r: File - script: - type: File - default: - class: File - location: updateval.py -outputs: - out: - type: File - outputBinding: - glob: $(inputs.r.basename) -arguments: [python, $(inputs.script), $(inputs.r.basename)] \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/load-listing-requirement_v1_0.cwl b/src/cwl_utils/testdata/extensions/load-listing-requirement_v1_0.cwl deleted file mode 100755 index 36d67fb4..00000000 --- a/src/cwl_utils/testdata/extensions/load-listing-requirement_v1_0.cwl +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.0 -$namespaces: - cwltool: http://commonwl.org/cwltool# -requirements: - cwltool:LoadListingRequirement: - loadListing: shallow_listing -inputs: - d: Directory -outputs: [] -arguments: - [echo, "$(inputs.d.listing[0].listing[0])"] diff --git a/src/cwl_utils/testdata/extensions/mpi-requirement_v1_0.cwl b/src/cwl_utils/testdata/extensions/mpi-requirement_v1_0.cwl deleted file mode 100755 index 72dc680b..00000000 --- a/src/cwl_utils/testdata/extensions/mpi-requirement_v1_0.cwl +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.0 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" - -baseCommand: env -requirements: - cwltool:MPIRequirement: - processes: 1 -inputs: {} -outputs: - environment: - type: stdout \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/mpi-requirement_v1_1.cwl b/src/cwl_utils/testdata/extensions/mpi-requirement_v1_1.cwl deleted file mode 100755 index a4ad0fe1..00000000 --- a/src/cwl_utils/testdata/extensions/mpi-requirement_v1_1.cwl +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.1 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -baseCommand: env -requirements: - cwltool:MPIRequirement: - processes: 1 -inputs: {} -outputs: - environment: - type: stdout \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/mpi-requirement_v1_2.cwl b/src/cwl_utils/testdata/extensions/mpi-requirement_v1_2.cwl deleted file mode 100755 index 5790f414..00000000 --- a/src/cwl_utils/testdata/extensions/mpi-requirement_v1_2.cwl +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.2 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -baseCommand: env -requirements: - cwltool:MPIRequirement: - processes: 1 -inputs: {} -outputs: - environment: - type: stdout \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/network-access_v1_0.cwl b/src/cwl_utils/testdata/extensions/network-access_v1_0.cwl deleted file mode 100755 index 4152a116..00000000 --- a/src/cwl_utils/testdata/extensions/network-access_v1_0.cwl +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.0 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -requirements: - cwltool:NetworkAccess: - networkAccess: true -inputs: [] -outputs: [] -baseCommand: python -arguments: - - "-c" - - valueFrom: | - import urllib.request - assert(urllib.request.urlopen("http://commonwl.org").code == 200) \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/process-generator_v1_0.cwl b/src/cwl_utils/testdata/extensions/process-generator_v1_0.cwl deleted file mode 100755 index 01df6367..00000000 --- a/src/cwl_utils/testdata/extensions/process-generator_v1_0.cwl +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env cwl-runner -cwlVersion: v1.0 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -class: cwltool:ProcessGenerator -inputs: - script: string - dir: Directory -outputs: {} -run: - class: CommandLineTool - inputs: - script: string - dir: Directory - outputs: - runProcess: - type: File - outputBinding: - glob: main.cwl - requirements: - InlineJavascriptRequirement: {} - LoadListingRequirement: - loadListing: shallow_listing - InitialWorkDirRequirement: - listing: | - ${ - var v = inputs.dir.listing; - v.push({entryname: "inp.py", entry: inputs.script}); - return v; - } - arguments: [python3, inp.py] - stdout: main.cwl \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/process-generator_v1_1.cwl b/src/cwl_utils/testdata/extensions/process-generator_v1_1.cwl deleted file mode 100755 index b4ccc99c..00000000 --- a/src/cwl_utils/testdata/extensions/process-generator_v1_1.cwl +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env cwl-runner -cwlVersion: v1.1 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -class: cwltool:ProcessGenerator -inputs: - script: string - dir: Directory -outputs: {} -run: - class: CommandLineTool - inputs: - script: string - dir: Directory - outputs: - runProcess: - type: File - outputBinding: - glob: main.cwl - requirements: - InlineJavascriptRequirement: {} - LoadListingRequirement: - loadListing: shallow_listing - InitialWorkDirRequirement: - listing: | - ${ - var v = inputs.dir.listing; - v.push({entryname: "inp.py", entry: inputs.script}); - return v; - } - arguments: [python3, inp.py] - stdout: main.cwl \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/process-generator_v1_2.cwl b/src/cwl_utils/testdata/extensions/process-generator_v1_2.cwl deleted file mode 100755 index 62960a2f..00000000 --- a/src/cwl_utils/testdata/extensions/process-generator_v1_2.cwl +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env cwl-runner -cwlVersion: v1.2 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -class: cwltool:ProcessGenerator -inputs: - script: string - dir: Directory -outputs: {} -run: - class: CommandLineTool - inputs: - script: string - dir: Directory - outputs: - runProcess: - type: File - outputBinding: - glob: main.cwl - requirements: - InlineJavascriptRequirement: {} - LoadListingRequirement: - loadListing: shallow_listing - InitialWorkDirRequirement: - listing: | - ${ - var v = inputs.dir.listing; - v.push({entryname: "inp.py", entry: inputs.script}); - return v; - } - arguments: [python3, inp.py] - stdout: main.cwl \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/secrets_v1_0.cwl b/src/cwl_utils/testdata/extensions/secrets_v1_0.cwl deleted file mode 100755 index 63446512..00000000 --- a/src/cwl_utils/testdata/extensions/secrets_v1_0.cwl +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env cwl-runner -cwlVersion: v1.0 -class: CommandLineTool -$namespaces: - cwltool: http://commonwl.org/cwltool# -requirements: - cwltool:Secrets: - secrets: [pw] -inputs: - pw: string -outputs: - out: stdout -arguments: [cat, example.conf] diff --git a/src/cwl_utils/testdata/extensions/secrets_v1_1.cwl b/src/cwl_utils/testdata/extensions/secrets_v1_1.cwl deleted file mode 100755 index 09d476f6..00000000 --- a/src/cwl_utils/testdata/extensions/secrets_v1_1.cwl +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env cwl-runner -cwlVersion: v1.1 -class: CommandLineTool -$namespaces: - cwltool: http://commonwl.org/cwltool# -requirements: - cwltool:Secrets: - secrets: [pw] -inputs: - pw: string -outputs: - out: stdout -arguments: [cat, example.conf] diff --git a/src/cwl_utils/testdata/extensions/secrets_v1_2.cwl b/src/cwl_utils/testdata/extensions/secrets_v1_2.cwl deleted file mode 100755 index 02d7dff6..00000000 --- a/src/cwl_utils/testdata/extensions/secrets_v1_2.cwl +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env cwl-runner -cwlVersion: v1.2 -class: CommandLineTool -$namespaces: - cwltool: http://commonwl.org/cwltool# -requirements: - cwltool:Secrets: - secrets: [pw] -inputs: - pw: string -outputs: - out: stdout -arguments: [cat, example.conf] diff --git a/src/cwl_utils/testdata/extensions/shm-size_v1_0.cwl b/src/cwl_utils/testdata/extensions/shm-size_v1_0.cwl deleted file mode 100755 index 01f90a44..00000000 --- a/src/cwl_utils/testdata/extensions/shm-size_v1_0.cwl +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.0 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -requirements: - cwltool:ShmSize: - shmSize: 128m -inputs: [] -outputs: - output: - type: stdout -baseCommand: echo -stdout: shm-size.txt -arguments: [ $(runtime) ] diff --git a/src/cwl_utils/testdata/extensions/shm-size_v1_1.cwl b/src/cwl_utils/testdata/extensions/shm-size_v1_1.cwl deleted file mode 100755 index 3bff20df..00000000 --- a/src/cwl_utils/testdata/extensions/shm-size_v1_1.cwl +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.1 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -requirements: - cwltool:ShmSize: - shmSize: 128m -inputs: [] -outputs: - output: - type: stdout -baseCommand: echo -stdout: shm-size.txt -arguments: [ $(runtime) ] diff --git a/src/cwl_utils/testdata/extensions/shm-size_v1_2.cwl b/src/cwl_utils/testdata/extensions/shm-size_v1_2.cwl deleted file mode 100755 index b6491432..00000000 --- a/src/cwl_utils/testdata/extensions/shm-size_v1_2.cwl +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.2 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -requirements: - cwltool:ShmSize: - shmSize: 128m -inputs: [] -outputs: - output: - type: stdout -baseCommand: echo -stdout: shm-size.txt -arguments: [ $(runtime) ] diff --git a/src/cwl_utils/testdata/extensions/single-var-loop_v1_2.cwl b/src/cwl_utils/testdata/extensions/single-var-loop_v1_2.cwl deleted file mode 100755 index c5f76563..00000000 --- a/src/cwl_utils/testdata/extensions/single-var-loop_v1_2.cwl +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env cwl-runner -cwlVersion: v1.2 -class: Workflow -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -requirements: - InlineJavascriptRequirement: {} -inputs: - i1: int -outputs: - o1: - type: int - outputSource: subworkflow/o1 -steps: - subworkflow: - run: - class: ExpressionTool - inputs: - i1: int - outputs: - o1: int - expression: > - ${return {'o1': inputs.i1 + 1};} - in: - i1: i1 - out: [o1] - requirements: - cwltool:Loop: - loopWhen: $(inputs.i1 < 10) - loop: - i1: o1 - outputMethod: last diff --git a/src/cwl_utils/testdata/extensions/time-limit_v1_0.cwl b/src/cwl_utils/testdata/extensions/time-limit_v1_0.cwl deleted file mode 100755 index 7106a934..00000000 --- a/src/cwl_utils/testdata/extensions/time-limit_v1_0.cwl +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.0 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -inputs: - sleep_time: - type: int - default: 3 - inputBinding: {} -outputs: [] -requirements: - cwltool:TimeLimit: - timelimit: 20 -baseCommand: sleep \ No newline at end of file diff --git a/src/cwl_utils/testdata/extensions/work-reuse_v1_0.cwl b/src/cwl_utils/testdata/extensions/work-reuse_v1_0.cwl deleted file mode 100755 index 7278cb2b..00000000 --- a/src/cwl_utils/testdata/extensions/work-reuse_v1_0.cwl +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env cwl-runner -class: CommandLineTool -cwlVersion: v1.0 -$namespaces: - cwltool: "http://commonwl.org/cwltool#" -requirements: - cwltool:WorkReuse: - enableReuse: false -inputs: [] -outputs: - page: stdout -stdout: time.txt -baseCommand: python -arguments: - - "-c" - - valueFrom: | - import time - print(time.time()) diff --git a/src/cwl_utils/tests/test_extensions.py b/src/cwl_utils/tests/test_extensions.py deleted file mode 100644 index fa9044dd..00000000 --- a/src/cwl_utils/tests/test_extensions.py +++ /dev/null @@ -1,197 +0,0 @@ -from cwl_utils.parser import cwl_v1_0, cwl_v1_1, cwl_v1_2, load_document_by_uri - -from .util import get_path - - -def test_cuda_requirement_v1_0() -> None: - """Test that CUDARequirement objects are correctly loaded for CWL v1.0.""" - uri = get_path("testdata/extensions/cuda-requirement_v1_0.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_0.CUDARequirement) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:CUDARequirement" - - -def test_cuda_requirement_v1_1() -> None: - """Test that CUDARequirement objects are correctly loaded for CWL v1.1.""" - uri = get_path("testdata/extensions/cuda-requirement_v1_1.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_1.CUDARequirement) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:CUDARequirement" - - -def test_cuda_requirement_v1_2() -> None: - """Test that CUDARequirement objects are correctly loaded for CWL v1.2.""" - uri = get_path("testdata/extensions/cuda-requirement_v1_2.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_2.CUDARequirement) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:CUDARequirement" - - -def test_load_listing_requirement_v1_0() -> None: - """Test that LoadListingRequirement objects are correctly loaded for CWL v1.0.""" - uri = get_path("testdata/extensions/load-listing-requirement_v1_0.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_0.LoadListingRequirement) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:LoadListingRequirement" - - -def test_loop_v1_2() -> None: - """Test that Loop and LoopInput objects are correctly loaded for CWL v1.2.""" - uri = get_path("testdata/extensions/single-var-loop_v1_2.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - cwl_step = next(iter(cwl_obj.steps)) - loop_req = next(iter(cwl_step.requirements)) - assert isinstance(loop_req, cwl_v1_2.Loop) - assert isinstance(next(iter(loop_req.loop)), cwl_v1_2.LoopInput) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["steps"][0]["requirements"][0]["class"] == "cwltool:Loop" - - -def test_inplace_update_requirement_v1_0() -> None: - """Test that InplaceUpdateRequirement objects are correctly loaded for CWL v1.0.""" - uri = get_path("testdata/extensions/inplace-update-requirement_v1_0.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance( - next(iter(cwl_obj.requirements)), cwl_v1_0.InplaceUpdateRequirement - ) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:InplaceUpdateRequirement" - - -def test_mpi_requirement_v1_0() -> None: - """Test that MPIRequirement objects are correctly loaded for CWL v1.0.""" - uri = get_path("testdata/extensions/mpi-requirement_v1_0.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_0.MPIRequirement) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:MPIRequirement" - - -def test_mpi_requirement_v1_1() -> None: - """Test that MPIRequirement objects are correctly loaded for CWL v1.1.""" - uri = get_path("testdata/extensions/mpi-requirement_v1_1.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_1.MPIRequirement) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:MPIRequirement" - - -def test_mpi_requirement_v1_2() -> None: - """Test that MPIRequirement objects are correctly loaded for CWL v1.2.""" - uri = get_path("testdata/extensions/mpi-requirement_v1_2.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_2.MPIRequirement) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:MPIRequirement" - - -def test_network_access_v1_0() -> None: - """Test that NetworkAccess objects are correctly loaded for CWL v1.0.""" - uri = get_path("testdata/extensions/network-access_v1_0.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_0.NetworkAccess) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:NetworkAccess" - - -def test_process_generator_v1_0() -> None: - """Test that ProcessGenerator objects are correctly loaded for CWL v1.0.""" - uri = get_path("testdata/extensions/process-generator_v1_0.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(cwl_obj, cwl_v1_0.ProcessGenerator) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["class"] == "cwltool:ProcessGenerator" - - -def test_process_generator_v1_1() -> None: - """Test that ProcessGenerator objects are correctly loaded for CWL v1.1.""" - uri = get_path("testdata/extensions/process-generator_v1_1.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(cwl_obj, cwl_v1_1.ProcessGenerator) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["class"] == "cwltool:ProcessGenerator" - - -def test_process_generator_v1_2() -> None: - """Test that ProcessGenerator objects are correctly loaded for CWL v1.2.""" - uri = get_path("testdata/extensions/process-generator_v1_2.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(cwl_obj, cwl_v1_2.ProcessGenerator) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["class"] == "cwltool:ProcessGenerator" - - -def test_secrets_v1_0() -> None: - """Test that Secrets objects are correctly loaded for CWL v1.0.""" - uri = get_path("testdata/extensions/secrets_v1_0.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_0.Secrets) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:Secrets" - - -def test_secrets_v1_1() -> None: - """Test that Secrets objects are correctly loaded for CWL v1.1.""" - uri = get_path("testdata/extensions/secrets_v1_1.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_1.Secrets) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:Secrets" - - -def test_secrets_v1_2() -> None: - """Test that Secrets objects are correctly loaded for CWL v1.2.""" - uri = get_path("testdata/extensions/secrets_v1_2.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_2.Secrets) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:Secrets" - - -def test_shm_size_v1_0() -> None: - """Test that ShmSize objects are correctly loaded for CWL v1.0.""" - uri = get_path("testdata/extensions/shm-size_v1_0.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_0.ShmSize) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:ShmSize" - - -def test_shm_size_v1_1() -> None: - """Test that ShmSize objects are correctly loaded for CWL v1.1.""" - uri = get_path("testdata/extensions/shm-size_v1_1.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_1.ShmSize) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:ShmSize" - - -def test_shm_size_v1_2() -> None: - """Test that ShmSize objects are correctly loaded for CWL v1.2.""" - uri = get_path("testdata/extensions/shm-size_v1_2.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_2.ShmSize) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:ShmSize" - - -def test_time_limit_v1_0() -> None: - """Test that TimeLimit objects are correctly loaded for CWL v1.0.""" - uri = get_path("testdata/extensions/time-limit_v1_0.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_0.TimeLimit) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:TimeLimit" - - -def test_work_reuse_v1_0() -> None: - """Test that WorkReuse objects are correctly loaded for CWL v1.0.""" - uri = get_path("testdata/extensions/work-reuse_v1_0.cwl").as_uri() - cwl_obj = load_document_by_uri(uri) - assert isinstance(next(iter(cwl_obj.requirements)), cwl_v1_0.WorkReuse) - cwl_dict = cwl_obj.save(top=True) - assert cwl_dict["requirements"][0]["class"] == "cwltool:WorkReuse" diff --git a/src/cwl_utils/tests/test_parser.py b/src/cwl_utils/tests/test_parser.py index fc33596a..c33da9af 100644 --- a/src/cwl_utils/tests/test_parser.py +++ b/src/cwl_utils/tests/test_parser.py @@ -3,11 +3,11 @@ from pytest import raises from ruamel.yaml.main import YAML +from schema_salad.runtime import shortname import cwl_utils.parser.latest as latest from cwl_utils.errors import GraphTargetMissingException from cwl_utils.parser import ( - cwl_v1_2, cwl_version, load_document, load_document_by_uri, @@ -86,12 +86,12 @@ def test_latest_parser() -> None: def test_shortname() -> None: - assert cwl_v1_2.shortname("http://example.com/foo") == "foo" - assert cwl_v1_2.shortname("http://example.com/#bar") == "bar" - assert cwl_v1_2.shortname("http://example.com/foo/bar") == "bar" - assert cwl_v1_2.shortname("http://example.com/foo#bar") == "bar" - assert cwl_v1_2.shortname("http://example.com/#foo/bar") == "bar" - assert cwl_v1_2.shortname("http://example.com/foo#bar/baz") == "baz" + assert shortname("http://example.com/foo") == "foo" + assert shortname("http://example.com/#bar") == "bar" + assert shortname("http://example.com/foo/bar") == "bar" + assert shortname("http://example.com/foo#bar") == "bar" + assert shortname("http://example.com/#foo/bar") == "bar" + assert shortname("http://example.com/foo#bar/baz") == "baz" def test_get_id_from_graph() -> None: diff --git a/src/cwl_utils/tests/test_parser_utils.py b/src/cwl_utils/tests/test_parser_utils.py index d977ec75..d3fc5c94 100644 --- a/src/cwl_utils/tests/test_parser_utils.py +++ b/src/cwl_utils/tests/test_parser_utils.py @@ -8,6 +8,7 @@ from typing import cast import pytest +import schema_salad.metaschema from pytest import LogCaptureFixture, raises from schema_salad.exceptions import ValidationException @@ -128,8 +129,6 @@ def test_static_checker_success(cwlVersion: str) -> None: "testdata/cond-wf-004.1.cwl", "testdata/cond-wf-005.1.cwl", "testdata/cond-single-source-wf-005.1.cwl", - "testdata/extensions/all-output-loop_v1_2.cwl", - "testdata/extensions/single-var-loop_v1_2.cwl", "testdata/wf2.cwl", ] ) @@ -337,9 +336,9 @@ def test_v1_0_type_output_source_record() -> None: process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_0.RecordSchema) + assert isinstance(source_type, schema_salad.metaschema.RecordSchema) fields = cast( - MutableSequence[cwl_utils.parser.cwl_v1_0.RecordField], source_type.fields + MutableSequence[schema_salad.metaschema.RecordField], source_type.fields ) assert len(fields) == 2 assert fields[0].type_ == "File" @@ -354,7 +353,7 @@ def test_v1_0_type_for_output_source_with_single_scatter_step() -> None: process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_0.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "string" @@ -366,8 +365,8 @@ def test_v1_0_type_for_output_source_with_nested_crossproduct_scatter_step() -> process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_0.ArraySchema) - assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_0.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) + assert isinstance(source_type.items, schema_salad.metaschema.ArraySchema) assert source_type.items.items == "string" @@ -378,7 +377,7 @@ def test_v1_0_type_for_output_source_with_flat_crossproduct_scatter_step() -> No source_type = cwl_utils.parser.utils.type_for_source( process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_0.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "string" @@ -391,8 +390,8 @@ def test_v1_0_type_for_source_with_multiple_entries_merge_nested() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_0.ArraySchema) - assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_0.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) + assert isinstance(source_type.items, schema_salad.metaschema.ArraySchema) assert source_type.items.items == "File" @@ -405,7 +404,7 @@ def test_v1_0_type_for_source_with_multiple_entries_merge_flattened() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_0.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "File" @@ -418,8 +417,8 @@ def test_v1_0_type_for_source_with_single_entry_merge_nested() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_0.ArraySchema) - assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_0.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) + assert isinstance(source_type.items, schema_salad.metaschema.ArraySchema) assert source_type.items.items == "File" @@ -432,7 +431,7 @@ def test_v1_0_type_for_source_with_single_entry_merge_flattened() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_0.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "File" @@ -686,9 +685,9 @@ def test_v1_1_type_output_source_record() -> None: process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_1.RecordSchema) + assert isinstance(source_type, schema_salad.metaschema.RecordSchema) fields = cast( - MutableSequence[cwl_utils.parser.cwl_v1_1.RecordField], source_type.fields + MutableSequence[schema_salad.metaschema.RecordField], source_type.fields ) assert len(fields) == 2 assert fields[0].type_ == "File" @@ -703,7 +702,7 @@ def test_v1_1_type_for_output_source_with_single_scatter_step() -> None: process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_1.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "string" @@ -715,8 +714,8 @@ def test_v1_1_type_for_output_source_with_nested_crossproduct_scatter_step() -> process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_1.ArraySchema) - assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_1.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) + assert isinstance(source_type.items, schema_salad.metaschema.ArraySchema) assert source_type.items.items == "string" @@ -728,7 +727,7 @@ def test_v1_1_type_for_output_source_with_flat_crossproduct_scatter_step() -> No process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_1.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "string" @@ -741,8 +740,8 @@ def test_v1_1_type_for_source_with_multiple_entries_merge_nested() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_1.ArraySchema) - assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_1.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) + assert isinstance(source_type.items, schema_salad.metaschema.ArraySchema) assert source_type.items.items == "File" @@ -755,7 +754,7 @@ def test_v1_1_type_for_source_with_multiple_entries_merge_flattened() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_1.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "File" @@ -768,8 +767,8 @@ def test_v1_1_type_for_source_with_single_entry_merge_nested() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_1.ArraySchema) - assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_1.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) + assert isinstance(source_type.items, schema_salad.metaschema.ArraySchema) assert source_type.items.items == "File" @@ -782,7 +781,7 @@ def test_v1_1_type_for_source_with_single_entry_merge_flattened() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_1.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "File" @@ -1036,9 +1035,9 @@ def test_v1_2_type_output_source_record() -> None: process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_2.RecordSchema) + assert isinstance(source_type, schema_salad.metaschema.RecordSchema) fields = cast( - MutableSequence[cwl_utils.parser.cwl_v1_2.RecordField], source_type.fields + MutableSequence[schema_salad.metaschema.RecordField], source_type.fields ) assert len(fields) == 2 assert fields[0].type_ == "File" @@ -1053,7 +1052,7 @@ def test_v1_2_type_for_output_source_with_single_scatter_step() -> None: process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_2.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "string" @@ -1065,8 +1064,8 @@ def test_v1_2_type_for_output_source_with_nested_crossproduct_scatter_step() -> process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_2.ArraySchema) - assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_2.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) + assert isinstance(source_type.items, schema_salad.metaschema.ArraySchema) assert source_type.items.items == "string" @@ -1078,7 +1077,7 @@ def test_v1_2_type_for_output_source_with_flat_crossproduct_scatter_step() -> No process=cwl_obj, sourcenames=cwl_obj.outputs[0].outputSource, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_2.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "string" @@ -1091,8 +1090,8 @@ def test_v1_2_type_for_source_with_multiple_entries_merge_nested() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_2.ArraySchema) - assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_2.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) + assert isinstance(source_type.items, schema_salad.metaschema.ArraySchema) assert source_type.items.items == "File" @@ -1105,7 +1104,7 @@ def test_v1_2_type_for_source_with_multiple_entries_merge_flattened() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_2.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "File" @@ -1118,8 +1117,8 @@ def test_v1_2_type_for_source_with_single_entry_merge_nested() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_2.ArraySchema) - assert isinstance(source_type.items, cwl_utils.parser.cwl_v1_2.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) + assert isinstance(source_type.items, schema_salad.metaschema.ArraySchema) assert source_type.items.items == "File" @@ -1132,7 +1131,7 @@ def test_v1_2_type_for_source_with_single_entry_merge_flattened() -> None: sourcenames=cwl_obj.steps[0].in_[0].source, linkMerge=cwl_obj.steps[0].in_[0].linkMerge, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_2.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "File" @@ -1169,7 +1168,7 @@ def test_v1_2_type_for_source_with_multiple_entries_all_non_null() -> None: sourcenames=cwl_obj.outputs[0].outputSource, pickValue=cwl_obj.outputs[0].pickValue, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_2.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "string" @@ -1206,5 +1205,5 @@ def test_v1_2_type_for_source_with_single_entry_all_non_null() -> None: sourcenames=cwl_obj.outputs[0].outputSource, pickValue=cwl_obj.outputs[0].pickValue, ) - assert isinstance(source_type, cwl_utils.parser.cwl_v1_2.ArraySchema) + assert isinstance(source_type, schema_salad.metaschema.ArraySchema) assert source_type.items == "string"