diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 3a582aeaee..376db31b71 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -260,7 +260,7 @@ jobs: - name: Install run: | sudo apt-get update - sudo apt-get install g++ libopenmpi-dev libhdf5-openmpi-dev python3 python3-numpy python3-mpi4py python3-pandas python3-h5py-mpi + sudo apt-get install g++ libopenmpi-dev libhdf5-openmpi-dev python3 python3-numpy python3-mpi4py python3-pandas python3-h5py-mpi python3-pip # TODO ADIOS2 - name: Build env: {CXXFLAGS: -Werror, PKG_CONFIG_PATH: /usr/lib/x86_64-linux-gnu/pkgconfig} @@ -275,6 +275,22 @@ jobs: cmake --build build --parallel 4 ctest --test-dir build --output-on-failure + python3 -m pip install jsonschema==4.* referencing + cd share/openPMD/json_schema + PATH="../../../build/bin:$PATH" make -j 2 + # We need to exclude the thetaMode example since that has a different + # meshesPath and the JSON schema needs to hardcode that. + find ../../../build/samples/ \ + ! -path '*thetaMode*' \ + ! -path '/*many_iterations/*' \ + ! -name 'profiling.json' \ + ! -name '*config.json' \ + -iname '*.json' \ + | while read i; do + echo "Checking $i" + ./check.py "$i" + done + musllinux_py10: runs-on: ubuntu-22.04 if: github.event.pull_request.draft == false diff --git a/CMakeLists.txt b/CMakeLists.txt index 32007025a6..213ffb3171 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -685,11 +685,12 @@ set(openPMD_TEST_NAMES # command line tools set(openPMD_CLI_TOOL_NAMES ls + convert-toml-json ) set(openPMD_PYTHON_CLI_TOOL_NAMES pipe ) -set(openPMD_PYTHON_CLI_MODULE_NAMES ${openPMD_CLI_TOOL_NAMES}) +set(openPMD_PYTHON_CLI_MODULE_NAMES ls) # examples set(openPMD_EXAMPLE_NAMES 1_structure @@ -894,6 +895,9 @@ if(openPMD_BUILD_CLI_TOOLS) endif() target_link_libraries(openpmd-${toolname} PRIVATE openPMD) + target_include_directories(openpmd-${toolname} SYSTEM PRIVATE + $ + $) endforeach() endif() diff --git a/include/openPMD/auxiliary/JSON_internal.hpp b/include/openPMD/auxiliary/JSON_internal.hpp index c608bd7f9f..8096009f9e 100644 --- a/include/openPMD/auxiliary/JSON_internal.hpp +++ b/include/openPMD/auxiliary/JSON_internal.hpp @@ -219,16 +219,25 @@ namespace json * @param options as a parsed JSON object. * @param considerFiles If yes, check if `options` refers to a file and read * from there. + * @param convertLowercase If yes, lowercase conversion is applied + * recursively to keys and values, except for some hardcoded places + * that should be left untouched. */ - ParsedConfig parseOptions(std::string const &options, bool considerFiles); + ParsedConfig parseOptions( + std::string const &options, + bool considerFiles, + bool convertLowercase = true); #if openPMD_HAVE_MPI /** * Parallel version of parseOptions(). MPI-collective. */ - ParsedConfig - parseOptions(std::string const &options, MPI_Comm comm, bool considerFiles); + ParsedConfig parseOptions( + std::string const &options, + MPI_Comm comm, + bool considerFiles, + bool convertLowercase = true); #endif diff --git a/share/openPMD/json_schema/Makefile b/share/openPMD/json_schema/Makefile new file mode 100644 index 0000000000..0680c45f23 --- /dev/null +++ b/share/openPMD/json_schema/Makefile @@ -0,0 +1,15 @@ +convert := openpmd-convert-toml-json + +json_files = attribute_defs.json attributes.json dataset_defs.json iteration.json mesh.json mesh_record_component.json particle_patches.json particle_species.json patch_record.json record.json record_component.json series.json + +.PHONY: all +all: $(json_files) + +# The target file should only be created if the conversion succeeded +$(json_files): %.json: %.toml + $(convert) @$^ > $@.tmp + mv $@.tmp $@ + +.PHONY: clean +clean: + for file in $(json_files); do rm -f "$$file" "$$file.tmp"; done diff --git a/share/openPMD/json_schema/README.md b/share/openPMD/json_schema/README.md new file mode 100644 index 0000000000..e8ad343660 --- /dev/null +++ b/share/openPMD/json_schema/README.md @@ -0,0 +1,47 @@ +# JSON Validation + +This folder contains a JSON schema for validation of openPMD files written as `.json` files. + +## Usage + +### Generating the JSON schema + +For improved readability, maintainability and documentation purposes, the JSON schema is written in `.toml` format and needs to be "compiled" to `.json` files first before usage. +To do this, the openPMD-api installs a tool named `openpmd-convert-toml-json` which can be used to convert between JSON and TOML files in both directions, e.g.: + +```bash +openpmd-convert-toml-json @series.toml > series.json +``` + +A `Makefile` is provided in this folder to automate generating the needed JSON files from the TOML files. + +### Verifying a file against the JSON schema + +In theory, the JSON schema should be applicable by any JSON validator. This JSON schema is written in terms of multiple files however, and most validators require special care to properly set up the links between the single files. A Python script `check.py` is provided in this folder which sets up the [Python jsonschema](https://python-jsonschema.readthedocs.io) library and verifies a file against it, e.g.: + +```bash +./check.py path/to/my/dataset.json +``` + +For further usage notes check the documentation of the script itself `./check.py --help`. + +## Caveats + +The openPMD standard is not entirely expressible in terms of a JSON schema: + +* Many semantic dependencies, e.g., that the `position/x` and `position/y` vectors of a particle species need to be of the same size, or that the `axisLabels` have the same dimensionality as the dataset itself, will go unchecked. +* The `meshesPath` is assumed to be `meshes/` and the `particlesPath` is assumed to be `particles/`. This dependency cannot be expressed. + +While a large part of the openPMD standard can indeed be verified by checking against a static JSON schema, the standard is generally large enough to make this approach come to its limits. Verification of a JSON schema is similar to the use of a naive recursive-descent parser. Error messages may become unexpectedly verbose and not very informative, especially when parsing disjunctive statements such as "A Record is either a scalar Record Component or a vector of non-scalar Record Components". We have taken care to decide disjunctive statements early on, e.g. with json-schema's support for `if` statements, but error messages may in general become unwieldy even due to tiny mistakes far down in the parse tree. + +The layout of attributes is assumed to be that which is created by the JSON backend of the openPMD-api. Both the longhand and shorthand forms are recognized: + +```json +"meshesPath": { + "datatype": "STRING", + "value": "meshes/" +}, +"particlesPath": "particles/" +``` + +For a custom-written verification of openPMD datasets, also consider using the [openPMD-validator](https://github.com/openPMD/openPMD-validator). diff --git a/share/openPMD/json_schema/attribute_defs.toml b/share/openPMD/json_schema/attribute_defs.toml new file mode 100644 index 0000000000..dfd86bfdad --- /dev/null +++ b/share/openPMD/json_schema/attribute_defs.toml @@ -0,0 +1,253 @@ + +["$defs"] + +###################### +# Vectors of strings # +###################### + +["$defs".vec_string_attribute.if] +type = "object" + +["$defs".vec_string_attribute.then] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".vec_string_attribute.then.properties] + +value.anyOf = [ + { type = "string" }, + { type = "array", items = { "type" = "string" } }, +] + +datatype.enum = [ + "STRING", + "CHAR", + "SCHAR", + "UCHAR", + "VEC_STRING", + "VEC_CHAR", + "VEC_SCHAR", + "VEC_UCHAR", +] + +["$defs".vec_string_attribute.else] +title = "Shorthand notation" +anyOf = [{ type = "string" }, { type = "array", items = { "type" = "string" } }] + + +################## +# Vectors of int # +################## + +["$defs".vec_int_attribute.if] +type = "object" + +["$defs".vec_int_attribute.then] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".vec_int_attribute.then.properties] + +value.anyOf = [ + { type = "integer" }, + { type = "array", items = { "type" = "integer" } }, +] + +datatype.enum = [ + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "VEC_SHORT", + "VEC_INT", + "VEC_LONG", + "VEC_LONGLONG", + "VEC_USHORT", + "VEC_UINT", + "VEC_ULONG", + "VEC_ULONGLONG", +] + +["$defs".vec_int_attribute.else] +title = "Shorthand notation" +anyOf = [ + { type = "integer" }, + { type = "array", items = { "type" = "integer" } }, +] + +#################### +# Vectors of float # +#################### + +["$defs".vec_float_attribute.if] +type = "object" + +["$defs".vec_float_attribute.then] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".vec_float_attribute.then.properties] + +value.anyOf = [ + { type = "number" }, + { type = "array", items = { "type" = "number" } }, +] + +datatype.enum = [ + "CHAR", + "UCHAR", + "SCHAR", + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "FLOAT", + "DOUBLE", + "LONG_DOUBLE", + "CFLOAT", + "CDOUBLE", + "CLONG_DOUBLE", + "VEC_SHORT", + "VEC_INT", + "VEC_LONG", + "VEC_LONGLONG", + "VEC_USHORT", + "VEC_UINT", + "VEC_ULONG", + "VEC_ULONGLONG", + "VEC_FLOAT", + "VEC_DOUBLE", + "VEC_LONG_DOUBLE", + "VEC_CFLOAT", + "VEC_CDOUBLE", + "VEC_CLONG_DOUBLE", +] + +["$defs".vec_float_attribute.else] +title = "Shorthand notation" +anyOf = [{ type = "number" }, { type = "array", items = { "type" = "number" } }] + +########################### +# Special case: # +# unitDimension attribute # +########################### + +["$defs".unitDimension.if] +type = "object" + +["$defs".unitDimension.then] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".unitDimension.then.properties] + +value = { type = "array", items = { type = "number" } } +datatype.const = "ARR_DBL_7" + +["$defs".unitDimension.else] +title = "Shorthand notation" +type = "array" +items.type = "number" + +##################### +# string attributes # +##################### + +["$defs".string_attribute.if] +type = "object" + +["$defs".string_attribute.then] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".string_attribute.then.properties] + +value.type = "string" +datatype.enum = ["STRING", "CHAR", "SCHAR", "UCHAR"] + +["$defs".string_attribute.else] +title = "Shorthand notation" +type = "string" + +################## +# int attributes # +################## + +["$defs".int_attribute.if] +type = "object" + +["$defs".int_attribute.then] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".int_attribute.then.properties] + +value.type = "integer" +datatype.enum = [ + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", +] + +["$defs".int_attribute.else] +title = "Shorthand notation" +type = "integer" + +#################### +# float attributes # +#################### + +["$defs".float_attribute.if] +type = "object" + +["$defs".float_attribute.then] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".float_attribute.then.properties] + +value.type = "number" +datatype.enum = [ + "CHAR", + "UCHAR", + "SCHAR", + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "FLOAT", + "DOUBLE", + "LONG_DOUBLE", + "CFLOAT", + "CDOUBLE", + "CLONG_DOUBLE", +] + + +["$defs".float_attribute.else] +title = "Shorthand notation" +type = "number" diff --git a/share/openPMD/json_schema/attributes.toml b/share/openPMD/json_schema/attributes.toml new file mode 100644 index 0000000000..1ee28acb2a --- /dev/null +++ b/share/openPMD/json_schema/attributes.toml @@ -0,0 +1,92 @@ +title = "Attribute layout" + +[[oneOf]] +type = "null" +title = "No attributes" + +[[oneOf]] +type = "object" +title = "Dictionary of attributes" +description = "Generic layout of an attributes object." + +[oneOf.patternProperties.".*".if] +type = "object" + +[oneOf.patternProperties.".*".then] +title = "A generic attribute - long form" +type = "object" +required = ["value", "datatype"] +maxProperties = 2 + +properties.value.anyOf = [ + # Any primitive value + { not = { anyOf = [ + { type = "object", title = "An object" }, + { type = "array", title = "An array" }, + ] }, title = "No complex type" }, + # Or an array of any primitive value + { type = "array", items = { not = { anyOf = [ + { type = "object", title = "An object" }, + { type = "array", title = "An array" }, + ] } }, title = "An array of non-complex types" }, +] + +properties.datatype.type = "string" +properties.datatype.enum = [ + "CHAR", + "UCHAR", + "SCHAR", + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "FLOAT", + "DOUBLE", + "LONG_DOUBLE", + "CFLOAT", + "CDOUBLE", + "CLONG_DOUBLE", + "STRING", + "VEC_CHAR", + "VEC_SHORT", + "VEC_INT", + "VEC_LONG", + "VEC_LONGLONG", + "VEC_UCHAR", + "VEC_USHORT", + "VEC_UINT", + "VEC_ULONG", + "VEC_ULONGLONG", + "VEC_FLOAT", + "VEC_DOUBLE", + "VEC_LONG_DOUBLE", + "VEC_CFLOAT", + "VEC_CDOUBLE", + "VEC_CLONG_DOUBLE", + "VEC_SCHAR", + "VEC_STRING", + "ARR_DBL_7", + "BOOL", +] + +[oneOf.patternProperties.".*".else] +title = "A generic attribute - short form" +anyOf = [ + # Any primitive value + { not = { anyOf = [ + { type = "object", title = "An object" }, + { type = "array", title = "An array" }, + ] }, title = "No complex type" }, + # Or an array of any primitive value + { type = "array", items = { not = { anyOf = [ + { type = "object", title = "An object" }, + { type = "array", title = "An array" }, + ] } }, title = "An array of non-complex types" }, +] + +[oneOf.propertyNames] +pattern = "^\\w*$" diff --git a/share/openPMD/json_schema/check.py b/share/openPMD/json_schema/check.py new file mode 100755 index 0000000000..155743007d --- /dev/null +++ b/share/openPMD/json_schema/check.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python + +import argparse +import json +import os +from pathlib import Path +import sys + +import jsonschema.validators +from referencing import Registry, Resource +from referencing.jsonschema import DRAFT202012 + + +def parse_args(program_name): + script_path = Path(os.path.dirname(os.path.realpath(sys.argv[0]))) + parser = argparse.ArgumentParser( + # we need this for line breaks + formatter_class=argparse.RawDescriptionHelpFormatter, + description=""" +Check JSON files against the openPMD JSON schema. + +This tool validates an openPMD-formatted JSON file against the openPMD JSON +schema, using the jsonschema Python library as a backend. +Please use this script instead of the jsonschema directly since the openPMD +schema consists of several JSON files and this script ensures that +cross-referencing is set up correctly. + +Note that the JSON schema is shipped in form of .toml files for ease +of reading, maintenance and documentation. +In order to perform a check, the .toml files need to be converted to .json +first. +The openPMD-api install a tool openpmd-convert-json-toml for this purpose. +Additionally, there is a Makefile shipped in the same folder as this Python +script which can be directly applied to generate the JSON schema. + + +Examples: + {0} --help + {0} --schema_root={1} +""".format(os.path.basename(program_name), script_path / "series.json")) + + parser.add_argument( + '--schema_root', + default=script_path, + help="""\ +Directory where to resolve JSON schema files to validate against. +""" + ) + parser.add_argument('openpmd_file', + metavar='file', + nargs=1, + help="The file which to validate.") + + return parser.parse_args() + + +args = parse_args(sys.argv[0]) + +path = Path(os.path.dirname(os.path.realpath(args.schema_root))) + + +def retrieve_from_filesystem(uri): + filepath = args.schema_root / uri + with open(filepath, "r") as referred: + loaded_json = json.load(referred) + return Resource.from_contents( + loaded_json, default_specification=DRAFT202012) + + +registry = Registry(retrieve=retrieve_from_filesystem) + +with open(args.openpmd_file[0], "r") as instance: + loaded_instance = json.load(instance) + jsonschema.validate( + instance=loaded_instance, + schema={"$ref": "./series.json"}, + registry=registry, + ) + print("File {} was validated successfully against schema {}.".format( + instance.name, args.schema_root)) diff --git a/share/openPMD/json_schema/dataset_defs.toml b/share/openPMD/json_schema/dataset_defs.toml new file mode 100644 index 0000000000..030b94a745 --- /dev/null +++ b/share/openPMD/json_schema/dataset_defs.toml @@ -0,0 +1,144 @@ +["$defs"] + +###################################### +# n-dimensional datasets of any type # +###################################### + +[["$defs".any_type_recursive_array.anyOf]] +title = "A numeric type" +type = "array" +items.anyOf = [{ "type" = "number" }, { "type" = "null" }] + +[["$defs".any_type_recursive_array.anyOf]] +title = "A recursive array of numeric types" +type = "array" +items."$ref" = "#/$defs/any_type_recursive_array" + + +["$defs".any_type_dataset_properties.properties] +datatype.enum = [ + "CHAR", + "UCHAR", + "SCHAR", + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "FLOAT", + "DOUBLE", + "LONG_DOUBLE", + "CFLOAT", + "CDOUBLE", + "CLONG_DOUBLE", + "BOOL", +] +data."$ref" = "#/$defs/any_type_recursive_array" +extent = { type = "array", items = { type = "integer" } } + + +################################################### +# Either `datatype` and `data` must be defined... # +################################################### + +[["$defs".any_type_dataset.anyOf]] +title = "A dataset of any numeric type" +allOf = [ + { required = [ + "datatype", + "data", + ], title = "Required properties" }, + { "$ref" = "#/$defs/any_type_dataset_properties", title = "Property definitions" }, +] + +######################################################### +# ...or `datatype` and `extent` must be (template form) # +######################################################### + +[["$defs".any_type_dataset.anyOf]] +title = "A template dataset of any numeric type" +allOf = [ + { required = [ + "datatype", + "extent", + ], title = "Required properties" }, + { "$ref" = "#/$defs/any_type_dataset_properties", title = "Property definitions" }, +] + +##################################################### +# ...or `datatype` is UNDEFINED, no extent required # +##################################################### + +[["$defs".any_type_dataset.anyOf]] +title = "An undefined template dataset" +allOf = [ + { required = [ + "datatype", + ], title = "Required properties" }, + { datatype.value = "UNDEFINED", extent = { type = "array", items.type = "integer" } }, +] + +###################################### +# n-dimensional datasets of int type # +###################################### + +[["$defs".int_type_recursive_array.anyOf]] +title = "An integer type" +type = "array" +items.anyOf = [{ "type" = "integer" }, { "type" = "null" }] + +[["$defs".int_type_recursive_array.anyOf]] +title = "A recursive array of integer types" +type = "array" +items."$ref" = "#/$defs/int_type_recursive_array" + + +["$defs".int_type_dataset_properties.properties] +datatype.enum = [ + "CHAR", + "UCHAR", + "SCHAR", + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "BOOL", +] +data."$ref" = "#/$defs/int_type_recursive_array" +extent = { type = "array", items = { type = "integer" } } + + +################################################### +# Either `datatype` and `data` must be defined... # +################################################### + +[["$defs".int_type_dataset.anyOf]] +title = "A dataset of integer type" +allOf = [ + { required = [ + "datatype", + "data", + ], title = "Required properties" }, + { "$ref" = "#/$defs/int_type_dataset_properties", title = "Property definitions" }, +] + +######################################################### +# ...or `datatype` and `extent` must be (template form) # +######################################################### + +[["$defs".int_type_dataset.anyOf]] +title = "A dataset template of integer type" +allOf = [ + { required = [ + "datatype", + "extent", + ], title = "Required properties" }, + { "$ref" = "#/$defs/int_type_dataset_properties", title = "Property definitions" }, +] diff --git a/share/openPMD/json_schema/iteration.toml b/share/openPMD/json_schema/iteration.toml new file mode 100644 index 0000000000..31a6c74417 --- /dev/null +++ b/share/openPMD/json_schema/iteration.toml @@ -0,0 +1,77 @@ +type = "object" +required = ["attributes"] +title = "Iteration" +description = "One iteration/snapshot." + +[properties] + +################# +# Particle data # +################# + +[properties.particles] +type = "object" +title = "Particles" +description = "Dict of particle species types." + +[properties.particles.properties.attributes] +title = "Attribute layout" +description = "Custom attributes allowed, no required attributes defined." +"$ref" = "attributes.json" + +##################################### +# Particle data -> Particle Species # +##################################### + +[properties.particles.patternProperties."^(?!attributes).*"] +title = "Particle Species" +"$ref" = "particle_species.json" + +############# +# Mesh data # +############# + +[properties.meshes] +type = "object" +title = "Meshes" +description = "Dict of meshes." + +[properties.meshes.properties.attributes] +title = "Attribute layout" +description = "Custom attributes allowed, no required attributes defined." +"$ref" = "attributes.json" + + +########################### +# Mesh data -> Mesh types # +########################### + +[properties.meshes.patternProperties."^(?!attributes).*"] +title = "Mesh" +"$ref" = "mesh.json" + +######################## +# Iteration attributes # +######################## + + +[properties.attributes] +title = "Attributes" + +# First requirement: standard-defined attributes + +[[properties.attributes.allOf]] +required = ["dt", "time", "timeUnitSI"] +title = "Iteration attributes" +description = "Standard-defined attributes at the Iteration level." + +[properties.attributes.allOf.properties] +timeUnitSI."$ref" = "attribute_defs.json#/$defs/float_attribute" +time."$ref" = "attribute_defs.json#/$defs/float_attribute" +dt."$ref" = "attribute_defs.json#/$defs/float_attribute" + +# Second condition: General layout of attributes + +[[properties.attributes.allOf]] +title = "Attribute layout" +"$ref" = "attributes.json" diff --git a/share/openPMD/json_schema/mesh.toml b/share/openPMD/json_schema/mesh.toml new file mode 100644 index 0000000000..0911130ba8 --- /dev/null +++ b/share/openPMD/json_schema/mesh.toml @@ -0,0 +1,101 @@ +######################################################### +# Requirement 1: Mesh-specific structure and attributes # +######################################################### + +[[allOf]] + +type = "object" +required = ["attributes"] +title = "Mesh" +description = "A mesh/grid of cells." + +[allOf.properties.attributes] +title = "Attributes" + +#################################### +# Requirement 1.1: Mesh attributes # +#################################### + +[[allOf.properties.attributes.allOf]] + +title = "Mesh attributes" +description = "Standard-defined attributes at the Mesh level." +required = [ + "axisLabels", + "geometry", + "gridGlobalOffset", + "gridSpacing", + "gridUnitSI", + "timeOffset", + "unitDimension", +] + +[allOf.properties.attributes.allOf.properties] + +# in openPMD 1.0: float_attribute, openPMD 2.0: vec_float_attribute +# TODO: decide how to better deal with the 1.0/2.0 dichotomy +gridUnitSI."$ref" = "attribute_defs.json#/$defs/vec_float_attribute" +gridSpacing."$ref" = "attribute_defs.json#/$defs/vec_float_attribute" +gridGlobalOffset."$ref" = "attribute_defs.json#/$defs/vec_float_attribute" +timeOffset."$ref" = "attribute_defs.json#/$defs/float_attribute" +geometryParameters."$ref" = "attribute_defs.json#/$defs/string_attribute" +unitDimension."$ref" = "attribute_defs.json#/$defs/unitDimension" +geometry."$ref" = "attribute_defs.json#/$defs/string_attribute" +dataOrder."$ref" = "attribute_defs.json#/$defs/string_attribute" +axisLabels."$ref" = "attribute_defs.json#/$defs/vec_string_attribute" + +################################################ +# Requirement 1.2: Generic attribute structure # +################################################ + +[[allOf.properties.attributes.allOf]] +title = "Attribute layout" +"$ref" = "attributes.json" + +###################################### +# Requirement 2: Contains components # +###################################### + +[[allOf]] + +title = "Contains components" + +#################################################### +# Requirement 2.1: Either this is a scalar mesh... # +#################################################### + +[allOf.if] +anyOf = [ + # vector mesh + { required = [ + "datatype", + ] }, + # constant mesh + { required = [ + "attributes", + ], properties.attributes.required = [ + "shape", + "value", + ] }, +] + + +[allOf.then] +title = "Scalar mesh component" +"$ref" = "mesh_record_component.json" + +############################################### +# Requirement 2.2: ... or it's a vector mesh. # +############################################### + +[allOf.else] +title = "Vector component" +description = "Additionally to the attributes, at least one component must be contained" +# The attributes are contained in this dict, and at least one further +# non-scalar component. Hence, we require at least two entries. +minProperties = 2 +propertyNames.pattern = "^\\w*$" + +[allOf.else.patternProperties."^(?!attributes).*"] +title = "Vector mesh component" +"$ref" = "mesh_record_component.json" diff --git a/share/openPMD/json_schema/mesh_record_component.toml b/share/openPMD/json_schema/mesh_record_component.toml new file mode 100644 index 0000000000..6622ea2335 --- /dev/null +++ b/share/openPMD/json_schema/mesh_record_component.toml @@ -0,0 +1,27 @@ +title = "Mesh Record Component" + +############################################# +# Requirement 1: This is a record component # +############################################# + +[[allOf]] +title = "Record Component" +"$ref" = "record_component.json" + +################################################## +# Requirement 2: Mesh Record Component Specifics # +################################################## + +[[allOf]] +title = "Mesh Record Component" +description = "Single component in a mesh record." +type = "object" +required = ["attributes"] + +[allOf.properties.attributes] +required = ["position"] +title = "Record Component attributes" +description = "Standard-defined attributes at the Record Component level." + +[allOf.properties.attributes.properties] +position."$ref" = "attribute_defs.json#/$defs/vec_float_attribute" diff --git a/share/openPMD/json_schema/particle_patches.toml b/share/openPMD/json_schema/particle_patches.toml new file mode 100644 index 0000000000..0144acb5bd --- /dev/null +++ b/share/openPMD/json_schema/particle_patches.toml @@ -0,0 +1,48 @@ +type = "object" +title = "Particle Patches" +description = "Recommended group for post-processing. It logically orders the 1D arrays of attributes into local patches of particles that can be read and processed in parallel." + +required = ["numParticles", "numParticlesOffset", "offset", "extent"] + +################################ +# General layout of attributes # +################################ + +[properties.attributes] +title = "Attribute layout" +description = "Custom attributes allowed, no required attributes defined." +"$ref" = "attributes.json" + +####################################################### +# Definition of particle patches via # +# numParticles, numParticlesOffset, offset and extent # +####################################################### + +[properties.numParticles] +title = "numParticles" +description = "number of particles in this patch" +allOf = [ + { "$ref" = "record_component.json" }, + { "$ref" = "dataset_defs.json#/$defs/int_type_dataset" }, +] + + +[properties.numParticlesOffset] +title = "numParticles" +description = "offset within the one-dimensional records of the particle species where the first particle in this patch is stored" +allOf = [ + { "$ref" = "record_component.json" }, + { "$ref" = "dataset_defs.json#/$defs/int_type_dataset" }, +] + + +[properties.offset] +title = "Offset" +description = "absolute position (position + positionOffset as defined above) where the particle patch begins: defines the (inclusive) lower bound with positions that are associated with the patch; the same requirements as for regular record components apply" +"$ref" = "patch_record.json" + + +[properties.extent] +title = "Extent" +description = "extent of the particle patch; the offset + extent must be larger than the maximum absolute position of particles in the patch as the exact upper bound of position offset + extent is excluded from the patch; the same requirements as for regular record components apply" +"$ref" = "patch_record.json" diff --git a/share/openPMD/json_schema/particle_species.toml b/share/openPMD/json_schema/particle_species.toml new file mode 100644 index 0000000000..43dc33444f --- /dev/null +++ b/share/openPMD/json_schema/particle_species.toml @@ -0,0 +1,40 @@ +type = "object" +title = "Particle Species" +description = "Dict of particle quantities." +propertyNames.pattern = "^\\w*$" + +# A particle species requires at least a "position" record +required = ["position"] + +[properties.attributes] + +################################################## +# First requirement: standard-defined attributes # +################################################## + +[[properties.attributes.allOf]] +title = "Particle Species attributes" +description = "Standard-defined attributes at the Particle Species level." +# No required attributes +required = [] + +[properties.attributes.allOf.properties] +id."$ref" = "attribute_defs.json#/$defs/int_attribute" + +#################################################### +# Second requirement: General layout of attributes # +#################################################### + +[[properties.attributes.allOf]] +title = "Attribute layout" +description = "Custom attributes allowed, no required attributes defined." +"$ref" = "attributes.json" + +[properties.particlePatches] +title = "Particle Patches" +"$ref" = "particle_patches.json" + + +[patternProperties."^(?!(attributes|particlePatches)).*"] +title = "Record" +"$ref" = "record.json" diff --git a/share/openPMD/json_schema/patch_record.toml b/share/openPMD/json_schema/patch_record.toml new file mode 100644 index 0000000000..1494410c57 --- /dev/null +++ b/share/openPMD/json_schema/patch_record.toml @@ -0,0 +1,37 @@ +######################################################### +# Requirement 1: Mesh-specific structure and attributes # +######################################################### + +[[allOf]] + +type = "object" +required = ["attributes"] +title = "Record" +description = "A list of particle quantities." + +############################################## +# Requirement 1: Generic attribute structure # +############################################## + + +[allOf.properties.attributes] +title = "Attribute layout" +"$ref" = "attributes.json" + +####################################################### +# Requirement 2: Contains components of a vector mesh # +####################################################### + +[[allOf]] +title = "Contains vector components" +description = "Additionally to the attributes, at least one component must be contained" +# The attributes are contained in this dict, and at least one further +# non-scalar component. Hence, we require at least two entries. +minProperties = 2 +propertyNames.pattern = "^\\w*$" + +[allOf.patternProperties] + +[allOf.patternProperties."^(?!attributes).*"] +title = "Scalar component" +"$ref" = "record_component.json" diff --git a/share/openPMD/json_schema/record.toml b/share/openPMD/json_schema/record.toml new file mode 100644 index 0000000000..eb331a248d --- /dev/null +++ b/share/openPMD/json_schema/record.toml @@ -0,0 +1,83 @@ +######################################################### +# Requirement 1: Mesh-specific structure and attributes # +######################################################### + +[[allOf]] + +type = "object" +required = ["attributes"] +title = "Record" +description = "A list of particle quantities." + +[allOf.properties.attributes] +title = "Attributes" + +#################################### +# Requirement 1.1: Mesh attributes # +#################################### + +[[allOf.properties.attributes.allOf]] + +title = "Particle Record attributes" +description = "Standard-defined attributes at the Particle Record level." +required = ["timeOffset", "unitDimension"] + +[allOf.properties.attributes.allOf.properties] + +timeOffset."$ref" = "attribute_defs.json#/$defs/float_attribute" +unitDimension."$ref" = "attribute_defs.json#/$defs/unitDimension" +dataOrder."$ref" = "attribute_defs.json#/$defs/string_attribute" + +################################################ +# Requirement 1.2: Generic attribute structure # +################################################ + +[[allOf.properties.attributes.allOf]] +title = "Attribute layout" +"$ref" = "attributes.json" + +###################################### +# Requirement 2: Contains components # +###################################### + +[[allOf]] +title = "Contains components" + +#################################################### +# Requirement 2.1: Either this is a scalar mesh... # +#################################################### + +[allOf.if] +anyOf = [ + # vector mesh + { required = [ + "datatype", + ] }, + # constant mesh + { required = [ + "attributes", + ], properties.attributes.required = [ + "shape", + "value", + ] }, +] + +[allOf.then] +title = "Scalar component" +"$ref" = "record_component.json" + +############################################### +# Requirement 2.2: ... or it's a vector mesh. # +############################################### + +[allOf.else] +title = "Vector component" +description = "Additionally to the attributes, at least one component must be contained" +# The attributes are contained in this dict, and at least one further +# non-scalar component. Hence, we require at least two entries. +minProperties = 2 +propertyNames.pattern = "^\\w*$" + +[allOf.else.patternProperties."^(?!attributes).*"] +title = "Scalar component" +"$ref" = "record_component.json" diff --git a/share/openPMD/json_schema/record_component.toml b/share/openPMD/json_schema/record_component.toml new file mode 100644 index 0000000000..9a4741a9f2 --- /dev/null +++ b/share/openPMD/json_schema/record_component.toml @@ -0,0 +1,65 @@ +############################################## +# Requirement 1: Record Component attributes # +############################################## + +[[allOf]] +title = "Record Component" +description = "Single component in a record." +type = "object" +required = ["attributes"] + +[allOf.properties.attributes] +title = "Attributes" + +################################################ +# Requirement 1.1: Standard-defined attributes # +################################################ + +[[allOf.properties.attributes.allOf]] +title = "Record Component attributes" +description = "Standard-defined attributes at the Record Component level." +required = ["unitSI"] + +[allOf.properties.attributes.allOf.properties] +unitSI."$ref" = "attribute_defs.json#/$defs/float_attribute" + +############################################# +# Requirement 1.2: Generic attribute layout # +############################################# + +[[allOf.properties.attributes.allOf]] +title = "Attribute layout" +"$ref" = "attributes.json" + +########################################### +# Requirement 2: Either array or constant # +########################################### + +[[allOf]] +title = "Either array or constant" + +######################## +# Option 2.1: Constant # +######################## + +[allOf.if] +required = ["attributes"] +properties.attributes.required = ["shape", "value"] + +[allOf.then] +title = "Constant dataset" +description = "A dataset represented by two attributes: The constant value and its shape." + +[allOf.properties.attributes.properties] +value."$ref" = "attribute_defs.json#/$defs/float_attribute" +shape."$ref" = "attribute_defs.json#/$defs/vec_int_attribute" + +##################### +# Option 2.2: Array # +##################### + +[allOf.else] +description = "An n-dimensional dataset containing the payload." +title = "Array dataset" + +"$ref" = "dataset_defs.json#/$defs/any_type_dataset" diff --git a/share/openPMD/json_schema/series.toml b/share/openPMD/json_schema/series.toml new file mode 100644 index 0000000000..f013a7e07d --- /dev/null +++ b/share/openPMD/json_schema/series.toml @@ -0,0 +1,137 @@ +type = "object" +required = ["attributes", "data"] +title = "Series" +description = "The root group in the hierarchical openPMD standard." + +######################################################### +# Requirement 1: Basic layout of the root path (Series) # +######################################################### + +[[allOf]] +title = "Basic Series layout" + +##################### +# Series attributes # +##################### + +[allOf.properties.attributes] +title = "Attributes" + +################################################ +# Requirement 1.1: standard-defined attributes # +################################################ + +[[allOf.properties.attributes.allOf]] +required = [ + "openPMD", + "openPMDextension", + "basePath", + "iterationEncoding", + "iterationFormat", +] +title = "Series attributes" +description = "Standard-defined attributes at the Series level." + +[allOf.properties.attributes.allOf.properties] + +author."$ref" = "attribute_defs.json#/$defs/string_attribute" +comment."$ref" = "attribute_defs.json#/$defs/string_attribute" +date."$ref" = "attribute_defs.json#/$defs/string_attribute" +openPMD."$ref" = "attribute_defs.json#/$defs/string_attribute" +iterationEncoding."$ref" = "attribute_defs.json#/$defs/string_attribute" +softwareVersion."$ref" = "attribute_defs.json#/$defs/string_attribute" +basePath."$ref" = "attribute_defs.json#/$defs/string_attribute" +iterationFormat."$ref" = "attribute_defs.json#/$defs/string_attribute" +openPMDextension."$ref" = "attribute_defs.json#/$defs/int_attribute" +software."$ref" = "attribute_defs.json#/$defs/string_attribute" +machine."$ref" = "attribute_defs.json#/$defs/string_attribute" +softwareDependencies."$ref" = "attribute_defs.json#/$defs/string_attribute" + +meshesPath.description = "Note that the meshesPath is hardcoded as its semantics are impossible to model in a JSON schema." +meshesPath.oneOf = [ + { const = { value = "meshes/", datatype = "STRING" } }, + { const = "meshes/" }, +] + +particlesPath.description = "Note that the particlesPath is hardcoded as its semantics are impossible to model in a JSON schema." +particlesPath.oneOf = [ + { const = { value = "particles/", datatype = "STRING" } }, + { const = "particles/" }, +] + +################################################# +# Requirement 1.2: General layout of attributes # +################################################# + +[[allOf.properties.attributes.allOf]] +title = "Attribute layout" +"$ref" = "attributes.json" + +################################################# +# Requirement 2: The Series contains iterations # +################################################# + +[[allOf]] +title = "Contains iterations" + +######################################################################### +# Requirement 2.1: Either a single iteration in variable-based encoding # +######################################################################### + +[allOf.if] +properties.attributes.properties.iterationEncoding.oneOf = [ + { const = { value = "variableBased", datatype = "STRING" } }, + { const = "variableBased" }, +] + +[allOf.then] +title = "Variable-based encoding" +properties.attributes.properties.iterationEncoding.properties.value = { const = "variableBased" } + +[allOf.then.properties.data] +type = "object" +title = "An iteration" +description = "A single iteration." + +# Only require iteration data if snapshot attribute is defined +[allOf.then.properties.data.if] +required = ["attributes"] +properties.attributes.required = ["snapshot"] + +[allOf.then.properties.data.then] +"$ref" = "iteration.json" + +############################################################################ +# Requirement 2.2: Or multiple iterations in group- or file-based encoding # +############################################################################ + +[allOf.else] +title = "Group-based (or file-based) encoding" +properties.attributes.properties.iterationEncoding = { oneOf = [ + { const = "groupBased" }, + { const = "fileBased" }, + { const = { value = "groupBased", datatype = "STRING" } }, + { const = { value = "fileBased", datatype = "STRING" } }, +] } + +# Base Path + +[allOf.else.properties.data] +type = "object" +title = "Base path" +description = "A map of all iterations/snapshots in the Series." + +propertyNames.pattern = "^(-?[0-9]*|attributes)$" + +[allOf.else.properties.data.properties] + +[allOf.else.properties.data.properties.attributes] +title = "Attribute layout" +description = "Custom attributes allowed, no required attributes defined." +"$ref" = "attributes.json" + +# Base Path -> Iterations + +[allOf.else.properties.data.patternProperties."^-?[0-9]*$"] +title = "Iteration" +"$ref" = "iteration.json" diff --git a/src/Series.cpp b/src/Series.cpp index 142dfe4bf0..d3acdf4a50 100644 --- a/src/Series.cpp +++ b/src/Series.cpp @@ -1299,14 +1299,20 @@ void Series::flushFileBased( bool flushIOHandler) { auto &series = get(); - /* - * Iterations might have been present, but have been closed and deleted from - * internal structures. In this case, previous flushes were successful and - * the Series is now in written() state. - */ - if (end == begin && !written()) + if (end == begin && + /* + * At parsing time, this might happen since iterations might contain + * errors and be deleted. + */ + IOHandler()->m_seriesStatus != internal::SeriesStatus::Parsing && + /* + * Iterations might have been present, but have been closed and deleted + * from internal structures. In this case, previous flushes were + * successful and the Series is now in written() state. + */ + !written()) { - throw std::runtime_error( + throw error::WrongAPIUsage( "fileBased output can not be written with no iterations."); } @@ -1423,7 +1429,6 @@ void Series::flushGorVBased( bool flushIOHandler) { auto &series = get(); - if (access::readOnly(IOHandler()->m_frontendAccess)) { for (auto it = begin; it != end; ++it) diff --git a/src/auxiliary/JSON.cpp b/src/auxiliary/JSON.cpp index 84e7006cbb..e5c885a9f1 100644 --- a/src/auxiliary/JSON.cpp +++ b/src/auxiliary/JSON.cpp @@ -386,7 +386,8 @@ toml::value jsonToToml(nlohmann::json const &val) namespace { - ParsedConfig parseInlineOptions(std::string const &options) + ParsedConfig + parseInlineOptions(std::string const &options, bool convertLowercase) { // speed up default options ParsedConfig res; @@ -422,12 +423,16 @@ namespace res.config = json::tomlToJson(tomlVal); res.originallySpecifiedAs = SupportedLanguages::TOML; } - lowerCase(res.config); + if (convertLowercase) + { + lowerCase(res.config); + } return res; } } // namespace -ParsedConfig parseOptions(std::string const &options, bool considerFiles) +ParsedConfig parseOptions( + std::string const &options, bool considerFiles, bool convertLowercase) { if (considerFiles) { @@ -437,6 +442,12 @@ ParsedConfig parseOptions(std::string const &options, bool considerFiles) std::fstream handle; handle.open( filename.value(), std::ios_base::binary | std::ios_base::in); + if (!handle.good()) + { + throw std::runtime_error( + "Failed opening '" + filename.value() + + "': " + strerror(errno)); + } ParsedConfig res; if (auxiliary::ends_with(filename.value(), ".toml")) { @@ -456,16 +467,22 @@ ParsedConfig parseOptions(std::string const &options, bool considerFiles) "Failed reading JSON config from file " + filename.value() + "."); } - lowerCase(res.config); + if (convertLowercase) + { + lowerCase(res.config); + } return res; } } - return parseInlineOptions(options); + return parseInlineOptions(options, convertLowercase); } #if openPMD_HAVE_MPI -ParsedConfig -parseOptions(std::string const &options, MPI_Comm comm, bool considerFiles) +ParsedConfig parseOptions( + std::string const &options, + MPI_Comm comm, + bool considerFiles, + bool convertLowercase) { if (considerFiles) { @@ -489,11 +506,14 @@ parseOptions(std::string const &options, MPI_Comm comm, bool considerFiles) res.config = nlohmann::json::parse(fileContent); res.originallySpecifiedAs = SupportedLanguages::JSON; } - lowerCase(res.config); + if (convertLowercase) + { + lowerCase(res.config); + } return res; } } - return parseInlineOptions(options); + return parseInlineOptions(options, convertLowercase); } #endif diff --git a/src/cli/convert-toml-json.cpp b/src/cli/convert-toml-json.cpp new file mode 100644 index 0000000000..f13317947d --- /dev/null +++ b/src/cli/convert-toml-json.cpp @@ -0,0 +1,72 @@ +#include +#include +#include + +#include +#include +#include + +namespace json = openPMD::json; + +void parsed_main(std::string jsonOrToml) +{ + auto [config, originallySpecifiedAs] = json::parseOptions( + jsonOrToml, /* considerFiles = */ true, /* convertLowercase = */ false); + { + [[maybe_unused]] auto _ = std::move(jsonOrToml); + } + switch (originallySpecifiedAs) + { + using SL = json::SupportedLanguages; + case SL::JSON: { + auto asToml = json::jsonToToml(config); + std::cout << json::format_toml(asToml); + } + break; + case SL::TOML: + std::cout << config << '\n'; + break; + } +} + +int main(int argc, char const **argv) +{ + std::string jsonOrToml; + switch (argc) + { + case 0: + case 1: + // Just read the whole stream into memory + // Not very elegant, but we'll hold the entire JSON/TOML dataset + // in memory at some point anyway, so it doesn't really matter + { + std::stringbuf readEverything; + std::cin >> &readEverything; + jsonOrToml = readEverything.str(); + } + break; + case 2: + if (strcmp(argv[1], "--help") == 0 || strcmp(argv[1], "-h") == 0) + { + std::cout << "Usage: " << std::string(argv[0]) << R"( [json_or_toml] +'json_or_toml' can be a JSON or TOML dataset specified inline or a reference +to a file prepended by an '@'. +Inline datasets will be interpreted as JSON if they start with an '{', as TOML +otherwise. Datasets from a file will be interpreted as JSON or TOML depending +on the file ending '.json' or '.toml' respectively. +Inline dataset specifications can be replaced by input read from stdin. + +If the input is JSON, then it will be converted to TOML and written to stdout, +equivalently from TOML to JSON. +)"; + exit(0); + } + jsonOrToml = argv[1]; + break; + default: + throw std::runtime_error( + std::string("Usage: ") + argv[0] + + " [file location or inline JSON/TOML]"); + } + parsed_main(std::move(jsonOrToml)); +} diff --git a/test/CoreTest.cpp b/test/CoreTest.cpp index f87be576bb..d3de988491 100644 --- a/test/CoreTest.cpp +++ b/test/CoreTest.cpp @@ -222,7 +222,8 @@ TEST_CASE("myPath", "[core]") REQUIRE( pathOf(scalarMesh) == vec_t{"data", "1234", "meshes", "e_chargeDensity"}); - auto scalarMeshComponent = scalarMesh[RecordComponent::SCALAR]; + auto scalarMeshComponent = scalarMesh[RecordComponent::SCALAR].resetDataset( + {Datatype::FLOAT, {10}}); REQUIRE( pathOf(scalarMeshComponent) == vec_t{"data", "1234", "meshes", "e_chargeDensity"}); @@ -230,7 +231,8 @@ TEST_CASE("myPath", "[core]") auto vectorMesh = iteration.meshes["E"]; REQUIRE(pathOf(vectorMesh) == vec_t{"data", "1234", "meshes", "E"}); - auto vectorMeshComponent = vectorMesh["x"]; + auto vectorMeshComponent = + vectorMesh["x"].resetDataset({Datatype::FLOAT, {10}}); REQUIRE( pathOf(vectorMeshComponent) == vec_t{"data", "1234", "meshes", "E", "x"}); @@ -245,7 +247,8 @@ TEST_CASE("myPath", "[core]") pathOf(speciesPosition) == vec_t{"data", "1234", "particles", "e", "position"}); - auto speciesPositionX = speciesPosition["x"]; + auto speciesPositionX = + speciesPosition["x"].resetDataset({Datatype::FLOAT, {10}}); REQUIRE( pathOf(speciesPositionX) == vec_t{"data", "1234", "particles", "e", "position", "x"}); @@ -256,7 +259,9 @@ TEST_CASE("myPath", "[core]") pathOf(speciesWeighting) == vec_t{"data", "1234", "particles", "e", "weighting"}); - auto speciesWeightingX = speciesWeighting[RecordComponent::SCALAR]; + auto speciesWeightingX = + speciesWeighting[RecordComponent::SCALAR].resetDataset( + {Datatype::FLOAT, {10}}); REQUIRE( pathOf(speciesWeightingX) == vec_t{"data", "1234", "particles", "e", "weighting"}); @@ -271,7 +276,7 @@ TEST_CASE("myPath", "[core]") pathOf(patchExtent) == vec_t{"data", "1234", "particles", "e", "particlePatches", "extent"}); - auto patchExtentX = patchExtent["x"]; + auto patchExtentX = patchExtent["x"].resetDataset({Datatype::INT, {10}}); REQUIRE( pathOf(patchExtentX) == vec_t{ @@ -295,7 +300,8 @@ TEST_CASE("myPath", "[core]") "numParticles"}); auto patchNumParticlesComponent = - patchNumParticles[RecordComponent::SCALAR]; + patchNumParticles[RecordComponent::SCALAR].resetDataset( + {Datatype::INT, {10}}); REQUIRE( pathOf(patchNumParticlesComponent) == vec_t{ @@ -305,6 +311,10 @@ TEST_CASE("myPath", "[core]") "e", "particlePatches", "numParticles"}); + + speciesE.particlePatches["offset"]["x"].resetDataset({Datatype::INT, {10}}); + speciesE.particlePatches["numParticlesOffset"][RecordComponent::SCALAR] + .resetDataset({Datatype::INT, {10}}); #endif } @@ -1119,6 +1129,7 @@ TEST_CASE("backend_via_json", "[core]") { Series series( "../samples/optionsViaJson", Access::CREATE, encodingVariableBased); + series.iterations[0]; // v-based encoding requires at least 1 iteration REQUIRE(series.backend() == "JSON"); REQUIRE(series.iterationEncoding() == IterationEncoding::variableBased); } @@ -1132,6 +1143,7 @@ TEST_CASE("backend_via_json", "[core]") "../samples/optionsViaJson.bp", Access::CREATE, encodingVariableBased); + series.iterations[0]; // v-based encoding requires at least 1 iteration REQUIRE(series.backend() == "JSON"); REQUIRE(series.iterationEncoding() == IterationEncoding::variableBased); } diff --git a/test/SerialIOTest.cpp b/test/SerialIOTest.cpp index e4b3dead4f..881aced538 100644 --- a/test/SerialIOTest.cpp +++ b/test/SerialIOTest.cpp @@ -2437,8 +2437,8 @@ inline void bool_test(const std::string &backend) Access::CREATE, R"({"iteration_encoding": "variable_based"})"); - o.setAttribute("Bool attribute true", true); - o.setAttribute("Bool attribute false", false); + o.setAttribute("Bool_attribute_true", true); + o.setAttribute("Bool_attribute_false", false); } { Series o = @@ -2446,12 +2446,12 @@ inline void bool_test(const std::string &backend) auto attrs = o.attributes(); REQUIRE( - std::count(attrs.begin(), attrs.end(), "Bool attribute true") == 1); + std::count(attrs.begin(), attrs.end(), "Bool_attribute_true") == 1); REQUIRE( - std::count(attrs.begin(), attrs.end(), "Bool attribute false") == + std::count(attrs.begin(), attrs.end(), "Bool_attribute_false") == 1); - REQUIRE(o.getAttribute("Bool attribute true").get() == true); - REQUIRE(o.getAttribute("Bool attribute false").get() == false); + REQUIRE(o.getAttribute("Bool_attribute_true").get() == true); + REQUIRE(o.getAttribute("Bool_attribute_false").get() == false); } { Series list{"../samples/serial_bool." + backend, Access::READ_ONLY}; diff --git a/test/python/unittest/API/APITest.py b/test/python/unittest/API/APITest.py index 6337807f33..4f4d7d59e0 100644 --- a/test/python/unittest/API/APITest.py +++ b/test/python/unittest/API/APITest.py @@ -2239,6 +2239,8 @@ def testSeriesConstructors(self): s = io.Series(f, io.Access.create, c) s.close() + os.remove(cfg_as_file) + def testScalarHdf5Fields(self): if "hdf5" not in io.variants: return