forked from openPMD/openPMD-api
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy path13_write_dynamic_configuration.cpp
More file actions
225 lines (200 loc) · 6.34 KB
/
13_write_dynamic_configuration.cpp
File metadata and controls
225 lines (200 loc) · 6.34 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
#include <openPMD/openPMD.hpp>
#include <algorithm>
#include <iostream>
#include <memory>
#include <numeric> // std::iota
using std::cout;
using namespace openPMD;
int main()
{
if (!getVariants()["hdf5"])
{
// Example configuration below selects the ADIOS2 backend
return 0;
}
using position_t = double;
// see https://github.com/ToruNiina/toml11/issues/205
#if !defined(__NVCOMPILER_MAJOR__) || __NVCOMPILER_MAJOR__ >= 23
/*
* This example demonstrates how to use JSON/TOML-based dynamic
* configuration for openPMD.
* The following configuration is passed to the constructor of the Series
* class and specifies the defaults to used for that Series.
* This configuration can later be overridden as needed on a per-dataset
* level.
*/
std::string const defaults = R"END(
# This configuration is TOML-based
# JSON can be used alternatively, the openPMD-api will automatically detect
# the language being used
#
# Alternatively, the location of a JSON/TOML-file on the filesystem can
# be passed by adding an at-sign `@` in front of the path
# The format will then be recognized by filename extension, i.e. .json or .toml
backend = "hdf5"
iteration_encoding = "group_based"
# The following is only relevant in read mode
defer_iteration_parsing = true
[adios2.engine]
type = "bp4"
# ADIOS2 allows adding several operators
# Lists are given in TOML by using double brackets
# For specifying a single operator only, the list may be skipped.
[[adios2.dataset.operators]]
type = "zlib"
parameters.clevel = 5
# Alternatively:
# [adios2.dataset.operators.parameters]
# clevel = 9
# For adding a further parameter:
# [[adios2.dataset.operators]]
# type = "some other parameter"
# # ...
# Sometimes, dataset configurations should not affect all datasets, but only
# specific ones, e.g. only particle data.
# Dataset configurations can be given as a list, here at the example of HDF5.
# In such lists, each entry is an object with two keys:
#
# 1. 'cfg': Mandatory key, this is the actual dataset configuration.
# 2. 'select': A Regex or a list of Regexes to match against the dataset name.
#
# This makes it possible to give dataset-specific configurations.
# The dataset name is the same as returned
# by `Attributable::myPath().openPMDPath()`.
# The regex must match against either the full path (e.g. "/data/1/meshes/E/x")
# or against the path within the iteration (e.g. "meshes/E/x").
# Example:
# Let HDF5 datasets be automatically chunked by default
[[hdf5.dataset]]
cfg.chunks = "auto"
# For particles, we can specify the chunking explicitly
[[hdf5.dataset]]
# Multiple selection regexes can be given as a list.
# They will be fused into a single regex '($^)|(regex1)|(regex2)|(regex3)|...'.
select = ["/data/1/particles/e/.*", "/data/2/particles/e/.*"]
cfg.chunks = [5]
# Selecting a match works top-down, the order of list entries is important.
[[hdf5.dataset]]
# Specifying only a single regex.
# The regex can match against the full dataset path
# or against the path within the Iteration.
# Capitalization is irrelevant.
select = "particles/e/.*"
CFG.CHUNKS = [10]
)END";
#else
/*
* This is the same configuration in JSON. We need this in deprecated
* NVHPC-compilers due to problems that those compilers have with the
* toruniina::toml11 library.
*/
std::string const defaults = R"(
{
"backend": "hdf5",
"defer_iteration_parsing": true,
"iteration_encoding": "group_based",
"adios2": {
"engine": {
"type": "bp4"
},
"dataset": {
"operators": [
{
"parameters": {
"clevel": 5
},
"type": "zlib"
}
]
}
},
"hdf5": {
"dataset": [
{
"cfg": {
"chunks": "auto"
}
},
{
"select": [
"/data/1/particles/e/.*",
"/data/2/particles/e/.*"
],
"cfg": {
"chunks": [
5
]
}
},
{
"select": "particles/e/.*",
"CFG": {
"CHUNKS": [
10
]
}
}
]
}
}
)";
#endif
// open file for writing
Series series =
Series("../samples/dynamicConfig.h5", Access::CREATE_LINEAR, defaults);
Datatype datatype = determineDatatype<position_t>();
constexpr unsigned long length = 10ul;
Extent global_extent = {length};
Dataset dataset = Dataset(datatype, global_extent);
std::shared_ptr<position_t> local_data(
new position_t[length], [](position_t const *ptr) { delete[] ptr; });
Snapshots iterations = series.snapshots();
for (size_t i = 0; i < 100; ++i)
{
Iteration iteration = iterations[i];
Record electronPositions = iteration.particles["e"]["position"];
std::iota(local_data.get(), local_data.get() + length, i * length);
for (auto const &dim : {"x", "y", "z"})
{
RecordComponent pos = electronPositions[dim];
pos.resetDataset(dataset);
pos.storeChunk(local_data, Offset{0}, global_extent);
}
/*
* We want different compression settings for this dataset, so we pass
* a dataset-specific configuration. This will override any definition
* specified above.
* Also showcase how to define an resizable dataset.
* This time in JSON.
*/
std::string const differentCompressionSettings = R"END(
{
"resizable": true,
"adios2": {
"dataset": {
"operators": {
"type": "zlib",
"parameters": {
"clevel": 9
}
}
}
}
})END";
Dataset differentlyCompressedDataset{Datatype::INT, {10}};
differentlyCompressedDataset.options = differentCompressionSettings;
auto someMesh = iteration.meshes["differentCompressionSettings"];
someMesh.resetDataset(differentlyCompressedDataset);
std::vector<int> dataVec(10, i);
someMesh.storeChunk(dataVec, {0}, {10});
iteration.close();
}
/* The files in 'series' are still open until the object is destroyed, on
* which it cleanly flushes and closes all open file handles.
* When running out of scope on return, the 'Series' destructor is called.
* Alternatively, one can call `series.close()` to the same effect as
* calling the destructor, including the release of file handles.
*/
series.close();
return 0;
}