Skip to content

Commit e87bdbd

Browse files
committed
[quality] quality improvement, add more unitests, restruct __init__,add pyproject.toml
1 parent a6021ac commit e87bdbd

22 files changed

+1722
-777
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
- Copyright: (C) Qianqian Fang (2019-2026) <q.fang at neu.edu>
66
- License: Apache License, Version 2.0
7-
- Version: 0.9.3
7+
- Version: 0.9.4
88
- URL: https://github.com/NeuroJSON/pyjdata
99
- Acknowledgement: This project is supported by US National Institute of Health (NIH)
1010
grant [U24-NS124027](https://reporter.nih.gov/project-details/10308329)

jdata/__init__.py

Lines changed: 51 additions & 218 deletions
Original file line numberDiff line numberDiff line change
@@ -35,224 +35,57 @@
3535
Copyright (c) 2019-2026 Qianqian Fang <q.fang at neu.edu>
3636
"""
3737

38-
from .jfile import (
39-
loadjson,
40-
savejson,
41-
loadbj,
42-
savebj,
43-
loadjd,
44-
savejd,
45-
load,
46-
save,
47-
loadurl,
48-
show,
49-
dumpb,
50-
loadt,
51-
savet,
52-
loadts,
53-
loadbs,
54-
loadb,
55-
saveb,
56-
jsoncache,
57-
jdlink,
58-
jext,
59-
loadjsnirf,
60-
loadsnirf,
61-
savejsnirf,
62-
savesnirf,
63-
loadmsgpack,
64-
savemsgpack,
65-
loadubjson,
66-
saveubjson,
38+
from . import jdata
39+
from . import jfile
40+
from . import jdictionary
41+
from . import csvtsv
42+
from . import h5
43+
from . import jnifti
44+
from . import jgifti
45+
from . import jpath
46+
from . import jschema
47+
from . import njprep
48+
from . import neurojson
49+
50+
# Re-export all public functions from submodules
51+
from .jdata import *
52+
from .jfile import *
53+
from .jdictionary import *
54+
from .csvtsv import *
55+
from .h5 import *
56+
from .jnifti import *
57+
from .jgifti import *
58+
from .jpath import *
59+
from .jschema import *
60+
from .njprep import *
61+
from .neurojson import *
62+
63+
__version__ = "0.9.4"
64+
__all__ = (
65+
jdata.__all__
66+
+ jfile.__all__
67+
+ jdictionary.__all__
68+
+ csvtsv.__all__
69+
+ h5.__all__
70+
+ jnifti.__all__
71+
+ jgifti.__all__
72+
+ jpath.__all__
73+
+ jschema.__all__
74+
+ njprep.__all__
75+
+ neurojson.__all__
76+
+ [
77+
"jdata",
78+
"jfile",
79+
"jdictionary",
80+
"csvtsv",
81+
"h5",
82+
"jnifti",
83+
"jgifti",
84+
"jpath",
85+
"jschema",
86+
"neurojson",
87+
"njprep",
88+
]
6789
)
68-
from .jdata import (
69-
jdataencode,
70-
jdatadecode,
71-
encode,
72-
decode,
73-
jdtype,
74-
jsonfilter,
75-
zlibencode,
76-
zlibdecode,
77-
gzipencode,
78-
gzipdecode,
79-
lzmaencode,
80-
lzmadecode,
81-
lz4encode,
82-
lz4decode,
83-
base64encode,
84-
base64decode,
85-
)
86-
from .jpath import jsonpath
87-
88-
from .jgifti import (
89-
JGifti,
90-
gii2jgii,
91-
jgii2gii,
92-
loadgifti,
93-
loadjgifti,
94-
savegifti,
95-
savejgifti,
96-
jgifticreate,
97-
giicodemap,
98-
get_node,
99-
get_face,
100-
get_property,
101-
get_properties,
102-
get_labels,
103-
get_metadata,
104-
get_coord_system,
105-
get_surfaces,
106-
)
107-
108-
from .jnifti import (
109-
nii2jnii,
110-
jnii2nii,
111-
loadnifti,
112-
loadjnifti,
113-
savenifti,
114-
savejnifti,
115-
nifticreate,
116-
jnifticreate,
117-
memmapstream,
118-
niiheader2jnii,
119-
niicodemap,
120-
niiformat,
121-
savejnii,
122-
savebnii,
123-
)
124-
125-
from .h5 import (
126-
loadh5,
127-
saveh5,
128-
regrouph5,
129-
aos2soa,
130-
soa2aos,
131-
jsnirfcreate,
132-
snirfcreate,
133-
snirfdecode,
134-
)
135-
136-
from .csv import (
137-
load_csv_tsv,
138-
loadcsv,
139-
loadtsv,
140-
save_csv_tsv,
141-
encode_enum_column,
142-
decode_enum_column,
143-
is_enum_encoded,
144-
tsv2json,
145-
json2tsv,
146-
save_csv_tsv_with_enum,
147-
)
148-
149-
from .jdict import jdict
150-
from .jschema import jsonschema
151-
from .neurojson import neuroj, neurojgui
152-
from .njprep import dataset2json, NJPREP_DEFAULT
153-
154-
__version__ = "0.9.3"
155-
__all__ = [
156-
"loadjson",
157-
"savejson",
158-
"loadbj",
159-
"savebj",
160-
"loadjd",
161-
"savejd",
162-
"jdataencode",
163-
"jdatadecode",
164-
"load",
165-
"save",
166-
"loadurl",
167-
"show",
168-
"dumpb",
169-
"loadt",
170-
"savet",
171-
"loadts",
172-
"loadbs",
173-
"loadb",
174-
"saveb",
175-
"encode",
176-
"decode",
177-
"jsoncache",
178-
"jdlink",
179-
"jdtype",
180-
"jsonfilter",
181-
"jext",
182-
"jsonpath",
183-
"nii2jnii",
184-
"jnii2nii",
185-
"loadnifti",
186-
"loadjnifti",
187-
"savenifti",
188-
"savejnifti",
189-
"nifticreate",
190-
"jnifticreate",
191-
"memmapstream",
192-
"niiheader2jnii",
193-
"niicodemap",
194-
"niiformat",
195-
"savebnii",
196-
"savejnii",
197-
"loadh5",
198-
"saveh5",
199-
"regrouph5",
200-
"aos2soa",
201-
"soa2aos",
202-
"jsnirfcreate",
203-
"snirfcreate",
204-
"loadjsnirf",
205-
"loadsnirf",
206-
"savejsnirf",
207-
"savesnirf",
208-
"snirfdecode",
209-
"zlibencode",
210-
"zlibdecode",
211-
"gzipencode",
212-
"gzipdecode",
213-
"lzmaencode",
214-
"lzmadecode",
215-
"lz4encode",
216-
"lz4decode",
217-
"base64encode",
218-
"base64decode",
219-
"neuroj",
220-
"neurojgui",
221-
"load_csv_tsv",
222-
"loadcsv",
223-
"loadtsv",
224-
"save_csv_tsv",
225-
"loadmsgpack",
226-
"savemsgpack",
227-
"loadubjson",
228-
"saveubjson",
229-
"jdict",
230-
"jsonschema",
231-
"dataset2json",
232-
"NJPREP_DEFAULT",
233-
"encode_enum_column",
234-
"decode_enum_column",
235-
"is_enum_encoded",
236-
"tsv2json",
237-
"json2tsv",
238-
"save_csv_tsv_with_enum",
239-
"JGifti",
240-
"gii2jgii",
241-
"jgii2gii",
242-
"loadgifti",
243-
"loadjgifti",
244-
"savegifti",
245-
"savejgifti",
246-
"jgifticreate",
247-
"giicodemap",
248-
"get_node",
249-
"get_face",
250-
"get_property",
251-
"get_properties",
252-
"get_labels",
253-
"get_metadata",
254-
"get_coord_system",
255-
"get_surfaces",
256-
]
25790

25891
__license__ = """Apache license 2.0, Copyright (c) 2019-2026 Qianqian Fang"""

jdata/csv.py renamed to jdata/csvtsv.py

Lines changed: 5 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -211,9 +211,7 @@ def group_column(column: List[str]) -> List[Any]:
211211
converted = [tonumbers(value) for value in column]
212212

213213
# Check if all non-NaN values are numeric
214-
numeric_count = sum(
215-
1 for x in converted if isinstance(x, (int, float)) and not np.isnan(x)
216-
)
214+
numeric_count = sum(1 for x in converted if isinstance(x, (int, float)) and not np.isnan(x))
217215
nan_count = sum(1 for x in converted if isinstance(x, float) and np.isnan(x))
218216
total_numeric = numeric_count + nan_count
219217

@@ -417,9 +415,7 @@ def encode_enum_column(values: List[Any], compress: bool = True) -> Union[List,
417415
]
418416
)
419417

420-
return OrderedDict(
421-
[("_EnumKey_", list(enum_map.keys())), ("_EnumValue_", enum_value)]
422-
)
418+
return OrderedDict([("_EnumKey_", list(enum_map.keys())), ("_EnumValue_", enum_value)])
423419

424420

425421
def decode_enum_column(data: Dict) -> List[Any]:
@@ -474,9 +470,7 @@ def decode_enum_column(data: Dict) -> List[Any]:
474470
raise ValueError("_EnumValue_ must contain _ArrayZipData_ or _ArrayData_")
475471

476472
# Map indices back to values (indices are 1-based)
477-
result = [
478-
enum_keys[idx - 1] if 0 < idx <= len(enum_keys) else None for idx in indices
479-
]
473+
result = [enum_keys[idx - 1] if 0 < idx <= len(enum_keys) else None for idx in indices]
480474

481475
return result
482476

@@ -530,9 +524,7 @@ def tsv2json(
530524
for col_name, col_values in data.items():
531525
# Skip encoding for certain columns
532526
should_skip = col_name in skip_columns
533-
if is_participants and any(
534-
x in col_name.lower() for x in ["age", "sex", "gender"]
535-
):
527+
if is_participants and any(x in col_name.lower() for x in ["age", "sex", "gender"]):
536528
should_skip = True
537529

538530
if should_skip:
@@ -669,9 +661,7 @@ def save_csv_tsv_with_enum(
669661

670662
for col_name, col_values in data.items():
671663
should_skip = col_name in skip_columns
672-
if is_participants and any(
673-
x in col_name.lower() for x in ["age", "sex", "gender"]
674-
):
664+
if is_participants and any(x in col_name.lower() for x in ["age", "sex", "gender"]):
675665
should_skip = True
676666

677667
if should_skip or not compress_enum:

0 commit comments

Comments
 (0)