Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions environments/environment-Linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,14 @@ dependencies:
- flask-cors == 4.0.0
- flask_restx == 1.1.0
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
- neuroconv[dandi,compressors] == 0.8.0
- neuroconv[dandi,compressors] == 0.8.1
- dandi < 0.74.0 # 0.74.0 renamed dandi-staging to dandi-sandbox, breaking neuroconv 0.6.6
- spikeinterface >= 0.101.0 # Previously included via neuroconv[ecephys]; needed for tutorial data generation
- pandas < 3.0 # pandas 3.0 uses Arrow backend by default, returning read-only arrays that break spikeinterface Phy extractor
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
- ndx-pose == 0.1.1
- nwbinspector == 0.6.2
- ndx-pose >= 0.1.1
- nwbinspector >= 0.6.2
- tables
- numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5
6 changes: 3 additions & 3 deletions environments/environment-MAC-apple-silicon.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@ dependencies:
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
# NOTE: the NeuroConv wheel on PyPI includes sonpy which is not compatible with arm64, so build and install
# NeuroConv from GitHub, which will remove the sonpy dependency when building from Mac arm64
- neuroconv[dandi,compressors] == 0.8.0
- neuroconv[dandi,compressors] == 0.8.1
- dandi < 0.74.0 # 0.74.0 renamed dandi-staging to dandi-sandbox, breaking neuroconv 0.6.6
- spikeinterface >= 0.101.0 # Previously included via neuroconv[ecephys]; needed for tutorial data generation
- pandas < 3.0 # pandas 3.0 uses Arrow backend by default, returning read-only arrays that break spikeinterface Phy extractor
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
- ndx-pose == 0.1.1
- nwbinspector == 0.6.2
- ndx-pose >= 0.1.1
- nwbinspector >= 0.6.2
- numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5
6 changes: 3 additions & 3 deletions environments/environment-MAC-intel.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@ dependencies:
- flask-cors == 4.0.0
- flask_restx == 1.1.0
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
- neuroconv[dandi,compressors] == 0.8.0
- neuroconv[dandi,compressors] == 0.8.1
- dandi < 0.74.0 # 0.74.0 renamed dandi-staging to dandi-sandbox, breaking neuroconv 0.6.6
- spikeinterface >= 0.101.0 # Previously included via neuroconv[ecephys]; needed for tutorial data generation
- pandas < 3.0 # pandas 3.0 uses Arrow backend by default, returning read-only arrays that break spikeinterface Phy extractor
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
- ndx-pose == 0.1.1
- nwbinspector == 0.6.2
- ndx-pose >= 0.1.1
- nwbinspector >= 0.6.2
- numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5
- h5py == 3.12.1 # 3.13.0 uses features in hdf5 1.14.4 that is not available in earlier hdf5 libs packaged
# with tables==3.9.1 (latest that can be used by neuroconv 0.6.0).
Expand Down
6 changes: 3 additions & 3 deletions environments/environment-Windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,14 @@ dependencies:
- flask-cors === 3.0.10
- flask_restx == 1.1.0
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
- neuroconv[dandi,compressors] == 0.8.0
- neuroconv[dandi,compressors] == 0.8.1
- dandi < 0.74.0 # 0.74.0 renamed dandi-staging to dandi-sandbox, breaking neuroconv 0.6.6
- spikeinterface >= 0.101.0 # Previously included via neuroconv[ecephys]; needed for tutorial data generation
- pandas < 3.0 # pandas 3.0 uses Arrow backend by default, returning read-only arrays that break spikeinterface Phy extractor
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
- ndx-pose == 0.1.1
- nwbinspector == 0.6.2
- ndx-pose >= 0.1.1
- nwbinspector >= 0.6.2
- tables
- numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5
17 changes: 15 additions & 2 deletions src/pyflask/manageNeuroconv/manage_neuroconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -610,6 +610,19 @@ def on_recording_interface(name, recording_interface):
"additionalProperties": True, # Allow for new columns
}

# Ensure ElectrodeColumns includes entries for all Electrode schema properties
# (needed for frontend linked-table validation in neuroconv >= 0.7.5)
existing_electrode_columns = ecephys_metadata.get("ElectrodeColumns", [])
existing_ecol_names = {col["name"] for col in existing_electrode_columns}
for prop_name, prop_info in new_electrodes_properties.items():
if prop_name not in existing_ecol_names:
existing_electrode_columns.append(
{
"name": prop_name,
"description": prop_info.get("description", "No description."),
}
)

if has_units:

unitprops_def = defs["UnitProperties"]
Expand Down Expand Up @@ -1380,7 +1393,7 @@ def upload_folder_to_dandi(
return automatic_dandi_upload(
dandiset_id=dandiset_id,
nwb_folder_path=Path(nwb_folder_path),
staging=sandbox, # Map sandbox parameter to staging for external API
sandbox=sandbox,
cleanup=cleanup,
number_of_jobs=number_of_jobs or 1,
number_of_threads=number_of_threads or 1,
Expand Down Expand Up @@ -1414,7 +1427,7 @@ def upload_project_to_dandi(
return automatic_dandi_upload(
dandiset_id=dandiset_id,
nwb_folder_path=CONVERSION_SAVE_FOLDER_PATH / project, # Scope valid DANDI upload paths to GUIDE projects
staging=sandbox, # Map sandbox parameter to staging for external API
sandbox=sandbox,
cleanup=cleanup,
number_of_jobs=number_of_jobs,
number_of_threads=number_of_threads,
Expand Down
Loading