diff --git a/environments/environment-Linux.yml b/environments/environment-Linux.yml index f3481191a..77518a3d0 100644 --- a/environments/environment-Linux.yml +++ b/environments/environment-Linux.yml @@ -15,13 +15,11 @@ dependencies: - flask-cors == 4.0.0 - flask_restx == 1.1.0 - werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask. - # For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility - - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text] - # For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation - - spikeinterface == 0.100.5 + - neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.0 - scikit-learn == 1.4.0 # Tutorial data generation - tqdm_publisher >= 0.0.1 # Progress bars - tzlocal >= 5.2 # Frontend timezone handling - ndx-pose == 0.1.1 - - nwbinspector==0.6.2 + - nwbinspector == 0.6.2 - tables + - numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5 diff --git a/environments/environment-MAC-apple-silicon.yml b/environments/environment-MAC-apple-silicon.yml index 8e296da6f..1fd769e13 100644 --- a/environments/environment-MAC-apple-silicon.yml +++ b/environments/environment-MAC-apple-silicon.yml @@ -23,12 +23,10 @@ dependencies: - werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask. # NOTE: the NeuroConv wheel on PyPI includes sonpy which is not compatible with arm64, so build and install # NeuroConv from GitHub, which will remove the sonpy dependency when building from Mac arm64 - # For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility - - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text] - # For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation - - spikeinterface == 0.100.5 + - neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.0 - scikit-learn == 1.4.0 # Tutorial data generation - tqdm_publisher >= 0.0.1 # Progress bars - tzlocal >= 5.2 # Frontend timezone handling - ndx-pose == 0.1.1 - - nwbinspector==0.6.2 + - nwbinspector == 0.6.2 + - numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5 diff --git a/environments/environment-MAC-intel.yml b/environments/environment-MAC-intel.yml index 98d4defab..287137657 100644 --- a/environments/environment-MAC-intel.yml +++ b/environments/environment-MAC-intel.yml @@ -7,7 +7,6 @@ dependencies: - nodejs = 18.16.1 # install these from conda-forge so that dependent packages get included in the distributable - jsonschema = 4.18.0 # installs jsonschema-specifications - - pytables = 3.10.2 # Install from conda-forge because PyPI version results in hdf5 conflicts and missing libs - pip - pip: - setuptools==70.0.0 @@ -19,12 +18,13 @@ dependencies: - flask-cors == 4.0.0 - flask_restx == 1.1.0 - werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask. - # For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility - - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text] - # For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation - - spikeinterface == 0.100.5 + - neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.0 - scikit-learn == 1.4.0 # Tutorial data generation - tqdm_publisher >= 0.0.1 # Progress bars - tzlocal >= 5.2 # Frontend timezone handling - ndx-pose == 0.1.1 - - nwbinspector==0.6.2 + - nwbinspector == 0.6.2 + - numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5 + - h5py == 3.12.1 # 3.13.0 uses features in hdf5 1.14.4 that is not available in earlier hdf5 libs packaged + # with tables==3.9.1 (latest that can be used by neuroconv 0.6.0). + # h5py and tables need to be consistent for electron build for unknown reason diff --git a/environments/environment-Windows.yml b/environments/environment-Windows.yml index 3282cd3ae..69f1a1bde 100644 --- a/environments/environment-Windows.yml +++ b/environments/environment-Windows.yml @@ -18,13 +18,11 @@ dependencies: - flask-cors === 3.0.10 - flask_restx == 1.1.0 - werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask. - # For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility - - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text] - # For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation - - spikeinterface == 0.100.5 + - neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.0 - scikit-learn == 1.4.0 # Tutorial data generation - tqdm_publisher >= 0.0.1 # Progress bars - tzlocal >= 5.2 # Frontend timezone handling - ndx-pose == 0.1.1 - - nwbinspector==0.6.2 + - nwbinspector == 0.6.2 - tables + - numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5 diff --git a/src/pyflask/manageNeuroconv/manage_neuroconv.py b/src/pyflask/manageNeuroconv/manage_neuroconv.py index 391f5b462..b3463ac66 100644 --- a/src/pyflask/manageNeuroconv/manage_neuroconv.py +++ b/src/pyflask/manageNeuroconv/manage_neuroconv.py @@ -1668,11 +1668,13 @@ def generate_test_data(output_path: str): """ Autogenerate the data formats needed for the tutorial pipeline. - Consists of a single-probe single-segment SpikeGLX recording (both AP and LF bands) as well as Phy spiking data. + Consists of a single-probe single-segment SpikeGLX recording (both AP and LF bands) as well as Phy sorting data. """ import spikeinterface - from spikeinterface.exporters import export_to_phy - from spikeinterface.preprocessing import bandpass_filter, resample, scale + import spikeinterface.exporters + import spikeinterface.preprocessing + + spikeinterface.set_global_job_kwargs(n_jobs=-1) base_path = Path(output_path) spikeglx_output_folder = base_path / "spikeglx" @@ -1687,8 +1689,8 @@ def generate_test_data(output_path: str): lf_sampling_frequency = 2_500.0 downsample_factor = int(ap_sampling_frequency / lf_sampling_frequency) - # Generate synthetic spiking and voltage traces with waveforms around them - artificial_ap_band_in_uV, spiking = spikeinterface.generate_ground_truth_recording( + # Generate synthetic sorting and voltage traces with waveforms around them + artificial_ap_band_in_uV, sorting = spikeinterface.generate_ground_truth_recording( durations=[duration_in_s], sampling_frequency=ap_sampling_frequency, num_channels=number_of_channels, @@ -1697,12 +1699,18 @@ def generate_test_data(output_path: str): seed=0, # Fixed seed for reproducibility ) - unscaled_artificial_ap_band = scale(recording=artificial_ap_band_in_uV, gain=1 / conversion_factor_to_uV) + unscaled_artificial_ap_band = spikeinterface.preprocessing.scale( + recording=artificial_ap_band_in_uV, gain=1 / conversion_factor_to_uV + ) int16_artificial_ap_band = unscaled_artificial_ap_band.astype(dtype="int16") int16_artificial_ap_band.set_channel_gains(conversion_factor_to_uV) - unscaled_artificial_lf_filter = bandpass_filter(recording=unscaled_artificial_ap_band, freq_min=0.5, freq_max=1_000) - unscaled_artificial_lf_band = resample(recording=unscaled_artificial_lf_filter, resample_rate=2_500) + unscaled_artificial_lf_filter = spikeinterface.preprocessing.bandpass_filter( + recording=unscaled_artificial_ap_band, freq_min=0.5, freq_max=1_000 + ) + unscaled_artificial_lf_band = spikeinterface.preprocessing.decimate( + recording=unscaled_artificial_lf_filter, decimation_factor=downsample_factor + ) int16_artificial_lf_band = unscaled_artificial_lf_band.astype(dtype="int16") int16_artificial_lf_band.set_channel_gains(conversion_factor_to_uV) @@ -1725,13 +1733,16 @@ def generate_test_data(output_path: str): with open(file=lf_meta_file_path, mode="w") as io: io.write(lf_meta_content) - # Make Phy folder - waveform_extractor = spikeinterface.extract_waveforms( - recording=artificial_ap_band_in_uV, sorting=spiking, mode="memory" + # Make Phy folder - see https://spikeinterface.readthedocs.io/en/latest/modules/exporters.html + sorting_analyzer = spikeinterface.create_sorting_analyzer( + sorting=sorting, recording=artificial_ap_band_in_uV, mode="memory", sparse=False ) + sorting_analyzer.compute(["random_spikes", "waveforms", "templates", "noise_levels"]) + sorting_analyzer.compute("spike_amplitudes") + sorting_analyzer.compute("principal_components", n_components=5, mode="by_channel_local") - export_to_phy( - waveform_extractor=waveform_extractor, output_folder=phy_output_folder, remove_if_exists=True, copy_binary=False + spikeinterface.exporters.export_to_phy( + sorting_analyzer=sorting_analyzer, output_folder=phy_output_folder, remove_if_exists=True, copy_binary=False )