Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions doc/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -336,6 +336,7 @@ spikeinterface.exporters
.. automodule:: spikeinterface.exporters

.. autofunction:: export_to_phy
.. autofunction:: export_to_ibl_gui
.. autofunction:: export_report


Expand Down
38 changes: 35 additions & 3 deletions doc/modules/exporters.rst
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ The input of the :py:func:`~spikeinterface.exporters.export_to_phy` is a :code:`
.. code-block:: python

import spikeinterface as si # core module only
from spikeinterface.postprocessing import compute_spike_amplitudes, compute_principal_components
from spikeinterface.exporters import export_to_phy

# the waveforms are sparse so it is faster to export to phy
Expand All @@ -40,6 +39,41 @@ The input of the :py:func:`~spikeinterface.exporters.export_to_phy` is a :code:`
export_to_phy(sorting_analyzer=sorting_analyzer, output_folder='path/to/phy_folder')


Export to IBL GUI
-----------------

The :py:func:`~spikeinterface.exporters.export_to_ibl_gui` function allows you to use the
`IBL GUI <https://github.com/int-brain-lab/iblapps/wiki>`_ for probe alignment.

The IBL GUI can also be installed as a standalone app using `this fork <https://github.com/AllenNeuralDynamics/ibl-ephys-alignment-gui>`_ from the Allen Institute.

The input of the :py:func:`~spikeinterface.exporters.export_to_ibl_gui` is a :code:`SortingAnalyzer` object.

.. code-block:: python

import spikeinterface as si # core module only
import spikeinterface.preprocessing as spre
from spikeinterface.exporters import export_to_ibl_gui

sorting_analyzer = si.create_sorting_analyzer(sorting=sorting, recording=recording)

# we need to compute some required extensions
sorting_analyzer.compute(['random_spikes', 'templates', 'spike_amplitudes', 'spike_locations', 'quality_metrics'])
# note that spike_locations are optional, but recommended to compute accurate spike depths

# optionally, we can pass an LFP recording to compute RMS/PSD in the LFP band
recording_lfp = spre.bandpass_filter(recording, freq_min=1, freq_max=300)
# we can also decimate the LFP to speed up the process
recording_lfp = spre.decimate(recording_lfp, 10)

# the export process is fast because everything is pre-computed
export_to_ibl_gui(
sorting_analyzer=sorting_analyzer,
output_folder='path/to/ibl_folder',
recording_lfp=recording_lfp,
n_jobs=-1
)


Export a spike sorting report
-----------------------------
Expand Down Expand Up @@ -68,8 +102,6 @@ with many units!
.. code-block:: python

import spikeinterface as si # core module only
from spikeinterface.postprocessing import compute_spike_amplitudes, compute_correlograms
from spikeinterface.qualitymetrics import compute_quality_metrics
from spikeinterface.exporters import export_report


Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/exporters/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
from .to_phy import export_to_phy
from .report import export_report
from .to_ibl import export_to_ibl_gui
5 changes: 4 additions & 1 deletion src/spikeinterface/exporters/tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,10 @@ def make_sorting_analyzer(sparse=True, with_group=False):
sorting_analyzer.compute("noise_levels")
sorting_analyzer.compute("principal_components")
sorting_analyzer.compute("template_similarity")
sorting_analyzer.compute("quality_metrics", metric_names=["snr"])
sorting_analyzer.compute(
"quality_metrics", metric_names=["snr", "amplitude_median", "isi_violation", "amplitude_cutoff"]
)
sorting_analyzer.compute(["spike_amplitudes", "spike_locations"])

return sorting_analyzer

Expand Down
115 changes: 115 additions & 0 deletions src/spikeinterface/exporters/tests/test_export_to_ibl.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
import pytest

from spikeinterface.preprocessing import bandpass_filter, decimate
from spikeinterface.exporters import export_to_ibl_gui

from spikeinterface.exporters.tests.common import (
make_sorting_analyzer,
sorting_analyzer_sparse_for_export,
)

required_output_files = [
"spikes.times.npy",
"spikes.clusters.npy",
"spikes.depths.npy",
"spikes.amps.npy",
"clusters.waveforms.npy",
"clusters.peakToTrough.npy",
"clusters.channels.npy",
"clusters.metrics.csv",
"channels.localCoordinates.npy",
"channels.rawInd.npy",
]
ap_output_files = ["_iblqc_ephysTimeRmsAP.rms.npy", "_iblqc_ephysTimeRmsAP.timestamps.npy"]
lfp_output_files = [
"_iblqc_ephysTimeRmsLF.rms.npy",
"_iblqc_ephysTimeRmsLF.timestamps.npy",
"_iblqc_ephysSpectralDensityLF.power.npy",
"_iblqc_ephysSpectralDensityLF.freqs.npy",
]

good_units_query = "amplitude_median < -30"


def test_export_ap_to_ibl(sorting_analyzer_sparse_for_export, create_cache_folder):
cache_folder = create_cache_folder
output_folder = cache_folder / "ibl_ap_output"

sorting_analyzer = sorting_analyzer_sparse_for_export
# AP, but no LFP
export_to_ibl_gui(
sorting_analyzer,
output_folder,
# good_units_query=good_units_query,
verbose=True,
n_jobs=-1,
)
for f in required_output_files:
assert (output_folder / f).exists(), f"Missing file: {f}"
for f in ap_output_files:
assert (output_folder / f).exists(), f"Missing file: {f}"
for f in lfp_output_files:
assert not (output_folder / f).exists(), f"Unexpected file: {f}"


def test_export_recordingless_to_ibl(sorting_analyzer_sparse_for_export, create_cache_folder):
cache_folder = create_cache_folder
output_folder = cache_folder / "ibl_recordingless_output"

sorting_analyzer = sorting_analyzer_sparse_for_export
recording = sorting_analyzer.recording
sorting_analyzer._recording = None

# AP, but no LFP
export_to_ibl_gui(sorting_analyzer_sparse_for_export, output_folder, good_units_query=good_units_query, n_jobs=-1)
for f in required_output_files:
assert (output_folder / f).exists(), f"Missing file: {f}"
for f in ap_output_files:
assert not (output_folder / f).exists(), f"Missing file: {f}"
for f in lfp_output_files:
assert not (output_folder / f).exists(), f"Unexpected file: {f}"

sorting_analyzer._recording = recording


def test_export_lfp_to_ibl(sorting_analyzer_sparse_for_export, create_cache_folder):
cache_folder = create_cache_folder
output_folder = cache_folder / "ibl_lfp_output"

sorting_analyzer = sorting_analyzer_sparse_for_export
recording = sorting_analyzer.recording
recording_lfp = bandpass_filter(recording, freq_min=0.5, freq_max=300)
recording_lfp = decimate(recording_lfp, 10)
# LFP, but no AP
export_to_ibl_gui(
sorting_analyzer, output_folder, lfp_recording=recording_lfp, good_units_query=good_units_query, n_jobs=-1
)
for f in required_output_files:
assert (output_folder / f).exists(), f"Missing file: {f}"
for f in ap_output_files:
assert (output_folder / f).exists(), f"Unexpected file: {f}"
for f in lfp_output_files:
assert (output_folder / f).exists(), f"Missing file: {f}"


def test_missing_info(sorting_analyzer_sparse_for_export, create_cache_folder):
cache_folder = create_cache_folder
output_folder = cache_folder / "ibl_missing_info_output"

sorting_analyzer = sorting_analyzer_sparse_for_export

# missing metrics
good_units_query = "rp_violations < 0.2"

with pytest.raises(ValueError, match="Missing required quality metrics"):
export_to_ibl_gui(sorting_analyzer, output_folder, good_units_query=good_units_query, n_jobs=-1)

sorting_analyzer.delete_extension("spike_amplitudes")

with pytest.raises(ValueError, match="Missing required extension"):
export_to_ibl_gui(sorting_analyzer, output_folder, n_jobs=-1)


if __name__ == "__main__":
sorting_analyzer = make_sorting_analyzer(sparse=True)
test_export_ap_to_ibl(sorting_analyzer)
Loading