Skip to content

Commit 4df8b0b

Browse files
authored
Merge branch 'main' into pre-commit-ci-update-config
2 parents 9c5f0fa + dc9f6bb commit 4df8b0b

3 files changed

Lines changed: 36 additions & 12 deletions

File tree

.github/workflows/example_data_cache.yml

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@ jobs:
1717
# Requirement settings identified as 'ExampleDataCache / {os}'
1818
name: ${{ matrix.os }}
1919
runs-on: ${{ matrix.os }}
20+
defaults:
21+
run:
22+
shell: bash
2023
strategy:
2124
fail-fast: false
2225
matrix:
@@ -27,7 +30,10 @@ jobs:
2730

2831
- name: Get ephy_testing_data current head hash
2932
id: ephys
30-
run: echo "::set-output name=HASH_EPHY_DATASET::$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)"
33+
run: |
34+
HASH=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)
35+
if [ -z "$HASH" ]; then echo "Failed to fetch ephy_testing_data hash" && exit 1; fi
36+
echo "HASH_EPHY_DATASET=$HASH" >> $GITHUB_OUTPUT
3137
- name: Get cached ephys example data - ${{ steps.ephys.outputs.HASH_EPHY_DATASET }}
3238
uses: actions/cache@v4
3339
id: cache-ephys-datasets
@@ -36,7 +42,10 @@ jobs:
3642
key: ephys-datasets-v2-${{ matrix.os }}-${{ steps.ephys.outputs.HASH_EPHY_DATASET }}
3743
- name: Get ophys_testing_data current head hash
3844
id: ophys
39-
run: echo "::set-output name=HASH_OPHYS_DATASET::$(git ls-remote https://gin.g-node.org/CatalystNeuro/ophys_testing_data.git HEAD | cut -f1)"
45+
run: |
46+
HASH=$(git ls-remote https://gin.g-node.org/CatalystNeuro/ophys_testing_data.git HEAD | cut -f1)
47+
if [ -z "$HASH" ]; then echo "Failed to fetch ophys_testing_data hash" && exit 1; fi
48+
echo "HASH_OPHYS_DATASET=$HASH" >> $GITHUB_OUTPUT
4049
- name: Get cached ophys example data - ${{ steps.ophys.outputs.HASH_OPHYS_DATASET }}
4150
uses: actions/cache@v4
4251
id: cache-ophys-datasets
@@ -45,13 +54,16 @@ jobs:
4554
key: ophys-datasets-${{ matrix.os }}-${{ steps.ophys.outputs.HASH_OPHYS_DATASET }}
4655
- name: Get behavior_testing_data current head hash
4756
id: behavior
48-
run: echo "::set-output name=HASH_BEHAVIOR_DATASET::$(git ls-remote https://gin.g-node.org/CatalystNeuro/behavior_testing_data.git HEAD | cut -f1)"
57+
run: |
58+
HASH=$(git ls-remote https://gin.g-node.org/CatalystNeuro/behavior_testing_data.git HEAD | cut -f1)
59+
if [ -z "$HASH" ]; then echo "Failed to fetch behavior_testing_data hash" && exit 1; fi
60+
echo "HASH_BEHAVIOR_DATASET=$HASH" >> $GITHUB_OUTPUT
4961
- name: Get cached behavior example data - ${{ steps.behavior.outputs.HASH_BEHAVIOR_DATASET }}
5062
uses: actions/cache@v4
5163
id: cache-behavior-datasets
5264
with:
5365
path: ./behavior_testing_data
54-
key: behavior-datasets-${{ matrix.os }}-${{ steps.behavior.outputs.HASH_behavior_DATASET }}
66+
key: behavior-datasets-${{ matrix.os }}-${{ steps.behavior.outputs.HASH_BEHAVIOR_DATASET }}
5567

5668
- if: steps.cache-ephys-datasets.outputs.cache-hit != 'true' || steps.cache-ophys-datasets.outputs.cache-hit != 'true' || steps.cache-behavior-datasets.outputs.cache-hit != 'true'
5769
name: Install and configure AWS CLI

.github/workflows/testing_pipelines.yml

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,10 @@ jobs:
7373
# Load example data caches
7474
- name: Get ephy_testing_data current head hash
7575
id: ephys
76-
run: echo "::set-output name=HASH_EPHY_DATASET::$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)"
76+
run: |
77+
HASH=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)
78+
if [ -z "$HASH" ]; then echo "Failed to fetch ephy_testing_data hash" && exit 1; fi
79+
echo "HASH_EPHY_DATASET=$HASH" >> $GITHUB_OUTPUT
7780
7881
- name: Cache ephys dataset - ${{ steps.ephys.outputs.HASH_EPHY_DATASET }}
7982
uses: actions/cache@v4
@@ -84,7 +87,10 @@ jobs:
8487

8588
- name: Get ophys_testing_data current head hash
8689
id: ophys
87-
run: echo "::set-output name=HASH_OPHYS_DATASET::$(git ls-remote https://gin.g-node.org/CatalystNeuro/ophys_testing_data.git HEAD | cut -f1)"
90+
run: |
91+
HASH=$(git ls-remote https://gin.g-node.org/CatalystNeuro/ophys_testing_data.git HEAD | cut -f1)
92+
if [ -z "$HASH" ]; then echo "Failed to fetch ophys_testing_data hash" && exit 1; fi
93+
echo "HASH_OPHYS_DATASET=$HASH" >> $GITHUB_OUTPUT
8894
8995
- name: Cache ophys dataset - ${{ steps.ophys.outputs.HASH_OPHYS_DATASET }}
9096
uses: actions/cache@v4
@@ -95,14 +101,17 @@ jobs:
95101

96102
- name: Get behavior_testing_data current head hash
97103
id: behavior
98-
run: echo "::set-output name=HASH_BEHAVIOR_DATASET::$(git ls-remote https://gin.g-node.org/CatalystNeuro/behavior_testing_data.git HEAD | cut -f1)"
104+
run: |
105+
HASH=$(git ls-remote https://gin.g-node.org/CatalystNeuro/behavior_testing_data.git HEAD | cut -f1)
106+
if [ -z "$HASH" ]; then echo "Failed to fetch behavior_testing_data hash" && exit 1; fi
107+
echo "HASH_BEHAVIOR_DATASET=$HASH" >> $GITHUB_OUTPUT
99108
100109
- name: Cache behavior dataset - ${{ steps.behavior.outputs.HASH_BEHAVIOR_DATASET }}
101110
uses: actions/cache@v4
102111
id: cache-behavior-datasets
103112
with:
104113
path: ./behavior_testing_data
105-
key: behavior-datasets-${{ matrix.os }}-${{ steps.behavior.outputs.HASH_behavior_DATASET }}
114+
key: behavior-datasets-${{ matrix.os }}-${{ steps.behavior.outputs.HASH_BEHAVIOR_DATASET }}
106115

107116
# Run pipeline tests
108117
- if: matrix.os != 'ubuntu-latest'

src/pyflask/manageNeuroconv/manage_neuroconv.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1813,12 +1813,15 @@ def generate_test_data(output_path: str):
18131813
sorting_analyzer = spikeinterface.create_sorting_analyzer(
18141814
sorting=sorting, recording=artificial_ap_band_in_uV, mode="memory", sparse=False
18151815
)
1816-
sorting_analyzer.compute(["random_spikes", "waveforms", "templates", "noise_levels"])
1817-
sorting_analyzer.compute("spike_amplitudes")
1818-
sorting_analyzer.compute("principal_components", n_components=5, mode="by_channel_local")
1816+
sorting_analyzer.compute(["random_spikes", "waveforms", "templates"])
18191817

18201818
spikeinterface.exporters.export_to_phy(
1821-
sorting_analyzer=sorting_analyzer, output_folder=phy_output_folder, remove_if_exists=True, copy_binary=False
1819+
sorting_analyzer=sorting_analyzer,
1820+
output_folder=phy_output_folder,
1821+
remove_if_exists=True,
1822+
copy_binary=False,
1823+
compute_pc_features=False,
1824+
compute_amplitudes=False,
18221825
)
18231826

18241827

0 commit comments

Comments
 (0)