Skip to content

Commit

Permalink
more warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
h-mayorquin committed Dec 16, 2024
1 parent e012abd commit 90ccdc1
Show file tree
Hide file tree
Showing 7 changed files with 10 additions and 29 deletions.
2 changes: 1 addition & 1 deletion docs/conversion_examples_gallery/sorting/blackrock.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ Convert Blackrock sorting data to NWB using
>>>
>>> file_path = f"{ECEPHY_DATA_PATH}/blackrock/FileSpec2.3001.nev"
>>> # Change the file_path to the location of the file in your system
>>> interface = BlackrockSortingInterface(file_path=file_path, verbose=False)
>>> interface = BlackrockSortingInterface(file_path=file_path, sampling_frequency=30000.0, verbose=False)
>>>
>>> # Extract what metadata we can from the source files
>>> metadata = interface.get_metadata()
Expand Down
3 changes: 2 additions & 1 deletion docs/conversion_examples_gallery/sorting/neuralynx.rst
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ Convert Neuralynx data to NWB using
>>>
>>> folder_path = f"{ECEPHY_DATA_PATH}/neuralynx/Cheetah_v5.5.1/original_data"
>>> # Change the folder_path to the location of the data in your system
>>> interface = NeuralynxSortingInterface(folder_path=folder_path, verbose=False)
>>> # The stream is optional but is used to specify the sampling frequency of the data
>>> interface = NeuralynxSortingInterface(folder_path=folder_path, verbose=False, stream_id="0")
>>>
>>> metadata = interface.get_metadata()
>>> session_start_time = datetime(2020, 1, 1, 12, 30, 0, tzinfo=ZoneInfo("US/Pacific")).isoformat()
Expand Down
2 changes: 1 addition & 1 deletion src/neuroconv/tools/testing/data_interface_mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -639,7 +639,7 @@ def check_read_nwb(self, nwbfile_path: str):
else:
renamed_unit_ids = np.arange(len(sorting.unit_ids))

sorting_renamed = sorting.rename_units(new_unit_ids=sorting.unit_ids)
sorting_renamed = sorting.rename_units(new_unit_ids=renamed_unit_ids)
check_sortings_equal(SX1=sorting_renamed, SX2=nwb_sorting)

def check_interface_set_aligned_segment_timestamps(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def generate_nwbfile_with_existing_time_series() -> NWBFile:

@pytest.fixture(scope="session")
def hdf5_nwbfile_path(tmpdir_factory):
nwbfile_path = tmpdir_factory.mktemp("data").join("test_default_dataset_configurations_hdf5_nwbfile_.nwb.h5")
nwbfile_path = tmpdir_factory.mktemp("data").join("test_default_dataset_configurations_hdf5_nwbfile_.nwb")
if not Path(nwbfile_path).exists():
nwbfile = generate_nwbfile_with_existing_time_series()
with NWBHDF5IO(path=str(nwbfile_path), mode="w") as io:
Expand Down
4 changes: 1 addition & 3 deletions tests/test_on_data/ecephys/test_raw_recordings.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,7 @@ class TestConverter(NWBConverter):
renamed_channel_ids = recording.get_property("channel_name")
else:
renamed_channel_ids = recording.get_channel_ids().astype("str")
recording = recording.channel_slice(
channel_ids=recording.get_channel_ids(), renamed_channel_ids=renamed_channel_ids
)
recording = recording.rename_channels(new_channel_ids=renamed_channel_ids)

# Edge case that only occurs in testing, but should eventually be fixed nonetheless
# The NwbRecordingExtractor on spikeinterface experiences an issue when duplicated channel_ids
Expand Down
4 changes: 2 additions & 2 deletions tests/test_on_data/ecephys/test_spikeglx_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,15 +212,15 @@ def test_electrode_table_writing(tmp_path):
# Test round trip with spikeinterface
recording_extractor_ap = NwbRecordingExtractor(
file_path=nwbfile_path,
electrical_series_name="ElectricalSeriesAP",
electrical_series_path="acquisition/ElectricalSeriesAP",
)

channel_ids = recording_extractor_ap.get_channel_ids()
np.testing.assert_array_equal(channel_ids, expected_channel_names_ap)

recording_extractor_lf = NwbRecordingExtractor(
file_path=nwbfile_path,
electrical_series_name="ElectricalSeriesLF",
electrical_series_path="acquisition/ElectricalSeriesLF",
)

channel_ids = recording_extractor_lf.get_channel_ids()
Expand Down
22 changes: 2 additions & 20 deletions tests/test_on_data/ecephys/test_spikeglx_metadata.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import datetime

import probeinterface as pi
import pytest
from numpy.testing import assert_array_equal
from spikeinterface.extractors import SpikeGLXRecordingExtractor

Expand Down Expand Up @@ -38,7 +37,8 @@ def test_spikelgx_recording_property_addition():
expected_contact_ids = probe.contact_ids

# Initialize the interface and get the added properties
interface = SpikeGLXRecordingInterface(file_path=ap_file_path)
folder_path = ap_file_path.parent
interface = SpikeGLXRecordingInterface(folder_path=folder_path, stream_id="imec0.ap")
group_name = interface.recording_extractor.get_property("group_name")
contact_shapes = interface.recording_extractor.get_property("contact_shapes")
shank_ids = interface.recording_extractor.get_property("shank_ids")
Expand All @@ -48,21 +48,3 @@ def test_spikelgx_recording_property_addition():
assert_array_equal(contact_shapes, expected_contact_shapes)
assert_array_equal(shank_ids, expected_shank_ids)
assert_array_equal(contact_ids, expected_contact_ids)


@pytest.mark.skip(reason="Legacy spikeextractors cannot read new GIN file.")
def test_matching_recording_property_addition_between_backends():
"""Test that the extracted properties match with both backends"""
folder_path = SPIKEGLX_PATH / "Noise4Sam_g0" / "Noise4Sam_g0_imec0"
ap_file_path = folder_path / "Noise4Sam_g0_t0.imec0.ap.bin"

interface_new = SpikeGLXRecordingInterface(file_path=ap_file_path)
shank_electrode_number_new = interface_new.recording_extractor.get_property("shank_electrode_number")
group_name_new = interface_new.recording_extractor.get_property("group_name")

interface_old = SpikeGLXRecordingInterface(file_path=ap_file_path, spikeextractors_backend=True)
shank_electrode_number_old = interface_old.recording_extractor.get_property("shank_electrode_number")
group_name_old = interface_old.recording_extractor.get_property("group_name")

assert_array_equal(shank_electrode_number_new, shank_electrode_number_old)
assert_array_equal(group_name_new, group_name_old)

0 comments on commit 90ccdc1

Please sign in to comment.