From 3f875d341b349a1f3eebd44a7cf87da9d24a25dc Mon Sep 17 00:00:00 2001 From: "Zahra M. Aghajan" Date: Tue, 10 Aug 2021 22:45:19 -0700 Subject: [PATCH 01/31] adding the new snirf data types to the constants --- mne/io/constants.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/mne/io/constants.py b/mne/io/constants.py index 4c4f3b57158..7875ce42637 100644 --- a/mne/io/constants.py +++ b/mne/io/constants.py @@ -916,6 +916,9 @@ FIFF.FIFFV_COIL_FNIRS_FD_AC_AMPLITUDE = 304 # fNIRS frequency domain AC amplitude FIFF.FIFFV_COIL_FNIRS_FD_PHASE = 305 # fNIRS frequency domain phase FIFF.FIFFV_COIL_FNIRS_RAW = FIFF.FIFFV_COIL_FNIRS_CW_AMPLITUDE # old alias +FIFF.FIFFV_COIL_FNIRS_TD_GATED_AMPLITUDE = 306 # fNIRS time-domain gated amplitude +FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE = 307 # fNIRS time-domain moments amplitude +FIFF.FIFFV_COIL_FNIRS_PROCESSED_AMPLITUDE = 308 # fNIRS processed amplitude FIFF.FIFFV_COIL_MCG_42 = 1000 # For testing the MCG software @@ -1002,7 +1005,9 @@ FIFF.FIFFV_COIL_DIPOLE, FIFF.FIFFV_COIL_FNIRS_HBO, FIFF.FIFFV_COIL_FNIRS_HBR, FIFF.FIFFV_COIL_FNIRS_RAW, FIFF.FIFFV_COIL_FNIRS_OD, FIFF.FIFFV_COIL_FNIRS_FD_AC_AMPLITUDE, - FIFF.FIFFV_COIL_FNIRS_FD_PHASE, FIFF.FIFFV_COIL_MCG_42, + FIFF.FIFFV_COIL_FNIRS_FD_PHASE, FIFF.FIFFV_COIL_FNIRS_TD_GATED_AMPLITUDE, + FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE, FIFF.FIFFV_COIL_FNIRS_PROCESSED_AMPLITUDE, + FIFF.FIFFV_COIL_MCG_42, FIFF.FIFFV_COIL_POINT_MAGNETOMETER, FIFF.FIFFV_COIL_AXIAL_GRAD_5CM, FIFF.FIFFV_COIL_VV_PLANAR_W, FIFF.FIFFV_COIL_VV_PLANAR_T1, FIFF.FIFFV_COIL_VV_PLANAR_T2, FIFF.FIFFV_COIL_VV_PLANAR_T3, From cc92adc70334069adb72287293e39b35e0ecbf54 Mon Sep 17 00:00:00 2001 From: "Zahra M. Aghajan" Date: Wed, 11 Aug 2021 09:28:36 -0700 Subject: [PATCH 02/31] more places to add the data types and constants --- mne/io/constants.py | 4 ++-- mne/io/pick.py | 22 +++++++++++++++++++++- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/mne/io/constants.py b/mne/io/constants.py index 7875ce42637..a0bd522725e 100644 --- a/mne/io/constants.py +++ b/mne/io/constants.py @@ -918,7 +918,7 @@ FIFF.FIFFV_COIL_FNIRS_RAW = FIFF.FIFFV_COIL_FNIRS_CW_AMPLITUDE # old alias FIFF.FIFFV_COIL_FNIRS_TD_GATED_AMPLITUDE = 306 # fNIRS time-domain gated amplitude FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE = 307 # fNIRS time-domain moments amplitude -FIFF.FIFFV_COIL_FNIRS_PROCESSED_AMPLITUDE = 308 # fNIRS processed amplitude +FIFF.FIFFV_COIL_FNIRS_PROCESSED = 308 # fNIRS processed data FIFF.FIFFV_COIL_MCG_42 = 1000 # For testing the MCG software @@ -1006,7 +1006,7 @@ FIFF.FIFFV_COIL_FNIRS_HBR, FIFF.FIFFV_COIL_FNIRS_RAW, FIFF.FIFFV_COIL_FNIRS_OD, FIFF.FIFFV_COIL_FNIRS_FD_AC_AMPLITUDE, FIFF.FIFFV_COIL_FNIRS_FD_PHASE, FIFF.FIFFV_COIL_FNIRS_TD_GATED_AMPLITUDE, - FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE, FIFF.FIFFV_COIL_FNIRS_PROCESSED_AMPLITUDE, + FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE, FIFF.FIFFV_COIL_FNIRS_PROCESSED, FIFF.FIFFV_COIL_MCG_42, FIFF.FIFFV_COIL_POINT_MAGNETOMETER, FIFF.FIFFV_COIL_AXIAL_GRAD_5CM, FIFF.FIFFV_COIL_VV_PLANAR_W, FIFF.FIFFV_COIL_VV_PLANAR_T1, diff --git a/mne/io/pick.py b/mne/io/pick.py index d84b0a1a4f1..6cb842a6053 100644 --- a/mne/io/pick.py +++ b/mne/io/pick.py @@ -76,6 +76,18 @@ def get_channel_type_constants(include_defaults=False): kind=FIFF.FIFFV_FNIRS_CH, unit=FIFF.FIFF_UNIT_V, coil_type=FIFF.FIFFV_COIL_FNIRS_CW_AMPLITUDE), + fnirs_td_gated_amplitude=dict( + kind=FIFF.FIFFV_FNIRS_CH, + unit=FIFF.FIFF_UNIT_V, + coil_type=FIFF.FIFFV_COIL_FNIRS_TD_GATED_AMPLITUDE), + fnirs_td_moments_amplitude=dict( + kind=FIFF.FIFFV_FNIRS_CH, + unit=FIFF.FIFF_UNIT_V, + coil_type=FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE), + fnirs_processed=dict( + kind=FIFF.FIFFV_FNIRS_CH, + unit=FIFF.FIFF_UNIT_V, + coil_type=FIFF.FIFFV_COIL_FNIRS_PROCESSED), fnirs_fd_ac_amplitude=dict( kind=FIFF.FIFFV_FNIRS_CH, unit=FIFF.FIFF_UNIT_V, @@ -160,6 +172,12 @@ def get_channel_type_constants(include_defaults=False): FIFF.FIFFV_COIL_FNIRS_FD_PHASE: 'fnirs_fd_phase', FIFF.FIFFV_COIL_FNIRS_OD: 'fnirs_od', + FIFF.FIFFV_COIL_FNIRS_TD_GATED_AMPLITUDE: + 'fnirs_td_gated_amplitude', + FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE: + 'fnirs_td_moments_amplitude', + FIFF.FIFFV_COIL_FNIRS_PROCESSED: + 'fnirs_processed', }), 'eeg': ('coil_type', {FIFF.FIFFV_COIL_EEG: 'eeg', FIFF.FIFFV_COIL_EEG_BIPOLAR: 'eeg', @@ -967,7 +985,9 @@ def _check_excludes_includes(chs, info=None, allow_bads=False): _PICK_TYPES_KEYS = tuple(list(_PICK_TYPES_DATA_DICT) + ['ref_meg']) _MEG_CH_TYPES_SPLIT = ('mag', 'grad', 'planar1', 'planar2') _FNIRS_CH_TYPES_SPLIT = ('hbo', 'hbr', 'fnirs_cw_amplitude', - 'fnirs_fd_ac_amplitude', 'fnirs_fd_phase', 'fnirs_od') + 'fnirs_fd_ac_amplitude', 'fnirs_fd_phase', 'fnirs_od', + 'fnirs_td_gated_amplitude', 'fnirs_td_moments_amplitude', + 'fnirs_processed') _DATA_CH_TYPES_ORDER_DEFAULT = ( 'mag', 'grad', 'eeg', 'csd', 'eog', 'ecg', 'resp', 'emg', 'ref_meg', 'misc', 'stim', 'chpi', 'exci', 'ias', 'syst', 'seeg', 'bio', 'ecog', From 0d2fe87b1207541dabbfb757198c1981c354338c Mon Sep 17 00:00:00 2001 From: "Zahra M. Aghajan" Date: Wed, 11 Aug 2021 11:20:20 -0700 Subject: [PATCH 03/31] adding constants for the allowed dataTypeLabels for the processed data type as outlined in the SNIRF specification document --- mne/io/constants.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/mne/io/constants.py b/mne/io/constants.py index a0bd522725e..682cf091a12 100644 --- a/mne/io/constants.py +++ b/mne/io/constants.py @@ -1064,3 +1064,22 @@ # add a comment here (with doi of a published source) above any new # aliases, as they are added } + +# SNIRF: Supported measurementList(k).dataTypeLabel values in dataTimeSeries +FNIRS_SNIRF_DATATYPELABELS = { + # These types are specified `here `_ + "HbO": 1, # Oxygenated hemoglobin (oxyhemoglobin) concentration + "HbR": 2, # Deoxygenated hemoglobin (deoxyhemoglobin) concentration + "HbT": 3, # Total hemoglobin concentration + "dOD": 4, # Change in optical density + "mua": 5, # Absorption coefficient + "musp": 6, # Scattering coefficient + "H2O": 7, # Water content + "Lipid": 8, # Lipid concentration + "BFi": 9, # Blood flow index + "HRF dOD": 10, # Hemodynamic response function for change in optical density + "HRF HbO": 11, # Hemodynamic response function for oxyhemoglobin concentration + "HRF HbR": 12, # Hemodynamic response function for deoxyhemoglobin concentration + "HRF HbT": 13, # Hemodynamic response function for total hemoglobin concentration + "HRF BFi": 14, # Hemodynamic response function for blood flow index +} \ No newline at end of file From 21ecb908b3d8873f8eb1ca643e2b9264654e488c Mon Sep 17 00:00:00 2001 From: "Zahra M. Aghajan" Date: Wed, 11 Aug 2021 15:58:28 -0700 Subject: [PATCH 04/31] updating the snirf reader to accept other data types: gated histograms, moments and processed (hbo/hbr) --- mne/io/snirf/_snirf.py | 209 ++++++++++++++++++++++++++++------------- 1 file changed, 145 insertions(+), 64 deletions(-) diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index de1070df8ef..1364a05db92 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -12,12 +12,14 @@ from ...annotations import Annotations from ...utils import logger, verbose, fill_doc, warn, _check_fname from ...utils.check import _require_version -from ..constants import FIFF +from ..constants import FIFF, FNIRS_SNIRF_DATATYPELABELS from .._digitization import _make_dig_points from ...transforms import _frame_to_str, apply_trans from ..nirx.nirx import _convert_fnirs_to_head from ..._freesurfer import get_mni_fiducials +AVAILABLE_DATA_TYPES = [1, 201, 301, 99999] + @fill_doc def read_raw_snirf(fname, optode_frame="unknown", preload=False, verbose=None): @@ -98,14 +100,14 @@ def __init__(self, fname, optode_frame="unknown", "MNE does not support this feature. " "Only the first dataset will be processed.") - if np.array(dat.get('nirs/data1/measurementList1/dataType')) != 1: - raise RuntimeError('File does not contain continuous wave ' - 'data. MNE only supports reading continuous' - ' wave amplitude SNIRF files. Expected type' - ' code 1 but received type code %d' % - (np.array(dat.get( - 'nirs/data1/measurementList1/dataType' - )))) + snirf_data_type = np.array( + dat.get('nirs/data1/measurementList1/dataType')) + if snirf_data_type not in AVAILABLE_DATA_TYPES: + raise RuntimeError( + "File does not contain the supported data types. \ + MNE only supports reading the following data types {}, \ + but received type code {}. Processing is only available \ + for data type 1 (CW data).".format(AVAILABLE_DATA_TYPES, snirf_data_type)) last_samps = dat.get('/nirs/data1/dataTimeSeries').shape[0] - 1 @@ -130,6 +132,17 @@ def __init__(self, fname, optode_frame="unknown", fnirs_wavelengths = np.array(dat.get('nirs/probe/wavelengths')) fnirs_wavelengths = [int(w) for w in fnirs_wavelengths] + # Get data type specific probe information + if snirf_data_type == 201: + fnirs_time_delays = np.array( + dat.get('nirs/probe/timeDelays')).tolist() + fnirs_time_delay_widths = np.array( + dat.get('nirs/probe/timeDelayWidths')).tolist() + elif snirf_data_type == 301: + fnirs_moment_orders = np.array( + dat.get('nirs/probe/momentOrders')) + fnirs_moment_orders = [int(m) for m in fnirs_moment_orders] + # Extract channels def atoi(text): return int(text) if text.isdigit() else text @@ -199,26 +212,74 @@ def natural_keys(text): assert len(sources) == srcPos3D.shape[0] assert len(detectors) == detPos3D.shape[0] + # Helper function for when the numpy array has shape (), i.e. just one element. + def _correct_shape(arr): + if arr.shape == (): + arr = arr[np.newaxis] + return arr + chnames = [] for chan in channels: - src_idx = int(np.array(dat.get('nirs/data1/' + - chan + '/sourceIndex'))[0]) - det_idx = int(np.array(dat.get('nirs/data1/' + - chan + '/detectorIndex'))[0]) - wve_idx = int(np.array(dat.get('nirs/data1/' + - chan + '/wavelengthIndex'))[0]) - ch_name = sources[src_idx - 1] + '_' +\ - detectors[det_idx - 1] + ' ' +\ - str(fnirs_wavelengths[wve_idx - 1]) - chnames.append(ch_name) + src_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/sourceIndex')))[0]) + det_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/detectorIndex')))[0]) + if snirf_data_type == 1: + wve_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/wavelengthIndex')))[0]) + ch_name = sources[src_idx - 1] + '_' +\ + detectors[det_idx - 1] + ' ' +\ + str(fnirs_wavelengths[wve_idx - 1]) + chnames.append(ch_name) + elif snirf_data_type == 201: + wve_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/wavelengthIndex')))[0]) + bin_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/dataTypeIndex')))[0]) + ch_name = sources[src_idx - 1] + '_' +\ + detectors[det_idx - 1] + ' ' +\ + str(fnirs_wavelengths[wve_idx - 1]) + ' ' +\ + 'bin' + str(fnirs_time_delays[bin_idx - 1]) + chnames.append(ch_name) + elif snirf_data_type == 301: + wve_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/wavelengthIndex')))[0]) + moment_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/dataTypeIndex')))[0]) + ch_name = sources[src_idx - 1] + '_' +\ + detectors[det_idx - 1] + ' ' +\ + str(fnirs_wavelengths[wve_idx - 1]) + ' ' +\ + 'moment' + str(fnirs_moment_orders[moment_idx - 1]) + chnames.append(ch_name) + elif snirf_data_type == 99999: + hb_id = _correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/dataTypeLabel')))[0].decode('UTF-8') + ch_name = sources[src_idx - 1] + '_' +\ + detectors[det_idx - 1] + ' ' +\ + hb_id + chnames.append(ch_name) # Create mne structure - info = create_info(chnames, - sampling_rate, - ch_types='fnirs_cw_amplitude') + if snirf_data_type == 1: + info = create_info(chnames, + sampling_rate, + ch_types='fnirs_cw_amplitude') + elif snirf_data_type == 201: + info = create_info(chnames, + sampling_rate, + ch_types='fnirs_td_gated_amplitude') + elif snirf_data_type == 301: + info = create_info(chnames, + sampling_rate, + ch_types='fnirs_td_moments_amplitude') + elif snirf_data_type == 99999: + info = create_info(chnames, + sampling_rate, + ch_types='fnirs_processed') subject_info = {} - names = np.array(dat.get('nirs/metaDataTags/SubjectID')) + names = _correct_shape( + np.array(dat.get('nirs/metaDataTags/SubjectID'))) subject_info['first_name'] = names[0].decode('UTF-8') # Read non standard (but allowed) custom metadata tags if 'lastName' in dat.get('nirs/metaDataTags/'): @@ -239,7 +300,8 @@ def natural_keys(text): # Update info info.update(subject_info=subject_info) - LengthUnit = np.array(dat.get('/nirs/metaDataTags/LengthUnit')) + LengthUnit = _correct_shape( + np.array(dat.get('/nirs/metaDataTags/LengthUnit'))) LengthUnit = LengthUnit[0].decode('UTF-8') scal = 1 if "cm" in LengthUnit: @@ -268,21 +330,38 @@ def natural_keys(text): coord_frame = FIFF.FIFFV_COORD_UNKNOWN for idx, chan in enumerate(channels): - src_idx = int(np.array(dat.get('nirs/data1/' + - chan + '/sourceIndex'))[0]) - det_idx = int(np.array(dat.get('nirs/data1/' + - chan + '/detectorIndex'))[0]) - wve_idx = int(np.array(dat.get('nirs/data1/' + - chan + '/wavelengthIndex'))[0]) + src_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/sourceIndex')))[0]) + det_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/detectorIndex')))[0]) info['chs'][idx]['loc'][3:6] = srcPos3D[src_idx - 1, :] info['chs'][idx]['loc'][6:9] = detPos3D[det_idx - 1, :] # Store channel as mid point midpoint = (info['chs'][idx]['loc'][3:6] + info['chs'][idx]['loc'][6:9]) / 2 info['chs'][idx]['loc'][0:3] = midpoint - info['chs'][idx]['loc'][9] = fnirs_wavelengths[wve_idx - 1] info['chs'][idx]['coord_frame'] = coord_frame + # get data type specific info: + if snirf_data_type in [1, 201, 301]: + wve_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/wavelengthIndex')))[0]) + info['chs'][idx]['loc'][9] = fnirs_wavelengths[wve_idx - 1] + elif snirf_data_type == 99999: + hb_id = _correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/dataTypeLabel')))[0].decode('UTF-8') + info['chs'][idx]['loc'][9] = FNIRS_SNIRF_DATATYPELABELS[hb_id] + + if snirf_data_type == 201: + bin_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/dataTypeIndex')))[0]) + info['chs'][idx]['loc'][10] = fnirs_time_delays[bin_idx - + 1] * fnirs_time_delay_widths + elif snirf_data_type == 301: + moment_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + + chan + '/dataTypeIndex')))[0]) + info['chs'][idx]['loc'][10] = fnirs_moment_orders[moment_idx - 1] + if 'landmarkPos3D' in dat.get('nirs/probe/'): diglocs = np.array(dat.get('/nirs/probe/landmarkPos3D')) digname = np.array(dat.get('/nirs/probe/landmarkLabels')) @@ -320,39 +399,40 @@ def natural_keys(text): info['dig'] = _format_dig_points(dig) del head_t - str_date = np.array((dat.get( - '/nirs/metaDataTags/MeasurementDate')))[0].decode('UTF-8') - str_time = np.array((dat.get( - '/nirs/metaDataTags/MeasurementTime')))[0].decode('UTF-8') - str_datetime = str_date + str_time - - # Several formats have been observed so we try each in turn - for dt_code in ['%Y-%m-%d%H:%M:%SZ', - '%Y-%m-%d%H:%M:%S']: - try: - meas_date = datetime.datetime.strptime( - str_datetime, dt_code) - except ValueError: - pass + str_date = _correct_shape(np.array((dat.get( + '/nirs/metaDataTags/MeasurementDate'))))[0].decode('UTF-8') + str_time = _correct_shape(np.array((dat.get( + '/nirs/metaDataTags/MeasurementTime'))))[0].decode('UTF-8') + str_datetime = str_date + str_time + + # Several formats have been observed so we try each in turn + for dt_code in ['%Y-%m-%d%H:%M:%SZ', + '%Y-%m-%d%H:%M:%S']: + try: + meas_date = datetime.datetime.strptime( + str_datetime, dt_code) + except ValueError: + pass + else: + break else: - break - else: - warn("Extraction of measurement date from SNIRF file failed. " - "The date is being set to January 1st, 2000, " - f"instead of {str_datetime}") - meas_date = datetime.datetime(2000, 1, 1, 0, 0, 0) - meas_date = meas_date.replace(tzinfo=datetime.timezone.utc) - info['meas_date'] = meas_date - - if 'DateOfBirth' in dat.get('nirs/metaDataTags/'): - str_birth = np.array((dat.get('/nirs/metaDataTags/' - 'DateOfBirth')))[0].decode() - birth_matched = re.fullmatch(r'(\d+)-(\d+)-(\d+)', str_birth) - if birth_matched is not None: - info["subject_info"]['birthday'] = ( - int(birth_matched.groups()[0]), - int(birth_matched.groups()[1]), - int(birth_matched.groups()[2])) + warn("Extraction of measurement date from SNIRF file failed. " + "The date is being set to January 1st, 2000, " + f"instead of {str_datetime}") + meas_date = datetime.datetime(2000, 1, 1, 0, 0, 0) + meas_date = meas_date.replace(tzinfo=datetime.timezone.utc) + info['meas_date'] = meas_date + + if 'DateOfBirth' in dat.get('nirs/metaDataTags/'): + str_birth = _correct_shape(np.array((dat.get('/nirs/metaDataTags/' + 'DateOfBirth'))))[0].decode() + birth_matched = re.fullmatch( + r'(\d+)-(\d+)-(\d+)', str_birth) + if birth_matched is not None: + info["subject_info"]['birthday'] = ( + int(birth_matched.groups()[0]), + int(birth_matched.groups()[1]), + int(birth_matched.groups()[2])) super(RawSNIRF, self).__init__(info, preload, filenames=[fname], last_samps=[last_samps], @@ -365,7 +445,8 @@ def natural_keys(text): data = np.atleast_2d(np.array( dat.get('/nirs/' + key + '/data'))) if data.size > 0: - desc = dat.get('/nirs/' + key + '/name')[0] + desc = _correct_shape( + np.array(dat.get('/nirs/' + key + '/name')))[0] annot.append(data[:, 0], 1.0, desc.decode('UTF-8')) self.set_annotations(annot) From 4491064623424ad9c7e9561c3eed52874e255a63 Mon Sep 17 00:00:00 2001 From: "Zahra M. Aghajan" Date: Wed, 11 Aug 2021 16:55:23 -0700 Subject: [PATCH 05/31] setting the formatter line length --- mne/io/pick.py | 8 +-- mne/io/snirf/_snirf.py | 113 +++++++++++++++++++++++++++++------------ 2 files changed, 84 insertions(+), 37 deletions(-) diff --git a/mne/io/pick.py b/mne/io/pick.py index 6cb842a6053..36c8587358f 100644 --- a/mne/io/pick.py +++ b/mne/io/pick.py @@ -984,10 +984,10 @@ def _check_excludes_includes(chs, info=None, allow_bads=False): dbs=True) _PICK_TYPES_KEYS = tuple(list(_PICK_TYPES_DATA_DICT) + ['ref_meg']) _MEG_CH_TYPES_SPLIT = ('mag', 'grad', 'planar1', 'planar2') -_FNIRS_CH_TYPES_SPLIT = ('hbo', 'hbr', 'fnirs_cw_amplitude', - 'fnirs_fd_ac_amplitude', 'fnirs_fd_phase', 'fnirs_od', - 'fnirs_td_gated_amplitude', 'fnirs_td_moments_amplitude', - 'fnirs_processed') +_FNIRS_CH_TYPES_SPLIT = ( + 'hbo', 'hbr', 'fnirs_cw_amplitude', 'fnirs_fd_ac_amplitude', + 'fnirs_fd_phase', 'fnirs_od', 'fnirs_td_gated_amplitude', + 'fnirs_td_moments_amplitude', 'fnirs_processed') _DATA_CH_TYPES_ORDER_DEFAULT = ( 'mag', 'grad', 'eeg', 'csd', 'eog', 'ecg', 'resp', 'emg', 'ref_meg', 'misc', 'stim', 'chpi', 'exci', 'ias', 'syst', 'seeg', 'bio', 'ecog', diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 1364a05db92..b7fe4ef6657 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -220,40 +220,67 @@ def _correct_shape(arr): chnames = [] for chan in channels: - src_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/sourceIndex')))[0]) - det_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/detectorIndex')))[0]) + src_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + '/sourceIndex'))) + [0]) + det_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + '/detectorIndex')))[0]) if snirf_data_type == 1: - wve_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/wavelengthIndex')))[0]) + wve_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + + '/wavelengthIndex')))[0]) ch_name = sources[src_idx - 1] + '_' +\ detectors[det_idx - 1] + ' ' +\ str(fnirs_wavelengths[wve_idx - 1]) chnames.append(ch_name) elif snirf_data_type == 201: - wve_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/wavelengthIndex')))[0]) - bin_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/dataTypeIndex')))[0]) + wve_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + + '/wavelengthIndex')))[0]) + bin_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + + '/dataTypeIndex')))[0]) ch_name = sources[src_idx - 1] + '_' +\ detectors[det_idx - 1] + ' ' +\ str(fnirs_wavelengths[wve_idx - 1]) + ' ' +\ 'bin' + str(fnirs_time_delays[bin_idx - 1]) chnames.append(ch_name) elif snirf_data_type == 301: - wve_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/wavelengthIndex')))[0]) - moment_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/dataTypeIndex')))[0]) + wve_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + + '/wavelengthIndex')))[0]) + moment_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + + '/dataTypeIndex')))[0]) ch_name = sources[src_idx - 1] + '_' +\ detectors[det_idx - 1] + ' ' +\ str(fnirs_wavelengths[wve_idx - 1]) + ' ' +\ 'moment' + str(fnirs_moment_orders[moment_idx - 1]) chnames.append(ch_name) elif snirf_data_type == 99999: - hb_id = _correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/dataTypeLabel')))[0].decode('UTF-8') + hb_id = _correct_shape( + np.array(dat.get('nirs/data1/' + chan + '/dataTypeLabel')))[0].decode('UTF-8') ch_name = sources[src_idx - 1] + '_' +\ detectors[det_idx - 1] + ' ' +\ hb_id @@ -330,10 +357,17 @@ def _correct_shape(arr): coord_frame = FIFF.FIFFV_COORD_UNKNOWN for idx, chan in enumerate(channels): - src_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/sourceIndex')))[0]) - det_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/detectorIndex')))[0]) + src_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + '/sourceIndex'))) + [0]) + det_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + '/detectorIndex')))[0]) info['chs'][idx]['loc'][3:6] = srcPos3D[src_idx - 1, :] info['chs'][idx]['loc'][6:9] = detPos3D[det_idx - 1, :] # Store channel as mid point @@ -344,22 +378,34 @@ def _correct_shape(arr): # get data type specific info: if snirf_data_type in [1, 201, 301]: - wve_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/wavelengthIndex')))[0]) + wve_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + + '/wavelengthIndex')))[0]) info['chs'][idx]['loc'][9] = fnirs_wavelengths[wve_idx - 1] elif snirf_data_type == 99999: - hb_id = _correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/dataTypeLabel')))[0].decode('UTF-8') + hb_id = _correct_shape( + np.array(dat.get('nirs/data1/' + chan + '/dataTypeLabel')))[0].decode('UTF-8') info['chs'][idx]['loc'][9] = FNIRS_SNIRF_DATATYPELABELS[hb_id] if snirf_data_type == 201: - bin_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/dataTypeIndex')))[0]) + bin_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + + '/dataTypeIndex')))[0]) info['chs'][idx]['loc'][10] = fnirs_time_delays[bin_idx - 1] * fnirs_time_delay_widths elif snirf_data_type == 301: - moment_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/dataTypeIndex')))[0]) + moment_idx = int( + _correct_shape( + np.array( + dat.get( + 'nirs/data1/' + chan + + '/dataTypeIndex')))[0]) info['chs'][idx]['loc'][10] = fnirs_moment_orders[moment_idx - 1] if 'landmarkPos3D' in dat.get('nirs/probe/'): @@ -416,16 +462,17 @@ def _correct_shape(arr): else: break else: - warn("Extraction of measurement date from SNIRF file failed. " - "The date is being set to January 1st, 2000, " - f"instead of {str_datetime}") + warn( + "Extraction of measurement date from SNIRF file failed. " + "The date is being set to January 1st, 2000, " + f"instead of {str_datetime}") meas_date = datetime.datetime(2000, 1, 1, 0, 0, 0) meas_date = meas_date.replace(tzinfo=datetime.timezone.utc) info['meas_date'] = meas_date if 'DateOfBirth' in dat.get('nirs/metaDataTags/'): - str_birth = _correct_shape(np.array((dat.get('/nirs/metaDataTags/' - 'DateOfBirth'))))[0].decode() + str_birth = _correct_shape( + np.array((dat.get('/nirs/metaDataTags/' 'DateOfBirth'))))[0].decode() birth_matched = re.fullmatch( r'(\d+)-(\d+)-(\d+)', str_birth) if birth_matched is not None: From dbdcebce5d279253ca6906bfa0ef375ccb904687 Mon Sep 17 00:00:00 2001 From: "Zahra M. Aghajan" Date: Wed, 11 Aug 2021 17:15:17 -0700 Subject: [PATCH 06/31] the reordering of the channels does not make much sense, need to inquire --- mne/io/snirf/_snirf.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index b7fe4ef6657..82a07f29e45 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -498,12 +498,13 @@ def _correct_shape(arr): self.set_annotations(annot) # Reorder channels to match expected ordering in MNE - num_chans = len(self.ch_names) - chans = [] - for idx in range(num_chans // 2): - chans.append(idx) - chans.append(idx + num_chans // 2) - self.pick(picks=chans) + if snirf_data_type in [1, 99999]: + num_chans = len(self.ch_names) + chans = [] + for idx in range(num_chans // 2): + chans.append(idx) + chans.append(idx + num_chans // 2) + self.pick(picks=chans) # Validate that the fNIRS info is correctly formatted _validate_nirs_info(self.info) From a35822a84e57a20160462581722ef80d36968cd0 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Sun, 21 Aug 2022 20:14:13 +0100 Subject: [PATCH 07/31] MAINT: Simpler with .item --- mne/io/constants.py | 19 -- mne/io/snirf/_snirf.py | 444 +++++++++++++---------------------------- mne/utils/__init__.py | 2 +- 3 files changed, 140 insertions(+), 325 deletions(-) diff --git a/mne/io/constants.py b/mne/io/constants.py index 8db52d2e45f..268b0e7d28c 100644 --- a/mne/io/constants.py +++ b/mne/io/constants.py @@ -1069,22 +1069,3 @@ # add a comment here (with doi of a published source) above any new # aliases, as they are added } - -# SNIRF: Supported measurementList(k).dataTypeLabel values in dataTimeSeries -FNIRS_SNIRF_DATATYPELABELS = { - # These types are specified `here `_ - "HbO": 1, # Oxygenated hemoglobin (oxyhemoglobin) concentration - "HbR": 2, # Deoxygenated hemoglobin (deoxyhemoglobin) concentration - "HbT": 3, # Total hemoglobin concentration - "dOD": 4, # Change in optical density - "mua": 5, # Absorption coefficient - "musp": 6, # Scattering coefficient - "H2O": 7, # Water content - "Lipid": 8, # Lipid concentration - "BFi": 9, # Blood flow index - "HRF dOD": 10, # Hemodynamic response function for change in optical density - "HRF HbO": 11, # Hemodynamic response function for oxyhemoglobin concentration - "HRF HbR": 12, # Hemodynamic response function for deoxyhemoglobin concentration - "HRF HbT": 13, # Hemodynamic response function for total hemoglobin concentration - "HRF BFi": 14, # Hemodynamic response function for blood flow index -} \ No newline at end of file diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 81add54897a..2758aa577ab 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -10,21 +10,44 @@ from ..meas_info import create_info, _format_dig_points from ..utils import _mult_cal_one from ...annotations import Annotations -<<<<<<< HEAD -from ...utils import logger, verbose, fill_doc, warn, _check_fname -from ...utils.check import _require_version -from ..constants import FIFF, FNIRS_SNIRF_DATATYPELABELS -======= from ...utils import (logger, verbose, fill_doc, warn, _check_fname, - _import_h5py) + _import_h5py, NamedInt) from ..constants import FIFF ->>>>>>> upstream/main from .._digitization import _make_dig_points from ...transforms import _frame_to_str, apply_trans from ..nirx.nirx import _convert_fnirs_to_head from ..._freesurfer import get_mni_fiducials -AVAILABLE_DATA_TYPES = [1, 201, 301, 99999] +SNIRF_CW_AMPLITUDE = NamedInt('SNIRF_CW_AMPLITUDE', 1) +SNIRF_TD_GATED_AMPLITUDE = NamedInt('SNIRF_TD_GATED_AMPLITUDE', 201) +SNIRF_TD_MOMENTS_AMPLITUDE = NamedInt('SNIRF_TD_MOMENTS_AMPLITUDE', 301) +SNIRF_FNIRS_PROCESSED = NamedInt('SNIRF_FNIRS_PROCESSED', 99999) +_AVAILABLE_SNIRF_DATA_TYPES = ( + SNIRF_CW_AMPLITUDE, + SNIRF_TD_GATED_AMPLITUDE, + SNIRF_TD_MOMENTS_AMPLITUDE, + SNIRF_FNIRS_PROCESSED) + + +# SNIRF: Supported measurementList(k).dataTypeLabel values in dataTimeSeries +FNIRS_SNIRF_DATATYPELABELS = { + # These types are specified here: + # https://github.com/fNIRS/snirf/blob/master/snirf_specification.md#supported-measurementlistkdatatypelabel-values-in-datatimeseries # noqa: E501 + "HbO": 1, # Oxygenated hemoglobin (oxyhemoglobin) concentration + "HbR": 2, # Deoxygenated hemoglobin (deoxyhemoglobin) concentration + "HbT": 3, # Total hemoglobin concentration + "dOD": 4, # Change in optical density + "mua": 5, # Absorption coefficient + "musp": 6, # Scattering coefficient + "H2O": 7, # Water content + "Lipid": 8, # Lipid concentration + "BFi": 9, # Blood flow index + "HRF dOD": 10, # HRF for change in optical density + "HRF HbO": 11, # HRF for oxyhemoglobin concentration + "HRF HbR": 12, # HRF for deoxyhemoglobin concentration + "HRF HbT": 13, # HRF for total hemoglobin concentration + "HRF BFi": 14, # HRF for blood flow index +} @fill_doc @@ -104,31 +127,18 @@ def __init__(self, fname, optode_frame="unknown", "MNE does not support this feature. " "Only the first dataset will be processed.") -<<<<<<< HEAD - snirf_data_type = np.array( - dat.get('nirs/data1/measurementList1/dataType')) - if snirf_data_type not in AVAILABLE_DATA_TYPES: - raise RuntimeError( - "File does not contain the supported data types. \ - MNE only supports reading the following data types {}, \ - but received type code {}. Processing is only available \ - for data type 1 (CW data).".format(AVAILABLE_DATA_TYPES, snirf_data_type)) -======= manafacturer = _get_metadata_str(dat, "ManufacturerName") if (optode_frame == "unknown") & (manafacturer == "Gowerlabs"): optode_frame = "head" snirf_data_type = np.array(dat.get('nirs/data1/measurementList1' '/dataType')).item() - if snirf_data_type not in [1, 99999]: - # 1 = Continuous Wave - # 99999 = Processed - raise RuntimeError('MNE only supports reading continuous' - ' wave amplitude and processed haemoglobin' - ' SNIRF files. Expected type' - ' code 1 or 99999 but received type ' - f'code {snirf_data_type}') ->>>>>>> upstream/main + if snirf_data_type not in _AVAILABLE_SNIRF_DATA_TYPES: + raise RuntimeError( + "File does not contain a supported data type. " + "MNE only supports reading the following data types " + f"{_AVAILABLE_SNIRF_DATA_TYPES}, but received type " + f"{snirf_data_type}.") last_samps = dat.get('/nirs/data1/dataTimeSeries').shape[0] - 1 @@ -149,12 +159,12 @@ def __init__(self, fname, optode_frame="unknown", 'with two wavelengths.') # Get data type specific probe information - if snirf_data_type == 201: + if snirf_data_type == SNIRF_TD_GATED_AMPLITUDE: fnirs_time_delays = np.array( dat.get('nirs/probe/timeDelays')).tolist() fnirs_time_delay_widths = np.array( dat.get('nirs/probe/timeDelayWidths')).tolist() - elif snirf_data_type == 301: + elif snirf_data_type == SNIRF_TD_MOMENTS_AMPLITUDE: fnirs_moment_orders = np.array( dat.get('nirs/probe/momentOrders')) fnirs_moment_orders = [int(m) for m in fnirs_moment_orders] @@ -180,8 +190,8 @@ def natural_keys(text): sources = np.array(dat.get('nirs/probe/sourceLabels')) sources = [s.decode('UTF-8') for s in sources] else: - sources = np.unique([_correct_shape(np.array(dat.get( - 'nirs/data1/' + c + '/sourceIndex')))[0] + sources = np.unique([np.array(dat.get( + f'nirs/data1/{c}/sourceIndex')).item() for c in channels]) sources = [f"S{int(s)}" for s in sources] @@ -192,8 +202,8 @@ def natural_keys(text): detectors = np.array(dat.get('nirs/probe/detectorLabels')) detectors = [d.decode('UTF-8') for d in detectors] else: - detectors = np.unique([_correct_shape(np.array(dat.get( - 'nirs/data1/' + c + '/detectorIndex')))[0] + detectors = np.unique([np.array(dat.get( + f'nirs/data1/{c}/detectorIndex')).item() for c in channels]) detectors = [f"D{int(d)}" for d in detectors] @@ -233,139 +243,58 @@ def natural_keys(text): assert len(sources) == srcPos3D.shape[0] assert len(detectors) == detPos3D.shape[0] - # Helper function for when the numpy array has shape (), i.e. just one element. - def _correct_shape(arr): - if arr.shape == (): - arr = arr[np.newaxis] - return arr - chnames = [] ch_types = [] for chan in channels: -<<<<<<< HEAD - src_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + '/sourceIndex'))) - [0]) - det_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + '/detectorIndex')))[0]) - if snirf_data_type == 1: - wve_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + - '/wavelengthIndex')))[0]) -======= - src_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/sourceIndex')))[0]) - det_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/detectorIndex')))[0]) - - if snirf_data_type == 1: - wve_idx = int(_correct_shape(np.array( - dat.get('nirs/data1/' + chan + - '/wavelengthIndex')))[0]) ->>>>>>> upstream/main - ch_name = sources[src_idx - 1] + '_' +\ - detectors[det_idx - 1] + ' ' +\ - str(fnirs_wavelengths[wve_idx - 1]) + ch_root = f'nirs/data1/{chan}' + src_idx = int(np.array( + dat.get(f'{ch_root}/sourceIndex')).item()) - 1 + det_idx = int(np.array( + dat.get(f'{ch_root}/detectorIndex')).item()) - 1 + if snirf_data_type == SNIRF_CW_AMPLITUDE: + wve_idx = int(np.array( + dat.get(f'{ch_root}/wavelengthIndex')).item()) + ch_name = ( + f'{sources[src_idx]}_{detectors[det_idx]} ' + f'{fnirs_wavelengths[wve_idx - 1]}') chnames.append(ch_name) -<<<<<<< HEAD - elif snirf_data_type == 201: - wve_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + - '/wavelengthIndex')))[0]) - bin_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + - '/dataTypeIndex')))[0]) - ch_name = sources[src_idx - 1] + '_' +\ - detectors[det_idx - 1] + ' ' +\ - str(fnirs_wavelengths[wve_idx - 1]) + ' ' +\ - 'bin' + str(fnirs_time_delays[bin_idx - 1]) - chnames.append(ch_name) - elif snirf_data_type == 301: - wve_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + - '/wavelengthIndex')))[0]) - moment_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + - '/dataTypeIndex')))[0]) - ch_name = sources[src_idx - 1] + '_' +\ - detectors[det_idx - 1] + ' ' +\ - str(fnirs_wavelengths[wve_idx - 1]) + ' ' +\ - 'moment' + str(fnirs_moment_orders[moment_idx - 1]) + ch_types.append('fnirs_cw_amplitude') + elif snirf_data_type == SNIRF_TD_GATED_AMPLITUDE: + wve_idx = np.array( + dat.get(f'{ch_root}/wavelengthIndex'), int).item() + bin_idx = np.array( + dat.get(f'{ch_root}/dataTypeIndex'), int).item() + ch_name = ( + f'{sources[src_idx]}_{detectors[det_idx]} ' + f'{fnirs_wavelengths[wve_idx - 1]} bin' + f'{fnirs_time_delays[bin_idx - 1]}') chnames.append(ch_name) - elif snirf_data_type == 99999: - hb_id = _correct_shape( - np.array(dat.get('nirs/data1/' + chan + '/dataTypeLabel')))[0].decode('UTF-8') - ch_name = sources[src_idx - 1] + '_' +\ - detectors[det_idx - 1] + ' ' +\ - hb_id + ch_types.append('fnirs_td_gated_amplitude') + elif snirf_data_type == SNIRF_TD_MOMENTS_AMPLITUDE: + wve_idx = np.array( + dat.get(f'{ch_root}/wavelengthIndex', int)).item() - 1 + moment_idx = np.array( + dat.get(f'{ch_root}/dataTypeIndex'), int).item() - 1 + ch_name = ( + f'{sources[src_idx]}_{detectors[det_idx]} ' + f'{fnirs_wavelengths[wve_idx]} ' + f'moment{fnirs_moment_orders[moment_idx]}') chnames.append(ch_name) - - # Create mne structure - if snirf_data_type == 1: - info = create_info(chnames, - sampling_rate, - ch_types='fnirs_cw_amplitude') - elif snirf_data_type == 201: - info = create_info(chnames, - sampling_rate, - ch_types='fnirs_td_gated_amplitude') - elif snirf_data_type == 301: - info = create_info(chnames, - sampling_rate, - ch_types='fnirs_td_moments_amplitude') - elif snirf_data_type == 99999: - info = create_info(chnames, - sampling_rate, - ch_types='fnirs_processed') - - subject_info = {} - names = _correct_shape( - np.array(dat.get('nirs/metaDataTags/SubjectID'))) - subject_info['first_name'] = names[0].decode('UTF-8') -======= - ch_types.append('fnirs_cw_amplitude') - - elif snirf_data_type == 99999: - dt_id = _correct_shape( - np.array(dat.get('nirs/data1/' + chan + - '/dataTypeLabel')))[0].decode('UTF-8') - + ch_types.append('fnirs_td_moments_amplitude') + elif snirf_data_type == SNIRF_FNIRS_PROCESSED: + dt_id = np.array(dat.get( + f'{ch_root}/dataTypeLabel')).item().decode('UTF-8') # Convert between SNIRF processed names and MNE type names dt_id = dt_id.lower().replace("dod", "fnirs_od") - - ch_name = sources[src_idx - 1] + '_' + \ - detectors[det_idx - 1] - + ch_name = f'{sources[src_idx]}_{detectors[det_idx]}' if dt_id == "fnirs_od": - wve_idx = int(_correct_shape(np.array( - dat.get('nirs/data1/' + chan + - '/wavelengthIndex')))[0]) - suffix = ' ' + str(fnirs_wavelengths[wve_idx - 1]) + wve_idx = ( + np.array(dat.get( + f'{ch_root}/wavelengthIndex'), int).item() - 1) + suffix = f' {fnirs_wavelengths[wve_idx]}' else: - suffix = ' ' + dt_id.lower() + suffix = f' {dt_id.lower()}' ch_name = ch_name + suffix - chnames.append(ch_name) ch_types.append(dt_id) @@ -375,10 +304,8 @@ def _correct_shape(arr): ch_types=ch_types) subject_info = {} - names = np.array(dat.get('nirs/metaDataTags/SubjectID')) - subject_info['first_name'] = \ - _correct_shape(names)[0].decode('UTF-8') ->>>>>>> upstream/main + name = np.array(dat.get('nirs/metaDataTags/SubjectID')).item() + subject_info['first_name'] = name.decode('UTF-8') # Read non standard (but allowed) custom metadata tags if 'lastName' in dat.get('nirs/metaDataTags/'): ln = dat.get('/nirs/metaDataTags/lastName')[0].decode('UTF-8') @@ -398,19 +325,8 @@ def _correct_shape(arr): # Update info info.update(subject_info=subject_info) -<<<<<<< HEAD - LengthUnit = _correct_shape( - np.array(dat.get('/nirs/metaDataTags/LengthUnit'))) - LengthUnit = LengthUnit[0].decode('UTF-8') - scal = 1 - if "cm" in LengthUnit: - scal = 100 - elif "mm" in LengthUnit: - scal = 1000 -======= length_unit = _get_metadata_str(dat, "LengthUnit") length_scaling = _get_lengthunit_scaling(length_unit) ->>>>>>> upstream/main srcPos3D /= length_scaling detPos3D /= length_scaling @@ -433,73 +349,47 @@ def _correct_shape(arr): coord_frame = FIFF.FIFFV_COORD_UNKNOWN for idx, chan in enumerate(channels): -<<<<<<< HEAD - src_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + '/sourceIndex'))) - [0]) - det_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + '/detectorIndex')))[0]) -======= - src_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/sourceIndex')))[0]) - det_idx = int(_correct_shape(np.array(dat.get('nirs/data1/' + - chan + '/detectorIndex')))[0]) - ->>>>>>> upstream/main - info['chs'][idx]['loc'][3:6] = srcPos3D[src_idx - 1, :] - info['chs'][idx]['loc'][6:9] = detPos3D[det_idx - 1, :] + src_idx = np.array( + dat.get(f'nirs/data1/{chan}/sourceIndex'), + int).item() - 1 + det_idx = np.array( + dat.get(f'nirs/data1/{chan}/detectorIndex'), + int).item() - 1 + info['chs'][idx]['loc'][3:6] = srcPos3D[src_idx, :] + info['chs'][idx]['loc'][6:9] = detPos3D[det_idx, :] # Store channel as mid point midpoint = (info['chs'][idx]['loc'][3:6] + info['chs'][idx]['loc'][6:9]) / 2 info['chs'][idx]['loc'][0:3] = midpoint info['chs'][idx]['coord_frame'] = coord_frame -<<<<<<< HEAD # get data type specific info: - if snirf_data_type in [1, 201, 301]: - wve_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + - '/wavelengthIndex')))[0]) - info['chs'][idx]['loc'][9] = fnirs_wavelengths[wve_idx - 1] - elif snirf_data_type == 99999: - hb_id = _correct_shape( - np.array(dat.get('nirs/data1/' + chan + '/dataTypeLabel')))[0].decode('UTF-8') - info['chs'][idx]['loc'][9] = FNIRS_SNIRF_DATATYPELABELS[hb_id] - - if snirf_data_type == 201: - bin_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + - '/dataTypeIndex')))[0]) - info['chs'][idx]['loc'][10] = fnirs_time_delays[bin_idx - - 1] * fnirs_time_delay_widths - elif snirf_data_type == 301: - moment_idx = int( - _correct_shape( - np.array( - dat.get( - 'nirs/data1/' + chan + - '/dataTypeIndex')))[0]) - info['chs'][idx]['loc'][10] = fnirs_moment_orders[moment_idx - 1] -======= - if (snirf_data_type in [1]) or \ - ((snirf_data_type == 99999) and - (ch_types[idx] == "fnirs_od")): - wve_idx = int(_correct_shape(np.array(dat.get( - 'nirs/data1/' + chan + '/wavelengthIndex')))[0]) - info['chs'][idx]['loc'][9] = fnirs_wavelengths[wve_idx - 1] ->>>>>>> upstream/main + if snirf_data_type == SNIRF_CW_AMPLITUDE or \ + (snirf_data_type == SNIRF_FNIRS_PROCESSED and + ch_types[idx] == "fnirs_od"): + wve_idx = np.array( + dat.get(f'{ch_root}/wavelengthIndex'), int).item() - 1 + info['chs'][idx]['loc'][9] = fnirs_wavelengths[wve_idx] + elif snirf_data_type in (SNIRF_TD_GATED_AMPLITUDE, + SNIRF_TD_MOMENTS_AMPLITUDE): + wve_idx = np.array( + dat.get(f'{ch_root}/wavelengthIndex'), int).item() - 1 + info['chs'][idx]['loc'][9] = fnirs_wavelengths[wve_idx] + elif snirf_data_type == SNIRF_FNIRS_PROCESSED: + hb_id = np.array(dat.get( + f'{ch_root}/dataTypeLabel')).item().decode('UTF-8') + info['chs'][idx]['loc'][9] = \ + FNIRS_SNIRF_DATATYPELABELS[hb_id] + if snirf_data_type == SNIRF_TD_GATED_AMPLITUDE: + bin_idx = np.array(dat.get( + f'{ch_root}/dataTypeIndex'), int).item() - 1 + info['chs'][idx]['loc'][10] = \ + fnirs_time_delays[bin_idx] * fnirs_time_delay_widths + elif snirf_data_type == SNIRF_TD_MOMENTS_AMPLITUDE: + moment_idx = np.array( + dat.get(f'{ch_root}/dataTypeIndex'), int).item() - 1 + info['chs'][idx]['loc'][10] = \ + fnirs_moment_orders[moment_idx] if 'landmarkPos3D' in dat.get('nirs/probe/'): diglocs = np.array(dat.get('/nirs/probe/landmarkPos3D')) @@ -542,51 +432,21 @@ def _correct_shape(arr): with info._unlock(): info['dig'] = dig -<<<<<<< HEAD - str_date = _correct_shape(np.array((dat.get( - '/nirs/metaDataTags/MeasurementDate'))))[0].decode('UTF-8') - str_time = _correct_shape(np.array((dat.get( - '/nirs/metaDataTags/MeasurementTime'))))[0].decode('UTF-8') - str_datetime = str_date + str_time -======= - str_date = _correct_shape(np.array((dat.get( - '/nirs/metaDataTags/MeasurementDate'))))[0].decode('UTF-8') - str_time = _correct_shape(np.array((dat.get( - '/nirs/metaDataTags/MeasurementTime'))))[0].decode('UTF-8') + str_date = np.array(dat.get( + '/nirs/metaDataTags/MeasurementDate')).item().decode('UTF-8') + str_time = np.array(dat.get( + '/nirs/metaDataTags/MeasurementTime')).item().decode('UTF-8') str_datetime = str_date + str_time ->>>>>>> upstream/main - - # Several formats have been observed so we try each in turn - for dt_code in ['%Y-%m-%d%H:%M:%SZ', - '%Y-%m-%d%H:%M:%S']: - try: - meas_date = datetime.datetime.strptime( - str_datetime, dt_code) - except ValueError: - pass - else: - break - else: -<<<<<<< HEAD - warn( - "Extraction of measurement date from SNIRF file failed. " - "The date is being set to January 1st, 2000, " - f"instead of {str_datetime}") - meas_date = datetime.datetime(2000, 1, 1, 0, 0, 0) - meas_date = meas_date.replace(tzinfo=datetime.timezone.utc) - info['meas_date'] = meas_date - if 'DateOfBirth' in dat.get('nirs/metaDataTags/'): - str_birth = _correct_shape( - np.array((dat.get('/nirs/metaDataTags/' 'DateOfBirth'))))[0].decode() - birth_matched = re.fullmatch( - r'(\d+)-(\d+)-(\d+)', str_birth) - if birth_matched is not None: - info["subject_info"]['birthday'] = ( - int(birth_matched.groups()[0]), - int(birth_matched.groups()[1]), - int(birth_matched.groups()[2])) -======= + # Several formats have been observed so we try each in turn + for dt_code in ['%Y-%m-%d%H:%M:%SZ', + '%Y-%m-%d%H:%M:%S']: + try: + meas_date = datetime.datetime.strptime( + str_datetime, dt_code) + except ValueError: + pass + else: break else: warn("Extraction of measurement date from SNIRF file failed. " @@ -607,7 +467,6 @@ def _correct_shape(arr): int(birth_matched.groups()[2])) with info._unlock(): info["subject_info"]['birthday'] = birthday ->>>>>>> upstream/main super(RawSNIRF, self).__init__(info, preload, filenames=[fname], last_samps=[last_samps], @@ -618,28 +477,12 @@ def _correct_shape(arr): for key in dat['nirs']: if 'stim' in key: data = np.atleast_2d(np.array( - dat.get('/nirs/' + key + '/data'))) + dat.get(f'/nirs/{key}/data'))) if data.size > 0: -<<<<<<< HEAD - desc = _correct_shape( - np.array(dat.get('/nirs/' + key + '/name')))[0] - annot.append(data[:, 0], 1.0, desc.decode('UTF-8')) - self.set_annotations(annot) - - # Reorder channels to match expected ordering in MNE - if snirf_data_type in [1, 99999]: - num_chans = len(self.ch_names) - chans = [] - for idx in range(num_chans // 2): - chans.append(idx) - chans.append(idx + num_chans // 2) - self.pick(picks=chans) -======= - desc = _correct_shape(np.array(dat.get( - '/nirs/' + key + '/name')))[0] - annot.append(data[:, 0], 1.0, desc.decode('UTF-8')) + desc = np.array(dat.get( + f'/nirs/{key}/name')).item().decode('utf-8') + annot.append(data[:, 0], 1.0, desc) self.set_annotations(annot, emit_warning=False) ->>>>>>> upstream/main # Validate that the fNIRS info is correctly formatted _validate_nirs_info(self.info) @@ -654,13 +497,6 @@ def _read_segment_file(self, data, idx, fi, start, stop, cals, mult): _mult_cal_one(data, one, idx, cals, mult) -# Helper function for when the numpy array has shape (), i.e. just one element. -def _correct_shape(arr): - if arr.shape == (): - arr = arr[np.newaxis] - return arr - - def _get_timeunit_scaling(time_unit): """MNE expects time in seconds, return required scaling.""" scalings = {'ms': 1000, 's': 1, 'unknown': 1} @@ -710,7 +546,5 @@ def _extract_sampling_rate(dat): def _get_metadata_str(dat, field): if field not in np.array(dat.get('nirs/metaDataTags')): return None - data = dat.get(f'/nirs/metaDataTags/{field}') - data = _correct_shape(np.array(data)) - data = str(data[0], 'utf-8') - return data + data = np.array(dat.get(f'/nirs/metaDataTags/{field}')).item() + return str(data, 'utf-8') diff --git a/mne/utils/__init__.py b/mne/utils/__init__.py index dcdb4d1b0cc..8364e26cd16 100644 --- a/mne/utils/__init__.py +++ b/mne/utils/__init__.py @@ -5,7 +5,7 @@ '__add__', '__sub__', '__mul__', '__div__', '__neg__') -from ._bunch import Bunch, BunchConst, BunchConstNamed +from ._bunch import Bunch, BunchConst, BunchConstNamed, NamedInt from .check import (check_fname, check_version, check_random_state, _check_fname, _check_subject, _check_pandas_installed, _check_pandas_index_arguments, From c2c217ce92287ae175828c651fa0a77b470b869a Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Sun, 21 Aug 2022 20:25:26 +0100 Subject: [PATCH 08/31] WIP: Constants --- mne/io/constants.py | 3 +- mne/io/pick.py | 13 ++--- mne/io/snirf/_snirf.py | 89 ++++++++++++++++------------------ mne/io/tests/test_constants.py | 7 ++- 4 files changed, 53 insertions(+), 59 deletions(-) diff --git a/mne/io/constants.py b/mne/io/constants.py index 268b0e7d28c..cea6e77034c 100644 --- a/mne/io/constants.py +++ b/mne/io/constants.py @@ -918,7 +918,6 @@ FIFF.FIFFV_COIL_FNIRS_RAW = FIFF.FIFFV_COIL_FNIRS_CW_AMPLITUDE # old alias FIFF.FIFFV_COIL_FNIRS_TD_GATED_AMPLITUDE = 306 # fNIRS time-domain gated amplitude FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE = 307 # fNIRS time-domain moments amplitude -FIFF.FIFFV_COIL_FNIRS_PROCESSED = 308 # fNIRS processed data FIFF.FIFFV_COIL_MCG_42 = 1000 # For testing the MCG software @@ -1006,7 +1005,7 @@ FIFF.FIFFV_COIL_FNIRS_HBR, FIFF.FIFFV_COIL_FNIRS_RAW, FIFF.FIFFV_COIL_FNIRS_OD, FIFF.FIFFV_COIL_FNIRS_FD_AC_AMPLITUDE, FIFF.FIFFV_COIL_FNIRS_FD_PHASE, FIFF.FIFFV_COIL_FNIRS_TD_GATED_AMPLITUDE, - FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE, FIFF.FIFFV_COIL_FNIRS_PROCESSED, + FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE, FIFF.FIFFV_COIL_MCG_42, FIFF.FIFFV_COIL_POINT_MAGNETOMETER, FIFF.FIFFV_COIL_AXIAL_GRAD_5CM, FIFF.FIFFV_COIL_VV_PLANAR_W, FIFF.FIFFV_COIL_VV_PLANAR_T1, diff --git a/mne/io/pick.py b/mne/io/pick.py index 6adfdf7c6c1..42e6f87b884 100644 --- a/mne/io/pick.py +++ b/mne/io/pick.py @@ -84,10 +84,6 @@ def get_channel_type_constants(include_defaults=False): kind=FIFF.FIFFV_FNIRS_CH, unit=FIFF.FIFF_UNIT_V, coil_type=FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE), - fnirs_processed=dict( - kind=FIFF.FIFFV_FNIRS_CH, - unit=FIFF.FIFF_UNIT_V, - coil_type=FIFF.FIFFV_COIL_FNIRS_PROCESSED), fnirs_fd_ac_amplitude=dict( kind=FIFF.FIFFV_FNIRS_CH, unit=FIFF.FIFF_UNIT_V, @@ -176,8 +172,6 @@ def get_channel_type_constants(include_defaults=False): 'fnirs_td_gated_amplitude', FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE: 'fnirs_td_moments_amplitude', - FIFF.FIFFV_COIL_FNIRS_PROCESSED: - 'fnirs_processed', }), 'eeg': ('coil_type', {FIFF.FIFFV_COIL_EEG: 'eeg', FIFF.FIFFV_COIL_EEG_BIPOLAR: 'eeg', @@ -933,9 +927,10 @@ def _check_excludes_includes(chs, info=None, allow_bads=False): _PICK_TYPES_KEYS = tuple(list(_PICK_TYPES_DATA_DICT) + ['ref_meg']) _MEG_CH_TYPES_SPLIT = ('mag', 'grad', 'planar1', 'planar2') _FNIRS_CH_TYPES_SPLIT = ( - 'hbo', 'hbr', 'fnirs_cw_amplitude', 'fnirs_fd_ac_amplitude', - 'fnirs_fd_phase', 'fnirs_od', 'fnirs_td_gated_amplitude', - 'fnirs_td_moments_amplitude', 'fnirs_processed') + 'hbo', 'hbr', 'fnirs_cw_amplitude', + 'fnirs_fd_ac_amplitude', 'fnirs_fd_phase', 'fnirs_od', + 'fnirs_td_gated_amplitude', 'fnirs_td_moments_amplitude', +) _DATA_CH_TYPES_ORDER_DEFAULT = ( 'mag', 'grad', 'eeg', 'csd', 'eog', 'ecg', 'resp', 'emg', 'ref_meg', 'misc', 'stim', 'chpi', 'exci', 'ias', 'syst', 'seeg', 'bio', 'ecog', diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 2758aa577ab..d46e0aa01ef 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -21,12 +21,12 @@ SNIRF_CW_AMPLITUDE = NamedInt('SNIRF_CW_AMPLITUDE', 1) SNIRF_TD_GATED_AMPLITUDE = NamedInt('SNIRF_TD_GATED_AMPLITUDE', 201) SNIRF_TD_MOMENTS_AMPLITUDE = NamedInt('SNIRF_TD_MOMENTS_AMPLITUDE', 301) -SNIRF_FNIRS_PROCESSED = NamedInt('SNIRF_FNIRS_PROCESSED', 99999) +SNIRF_PROCESSED = NamedInt('SNIRF_PROCESSED', 99999) _AVAILABLE_SNIRF_DATA_TYPES = ( SNIRF_CW_AMPLITUDE, SNIRF_TD_GATED_AMPLITUDE, SNIRF_TD_MOMENTS_AMPLITUDE, - SNIRF_FNIRS_PROCESSED) + SNIRF_PROCESSED) # SNIRF: Supported measurementList(k).dataTypeLabel values in dataTimeSeries @@ -148,8 +148,8 @@ def __init__(self, fname, optode_frame="unknown", warn("Unable to extract sample rate from SNIRF file.") # Extract wavelengths - fnirs_wavelengths = np.array(dat.get('nirs/probe/wavelengths')) - fnirs_wavelengths = [int(w) for w in fnirs_wavelengths] + fnirs_wavelengths = np.array( + dat.get('nirs/probe/wavelengths'), int) if len(fnirs_wavelengths) != 2: raise RuntimeError(f'The data contains ' f'{len(fnirs_wavelengths)}' @@ -161,13 +161,12 @@ def __init__(self, fname, optode_frame="unknown", # Get data type specific probe information if snirf_data_type == SNIRF_TD_GATED_AMPLITUDE: fnirs_time_delays = np.array( - dat.get('nirs/probe/timeDelays')).tolist() + dat.get('nirs/probe/timeDelays'), float) fnirs_time_delay_widths = np.array( - dat.get('nirs/probe/timeDelayWidths')).tolist() + dat.get('nirs/probe/timeDelayWidths'), float) elif snirf_data_type == SNIRF_TD_MOMENTS_AMPLITUDE: fnirs_moment_orders = np.array( - dat.get('nirs/probe/momentOrders')) - fnirs_moment_orders = [int(m) for m in fnirs_moment_orders] + dat.get('nirs/probe/momentOrders'), int) # Extract channels def atoi(text): @@ -247,27 +246,26 @@ def natural_keys(text): ch_types = [] for chan in channels: ch_root = f'nirs/data1/{chan}' - src_idx = int(np.array( - dat.get(f'{ch_root}/sourceIndex')).item()) - 1 - det_idx = int(np.array( - dat.get(f'{ch_root}/detectorIndex')).item()) - 1 + src_idx = np.array( + dat.get(f'{ch_root}/sourceIndex'), int).item() - 1 + det_idx = np.array( + dat.get(f'{ch_root}/detectorIndex'), int).item() - 1 + src = sources[src_idx] + det = detectors[det_idx] if snirf_data_type == SNIRF_CW_AMPLITUDE: - wve_idx = int(np.array( - dat.get(f'{ch_root}/wavelengthIndex')).item()) - ch_name = ( - f'{sources[src_idx]}_{detectors[det_idx]} ' - f'{fnirs_wavelengths[wve_idx - 1]}') + wve_idx = np.array( + dat.get(f'{ch_root}/wavelengthIndex'), int).item() - 1 + ch_name = f'{src}_{det} {fnirs_wavelengths[wve_idx]}' chnames.append(ch_name) ch_types.append('fnirs_cw_amplitude') elif snirf_data_type == SNIRF_TD_GATED_AMPLITUDE: wve_idx = np.array( - dat.get(f'{ch_root}/wavelengthIndex'), int).item() + dat.get(f'{ch_root}/wavelengthIndex'), int).item() - 1 bin_idx = np.array( - dat.get(f'{ch_root}/dataTypeIndex'), int).item() + dat.get(f'{ch_root}/dataTypeIndex'), int).item() - 1 ch_name = ( - f'{sources[src_idx]}_{detectors[det_idx]} ' - f'{fnirs_wavelengths[wve_idx - 1]} bin' - f'{fnirs_time_delays[bin_idx - 1]}') + f'{src}_{det} {fnirs_wavelengths[wve_idx]} ' + f'bin{fnirs_time_delays[bin_idx]}') chnames.append(ch_name) ch_types.append('fnirs_td_gated_amplitude') elif snirf_data_type == SNIRF_TD_MOMENTS_AMPLITUDE: @@ -276,21 +274,19 @@ def natural_keys(text): moment_idx = np.array( dat.get(f'{ch_root}/dataTypeIndex'), int).item() - 1 ch_name = ( - f'{sources[src_idx]}_{detectors[det_idx]} ' - f'{fnirs_wavelengths[wve_idx]} ' + f'{src}_{det} {fnirs_wavelengths[wve_idx]} ' f'moment{fnirs_moment_orders[moment_idx]}') chnames.append(ch_name) ch_types.append('fnirs_td_moments_amplitude') - elif snirf_data_type == SNIRF_FNIRS_PROCESSED: + elif snirf_data_type == SNIRF_PROCESSED: dt_id = np.array(dat.get( f'{ch_root}/dataTypeLabel')).item().decode('UTF-8') # Convert between SNIRF processed names and MNE type names dt_id = dt_id.lower().replace("dod", "fnirs_od") ch_name = f'{sources[src_idx]}_{detectors[det_idx]}' if dt_id == "fnirs_od": - wve_idx = ( - np.array(dat.get( - f'{ch_root}/wavelengthIndex'), int).item() - 1) + wve_idx = np.array(dat.get( + f'{ch_root}/wavelengthIndex'), int).item() - 1 suffix = f' {fnirs_wavelengths[wve_idx]}' else: suffix = f' {dt_id.lower()}' @@ -343,18 +339,17 @@ def natural_keys(text): # Then the transformation to head was performed above coord_frame = FIFF.FIFFV_COORD_HEAD elif 'MNE_coordFrame' in dat.get('nirs/metaDataTags/'): - coord_frame = int(dat.get('/nirs/metaDataTags/MNE_coordFrame') - [0]) + coord_frame = np.array( + dat.get('/nirs/metaDataTags/MNE_coordFrame'), int).item() else: coord_frame = FIFF.FIFFV_COORD_UNKNOWN for idx, chan in enumerate(channels): + ch_root = f'nirs/data1/{chan}' src_idx = np.array( - dat.get(f'nirs/data1/{chan}/sourceIndex'), - int).item() - 1 + dat.get(f'{ch_root}/sourceIndex'), int).item() - 1 det_idx = np.array( - dat.get(f'nirs/data1/{chan}/detectorIndex'), - int).item() - 1 + dat.get(f'{ch_root}/detectorIndex'), int).item() - 1 info['chs'][idx]['loc'][3:6] = srcPos3D[src_idx, :] info['chs'][idx]['loc'][6:9] = detPos3D[det_idx, :] # Store channel as mid point @@ -365,7 +360,7 @@ def natural_keys(text): # get data type specific info: if snirf_data_type == SNIRF_CW_AMPLITUDE or \ - (snirf_data_type == SNIRF_FNIRS_PROCESSED and + (snirf_data_type == SNIRF_PROCESSED and ch_types[idx] == "fnirs_od"): wve_idx = np.array( dat.get(f'{ch_root}/wavelengthIndex'), int).item() - 1 @@ -375,21 +370,23 @@ def natural_keys(text): wve_idx = np.array( dat.get(f'{ch_root}/wavelengthIndex'), int).item() - 1 info['chs'][idx]['loc'][9] = fnirs_wavelengths[wve_idx] - elif snirf_data_type == SNIRF_FNIRS_PROCESSED: + if snirf_data_type == SNIRF_TD_GATED_AMPLITUDE: + bin_idx = np.array(dat.get( + f'{ch_root}/dataTypeIndex'), int).item() - 1 + val = (fnirs_time_delays[bin_idx] * + fnirs_time_delay_widths) + info['chs'][idx]['loc'][10] = val + else: + assert snirf_data_type == SNIRF_TD_MOMENTS_AMPLITUDE + moment_idx = np.array( + dat.get(f'{ch_root}/dataTypeIndex'), int).item() - 1 + info['chs'][idx]['loc'][10] = \ + fnirs_moment_orders[moment_idx] + elif snirf_data_type == SNIRF_PROCESSED: hb_id = np.array(dat.get( f'{ch_root}/dataTypeLabel')).item().decode('UTF-8') info['chs'][idx]['loc'][9] = \ FNIRS_SNIRF_DATATYPELABELS[hb_id] - if snirf_data_type == SNIRF_TD_GATED_AMPLITUDE: - bin_idx = np.array(dat.get( - f'{ch_root}/dataTypeIndex'), int).item() - 1 - info['chs'][idx]['loc'][10] = \ - fnirs_time_delays[bin_idx] * fnirs_time_delay_widths - elif snirf_data_type == SNIRF_TD_MOMENTS_AMPLITUDE: - moment_idx = np.array( - dat.get(f'{ch_root}/dataTypeIndex'), int).item() - 1 - info['chs'][idx]['loc'][10] = \ - fnirs_moment_orders[moment_idx] if 'landmarkPos3D' in dat.get('nirs/probe/'): diglocs = np.array(dat.get('/nirs/probe/landmarkPos3D')) diff --git a/mne/io/tests/test_constants.py b/mne/io/tests/test_constants.py index b74c4ec3894..53d761ab69d 100644 --- a/mne/io/tests/test_constants.py +++ b/mne/io/tests/test_constants.py @@ -20,8 +20,9 @@ # https://github.com/mne-tools/fiff-constants/commits/master -REPO = 'mne-tools' -COMMIT = 'aa49e20cff5791fbaf01d77ad4ec2e0ecb69840d' +# TODO: Set back to mne-tools before merge! +REPO = 'larsoner' +COMMIT = 'd9835a6fd544926da18a6c07403808368c8f2931' # These are oddities that we won't address: iod_dups = (355, 359) # these are in both MEGIN and MNE files @@ -55,6 +56,8 @@ 303, # fNIRS optical density 304, # fNIRS frequency domain AC amplitude 305, # fNIRS frequency domain phase + 306, # fNIRS time domain gated amplitude + 307, # fNIRS time domain moments amplitude 1000, # For testing the MCG software 2001, # Generic axial gradiometer 3011, # VV prototype wirewound planar sensor From ac52bc0bda66c34767e135255a33fcf852c1ba5b Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Sun, 21 Aug 2022 20:54:47 +0100 Subject: [PATCH 09/31] FIX: Flake --- mne/io/snirf/_snirf.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index d46e0aa01ef..869168b0a44 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -33,20 +33,20 @@ FNIRS_SNIRF_DATATYPELABELS = { # These types are specified here: # https://github.com/fNIRS/snirf/blob/master/snirf_specification.md#supported-measurementlistkdatatypelabel-values-in-datatimeseries # noqa: E501 - "HbO": 1, # Oxygenated hemoglobin (oxyhemoglobin) concentration - "HbR": 2, # Deoxygenated hemoglobin (deoxyhemoglobin) concentration - "HbT": 3, # Total hemoglobin concentration - "dOD": 4, # Change in optical density - "mua": 5, # Absorption coefficient - "musp": 6, # Scattering coefficient - "H2O": 7, # Water content - "Lipid": 8, # Lipid concentration - "BFi": 9, # Blood flow index - "HRF dOD": 10, # HRF for change in optical density - "HRF HbO": 11, # HRF for oxyhemoglobin concentration - "HRF HbR": 12, # HRF for deoxyhemoglobin concentration - "HRF HbT": 13, # HRF for total hemoglobin concentration - "HRF BFi": 14, # HRF for blood flow index + "HbO": 1, # Oxygenated hemoglobin (oxyhemoglobin) concentration + "HbR": 2, # Deoxygenated hemoglobin (deoxyhemoglobin) concentration + "HbT": 3, # Total hemoglobin concentration + "dOD": 4, # Change in optical density + "mua": 5, # Absorption coefficient + "musp": 6, # Scattering coefficient + "H2O": 7, # Water content + "Lipid": 8, # Lipid concentration + "BFi": 9, # Blood flow index + "HRF dOD": 10, # HRF for change in optical density + "HRF HbO": 11, # HRF for oxyhemoglobin concentration + "HRF HbR": 12, # HRF for deoxyhemoglobin concentration + "HRF HbT": 13, # HRF for total hemoglobin concentration + "HRF BFi": 14, # HRF for blood flow index } @@ -379,7 +379,8 @@ def natural_keys(text): else: assert snirf_data_type == SNIRF_TD_MOMENTS_AMPLITUDE moment_idx = np.array( - dat.get(f'{ch_root}/dataTypeIndex'), int).item() - 1 + dat.get(f'{ch_root}/dataTypeIndex'), + int).item() - 1 info['chs'][idx]['loc'][10] = \ fnirs_moment_orders[moment_idx] elif snirf_data_type == SNIRF_PROCESSED: From da275f18cd1aab716f184fc8e28cbfe47edbbc44 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Sun, 21 Aug 2022 21:05:39 +0100 Subject: [PATCH 10/31] FIX: Consistent --- mne/defaults.py | 31 +++++++++++++++++++++++-------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/mne/defaults.py b/mne/defaults.py index 27bfe96b26c..3dab66ffc26 100644 --- a/mne/defaults.py +++ b/mne/defaults.py @@ -12,25 +12,36 @@ exci='k', ias='k', syst='k', seeg='saddlebrown', dbs='seagreen', dipole='k', gof='k', bio='k', ecog='k', hbo='#AA3377', hbr='b', fnirs_cw_amplitude='k', fnirs_fd_ac_amplitude='k', - fnirs_fd_phase='k', fnirs_od='k', csd='k', whitened='k'), + fnirs_fd_phase='k', fnirs_od='k', + fnirs_td_gated_amplitude='k', + fnirs_td_gated_moment='k', + csd='k', whitened='k'), si_units=dict(mag='T', grad='T/m', eeg='V', eog='V', ecg='V', emg='V', misc='AU', seeg='V', dbs='V', dipole='Am', gof='GOF', bio='V', ecog='V', hbo='M', hbr='M', ref_meg='T', fnirs_cw_amplitude='V', fnirs_fd_ac_amplitude='V', - fnirs_fd_phase='rad', fnirs_od='V', csd='V/m²', - whitened='Z'), + fnirs_fd_phase='rad', fnirs_od='V', + # TODO: These units and scalings are wrong + fnirs_td_gated_amplitude='Au', + fnirs_td_gated_moment='Au', + csd='V/m²', whitened='Z'), units=dict(mag='fT', grad='fT/cm', eeg='µV', eog='µV', ecg='µV', emg='µV', misc='AU', seeg='mV', dbs='µV', dipole='nAm', gof='GOF', bio='µV', ecog='µV', hbo='µM', hbr='µM', ref_meg='fT', fnirs_cw_amplitude='V', fnirs_fd_ac_amplitude='V', - fnirs_fd_phase='rad', fnirs_od='V', csd='mV/m²', - whitened='Z'), + fnirs_fd_phase='rad', fnirs_od='V', + fnirs_td_gated_amplitude='Au', + fnirs_td_gated_moment='Au', + csd='mV/m²', whitened='Z'), # scalings for the units scalings=dict(mag=1e15, grad=1e13, eeg=1e6, eog=1e6, emg=1e6, ecg=1e6, misc=1.0, seeg=1e3, dbs=1e6, ecog=1e6, dipole=1e9, gof=1.0, bio=1e6, hbo=1e6, hbr=1e6, ref_meg=1e15, fnirs_cw_amplitude=1.0, fnirs_fd_ac_amplitude=1.0, - fnirs_fd_phase=1., fnirs_od=1.0, csd=1e3, whitened=1.), + fnirs_fd_phase=1., fnirs_od=1.0, + fnirs_td_gated_amplitude=1., + fnirs_td_gated_moment=1., + csd=1e3, whitened=1.), # rough guess for a good plot scalings_plot_raw=dict(mag=1e-12, grad=4e-11, eeg=20e-6, eog=150e-6, ecg=5e-4, emg=1e-3, ref_meg=1e-12, misc='auto', @@ -38,8 +49,10 @@ seeg=1e-4, dbs=1e-4, bio=1e-6, ecog=1e-4, hbo=10e-6, hbr=10e-6, whitened=10., fnirs_cw_amplitude=2e-2, fnirs_fd_ac_amplitude=2e-2, fnirs_fd_phase=2e-1, - fnirs_od=2e-2, csd=200e-4, - dipole=1e-7, gof=1e2), + fnirs_od=2e-2, + fnirs_td_gated_amplitude=1., + fnirs_td_gated_moment=1., + csd=200e-4, dipole=1e-7, gof=1e2), scalings_cov_rank=dict(mag=1e12, grad=1e11, eeg=1e5, # ~100x scalings seeg=1e1, dbs=1e4, ecog=1e4, hbo=1e4, hbr=1e4), ylim=dict(mag=(-600., 600.), grad=(-200., 200.), eeg=(-200., 200.), @@ -54,6 +67,8 @@ fnirs_fd_ac_amplitude='fNIRS (FD AC amplitude)', fnirs_fd_phase='fNIRS (FD phase)', fnirs_od='fNIRS (OD)', hbr='Deoxyhemoglobin', + fnirs_td_gated_amplitude='fNIRS (TD amplitude)', + fnirs_td_gated_moment='fNIRS (TD moment)', gof='Goodness of fit', csd='Current source density', stim='Stimulus', ), From 3986b962c5d497b31157504c5255ded3b7cf55c6 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Sun, 21 Aug 2022 21:18:04 +0100 Subject: [PATCH 11/31] FIX: Missed --- mne/defaults.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mne/defaults.py b/mne/defaults.py index 3dab66ffc26..759e343ef49 100644 --- a/mne/defaults.py +++ b/mne/defaults.py @@ -14,7 +14,7 @@ fnirs_cw_amplitude='k', fnirs_fd_ac_amplitude='k', fnirs_fd_phase='k', fnirs_od='k', fnirs_td_gated_amplitude='k', - fnirs_td_gated_moment='k', + fnirs_td_moments_amplitude='k', csd='k', whitened='k'), si_units=dict(mag='T', grad='T/m', eeg='V', eog='V', ecg='V', emg='V', misc='AU', seeg='V', dbs='V', dipole='Am', gof='GOF', @@ -23,7 +23,7 @@ fnirs_fd_phase='rad', fnirs_od='V', # TODO: These units and scalings are wrong fnirs_td_gated_amplitude='Au', - fnirs_td_gated_moment='Au', + fnirs_td_moments_amplitude='Au', csd='V/m²', whitened='Z'), units=dict(mag='fT', grad='fT/cm', eeg='µV', eog='µV', ecg='µV', emg='µV', misc='AU', seeg='mV', dbs='µV', dipole='nAm', gof='GOF', @@ -31,7 +31,7 @@ fnirs_cw_amplitude='V', fnirs_fd_ac_amplitude='V', fnirs_fd_phase='rad', fnirs_od='V', fnirs_td_gated_amplitude='Au', - fnirs_td_gated_moment='Au', + fnirs_td_moments_amplitude='Au', csd='mV/m²', whitened='Z'), # scalings for the units scalings=dict(mag=1e15, grad=1e13, eeg=1e6, eog=1e6, emg=1e6, ecg=1e6, @@ -40,7 +40,7 @@ fnirs_cw_amplitude=1.0, fnirs_fd_ac_amplitude=1.0, fnirs_fd_phase=1., fnirs_od=1.0, fnirs_td_gated_amplitude=1., - fnirs_td_gated_moment=1., + fnirs_td_moments_amplitude=1., csd=1e3, whitened=1.), # rough guess for a good plot scalings_plot_raw=dict(mag=1e-12, grad=4e-11, eeg=20e-6, eog=150e-6, @@ -51,7 +51,7 @@ fnirs_fd_ac_amplitude=2e-2, fnirs_fd_phase=2e-1, fnirs_od=2e-2, fnirs_td_gated_amplitude=1., - fnirs_td_gated_moment=1., + fnirs_td_moments_amplitude=1., csd=200e-4, dipole=1e-7, gof=1e2), scalings_cov_rank=dict(mag=1e12, grad=1e11, eeg=1e5, # ~100x scalings seeg=1e1, dbs=1e4, ecog=1e4, hbo=1e4, hbr=1e4), @@ -68,7 +68,7 @@ fnirs_fd_phase='fNIRS (FD phase)', fnirs_od='fNIRS (OD)', hbr='Deoxyhemoglobin', fnirs_td_gated_amplitude='fNIRS (TD amplitude)', - fnirs_td_gated_moment='fNIRS (TD moment)', + fnirs_td_moments_amplitude='fNIRS (TD moment)', gof='Goodness of fit', csd='Current source density', stim='Stimulus', ), From ce8b3af1ae474eb71c30edbe0058d047ce8fff5f Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Sun, 21 Aug 2022 21:40:48 +0100 Subject: [PATCH 12/31] FIX: Better code --- mne/io/snirf/_snirf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 869168b0a44..8532ef51d76 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -434,11 +434,11 @@ def natural_keys(text): '/nirs/metaDataTags/MeasurementDate')).item().decode('UTF-8') str_time = np.array(dat.get( '/nirs/metaDataTags/MeasurementTime')).item().decode('UTF-8') - str_datetime = str_date + str_time + str_datetime = f'{str_date} {str_time}' # Several formats have been observed so we try each in turn - for dt_code in ['%Y-%m-%d%H:%M:%SZ', - '%Y-%m-%d%H:%M:%S']: + for dt_code in ['%Y-%m-%d %H:%M:%SZ', + '%Y-%m-%d %H:%M:%S']: try: meas_date = datetime.datetime.strptime( str_datetime, dt_code) From 1244addc2cb953c7ab7a702d89eb08de8e9e56d5 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Sun, 21 Aug 2022 22:40:34 +0100 Subject: [PATCH 13/31] FIX: Scale --- mne/io/snirf/_snirf.py | 35 ++++++++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 8532ef51d76..5fff407e4e0 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -244,6 +244,7 @@ def natural_keys(text): chnames = [] ch_types = [] + need_data_scale = False for chan in channels: ch_root = f'nirs/data1/{chan}' src_idx = np.array( @@ -290,10 +291,23 @@ def natural_keys(text): suffix = f' {fnirs_wavelengths[wve_idx]}' else: suffix = f' {dt_id.lower()}' + if dt_id not in ('hbo', 'hbr'): + raise RuntimeError( + 'read_raw_snirf can only handle processed ' + 'data in the form of optical density or ' + f'HbO/HbR, but got type f{dt_id}') + need_data_scale = True ch_name = ch_name + suffix chnames.append(ch_name) ch_types.append(dt_id) + data_scale = None + if need_data_scale: + snirf_data_unit = np.array( + dat.get('nirs/data1/measurementList1/dataUnit', b'M') + ).item().decode('utf-8') + data_scale = _get_dataunit_scaling(snirf_data_unit) + # Create mne structure info = create_info(chnames, sampling_rate, @@ -466,9 +480,10 @@ def natural_keys(text): with info._unlock(): info["subject_info"]['birthday'] = birthday - super(RawSNIRF, self).__init__(info, preload, filenames=[fname], - last_samps=[last_samps], - verbose=verbose) + raw_extras = dict(data_scale=data_scale) + super(RawSNIRF, self).__init__( + info, preload, filenames=[fname], last_samps=[last_samps], + raw_extras=[raw_extras], verbose=verbose) # Extract annotations annot = Annotations([], [], []) @@ -494,6 +509,10 @@ def _read_segment_file(self, data, idx, fi, start, stop, cals, mult): _mult_cal_one(data, one, idx, cals, mult) + data_scale = self._raw_extras[fi]['data_scale'] + if data_scale is not None: + one *= data_scale + def _get_timeunit_scaling(time_unit): """MNE expects time in seconds, return required scaling.""" @@ -517,6 +536,16 @@ def _get_lengthunit_scaling(length_unit): 'issue to inform the developers.') +def _get_dataunit_scaling(hbx_unit): + """MNE expects hbo/hbr in M, return required scaling.""" + scalings = {'M': None, 'uM': 1e-6} + try: + return scalings[hbx_unit] + except KeyError: + raise RuntimeError(f'The Hb unit {hbx_unit} is not supported ' + 'by MNE. Please report this error as a GitHub ' + 'issue to inform the developers.') + def _extract_sampling_rate(dat): """Extract the sample rate from the time field.""" time_data = np.array(dat.get('nirs/data1/time')) From 3944254fa4bcf4604747dbd817b1e1d442225b65 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Sun, 21 Aug 2022 23:02:44 +0100 Subject: [PATCH 14/31] FIX: Missing --- mne/cov.py | 11 ++++++++--- mne/io/pick.py | 5 ++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/mne/cov.py b/mne/cov.py index 55378703310..5a5e68fb8f6 100644 --- a/mne/cov.py +++ b/mne/cov.py @@ -1237,9 +1237,11 @@ def _auto_low_rank_model(data, mode, n_jobs, method_params, cv, class _RegCovariance(BaseEstimator): """Aux class.""" - def __init__(self, info, grad=0.1, mag=0.1, eeg=0.1, seeg=0.1, + def __init__(self, info, grad=0.1, mag=0.1, eeg=0.1, *, seeg=0.1, ecog=0.1, hbo=0.1, hbr=0.1, fnirs_cw_amplitude=0.1, fnirs_fd_ac_amplitude=0.1, fnirs_fd_phase=0.1, fnirs_od=0.1, + fnirs_td_gated_amplitude=0.1, + fnirs_td_moments_amplitude=0.1, csd=0.1, dbs=0.1, store_precision=False, assume_centered=False): self.info = info @@ -1540,9 +1542,12 @@ def _smart_eigh(C, info, rank, scalings=None, projs=None, @verbose def regularize(cov, info, mag=0.1, grad=0.1, eeg=0.1, exclude='bads', - proj=True, seeg=0.1, ecog=0.1, hbo=0.1, hbr=0.1, + proj=True, *, seeg=0.1, ecog=0.1, hbo=0.1, hbr=0.1, fnirs_cw_amplitude=0.1, fnirs_fd_ac_amplitude=0.1, - fnirs_fd_phase=0.1, fnirs_od=0.1, csd=0.1, dbs=0.1, + fnirs_fd_phase=0.1, fnirs_od=0.1, + fnirs_td_gated_amplitude=0.1, + fnirs_td_moments_amplitude=0.1, + csd=0.1, dbs=0.1, rank=None, scalings=None, verbose=None): """Regularize noise covariance matrix. diff --git a/mne/io/pick.py b/mne/io/pick.py index 42e6f87b884..c44a861ea1a 100644 --- a/mne/io/pick.py +++ b/mne/io/pick.py @@ -729,9 +729,8 @@ def channel_indices_by_type(info, picks=None): """ idx_by_type = {key: list() for key in _PICK_TYPES_KEYS if key not in ('meg', 'fnirs')} - idx_by_type.update(mag=list(), grad=list(), hbo=list(), hbr=list(), - fnirs_cw_amplitude=list(), fnirs_fd_ac_amplitude=list(), - fnirs_fd_phase=list(), fnirs_od=list()) + idx_by_type.update(mag=list(), grad=list()) + idx_by_type.update((key, list()) for key in _FNIRS_CH_TYPES_SPLIT) picks = _picks_to_idx(info, picks, none='all', exclude=(), allow_empty=True) for k in picks: From 967ddddc79903cfdb0b7181da8237fc1e6ece011 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Sun, 21 Aug 2022 23:37:37 +0100 Subject: [PATCH 15/31] FIX: Flake --- mne/cov.py | 9 ++++++++- mne/io/snirf/_snirf.py | 3 ++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/mne/cov.py b/mne/cov.py index 5a5e68fb8f6..995188c3d3f 100644 --- a/mne/cov.py +++ b/mne/cov.py @@ -1595,6 +1595,10 @@ def regularize(cov, info, mag=0.1, grad=0.1, eeg=0.1, exclude='bads', Regularization factor for fNIRS raw phase signals. fnirs_od : float (default 0.1) Regularization factor for fNIRS optical density signals. + fnirs_td_gated_amplitude : float (default 0.1) + Regularization factor for fNIRS time domain gated amplitude signals. + fnirs_td_moments_amplitude : float (default 0.1) + Regularization factor for fNIRS time domain moments amplitude signals. csd : float (default 0.1) Regularization factor for EEG-CSD signals. dbs : float (default 0.1) @@ -1628,7 +1632,10 @@ def regularize(cov, info, mag=0.1, grad=0.1, eeg=0.1, exclude='bads', regs = dict(eeg=eeg, seeg=seeg, dbs=dbs, ecog=ecog, hbo=hbo, hbr=hbr, fnirs_cw_amplitude=fnirs_cw_amplitude, fnirs_fd_ac_amplitude=fnirs_fd_ac_amplitude, - fnirs_fd_phase=fnirs_fd_phase, fnirs_od=fnirs_od, csd=csd) + fnirs_fd_phase=fnirs_fd_phase, fnirs_od=fnirs_od, + fnirs_td_gated_amplitude=fnirs_td_gated_amplitude, + fnirs_td_moments_amplitude=fnirs_td_moments_amplitude, + csd=csd) if exclude is None: raise ValueError('exclude must be a list of strings or "bads"') diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 5fff407e4e0..7d711fa4df4 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -305,7 +305,7 @@ def natural_keys(text): if need_data_scale: snirf_data_unit = np.array( dat.get('nirs/data1/measurementList1/dataUnit', b'M') - ).item().decode('utf-8') + snirf_data_unit = snirf_data_unit.item().decode('utf-8') data_scale = _get_dataunit_scaling(snirf_data_unit) # Create mne structure @@ -546,6 +546,7 @@ def _get_dataunit_scaling(hbx_unit): 'by MNE. Please report this error as a GitHub ' 'issue to inform the developers.') + def _extract_sampling_rate(dat): """Extract the sample rate from the time field.""" time_data = np.array(dat.get('nirs/data1/time')) From e97367db1c19c2a4044adde7c8633398b3649574 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Sun, 21 Aug 2022 23:39:32 +0100 Subject: [PATCH 16/31] FIX: Syntax --- mne/io/snirf/_snirf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 7d711fa4df4..f0f9c02e135 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -304,7 +304,7 @@ def natural_keys(text): data_scale = None if need_data_scale: snirf_data_unit = np.array( - dat.get('nirs/data1/measurementList1/dataUnit', b'M') + dat.get('nirs/data1/measurementList1/dataUnit', b'M')) snirf_data_unit = snirf_data_unit.item().decode('utf-8') data_scale = _get_dataunit_scaling(snirf_data_unit) From d26ff8a3190e217c6ea35ad5f082bcc84e5405f8 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Wed, 1 Mar 2023 13:00:52 -0500 Subject: [PATCH 17/31] FIX: shape --- mne/io/snirf/_snirf.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 7a2ea89b5e0..64f315ea7ec 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -499,8 +499,7 @@ def natural_keys(text): data = np.atleast_2d(np.array( dat.get(f'/nirs/{key}/data'))) if data.size > 0: - desc = _correct_shape(np.array(dat.get( - '/nirs/' + key + '/name')))[0] + desc = np.array(dat.get(f'/nirs{key}/name')).item() annot.append(data[:, 0], data[:, 1], desc.decode('UTF-8')) From f84fe278ad5309b9b54fc4f16bb7d72c51245c3a Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Wed, 1 Mar 2023 13:04:48 -0500 Subject: [PATCH 18/31] FIX: Slash --- mne/io/snirf/_snirf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 64f315ea7ec..b07160d014a 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -499,7 +499,7 @@ def natural_keys(text): data = np.atleast_2d(np.array( dat.get(f'/nirs/{key}/data'))) if data.size > 0: - desc = np.array(dat.get(f'/nirs{key}/name')).item() + desc = np.array(dat.get(f'/nirs/{key}/name')).item() annot.append(data[:, 0], data[:, 1], desc.decode('UTF-8')) From 50f3064602291ffacffe3f008651a98a0f79466f Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Wed, 1 Mar 2023 13:23:40 -0500 Subject: [PATCH 19/31] FIX: Bad merge --- mne/defaults.py | 2 +- mne/tests/test_defaults.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mne/defaults.py b/mne/defaults.py index 94122bb2a7b..2f9f5006ab0 100644 --- a/mne/defaults.py +++ b/mne/defaults.py @@ -45,7 +45,7 @@ fnirs_td_gated_amplitude=1., fnirs_td_moments_amplitude=1., csd=1e3, whitened=1., - gsr=1., temperature=0.1), + gsr=1., temperature=1.), # rough guess for a good plot scalings_plot_raw=dict(mag=1e-12, grad=4e-11, eeg=20e-6, eog=150e-6, ecg=5e-4, emg=1e-3, ref_meg=1e-12, misc='auto', diff --git a/mne/tests/test_defaults.py b/mne/tests/test_defaults.py index 5e8a34ef66f..7c01533231f 100644 --- a/mne/tests/test_defaults.py +++ b/mne/tests/test_defaults.py @@ -39,7 +39,7 @@ def test_si_units(): want_scale = _get_scaling(key, units[key]) else: want_scale = _get_scaling(key, units[key]) - assert_allclose(scale, want_scale, rtol=1e-12) + assert_allclose(scale, want_scale, rtol=1e-12, err_msg=key) @pytest.mark.parametrize('key', ('si_units', 'color', 'scalings', From 698d2a69595474582984b59099a5dafe4fd30a6f Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Thu, 9 Mar 2023 13:39:04 -0500 Subject: [PATCH 20/31] FIX: Add tests --- mne/datasets/config.py | 4 ++-- mne/io/pick.py | 9 +++++++- mne/io/snirf/tests/test_snirf.py | 39 +++++++++++++++++++++----------- mne/preprocessing/nirs/nirs.py | 29 ++++++++++++++++++++---- 4 files changed, 60 insertions(+), 21 deletions(-) diff --git a/mne/datasets/config.py b/mne/datasets/config.py index c9431a9837e..7ebd5c5b3f8 100644 --- a/mne/datasets/config.py +++ b/mne/datasets/config.py @@ -87,7 +87,7 @@ # respective repos, and make a new release of the dataset on GitHub. Then # update the checksum in the MNE_DATASETS dict below, and change version # here: ↓↓↓↓↓ ↓↓↓ -RELEASES = dict(testing='0.142', misc='0.24') +RELEASES = dict(testing='0.143', misc='0.24') TESTING_VERSIONED = f'mne-testing-data-{RELEASES["testing"]}' MISC_VERSIONED = f'mne-misc-data-{RELEASES["misc"]}' @@ -111,7 +111,7 @@ # Testing and misc are at the top as they're updated most often MNE_DATASETS['testing'] = dict( archive_name=f'{TESTING_VERSIONED}.tar.gz', - hash='md5:44b857ddb34aefd752e4f5b19d625dee', + hash='md5:6a3b6f69a65cd2547d2c3884ae7ca778', url=('https://codeload.github.com/mne-tools/mne-testing-data/' f'tar.gz/{RELEASES["testing"]}'), # In case we ever have to resort to osf.io again... diff --git a/mne/io/pick.py b/mne/io/pick.py index de2e218c131..6996d851da9 100644 --- a/mne/io/pick.py +++ b/mne/io/pick.py @@ -359,6 +359,12 @@ def _triage_fnirs_pick(ch, fnirs, warned): return True elif ch['coil_type'] == FIFF.FIFFV_COIL_FNIRS_OD and 'fnirs_od' in fnirs: return True + elif ch['coil_type'] == FIFF.FIFFV_COIL_FNIRS_TD_MOMENTS_AMPLITUDE and \ + 'fnirs_td_moments_amplitude' in fnirs: + return True + elif ch['coil_type'] == FIFF.FIFFV_COIL_FNIRS_TD_GATED_AMPLITUDE and \ + 'fnirs_td_gated_amplitude' in fnirs: + return True return False @@ -451,7 +457,7 @@ def pick_types(info, meg=False, eeg=False, stim=False, eog=False, ecg=False, pick[k] = _triage_meg_pick(info['chs'][k], meg) elif ch_type == 'ref_meg': pick[k] = _triage_meg_pick(info['chs'][k], ref_meg) - else: # ch_type in ('hbo', 'hbr') + else: # ch_type in ('hbo', 'hbr', ...) pick[k] = _triage_fnirs_pick(info['chs'][k], fnirs, warned) # restrict channels to selection if provided @@ -939,6 +945,7 @@ def _check_excludes_includes(chs, info=None, allow_bads=False): dbs=True, temperature=False, gsr=False) _PICK_TYPES_KEYS = tuple(list(_PICK_TYPES_DATA_DICT) + ['ref_meg']) _MEG_CH_TYPES_SPLIT = ('mag', 'grad', 'planar1', 'planar2') +# Entries here need to be added to _triage_fnirs_pick as well! _FNIRS_CH_TYPES_SPLIT = ( 'hbo', 'hbr', 'fnirs_cw_amplitude', 'fnirs_fd_ac_amplitude', 'fnirs_fd_phase', 'fnirs_od', diff --git a/mne/io/snirf/tests/test_snirf.py b/mne/io/snirf/tests/test_snirf.py index fc4572f3e3f..64b79af608e 100644 --- a/mne/io/snirf/tests/test_snirf.py +++ b/mne/io/snirf/tests/test_snirf.py @@ -67,14 +67,15 @@ nirx_nirsport2_20219 = testing_path / "NIRx" / "nirsport_v2" / "aurora_2021_9" # Kernel -kernel_hb = ( +kernel_path = ( testing_path / "SNIRF" / "Kernel" / "Flow50" / "Portal_2021_11" - / "hb.snirf" ) +kernel_hb = kernel_path / "hb.snirf" +kernel_td = kernel_path / "td_moments.snirf" h5py = pytest.importorskip("h5py") # module-level @@ -99,6 +100,7 @@ def _get_loc(raw, ch_name): nirx_nirsport2_103_2, nirx_nirsport2_103_2, kernel_hb, + kernel_td, lumo110 ])) def test_basic_reading_and_min_process(fname): @@ -109,8 +111,11 @@ def test_basic_reading_and_min_process(fname): raw = optical_density(raw) if 'fnirs_od' in raw: raw = beer_lambert_law(raw, ppf=6) - assert 'hbo' in raw - assert 'hbr' in raw + if 'fnirs_td_moments_amplitude' in raw: + pass + else: + assert 'hbo' in raw + assert 'hbr' in raw @requires_testing_data @@ -378,19 +383,27 @@ def test_snirf_fieldtrip_od(): @requires_testing_data -def test_snirf_kernel_hb(): - """Test reading Kernel SNIRF files with haemoglobin data.""" - raw = read_raw_snirf(kernel_hb, preload=True) - - # Test data import - assert raw._data.shape == (180 * 2, 14) - assert raw.copy().pick('hbo')._data.shape == (180, 14) - assert raw.copy().pick('hbr')._data.shape == (180, 14) +@pytest.mark.parametrize('kind', ('hb', 'td')) +def test_snirf_kernel(kind): + """Test reading Kernel SNIRF files with haemoglobin or TD data.""" + fname = dict(hb=kernel_hb, td=kernel_td)[kind] + raw = read_raw_snirf(fname, preload=True) + if kind == 'hb': + # Test data import + assert raw._data.shape == (180 * 2, 14) + assert raw.copy().pick('hbo')._data.shape == (180, 14) + assert raw.copy().pick('hbr')._data.shape == (180, 14) + n_nan = 20 + else: + assert raw._data.shape == (1080, 14) + assert raw.copy().pick('fnirs_td_moments_amplitude')._data.shape == \ + raw._data.shape + n_nan = 60 assert_allclose(raw.info['sfreq'], 8.257638) bad_nans = np.isnan(raw.get_data()).any(axis=1) - assert np.sum(bad_nans) == 20 + assert np.sum(bad_nans) == n_nan assert len(raw.annotations.description) == 2 assert raw.annotations.onset[0] == 0.036939 diff --git a/mne/preprocessing/nirs/nirs.py b/mne/preprocessing/nirs/nirs.py index 59a638f0f35..012b56140ac 100644 --- a/mne/preprocessing/nirs/nirs.py +++ b/mne/preprocessing/nirs/nirs.py @@ -102,21 +102,40 @@ def _check_channels_ordered(info, pair_vals, *, throw_errors=True, picks_chroma = _picks_to_idx(info, ['hbo', 'hbr'], exclude=[], allow_empty=True) - if (len(picks_wave) > 0) & (len(picks_chroma) > 0): + # All TD moments + picks_moments = _picks_to_idx(info, ['fnirs_td_moments_amplitude'], + exclude=[], allow_empty=True) + + # All TD gated + picks_gated = _picks_to_idx(info, ['fnirs_td_gated_amplitude'], + exclude=[], allow_empty=True) + + n_found = sum( + len(x) > 0 for x in ( + picks_wave, + picks_chroma, + picks_moments, + picks_gated, + ) + ) + if n_found != 1: picks = _throw_or_return_empty( - 'MNE does not support a combination of amplitude, optical ' - 'density, and haemoglobin data in the same raw structure.', - throw_errors) + 'MNE supports exactly one of amplitude, optical density, ' + 'TD moments, TD gated, and haemoglobin data in a given raw ' + f'structure, found {n_found}', throw_errors) # All continuous wave fNIRS data if len(picks_wave): error_word = "frequencies" use_RE = _S_D_F_RE picks = picks_wave - else: + elif len(picks_chroma): error_word = "chromophore" use_RE = _S_D_H_RE picks = picks_chroma + else: + assert len(picks_moments) or len(picks_gated) + return # nothing to check pair_vals = np.array(pair_vals) if pair_vals.shape != (2,): From d3b75ab1e93b4c00b0d8a6abcb741ee5cbaf7aba Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Thu, 9 Mar 2023 14:03:14 -0500 Subject: [PATCH 21/31] FIX: Reg --- mne/preprocessing/nirs/tests/test_nirs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mne/preprocessing/nirs/tests/test_nirs.py b/mne/preprocessing/nirs/tests/test_nirs.py index 24cd75a9627..774d38f0cde 100644 --- a/mne/preprocessing/nirs/tests/test_nirs.py +++ b/mne/preprocessing/nirs/tests/test_nirs.py @@ -337,7 +337,7 @@ def test_fnirs_channel_naming_and_order_custom_optical_density(): info = create_info(ch_names=ch_names, ch_types=ch_types, sfreq=1.0) raw2 = RawArray(data, info, verbose=True) raw.add_channels([raw2]) - with pytest.raises(ValueError, match='does not support a combination'): + with pytest.raises(ValueError, match='exactly one of'): _check_channels_ordered(raw.info, [760, 850]) From 0c4f8868ba874d0cddb1b182e8cada8201aed98a Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 28 Oct 2024 12:16:36 -0400 Subject: [PATCH 22/31] MAINT: Revert --- mne/datasets/config.py | 416 ++++++++++++++++++++++------------------- 1 file changed, 225 insertions(+), 191 deletions(-) diff --git a/mne/datasets/config.py b/mne/datasets/config.py index 7ebd5c5b3f8..ccd4babacd9 100644 --- a/mne/datasets/config.py +++ b/mne/datasets/config.py @@ -1,7 +1,6 @@ -# Authors: Adam Li -# Daniel McCloy -# -# License: BSD Style. +# Authors: The MNE-Python contributors. +# License: BSD-3-Clause +# Copyright the MNE-Python contributors. _bst_license_text = """ @@ -86,8 +85,13 @@ # To update the `testing` or `misc` datasets, push or merge commits to their # respective repos, and make a new release of the dataset on GitHub. Then # update the checksum in the MNE_DATASETS dict below, and change version -# here: ↓↓↓↓↓ ↓↓↓ -RELEASES = dict(testing='0.143', misc='0.24') +# here: ↓↓↓↓↓↓↓↓ +RELEASES = dict( + testing="0.156", + misc="0.27", + phantom_kit="0.2", + ucl_opm_auditory="0.2", +) TESTING_VERSIONED = f'mne-testing-data-{RELEASES["testing"]}' MISC_VERSIONED = f'mne-misc-data-{RELEASES["misc"]}' @@ -109,229 +113,259 @@ # of the downloaded dataset (ex: "MNE_DATASETS_EEGBCI_PATH"). # Testing and misc are at the top as they're updated most often -MNE_DATASETS['testing'] = dict( - archive_name=f'{TESTING_VERSIONED}.tar.gz', - hash='md5:6a3b6f69a65cd2547d2c3884ae7ca778', - url=('https://codeload.github.com/mne-tools/mne-testing-data/' - f'tar.gz/{RELEASES["testing"]}'), +MNE_DATASETS["testing"] = dict( + archive_name=f"{TESTING_VERSIONED}.tar.gz", + hash="md5:d94fe9f3abe949a507eaeb865fb84a3f", + url=( + "https://codeload.github.com/mne-tools/mne-testing-data/" + f'tar.gz/{RELEASES["testing"]}' + ), # In case we ever have to resort to osf.io again... # archive_name='mne-testing-data.tar.gz', # hash='md5:c805a5fed8ca46f723e7eec828d90824', # url='https://osf.io/dqfgy/download?version=1', # 0.136 - folder_name='MNE-testing-data', - config_key='MNE_DATASETS_TESTING_PATH', + folder_name="MNE-testing-data", + config_key="MNE_DATASETS_TESTING_PATH", +) +MNE_DATASETS["misc"] = dict( + archive_name=f"{MISC_VERSIONED}.tar.gz", # 'mne-misc-data', + hash="md5:e343d3a00cb49f8a2f719d14f4758afe", + url=( + "https://codeload.github.com/mne-tools/mne-misc-data/tar.gz/" + f'{RELEASES["misc"]}' + ), + folder_name="MNE-misc-data", + config_key="MNE_DATASETS_MISC_PATH", ) -MNE_DATASETS['misc'] = dict( - archive_name=f'{MISC_VERSIONED}.tar.gz', # 'mne-misc-data', - hash='md5:eb017a919939511932bd683f26f97490', - url=('https://codeload.github.com/mne-tools/mne-misc-data/tar.gz/' - f'{RELEASES["misc"]}'), - folder_name='MNE-misc-data', - config_key='MNE_DATASETS_MISC_PATH' + +MNE_DATASETS["fnirs_motor"] = dict( + archive_name="MNE-fNIRS-motor-data.tgz", + hash="md5:c4935d19ddab35422a69f3326a01fef8", + url="https://osf.io/dj3eh/download?version=1", + folder_name="MNE-fNIRS-motor-data", + config_key="MNE_DATASETS_FNIRS_MOTOR_PATH", +) + +MNE_DATASETS["ucl_opm_auditory"] = dict( + archive_name="auditory_OPM_stationary.zip", + hash="md5:b2d69aa2d656b960bd0c18968dc1a14d", + url="https://osf.io/download/tp324/?version=1", # original is mwrt3 + folder_name="auditory_OPM_stationary", + config_key="MNE_DATASETS_UCL_OPM_AUDITORY_PATH", ) -MNE_DATASETS['fnirs_motor'] = dict( - archive_name='MNE-fNIRS-motor-data.tgz', - hash='md5:c4935d19ddab35422a69f3326a01fef8', - url='https://osf.io/dj3eh/download?version=1', - folder_name='MNE-fNIRS-motor-data', - config_key='MNE_DATASETS_FNIRS_MOTOR_PATH', +MNE_DATASETS["kiloword"] = dict( + archive_name="MNE-kiloword-data.tar.gz", + hash="md5:3a124170795abbd2e48aae8727e719a8", + url="https://osf.io/qkvf9/download?version=1", + folder_name="MNE-kiloword-data", + config_key="MNE_DATASETS_KILOWORD_PATH", ) -MNE_DATASETS['ucl_opm_auditory'] = dict( - archive_name='auditory_OPM_stationary.zip', - hash='md5:9ed0d8d554894542b56f8e7c4c0041fe', - url='https://osf.io/download/mwrt3/?version=1', - folder_name='auditory_OPM_stationary', - config_key='MNE_DATASETS_UCL_OPM_AUDITORY_PATH', +MNE_DATASETS["multimodal"] = dict( + archive_name="MNE-multimodal-data.tar.gz", + hash="md5:26ec847ae9ab80f58f204d09e2c08367", + url="https://ndownloader.figshare.com/files/5999598", + folder_name="MNE-multimodal-data", + config_key="MNE_DATASETS_MULTIMODAL_PATH", ) -MNE_DATASETS['kiloword'] = dict( - archive_name='MNE-kiloword-data.tar.gz', - hash='md5:3a124170795abbd2e48aae8727e719a8', - url='https://osf.io/qkvf9/download?version=1', - folder_name='MNE-kiloword-data', - config_key='MNE_DATASETS_KILOWORD_PATH', +MNE_DATASETS["opm"] = dict( + archive_name="MNE-OPM-data.tar.gz", + hash="md5:370ad1dcfd5c47e029e692c85358a374", + url="https://osf.io/p6ae7/download?version=2", + folder_name="MNE-OPM-data", + config_key="MNE_DATASETS_OPM_PATH", ) -MNE_DATASETS['multimodal'] = dict( - archive_name='MNE-multimodal-data.tar.gz', - hash='md5:26ec847ae9ab80f58f204d09e2c08367', - url='https://ndownloader.figshare.com/files/5999598', - folder_name='MNE-multimodal-data', - config_key='MNE_DATASETS_MULTIMODAL_PATH', +MNE_DATASETS["phantom_kit"] = dict( + archive_name="MNE-phantom-KIT-data.tar.gz", + hash="md5:7bfdf40bbeaf17a66c99c695640e0740", + url="https://osf.io/fb6ya/download?version=1", + folder_name="MNE-phantom-KIT-data", + config_key="MNE_DATASETS_PHANTOM_KIT_PATH", ) -MNE_DATASETS['opm'] = dict( - archive_name='MNE-OPM-data.tar.gz', - hash='md5:370ad1dcfd5c47e029e692c85358a374', - url='https://osf.io/p6ae7/download?version=2', - folder_name='MNE-OPM-data', - config_key='MNE_DATASETS_OPM_PATH', +MNE_DATASETS["phantom_4dbti"] = dict( + archive_name="MNE-phantom-4DBTi.zip", + hash="md5:938a601440f3ffa780d20a17bae039ff", + url="https://osf.io/v2brw/download?version=2", + folder_name="MNE-phantom-4DBTi", + config_key="MNE_DATASETS_PHANTOM_4DBTI_PATH", ) -MNE_DATASETS['phantom_4dbti'] = dict( - archive_name='MNE-phantom-4DBTi.zip', - hash='md5:938a601440f3ffa780d20a17bae039ff', - url='https://osf.io/v2brw/download?version=2', - folder_name='MNE-phantom-4DBTi', - config_key='MNE_DATASETS_PHANTOM_4DBTI_PATH', +MNE_DATASETS["phantom_kernel"] = dict( + archive_name="MNE-phantom-kernel.tar.gz", + hash="md5:4e2ad987dac1a20f95bae8ffeb2d41d6", + url="https://osf.io/dj7wz/download?version=1", + folder_name="MNE-phantom-kernel-data", + config_key="MNE_DATASETS_PHANTOM_KERNEL_PATH", ) -MNE_DATASETS['sample'] = dict( - archive_name='MNE-sample-data-processed.tar.gz', - hash='md5:e8f30c4516abdc12a0c08e6bae57409c', - url='https://osf.io/86qa2/download?version=6', - folder_name='MNE-sample-data', - config_key='MNE_DATASETS_SAMPLE_PATH', +MNE_DATASETS["sample"] = dict( + archive_name="MNE-sample-data-processed.tar.gz", + hash="md5:e8f30c4516abdc12a0c08e6bae57409c", + url="https://osf.io/86qa2/download?version=6", + folder_name="MNE-sample-data", + config_key="MNE_DATASETS_SAMPLE_PATH", ) -MNE_DATASETS['somato'] = dict( - archive_name='MNE-somato-data.tar.gz', - hash='md5:32fd2f6c8c7eb0784a1de6435273c48b', - url='https://osf.io/tp4sg/download?version=7', - folder_name='MNE-somato-data', - config_key='MNE_DATASETS_SOMATO_PATH' +MNE_DATASETS["somato"] = dict( + archive_name="MNE-somato-data.tar.gz", + hash="md5:32fd2f6c8c7eb0784a1de6435273c48b", + url="https://osf.io/tp4sg/download?version=7", + folder_name="MNE-somato-data", + config_key="MNE_DATASETS_SOMATO_PATH", ) -MNE_DATASETS['spm'] = dict( - archive_name='MNE-spm-face.tar.gz', - hash='md5:9f43f67150e3b694b523a21eb929ea75', - url='https://osf.io/je4s8/download?version=2', - folder_name='MNE-spm-face', - config_key='MNE_DATASETS_SPM_FACE_PATH', +MNE_DATASETS["spm"] = dict( + archive_name="MNE-spm-face.tar.gz", + hash="md5:9f43f67150e3b694b523a21eb929ea75", + url="https://osf.io/je4s8/download?version=2", + folder_name="MNE-spm-face", + config_key="MNE_DATASETS_SPM_FACE_PATH", ) # Visual 92 categories has the dataset split into 2 files. # We define a dictionary holding the items with the same # value across both files: folder name and configuration key. -MNE_DATASETS['visual_92_categories'] = dict( - folder_name='MNE-visual_92_categories-data', - config_key='MNE_DATASETS_VISUAL_92_CATEGORIES_PATH', -) -MNE_DATASETS['visual_92_categories_1'] = dict( - archive_name='MNE-visual_92_categories-data-part1.tar.gz', - hash='md5:74f50bbeb65740903eadc229c9fa759f', - url='https://osf.io/8ejrs/download?version=1', - folder_name='MNE-visual_92_categories-data', - config_key='MNE_DATASETS_VISUAL_92_CATEGORIES_PATH', -) -MNE_DATASETS['visual_92_categories_2'] = dict( - archive_name='MNE-visual_92_categories-data-part2.tar.gz', - hash='md5:203410a98afc9df9ae8ba9f933370e20', - url='https://osf.io/t4yjp/download?version=1', - folder_name='MNE-visual_92_categories-data', - config_key='MNE_DATASETS_VISUAL_92_CATEGORIES_PATH', -) - -MNE_DATASETS['mtrf'] = dict( - archive_name='mTRF_1.5.zip', - hash='md5:273a390ebbc48da2c3184b01a82e4636', - url='https://osf.io/h85s2/download?version=1', - folder_name='mTRF_1.5', - config_key='MNE_DATASETS_MTRF_PATH' -) -MNE_DATASETS['refmeg_noise'] = dict( - archive_name='sample_reference_MEG_noise-raw.zip', - hash='md5:779fecd890d98b73a4832e717d7c7c45', - url='https://osf.io/drt6v/download?version=1', - folder_name='MNE-refmeg-noise-data', - config_key='MNE_DATASETS_REFMEG_NOISE_PATH' -) - -MNE_DATASETS['ssvep'] = dict( - archive_name='ssvep_example_data.zip', - hash='md5:af866bbc0f921114ac9d683494fe87d6', - url='https://osf.io/z8h6k/download?version=5', - folder_name='ssvep-example-data', - config_key='MNE_DATASETS_SSVEP_PATH' -) - -MNE_DATASETS['erp_core'] = dict( - archive_name='MNE-ERP-CORE-data.tar.gz', - hash='md5:5866c0d6213bd7ac97f254c776f6c4b1', - url='https://osf.io/rzgba/download?version=1', - folder_name='MNE-ERP-CORE-data', - config_key='MNE_DATASETS_ERP_CORE_PATH', -) - -MNE_DATASETS['epilepsy_ecog'] = dict( - archive_name='MNE-epilepsy-ecog-data.tar.gz', - hash='md5:ffb139174afa0f71ec98adbbb1729dea', - url='https://osf.io/z4epq/download?version=1', - folder_name='MNE-epilepsy-ecog-data', - config_key='MNE_DATASETS_EPILEPSY_ECOG_PATH', +MNE_DATASETS["visual_92_categories"] = dict( + folder_name="MNE-visual_92_categories-data", + config_key="MNE_DATASETS_VISUAL_92_CATEGORIES_PATH", +) +MNE_DATASETS["visual_92_categories_1"] = dict( + archive_name="MNE-visual_92_categories-data-part1.tar.gz", + hash="md5:74f50bbeb65740903eadc229c9fa759f", + url="https://osf.io/8ejrs/download?version=1", + folder_name="MNE-visual_92_categories-data", + config_key="MNE_DATASETS_VISUAL_92_CATEGORIES_PATH", +) +MNE_DATASETS["visual_92_categories_2"] = dict( + archive_name="MNE-visual_92_categories-data-part2.tar.gz", + hash="md5:203410a98afc9df9ae8ba9f933370e20", + url="https://osf.io/t4yjp/download?version=1", + folder_name="MNE-visual_92_categories-data", + config_key="MNE_DATASETS_VISUAL_92_CATEGORIES_PATH", +) + +MNE_DATASETS["mtrf"] = dict( + archive_name="mTRF_1.5.zip", + hash="md5:273a390ebbc48da2c3184b01a82e4636", + url="https://osf.io/h85s2/download?version=1", + folder_name="mTRF_1.5", + config_key="MNE_DATASETS_MTRF_PATH", +) +MNE_DATASETS["refmeg_noise"] = dict( + archive_name="sample_reference_MEG_noise-raw.zip", + hash="md5:779fecd890d98b73a4832e717d7c7c45", + url="https://osf.io/drt6v/download?version=1", + folder_name="MNE-refmeg-noise-data", + config_key="MNE_DATASETS_REFMEG_NOISE_PATH", +) + +MNE_DATASETS["ssvep"] = dict( + archive_name="ssvep_example_data.zip", + hash="md5:af866bbc0f921114ac9d683494fe87d6", + url="https://osf.io/z8h6k/download?version=5", + folder_name="ssvep-example-data", + config_key="MNE_DATASETS_SSVEP_PATH", +) + +MNE_DATASETS["erp_core"] = dict( + archive_name="MNE-ERP-CORE-data.tar.gz", + hash="md5:5866c0d6213bd7ac97f254c776f6c4b1", + url="https://osf.io/rzgba/download?version=1", + folder_name="MNE-ERP-CORE-data", + config_key="MNE_DATASETS_ERP_CORE_PATH", +) + +MNE_DATASETS["epilepsy_ecog"] = dict( + archive_name="MNE-epilepsy-ecog-data.tar.gz", + hash="md5:ffb139174afa0f71ec98adbbb1729dea", + url="https://osf.io/z4epq/download?version=1", + folder_name="MNE-epilepsy-ecog-data", + config_key="MNE_DATASETS_EPILEPSY_ECOG_PATH", ) # Fieldtrip CMC dataset -MNE_DATASETS['fieldtrip_cmc'] = dict( - archive_name='SubjectCMC.zip', - hash='md5:6f9fd6520f9a66e20994423808d2528c', - url='https://osf.io/j9b6s/download?version=1', - folder_name='MNE-fieldtrip_cmc-data', - config_key='MNE_DATASETS_FIELDTRIP_CMC_PATH' +MNE_DATASETS["fieldtrip_cmc"] = dict( + archive_name="SubjectCMC.zip", + hash="md5:6f9fd6520f9a66e20994423808d2528c", + url="https://osf.io/j9b6s/download?version=1", + folder_name="MNE-fieldtrip_cmc-data", + config_key="MNE_DATASETS_FIELDTRIP_CMC_PATH", ) # brainstorm datasets: -MNE_DATASETS['bst_auditory'] = dict( - archive_name='bst_auditory.tar.gz', - hash='md5:fa371a889a5688258896bfa29dd1700b', - url='https://osf.io/5t9n8/download?version=1', - folder_name='MNE-brainstorm-data', - config_key='MNE_DATASETS_BRAINSTORM_PATH', -) -MNE_DATASETS['bst_phantom_ctf'] = dict( - archive_name='bst_phantom_ctf.tar.gz', - hash='md5:80819cb7f5b92d1a5289db3fb6acb33c', - url='https://osf.io/sxr8y/download?version=1', - folder_name='MNE-brainstorm-data', - config_key='MNE_DATASETS_BRAINSTORM_PATH', -) -MNE_DATASETS['bst_phantom_elekta'] = dict( - archive_name='bst_phantom_elekta.tar.gz', - hash='md5:1badccbe17998d18cc373526e86a7aaf', - url='https://osf.io/dpcku/download?version=1', - folder_name='MNE-brainstorm-data', - config_key='MNE_DATASETS_BRAINSTORM_PATH', -) -MNE_DATASETS['bst_raw'] = dict( - archive_name='bst_raw.tar.gz', - hash='md5:fa2efaaec3f3d462b319bc24898f440c', - url='https://osf.io/9675n/download?version=2', - folder_name='MNE-brainstorm-data', - config_key='MNE_DATASETS_BRAINSTORM_PATH', -) -MNE_DATASETS['bst_resting'] = dict( - archive_name='bst_resting.tar.gz', - hash='md5:70fc7bf9c3b97c4f2eab6260ee4a0430', - url='https://osf.io/m7bd3/download?version=3', - folder_name='MNE-brainstorm-data', - config_key='MNE_DATASETS_BRAINSTORM_PATH', +MNE_DATASETS["bst_auditory"] = dict( + archive_name="bst_auditory.tar.gz", + hash="md5:fa371a889a5688258896bfa29dd1700b", + url="https://osf.io/5t9n8/download?version=1", + folder_name="MNE-brainstorm-data", + config_key="MNE_DATASETS_BRAINSTORM_PATH", +) +MNE_DATASETS["bst_phantom_ctf"] = dict( + archive_name="bst_phantom_ctf.tar.gz", + hash="md5:80819cb7f5b92d1a5289db3fb6acb33c", + url="https://osf.io/sxr8y/download?version=1", + folder_name="MNE-brainstorm-data", + config_key="MNE_DATASETS_BRAINSTORM_PATH", +) +MNE_DATASETS["bst_phantom_elekta"] = dict( + archive_name="bst_phantom_elekta.tar.gz", + hash="md5:1badccbe17998d18cc373526e86a7aaf", + url="https://osf.io/dpcku/download?version=1", + folder_name="MNE-brainstorm-data", + config_key="MNE_DATASETS_BRAINSTORM_PATH", +) +MNE_DATASETS["bst_raw"] = dict( + archive_name="bst_raw.tar.gz", + hash="md5:fa2efaaec3f3d462b319bc24898f440c", + url="https://osf.io/9675n/download?version=2", + folder_name="MNE-brainstorm-data", + config_key="MNE_DATASETS_BRAINSTORM_PATH", +) +MNE_DATASETS["bst_resting"] = dict( + archive_name="bst_resting.tar.gz", + hash="md5:70fc7bf9c3b97c4f2eab6260ee4a0430", + url="https://osf.io/m7bd3/download?version=3", + folder_name="MNE-brainstorm-data", + config_key="MNE_DATASETS_BRAINSTORM_PATH", ) # HF-SEF -MNE_DATASETS['hf_sef_raw'] = dict( - archive_name='hf_sef_raw.tar.gz', - hash='md5:33934351e558542bafa9b262ac071168', - url='https://zenodo.org/record/889296/files/hf_sef_raw.tar.gz', - folder_name='hf_sef', - config_key='MNE_DATASETS_HF_SEF_PATH', -) -MNE_DATASETS['hf_sef_evoked'] = dict( - archive_name='hf_sef_evoked.tar.gz', - hash='md5:13d34cb5db584e00868677d8fb0aab2b', - url=('https://zenodo.org/record/3523071/files/' - 'hf_sef_evoked.tar.gz'), - folder_name='hf_sef', - config_key='MNE_DATASETS_HF_SEF_PATH', +MNE_DATASETS["hf_sef_raw"] = dict( + archive_name="hf_sef_raw.tar.gz", + hash="md5:33934351e558542bafa9b262ac071168", + url="https://zenodo.org/record/889296/files/hf_sef_raw.tar.gz", + folder_name="hf_sef", + config_key="MNE_DATASETS_HF_SEF_PATH", +) +MNE_DATASETS["hf_sef_evoked"] = dict( + archive_name="hf_sef_evoked.tar.gz", + hash="md5:13d34cb5db584e00868677d8fb0aab2b", + # Zenodo can be slow, so we use the OSF mirror + # url=('https://zenodo.org/record/3523071/files/' + # 'hf_sef_evoked.tar.gz'), + url="https://osf.io/25f8d/download?version=2", + folder_name="hf_sef", + config_key="MNE_DATASETS_HF_SEF_PATH", ) # "fake" dataset (for testing) -MNE_DATASETS['fake'] = dict( - archive_name='foo.tgz', - hash='md5:3194e9f7b46039bb050a74f3e1ae9908', - url=('https://github.com/mne-tools/mne-testing-data/raw/master/' - 'datasets/foo.tgz'), - folder_name='foo', - config_key='MNE_DATASETS_FAKE_PATH' +MNE_DATASETS["fake"] = dict( + archive_name="foo.tgz", + hash="md5:3194e9f7b46039bb050a74f3e1ae9908", + url="https://github.com/mne-tools/mne-testing-data/raw/master/datasets/foo.tgz", + folder_name="foo", + config_key="MNE_DATASETS_FAKE_PATH", +) + +# eyelink dataset +MNE_DATASETS["eyelink"] = dict( + archive_name="MNE-eyelink-data.zip", + hash="md5:68a6323ef17d655f1a659c3290ee1c3f", + url=("https://osf.io/xsu4g/download?version=1"), + folder_name="MNE-eyelink-data", + config_key="MNE_DATASETS_EYELINK_PATH", ) From b5e9967a39839001d9912eb57f7dee2b87df34c5 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 28 Oct 2024 13:25:19 -0400 Subject: [PATCH 23/31] Empty commit for credit Co-authored-by: Robert Luke Co-authored-by: John Griffiths <397826+JohnGriffiths@users.noreply.github.com> Co-authored-by: Julien Dubois <93365261+julien-dubois-k@users.noreply.github.com> From 34dffbf519b2be2183b84109a80caebb4a2391d1 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 28 Oct 2024 13:41:42 -0400 Subject: [PATCH 24/31] DOC: Change --- doc/changes/devel/11064.newfeature.rst | 1 + doc/changes/names.inc | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 doc/changes/devel/11064.newfeature.rst diff --git a/doc/changes/devel/11064.newfeature.rst b/doc/changes/devel/11064.newfeature.rst new file mode 100644 index 00000000000..24f3819a85d --- /dev/null +++ b/doc/changes/devel/11064.newfeature.rst @@ -0,0 +1 @@ +Added basic support for TD fNIRS data, by :newcontrib:`Zahra Aghajan`, :newcontrib:`Julien Dubois`, :newcontrib:`John Griffiths`, `Robert Luke`_, and `Eric Larson`_. \ No newline at end of file diff --git a/doc/changes/names.inc b/doc/changes/names.inc index 2c5e8e1ee2c..e7ed051e652 100644 --- a/doc/changes/names.inc +++ b/doc/changes/names.inc @@ -137,6 +137,7 @@ .. _Joan Massich: https://github.com/massich .. _Johann Benerradi: https://github.com/HanBnrd .. _Johannes Niediek: https://github.com/jniediek +.. _John Griffiths: https://www.grifflab.com .. _John Samuelsson: https://github.com/johnsam7 .. _John Veillette: https://psychology.uchicago.edu/directory/john-veillette .. _Jon Houck: https://www.mrn.org/people/jon-m.-houck/principal-investigators @@ -150,6 +151,7 @@ .. _Judy D Zhu: https://github.com/JD-Zhu .. _Juergen Dammers: https://github.com/jdammers .. _Jukka Nenonen: https://www.linkedin.com/pub/jukka-nenonen/28/b5a/684 +.. _Julien Dubois: https://github.com/julien-dubois-k .. _Jussi Nurminen: https://github.com/jjnurminen .. _Kaisu Lankinen: http://bishoplab.berkeley.edu/Kaisu.html .. _Katarina Slama: https://github.com/katarinaslama @@ -314,5 +316,6 @@ .. _Yiping Zuo: https://github.com/frostime .. _Yousra Bekhti: https://www.linkedin.com/pub/yousra-bekhti/56/886/421 .. _Yu-Han Luo: https://github.com/yh-luo +.. _Zahra Aghajan: https://github.com/Zahra-M-Aghajan .. _Zhi Zhang: https://github.com/tczhangzhi/ .. _Zvi Baratz: https://github.com/ZviBaratz From d62dbb169b0181d9609c0f5c3ad35df12453b05b Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 28 Oct 2024 13:43:39 -0400 Subject: [PATCH 25/31] MAINT: Mailmap --- .mailmap | 1 + 1 file changed, 1 insertion(+) diff --git a/.mailmap b/.mailmap index f130380a955..da14027b52f 100644 --- a/.mailmap +++ b/.mailmap @@ -352,5 +352,6 @@ Yousra Bekhti Yoursa BEKHTI Yoursa BEKHTI Yousra Bekhti Yousra BEKHTI Yousra Bekhti yousrabk +Zahra M. Aghajan Zahra M. Aghajan Zhi Zhang <850734033@qq.com> ZHANG Zhi <850734033@qq.com> Zhi Zhang <850734033@qq.com> ZHANG Zhi From 880356e96eff3ba43a8ab229335b5054c69173da Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 28 Oct 2024 13:51:12 -0400 Subject: [PATCH 26/31] FIX: SI --- mne/defaults.py | 33 ++++++++++++++++++++++++++------- mne/io/snirf/_snirf.py | 4 ++-- 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/mne/defaults.py b/mne/defaults.py index af19725bc44..be4e200f054 100644 --- a/mne/defaults.py +++ b/mne/defaults.py @@ -38,6 +38,8 @@ whitened="k", gsr="#666633", temperature="#663333", + eyegaze="k", + pupil="k", ), si_units=dict( mag="T", @@ -60,13 +62,14 @@ fnirs_fd_ac_amplitude="V", fnirs_fd_phase="rad", fnirs_od="V", - # TODO: These units and scalings are wrong - fnirs_td_gated_amplitude="Au", - fnirs_td_moments_amplitude="Au", + fnirs_td_gated_amplitude="M", + fnirs_td_moments_amplitude="M", csd="V/m²", whitened="Z", gsr="S", temperature="C", + eyegaze="rad", + pupil="µm", ), units=dict( mag="fT", @@ -89,12 +92,14 @@ fnirs_fd_ac_amplitude="V", fnirs_fd_phase="rad", fnirs_od="V", - fnirs_td_gated_amplitude="Au", - fnirs_td_moments_amplitude="Au", + fnirs_td_gated_amplitude="µM", + fnirs_td_moments_amplitude="µM", csd="mV/m²", whitened="Z", gsr="S", temperature="C", + eyegaze="rad", + pupil="µm", ), # scalings for the units scalings=dict( @@ -118,12 +123,14 @@ fnirs_fd_ac_amplitude=1.0, fnirs_fd_phase=1.0, fnirs_od=1.0, - fnirs_td_gated_amplitude=1.0, - fnirs_td_moments_amplitude=1.0, + fnirs_td_gated_amplitude=1e6, + fnirs_td_moments_amplitude=1e6, csd=1e3, whitened=1.0, gsr=1.0, temperature=1.0, + eyegaze=1.0, + pupil=1e6, ), # rough guess for a good plot scalings_plot_raw=dict( @@ -159,6 +166,8 @@ gof=1e2, gsr=1.0, temperature=0.1, + eyegaze=2e-1, + pupil=1e-2, ), scalings_cov_rank=dict( mag=1e12, @@ -184,6 +193,8 @@ hbo=(0, 20), hbr=(0, 20), csd=(-50.0, 50.0), + eyegaze=(-1, 1), + pupil=(-1.0, 1.0), ), titles=dict( mag="Magnetometers", @@ -212,6 +223,14 @@ stim="Stimulus", gsr="Galvanic skin response", temperature="Temperature", + eyegaze="Eye-tracking (Gaze position)", + pupil="Eye-tracking (Pupil size)", + resp="Respiration monitoring channel", + chpi="Continuous head position indicator (HPI) coil channels", + exci="Flux excitation channel", + ias="Internal Active Shielding data (Triux systems)", + syst="System status channel information (Triux systems)", + whitened="Whitened data", ), mask_params=dict( marker="o", diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 86c341c18ec..43afa53c3f2 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -637,10 +637,10 @@ def _get_dataunit_scaling(hbx_unit): return scalings[hbx_unit] except KeyError: raise RuntimeError( - f"The Hb unit {hbx_unit} is not supported " + f"The Hb unit {repr(hbx_unit)} is not supported " "by MNE. Please report this error as a GitHub " "issue to inform the developers." - ) + ) from None def _extract_sampling_rate(dat): From 4f0e8e6994c1ae35abae9eb72cb90f7d2006fe3b Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 28 Oct 2024 13:51:49 -0400 Subject: [PATCH 27/31] FIX: Oops --- mne/defaults.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mne/defaults.py b/mne/defaults.py index be4e200f054..418795b2ce0 100644 --- a/mne/defaults.py +++ b/mne/defaults.py @@ -99,7 +99,7 @@ gsr="S", temperature="C", eyegaze="rad", - pupil="µm", + pupil="m", ), # scalings for the units scalings=dict( From a5c97b8117253ed03b0ba88475745839d653e646 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 28 Oct 2024 13:52:09 -0400 Subject: [PATCH 28/31] FIX: Oops --- mne/defaults.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mne/defaults.py b/mne/defaults.py index 418795b2ce0..65a410e5b50 100644 --- a/mne/defaults.py +++ b/mne/defaults.py @@ -69,7 +69,7 @@ gsr="S", temperature="C", eyegaze="rad", - pupil="µm", + pupil="m", ), units=dict( mag="fT", @@ -99,7 +99,7 @@ gsr="S", temperature="C", eyegaze="rad", - pupil="m", + pupil="µm", ), # scalings for the units scalings=dict( From fba1ffa42cba8b5c8319f5a0a6842198329aba65 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 28 Oct 2024 13:59:03 -0400 Subject: [PATCH 29/31] FIX: idx --- mne/io/snirf/_snirf.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 43afa53c3f2..566d610f803 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -17,6 +17,7 @@ from ...utils import ( NamedInt, _check_fname, + _check_option, _import_h5py, fill_doc, logger, @@ -144,16 +145,14 @@ def __init__(self, fname, optode_frame="unknown", preload=False, verbose=None): if (optode_frame == "unknown") & (manufacturer == "Gowerlabs"): optode_frame = "head" - snirf_data_type = np.array( - dat.get("nirs/data1/measurementList1/dataType") - ).item() - if snirf_data_type not in _AVAILABLE_SNIRF_DATA_TYPES: - raise RuntimeError( - "File does not contain a supported data type. " - "MNE only supports reading the following data types " - f"{_AVAILABLE_SNIRF_DATA_TYPES}, but received type " - f"{snirf_data_type}." - ) + snirf_data_type = _correct_shape( + np.array(dat.get("nirs/data1/measurementList1/dataType")) + )[0] + _check_option( + "SNIRF data type", + snirf_data_type, + list(_AVAILABLE_SNIRF_DATA_TYPES), + ) last_samps = dat.get("/nirs/data1/dataTimeSeries").shape[0] - 1 @@ -292,6 +291,7 @@ def natural_keys(text): 0 ] ) + # append wavelength ch_name = f"{ch_name} {fnirs_wavelengths[wve_idx - 1]}" if snirf_data_type == SNIRF_CW_AMPLITUDE: ch_type = "fnirs_cw_amplitude" @@ -301,6 +301,7 @@ def natural_keys(text): np.array(dat.get(f"{ch_root}/dataTypeIndex")) )[0] ) + # append time delay ch_name = f"{ch_name} bin{fnirs_time_delays[bin_idx - 1]}" ch_type = "fnirs_td_gated_amplitude" else: @@ -310,6 +311,7 @@ def natural_keys(text): np.array(dat.get(f"{ch_root}/dataTypeIndex")) )[0] ) + # append moment order ch_name = ( f"{ch_name} moment{fnirs_moment_orders[moment_idx - 1]}" ) @@ -336,6 +338,7 @@ def natural_keys(text): ch_type = dt_id chnames.append(ch_name) ch_types.append(ch_type) + del ch_root, ch_name, ch_type data_scale = None if need_data_scale: @@ -440,7 +443,8 @@ def natural_keys(text): )[0] ) info["chs"][idx]["loc"][10] = ( - fnirs_time_delays[bin_idx - 1] * fnirs_time_delay_widths + fnirs_time_delays[bin_idx - 1] + * fnirs_time_delay_widths[bin_idx - 1] ) else: assert snirf_data_type == SNIRF_TD_MOMENTS_AMPLITUDE From 9f4234d044392376a845c3c3699ce24c81ec9528 Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 28 Oct 2024 14:04:29 -0400 Subject: [PATCH 30/31] WIP --- mne/io/snirf/_snirf.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index 566d610f803..dba28c9a3b4 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -340,16 +340,18 @@ def natural_keys(text): ch_types.append(ch_type) del ch_root, ch_name, ch_type - data_scale = None + # Create mne structure + info = create_info(chnames, sampling_rate, ch_types=ch_types) + if need_data_scale: snirf_data_unit = np.array( dat.get("nirs/data1/measurementList1/dataUnit", b"M") ) snirf_data_unit = snirf_data_unit.item().decode("utf-8") - data_scale = _get_dataunit_scaling(snirf_data_unit) - - # Create mne structure - info = create_info(chnames, sampling_rate, ch_types=ch_types) + scale = _get_dataunit_scaling(snirf_data_unit) + if scale is not None: + for ch in info["chs"]: + ch["cal"] = 1.0 / scale subject_info = {} names = np.array(dat.get("nirs/metaDataTags/SubjectID")) @@ -562,13 +564,11 @@ def natural_keys(text): with info._unlock(): info["subject_info"]["birthday"] = birthday - raw_extras = dict(data_scale=data_scale) super().__init__( info, preload, filenames=[fname], last_samps=[last_samps], - raw_extras=[raw_extras], verbose=verbose, ) @@ -596,10 +596,6 @@ def _read_segment_file(self, data, idx, fi, start, stop, cals, mult): _mult_cal_one(data, one, idx, cals, mult) - data_scale = self._raw_extras[fi]["data_scale"] - if data_scale is not None: - one *= data_scale - # Helper function for when the numpy array has shape (), i.e. just one element. def _correct_shape(arr): From b3b3f2e809055b47b08a476241acf1481cb15d9f Mon Sep 17 00:00:00 2001 From: Eric Larson Date: Mon, 28 Oct 2024 14:20:02 -0400 Subject: [PATCH 31/31] FIX: Test --- mne/io/snirf/_snirf.py | 11 ++++++++++- mne/io/snirf/tests/test_snirf.py | 15 ++++++++++----- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/mne/io/snirf/_snirf.py b/mne/io/snirf/_snirf.py index dba28c9a3b4..f3582266b60 100644 --- a/mne/io/snirf/_snirf.py +++ b/mne/io/snirf/_snirf.py @@ -304,6 +304,7 @@ def natural_keys(text): # append time delay ch_name = f"{ch_name} bin{fnirs_time_delays[bin_idx - 1]}" ch_type = "fnirs_td_gated_amplitude" + need_data_scale = True else: assert snirf_data_type == SNIRF_TD_MOMENTS_AMPLITUDE moment_idx = int( @@ -316,6 +317,7 @@ def natural_keys(text): f"{ch_name} moment{fnirs_moment_orders[moment_idx - 1]}" ) ch_type = "fnirs_td_moments_amplitude" + need_data_scale = True elif snirf_data_type == SNIRF_PROCESSED: dt_id = _correct_shape( @@ -333,7 +335,14 @@ def natural_keys(text): ) suffix = str(fnirs_wavelengths[wve_idx - 1]) else: + if dt_id not in ("hbo", "hbr"): + raise RuntimeError( + "read_raw_snirf can only handle processed " + "data in the form of optical density or " + f"HbO/HbR, but got type f{dt_id}" + ) suffix = dt_id.lower() + need_data_scale = True ch_name = f"{ch_name} {suffix}" ch_type = dt_id chnames.append(ch_name) @@ -351,7 +360,7 @@ def natural_keys(text): scale = _get_dataunit_scaling(snirf_data_unit) if scale is not None: for ch in info["chs"]: - ch["cal"] = 1.0 / scale + ch["cal"] = scale subject_info = {} names = np.array(dat.get("nirs/metaDataTags/SubjectID")) diff --git a/mne/io/snirf/tests/test_snirf.py b/mne/io/snirf/tests/test_snirf.py index a3812feeff1..35f37a51a90 100644 --- a/mne/io/snirf/tests/test_snirf.py +++ b/mne/io/snirf/tests/test_snirf.py @@ -427,14 +427,19 @@ def test_snirf_kernel(kind): if kind == "hb": # Test data import assert raw._data.shape == (180 * 2, 14) - assert raw.copy().pick("hbo")._data.shape == (180, 14) - assert raw.copy().pick("hbr")._data.shape == (180, 14) + hbo_data = raw.get_data("hbo") + hbr_data = raw.get_data("hbr") + assert hbo_data.shape == hbr_data.shape == (180, 14) + hbo_norm = np.nanmedian(np.linalg.norm(hbo_data, axis=-1)) + hbr_norm = np.nanmedian(np.linalg.norm(hbr_data, axis=-1)) + assert 1 < hbr_norm < hbo_norm < 3 n_nan = 20 else: assert raw._data.shape == (1080, 14) - assert ( - raw.copy().pick("fnirs_td_moments_amplitude")._data.shape == raw._data.shape - ) + data = raw.get_data("fnirs_td_moments_amplitude") + assert data.shape == raw._data.shape + norm = np.nanmedian(np.linalg.norm(data, axis=-1)) + assert 1e5 < norm < 1e6 # TODO: 429256, is this reasonable Molars!?? n_nan = 60 assert_allclose(raw.info["sfreq"], 8.257638)