Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add funloc dataset to test support for session-specific anat #1039

Open
wants to merge 9 commits into
base: main
Choose a base branch
from
55 changes: 55 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,25 @@ jobs:
paths:
- ~/mne_data/eeg_matchingpennies

cache_MNE-funloc-data:
<<: *imageconfig
steps:
- attach_workspace:
at: ~/
- restore_cache:
keys:
- data-cache-MNE-funloc-data-1
- bash_env
- run:
name: Get MNE-funloc-data
command: |
$DOWNLOAD_DATA MNE-funloc-data
- codecov/upload
- save_cache:
key: data-cache-MNE-funloc-data-1
paths:
- ~/mne_data/MNE-funloc-data

cache_MNE-phantom-KIT-data:
<<: *imageconfig
steps:
Expand Down Expand Up @@ -784,6 +803,32 @@ jobs:
paths:
- mne_data/derivatives/mne-bids-pipeline/eeg_matchingpennies/*/*/*.html

test_MNE-funloc-data:
<<: *imageconfig
steps:
- attach_workspace:
at: ~/
- bash_env
- restore_cache:
keys:
- data-cache-MNE-funloc-data-1
- run:
name: test MNE-funloc-data
command: $RUN_TESTS MNE-funloc-data
- codecov/upload
- store_test_results:
path: ./test-results
- store_artifacts:
path: ./test-results
destination: test-results
- store_artifacts:
path: /home/circleci/reports/MNE-funloc-data
destination: reports/MNE-funloc-data
- persist_to_workspace:
root: ~/
paths:
- mne_data/derivatives/mne-bids-pipeline/MNE-funloc-data/*/*/*.html

test_MNE-phantom-KIT-data:
<<: *imageconfig
steps:
Expand Down Expand Up @@ -1244,6 +1289,15 @@ workflows:
- cache_eeg_matchingpennies
<<: *filter_tags

- cache_MNE-funloc-data:
requires:
- setup_env
<<: *filter_tags
- test_MNE-funloc-data:
requires:
- cache_MNE-funloc-data
<<: *filter_tags

- cache_MNE-phantom-KIT-data:
requires:
- setup_env
Expand Down Expand Up @@ -1304,6 +1358,7 @@ workflows:
- test_ds003392
- test_ds004229
- test_eeg_matchingpennies
- test_MNE-funloc-data
- test_MNE-phantom-KIT-data
- test_ERP_CORE_N400
- test_ERP_CORE_ERN
Expand Down
1 change: 1 addition & 0 deletions docs/mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ nav:
- examples/ds000248_no_mri.md
- examples/ds003104.md
- examples/eeg_matchingpennies.md
- examples/MNE-funloc-data.md
- examples/MNE-phantom-KIT-data.md
- examples/ds001810.md
- examples/ds000117.md
Expand Down
8 changes: 5 additions & 3 deletions mne_bids_pipeline/_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,16 +43,18 @@ def _download_from_web(*, ds_name: str, ds_path: Path) -> None:

ds_path.mkdir(parents=True, exist_ok=True)
path = ds_path.parent.resolve(strict=True)
fname = f"{ds_name}.zip"
ext = "tar.gz" if options.get("processor") == "untar" else "zip"
processor = pooch.Untar if options.get("processor") == "untar" else pooch.Unzip
fname = f"{ds_name}.{ext}"
pooch.retrieve(
url=url,
path=path,
fname=fname,
processor=pooch.Unzip(extract_dir="."), # relative to path
processor=processor(extract_dir="."), # relative to path
progressbar=True,
known_hash=known_hash,
)
(path / f"{ds_name}.zip").unlink()
(path / f"{ds_name}.{ext}").unlink()


def _download_via_mne(*, ds_name: str, ds_path: Path) -> None:
Expand Down
36 changes: 36 additions & 0 deletions mne_bids_pipeline/tests/configs/config_funloc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
"""Funloc data."""

from pathlib import Path

data_root = Path("~/mne_data").expanduser().resolve()
bids_root = data_root / "MNE-funloc-data"
deriv_root = data_root / "derivatives" / "mne-bids-pipeline" / "MNE-funloc-data"
subjects_dir = bids_root / "derivatives" / "freesurfer" / "subjects"
task = "funloc"
ch_types = ["meg", "eeg"]
data_type = "meg"

# filter
l_freq = None
h_freq = 50.0
# maxfilter
use_maxwell_filter: bool = True
mf_st_duration = 60.0
# SSP
n_proj_eog = dict(n_mag=1, n_grad=1, n_eeg=2)
n_proj_ecg = dict(n_mag=1, n_grad=1, n_eeg=0)

# Epochs
epochs_tmin = -0.2
epochs_tmax = 0.5
epochs_t_adjust = -4.0e-3 # TODO is there a way to do this in MBP?
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@larsoner does MBP have a way of doing what t_adjust does in mnefun?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No, this should be fixed/adjusted when bidsifying the dataset I think (the timings, event types, etc. in there should be as accurate as possible and not require fixing)

epochs_decim = 5 # 1000 -> 200 Hz
baseline = (None, 0)
conditions = [
"auditory/standard",
"auditory/deviant",
"visual/standard",
"visual/deviant",
]
# contrasts
contrasts = [("auditory", "visual")]
6 changes: 6 additions & 0 deletions mne_bids_pipeline/tests/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ class DATASET_OPTIONS_T(TypedDict, total=False):
include: list[str] # []
exclude: list[str] # []
hash: str # ""
processor: str # ""


DATASET_OPTIONS: dict[str, DATASET_OPTIONS_T] = {
Expand Down Expand Up @@ -118,4 +119,9 @@ class DATASET_OPTIONS_T(TypedDict, total=False):
"MNE-phantom-KIT-data": {
"mne": "phantom_kit",
},
"MNE-funloc-data": {
"web": "https://osf.io/2pemg/download?version=1",
"hash": "sha256:05a6b0e9d7a21ac3378236082de5e3b8f1a02315501b8e587179959bc5fafba2", # noqa: E501
"processor": "untar",
},
}
101 changes: 101 additions & 0 deletions mne_bids_pipeline/tests/test_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@
from typing import Any, TypedDict

import pytest
from mne_bids import BIDSPath, get_bids_path_from_fname

from mne_bids_pipeline._config_import import _import_config
from mne_bids_pipeline._download import main as download_main
from mne_bids_pipeline._main import main

Expand Down Expand Up @@ -135,6 +137,10 @@ class _TestOptionsT(TypedDict, total=False):
"MNE-phantom-KIT-data": {
"config": "config_MNE_phantom_KIT_data.py",
},
"MNE-funloc-data": {
"config": "config_funloc.py",
"steps": ["init", "preprocessing", "sensor", "source"],
},
}


Expand Down Expand Up @@ -274,3 +280,98 @@ def test_missing_sessions(
print()
with context:
main()


@pytest.mark.dataset_test
@pytest.mark.parametrize("dataset", ["MNE-funloc-data"])
def test_session_specific_mri(
dataset: str,
monkeypatch: pytest.MonkeyPatch,
dataset_test: Any,
tmp_path: Path,
capsys: pytest.CaptureFixture[str],
) -> None:
"""Test of (faked) session-specific MRIs."""
test_options = TEST_SUITE[dataset]
config = test_options.get("config", f"config_{dataset}.py")
config_path = BIDS_PIPELINE_DIR / "tests" / "configs" / config
# copy the dataset to a tmpdir
config_obj = _import_config(config_path=config_path)
# make it seem like there's only one subj with different MRIs for different sessions
new_bids_path = BIDSPath(root=tmp_path / dataset, subject="01", session="a")
# sub-01/* → sub-01/ses-a/*
src_dir = config_obj.bids_root / "sub-01"
dst_dir = new_bids_path.root / "sub-01" / "ses-a"
for root, dirs, files in src_dir.walk():
offset = root.relative_to(src_dir)
for _dir in dirs:
(dst_dir / offset / _dir).mkdir(parents=True)
for _file in files:
bp = get_bids_path_from_fname(root / _file)
bp.update(root=new_bids_path.root, session="a")
shutil.copyfile(src=root / _file, dst=dst_dir / offset / bp.basename)
# sub-02/* → sub-01/ses-b/*
src_dir = config_obj.bids_root / "sub-02"
dst_dir = new_bids_path.root / "sub-01" / "ses-b"
for root, dirs, files in src_dir.walk():
offset = root.relative_to(src_dir)
for _dir in dirs:
(dst_dir / offset / _dir).mkdir(parents=True)
for _file in files:
bp = get_bids_path_from_fname(root / _file)
bp.update(root=new_bids_path.root, subject="01", session="b")
shutil.copyfile(src=root / _file, dst=dst_dir / offset / bp.basename)
# emptyroom
src_dir = config_obj.bids_root / "sub-emptyroom"
dst_dir = new_bids_path.root / "sub-emptyroom"
shutil.copytree(src=src_dir, dst=dst_dir)
# root-level files (dataset description, etc)
src_dir = config_obj.bids_root
dst_dir = new_bids_path.root
files = [f for f in src_dir.iterdir() if f.is_file()]
for _file in files:
shutil.copyfile(src=_file, dst=dst_dir / _file.name)
# now move derivatives (freesurfer files)
src_dir = config_obj.bids_root / "derivatives" / "freesurfer" / "subjects"
dst_dir = new_bids_path.root / "derivatives" / "freesurfer" / "subjects"
dst_dir.mkdir(parents=True)
for root, dirs, files in src_dir.walk():
new_root = root
if "sub01" in root.parts:
new_root = Path(
*["sub-01_ses-a" if p == "sub01" else p for p in new_root.parts]
)
elif "sub02" in root.parts:
new_root = Path(
*["sub-01_ses-b" if p == "sub02" else p for p in new_root.parts]
)
offset = new_root.relative_to(src_dir)
for _dir in dirs:
if _dir == "sub01":
_dir = "sub-01_ses-a"
elif _dir == "sub02":
_dir = "sub-01_ses-b"
(dst_dir / offset / _dir).mkdir()
for _file in files:
dst_file = _file
if _file.startswith("sub01"):
dst_file = dst_file.replace("sub01", "sub-01_ses-a")
elif _file.startswith("sub02"):
dst_file = dst_file.replace("sub02", "sub-01_ses-b")
shutil.copyfile(src=root / _file, dst=dst_dir / offset / dst_file)
# print_dir_tree(new_bids_path.root) # for debugging
# hack in the new bids_root
extra_config = dict(bids_root=str(new_bids_path.root))
extra_path = tmp_path / "extra_config.py"
extra_path.write_text(str(extra_config))
monkeypatch.setenv("_MNE_BIDS_STUDY_TESTING_EXTRA_CONFIG", str(extra_path))
# Run the tests.
steps = test_options.get("steps", ())
command = ["mne_bids_pipeline", str(config_path), f"--steps={','.join(steps)}"]
if "--pdb" in sys.argv:
command.append("--n_jobs=1")
monkeypatch.setenv("_MNE_BIDS_STUDY_TESTING", "true")
monkeypatch.setattr(sys, "argv", command)
with capsys.disabled():
print()
main()
Loading