Skip to content

Commit

Permalink
BUG: Fix bug with recomputation
Browse files Browse the repository at this point in the history
  • Loading branch information
larsoner committed Mar 14, 2024
1 parent 1c2a081 commit 04a76b1
Show file tree
Hide file tree
Showing 13 changed files with 34 additions and 53 deletions.
2 changes: 1 addition & 1 deletion docs/source/examples/gen_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def _gen_demonstrated_funcs(example_config_path: Path) -> dict:
)
if dataset_name in all_demonstrated:
logger.warning(
f"Duplicate dataset name {test_dataset_name} -> {dataset_name}, " "skipping"
f"Duplicate dataset name {test_dataset_name} -> {dataset_name}, skipping"
)
continue
del test_dataset_options, test_dataset_name
Expand Down
4 changes: 2 additions & 2 deletions docs/source/v1.8.md.inc
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@

[//]: # (- Whatever (#000 by @whoever))

[//]: # (### :bug: Bug fixes)
### :bug: Bug fixes

[//]: # (- Whatever (#000 by @whoever))
- Fix bug where Maxwell filtered data was not handled properly in CSP (#890 by @larsoner)

### :medical_symbol: Code health

Expand Down
2 changes: 1 addition & 1 deletion mne_bids_pipeline/_config_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,7 @@ def _check_misspellings_removals(
if user_name not in valid_names:
# find the closest match
closest_match = difflib.get_close_matches(user_name, valid_names, n=1)
msg = f"Found a variable named {repr(user_name)} in your custom " "config,"
msg = f"Found a variable named {repr(user_name)} in your custom config,"
if closest_match and closest_match[0] not in user_names:
this_msg = (
f"{msg} did you mean {repr(closest_match[0])}? "
Expand Down
2 changes: 1 addition & 1 deletion mne_bids_pipeline/_config_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,7 +403,7 @@ def get_mf_ctc_fname(
root=config.bids_root,
).meg_crosstalk_fpath
if mf_ctc_fpath is None:
raise ValueError("Could not find Maxwell Filter cross-talk " "file.")
raise ValueError("Could not find Maxwell Filter cross-talk file.")
else:
mf_ctc_fpath = pathlib.Path(config.mf_ctc_fname).expanduser().absolute()
if not mf_ctc_fpath.exists():
Expand Down
23 changes: 8 additions & 15 deletions mne_bids_pipeline/_import_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
get_runs,
get_task,
)
from ._io import _empty_room_match_path, _read_json
from ._io import _read_json
from ._logging import gen_log_kwargs, logger
from ._run import _update_for_splits
from .typing import PathLike
Expand Down Expand Up @@ -403,6 +403,7 @@ def import_experimental_data(
_fix_stim_artifact_func(cfg=cfg, raw=raw)

if bids_path_bads_in is not None:
run = "rest" if data_is_rest else run # improve logging
bads = _read_bads_tsv(cfg=cfg, bids_path_bads=bids_path_bads_in)
msg = f"Marking {len(bads)} channel{_pl(bads)} as bad."
logger.info(**gen_log_kwargs(message=msg))
Expand Down Expand Up @@ -701,6 +702,12 @@ def _get_mf_reference_run_path(
)


def _empty_room_match_path(run_path: BIDSPath, cfg: SimpleNamespace) -> BIDSPath:
return run_path.copy().update(
extension=".json", suffix="emptyroommatch", root=cfg.deriv_root
)


def _path_dict(
*,
cfg: SimpleNamespace,
Expand All @@ -725,20 +732,6 @@ def _path_dict(
return in_files


def _auto_scores_path(
*,
cfg: SimpleNamespace,
bids_path_in: BIDSPath,
) -> BIDSPath:
return bids_path_in.copy().update(
suffix="scores",
extension=".json",
root=cfg.deriv_root,
split=None,
check=False,
)


def _bads_path(
*,
cfg: SimpleNamespace,
Expand Down
9 changes: 0 additions & 9 deletions mne_bids_pipeline/_io.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
"""I/O helpers."""

from types import SimpleNamespace

import json_tricks
from mne_bids import BIDSPath

from .typing import PathLike

Expand All @@ -16,9 +13,3 @@ def _write_json(fname: PathLike, data: dict) -> None:
def _read_json(fname: PathLike) -> dict:
with open(fname, encoding="utf-8") as f:
return json_tricks.load(f)


def _empty_room_match_path(run_path: BIDSPath, cfg: SimpleNamespace) -> BIDSPath:
return run_path.copy().update(
extension=".json", suffix="emptyroommatch", root=cfg.deriv_root
)
11 changes: 5 additions & 6 deletions mne_bids_pipeline/_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,13 @@ def __mne_bids_pipeline_failsafe_wrapper__(*args, **kwargs):
# Find the limit / step where the error occurred
step_dir = pathlib.Path(__file__).parent / "steps"
tb = traceback.extract_tb(e.__traceback__)
for fi, frame in enumerate(inspect.stack()):
for fi, frame in enumerate(tb):
is_step = pathlib.Path(frame.filename).parent.parent == step_dir
del frame
if is_step:
# omit everything before the "step" dir, which will
# generally be stuff from this file and joblib
tb = tb[-fi:]
tb = tb[fi:]
break
tb = "".join(traceback.format_list(tb))

Expand Down Expand Up @@ -221,17 +221,16 @@ def wrapper(*args, **kwargs):
for key, (fname, this_hash) in out_files_hashes.items():
fname = pathlib.Path(fname)
if not fname.exists():
msg = (
f"Output file missing {str(fname)}, " "will recompute …"
)
msg = f"Output file missing {str(fname)}, will recompute …"
emoji = "🧩"
bad_out_files = True
break
got_hash = hash_(key, fname, kind="out")[1]
if this_hash != got_hash:
msg = (
f"Output file {self.memory_file_method} mismatch for "
f"{str(fname)}, will recompute …"
f"{str(fname)} ({this_hash} != {got_hash}), will "
"recompute …"
)
emoji = "🚫"
bad_out_files = True
Expand Down
3 changes: 2 additions & 1 deletion mne_bids_pipeline/steps/init/_02_find_empty_room.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
get_sessions,
get_subjects,
)
from ..._io import _empty_room_match_path, _write_json
from ..._import_data import _empty_room_match_path
from ..._io import _write_json
from ..._logging import gen_log_kwargs, logger
from ..._run import _prep_out_files, _update_for_splits, failsafe_run, save_logs

Expand Down
12 changes: 7 additions & 5 deletions mne_bids_pipeline/steps/preprocessing/_01_data_quality.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
get_subjects,
)
from ..._import_data import (
_auto_scores_path,
_bads_path,
_get_mf_reference_run_path,
_get_run_rest_noise_path,
Expand Down Expand Up @@ -159,7 +158,7 @@ def _find_bads_maxwell(
elif cfg.find_noisy_channels_meg and not cfg.find_flat_channels_meg:
msg = "Finding noisy channels using Maxwell filtering."
else:
msg = "Finding flat channels and noisy channels using " "Maxwell filtering."
msg = "Finding flat channels and noisy channels using Maxwell filtering."
logger.info(**gen_log_kwargs(message=msg))

if run is None and task == "noise":
Expand Down Expand Up @@ -232,9 +231,12 @@ def _find_bads_maxwell(
logger.info(**gen_log_kwargs(message=msg))

if cfg.find_noisy_channels_meg:
out_files["auto_scores"] = _auto_scores_path(
cfg=cfg,
bids_path_in=bids_path_in,
out_files["auto_scores"] = bids_path_in.copy().update(
suffix="scores",
extension=".json",
root=cfg.deriv_root,
split=None,
check=False,
)
if not out_files["auto_scores"].fpath.parent.exists():
out_files["auto_scores"].fpath.parent.mkdir(parents=True)
Expand Down
14 changes: 4 additions & 10 deletions mne_bids_pipeline/steps/sensor/_05_decoding_csp.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,21 +79,15 @@ def prepare_epochs_and_y(
*, epochs: mne.BaseEpochs, contrast: tuple[str, str], cfg, fmin: float, fmax: float
) -> tuple[mne.BaseEpochs, np.ndarray]:
"""Band-pass between, sub-select the desired epochs, and prepare y."""
epochs_filt = epochs.copy().pick(["meg", "eeg"])

# We only take mag to speed up computation
# because the information is redundant between grad and mag
if cfg.datatype == "meg" and cfg.use_maxwell_filter:
epochs_filt.pick("mag")

# filtering out the conditions we are not interested in, to ensure here we
# have a valid partition between the condition of the contrast.
#

# XXX Hack for handling epochs selection via metadata
# This also makes a copy
if contrast[0].startswith("event_name.isin"):
epochs_filt = epochs_filt[f"{contrast[0]} or {contrast[1]}"]
epochs_filt = epochs[f"{contrast[0]} or {contrast[1]}"]
else:
epochs_filt = epochs_filt[contrast]
epochs_filt = epochs[contrast]

# Filtering is costly, so do it last, after the selection of the channels
# and epochs. We know that often the filter will be longer than the signal,
Expand Down
2 changes: 1 addition & 1 deletion mne_bids_pipeline/steps/sensor/_06_make_cov.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def retrieve_custom_cov(
check=False,
)

msg = "Retrieving noise covariance matrix from custom user-supplied " "function"
msg = "Retrieving noise covariance matrix from custom user-supplied function"
logger.info(**gen_log_kwargs(message=msg))
msg = f'Output: {out_files["cov"].basename}'
logger.info(**gen_log_kwargs(message=msg))
Expand Down
2 changes: 1 addition & 1 deletion mne_bids_pipeline/steps/source/_02_make_bem_solution.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def main(*, config) -> None:
return

if config.use_template_mri is not None:
msg = "Skipping, BEM solution computation not needed for " "MRI template …"
msg = "Skipping, BEM solution computation not needed for MRI template …"
logger.info(**gen_log_kwargs(message=msg, emoji="skip"))
if config.use_template_mri == "fsaverage":
# Ensure we have the BEM
Expand Down
1 change: 1 addition & 0 deletions mne_bids_pipeline/tests/configs/config_ds003392.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
decoding_time_generalization = True
decoding_time_generalization_decim = 4
contrasts = [("incoherent", "coherent")]
# decoding_csp = True

# Noise estimation
noise_cov = "emptyroom"

0 comments on commit 04a76b1

Please sign in to comment.