diff --git a/.lgtm.yml b/.lgtm.yml deleted file mode 100644 index 4a43aa25c57..00000000000 --- a/.lgtm.yml +++ /dev/null @@ -1,8 +0,0 @@ -extraction: - javascript: - index: - filters: - - exclude: "**/*.js" -queries: - - exclude: py/missing-equals - - exclude: py/import-and-import-from diff --git a/doc/changes/devel.rst b/doc/changes/devel.rst index 2a43097190f..d598e96e104 100644 --- a/doc/changes/devel.rst +++ b/doc/changes/devel.rst @@ -38,6 +38,7 @@ Enhancements - Add support for writing forward solutions to HDF5 and convenience function :meth:`mne.Forward.save` (:gh:`12036` by `Eric Larson`_) - Refactored internals of :func:`mne.read_annotations` (:gh:`11964` by `Paul Roujansky`_) - Add support for drawing MEG sensors in :ref:`mne coreg` (:gh:`12098` by `Eric Larson`_) +- Bad channels are now colored gray in addition to being dashed when spatial colors are used in :func:`mne.viz.plot_evoked` and related functions (:gh:`12142` by `Eric Larson`_) - By default MNE-Python creates matplotlib figures with ``layout='constrained'`` rather than the default ``layout='tight'`` (:gh:`12050`, :gh:`12103` by `Mathieu Scheltienne`_ and `Eric Larson`_) - Enhance :func:`~mne.viz.plot_evoked_field` with a GUI that has controls for time, colormap, and contour lines (:gh:`11942` by `Marijn van Vliet`_) - Add :class:`mne.viz.ui_events.UIEvent` linking for interactive colorbars, allowing users to link figures and change the colormap and limits interactively. This supports :func:`~mne.viz.plot_evoked_topomap`, :func:`~mne.viz.plot_ica_components`, :func:`~mne.viz.plot_tfr_topomap`, :func:`~mne.viz.plot_projs_topomap`, :meth:`~mne.Evoked.plot_image`, and :meth:`~mne.Epochs.plot_image` (:gh:`12057` by `Santeri Ruuskanen`_) @@ -72,6 +73,7 @@ Bugs - Fix :func:`~mne.viz.plot_volume_source_estimates` with :class:`~mne.VolSourceEstimate` which include a list of vertices (:gh:`12025` by `Mathieu Scheltienne`_) - Add support for non-ASCII characters in Annotations, Evoked comments, etc when saving to FIFF format (:gh:`12080` by `Daniel McCloy`_) - Correctly handle passing ``"eyegaze"`` or ``"pupil"`` to :meth:`mne.io.Raw.pick` (:gh:`12019` by `Scott Huberty`_) +- Fix bug with :func:`mne.time_frequency.Spectrum.plot` and related functions where bad channels were not marked (:gh:`12142` by `Eric Larson`_) - Fix bug with :func:`~mne.viz.plot_raw` where changing ``MNE_BROWSER_BACKEND`` via :func:`~mne.set_config` would have no effect within a Python session (:gh:`12078` by `Santeri Ruuskanen`_) - Improve handling of ``method`` argument in the channel interpolation function to support :class:`str` and raise helpful error messages (:gh:`12113` by `Mathieu Scheltienne`_) - Fix combination of ``DIN`` event channels into a single synthetic trigger channel ``STI 014`` by the MFF reader of :func:`mne.io.read_raw_egi` (:gh:`12122` by `Mathieu Scheltienne`_) diff --git a/doc/development/governance.rst b/doc/development/governance.rst index f7d81ee8a85..f5b70e39485 100644 --- a/doc/development/governance.rst +++ b/doc/development/governance.rst @@ -69,7 +69,7 @@ BDFL ---- The Project will have a BDFL (Benevolent Dictator for Life), who is currently -Alexandre Gramfort. As Dictator, the BDFL has the authority to make all final +Daniel McCloy. As Dictator, the BDFL has the authority to make all final decisions for The Project. As Benevolent, the BDFL, in practice, chooses to defer that authority to the consensus of the community discussion channels and the Steering Council (see below). It is expected, and in the past has been the diff --git a/doc/overview/people.rst b/doc/overview/people.rst index 14c20724095..3647aae978a 100644 --- a/doc/overview/people.rst +++ b/doc/overview/people.rst @@ -22,8 +22,6 @@ Steering Council * `Daniel McCloy`_ * `Denis Engemann`_ * `Eric Larson`_ -* `Guillaume Favelier`_ -* `Luke Bloy`_ * `Mainak Jas`_ * `Marijn van Vliet`_ * `Mathieu Scheltienne`_ diff --git a/examples/preprocessing/eeg_bridging.py b/examples/preprocessing/eeg_bridging.py index 7eadb7239d2..6d2c1aec165 100644 --- a/examples/preprocessing/eeg_bridging.py +++ b/examples/preprocessing/eeg_bridging.py @@ -10,7 +10,7 @@ electrode connects with the gel conducting signal from another electrode "bridging" the two signals. This is undesirable because the signals from the two (or more) electrodes are not as independent as they would otherwise be; -they are very similar to each other introducting additional +they are very similar to each other introducing additional spatial smearing. An algorithm has been developed to detect electrode bridging :footcite:`TenkeKayser2001`, which has been implemented in EEGLAB :footcite:`DelormeMakeig2004`. Unfortunately, there is not a lot to be diff --git a/mne/conftest.py b/mne/conftest.py index 1357a6a7c4a..33396621890 100644 --- a/mne/conftest.py +++ b/mne/conftest.py @@ -92,8 +92,6 @@ def pytest_configure(config): # Fixtures for fixture in ( "matplotlib_config", - "close_all", - "check_verbose", "qt_config", "protect_config", ): @@ -268,10 +266,7 @@ def matplotlib_config(): # functionality) plt.ioff() plt.rcParams["figure.dpi"] = 100 - try: - plt.rcParams["figure.raise_window"] = False - except KeyError: # MPL < 3.3 - pass + plt.rcParams["figure.raise_window"] = False # Make sure that we always reraise exceptions in handlers orig = cbook.CallbackRegistry diff --git a/mne/epochs.py b/mne/epochs.py index 14c2a886a6b..510161f99bc 100644 --- a/mne/epochs.py +++ b/mne/epochs.py @@ -4114,12 +4114,13 @@ def _concatenate_epochs( event_id = deepcopy(out.event_id) selection = out.selection # offset is the last epoch + tmax + 10 second - shift = int((10 + tmax) * out.info["sfreq"]) + shift = np.int64((10 + tmax) * out.info["sfreq"]) # Allow reading empty epochs (ToDo: Maybe not anymore in the future) if out._allow_empty: events_offset = 0 else: events_offset = int(np.max(events[0][:, 0])) + shift + events_offset = np.int64(events_offset) events_overflow = False warned = False for ii, epochs in enumerate(epochs_list[1:], 1): diff --git a/mne/io/bti/read.py b/mne/io/bti/read.py index 4af53112ae8..d05e2d9d941 100644 --- a/mne/io/bti/read.py +++ b/mne/io/bti/read.py @@ -48,12 +48,12 @@ def read_int8(fid): def read_uint16(fid): """Read unsigned 16bit integer from bti file.""" - return _unpack_simple(fid, ">u2", np.uint16) + return _unpack_simple(fid, ">u2", np.uint32) def read_int16(fid): """Read 16bit integer from bti file.""" - return _unpack_simple(fid, ">i2", np.int16) + return _unpack_simple(fid, ">i2", np.int32) def read_uint32(fid): @@ -88,7 +88,13 @@ def read_double(fid): def read_int16_matrix(fid, rows, cols): """Read 16bit integer matrix from bti file.""" - return _unpack_matrix(fid, rows, cols, dtype=">i2", out_dtype=np.int16) + return _unpack_matrix( + fid, + rows, + cols, + dtype=">i2", + out_dtype=np.int32, + ) def read_float_matrix(fid, rows, cols): diff --git a/mne/io/ctf/res4.py b/mne/io/ctf/res4.py index b5c0f884c99..2ea2f619bcc 100644 --- a/mne/io/ctf/res4.py +++ b/mne/io/ctf/res4.py @@ -43,7 +43,7 @@ def _read_ustring(fid, n_bytes): def _read_int2(fid): """Read int from short.""" - return np.fromfile(fid, ">i2", 1)[0] + return _auto_cast(np.fromfile(fid, ">i2", 1)[0]) def _read_int(fid): @@ -208,6 +208,9 @@ def _read_res4(dsdir): coil["area"] *= 1e-4 # convert to dict chs = [dict(zip(chs.dtype.names, x)) for x in chs] + for ch in chs: + for key, val in ch.items(): + ch[key] = _auto_cast(val) res["chs"] = chs for k in range(res["nchan"]): res["chs"][k]["ch_name"] = res["ch_names"][k] @@ -216,3 +219,15 @@ def _read_res4(dsdir): _read_comp_coeff(fid, res) logger.info(" res4 data read.") return res + + +def _auto_cast(x): + # Upcast scalars + if isinstance(x, np.ScalarType): + if x.dtype.kind == "i": + if x.dtype != np.int64: + x = x.astype(np.int64) + elif x.dtype.kind == "f": + if x.dtype != np.float64: + x = x.astype(np.float64) + return x diff --git a/mne/io/edf/edf.py b/mne/io/edf/edf.py index 8831989860c..e507a651676 100644 --- a/mne/io/edf/edf.py +++ b/mne/io/edf/edf.py @@ -1106,7 +1106,7 @@ def _read_gdf_header(fname, exclude, include=None): "Header information is incorrect for record length. " "Default record length set to 1." ) - nchan = np.fromfile(fid, UINT32, 1)[0] + nchan = int(np.fromfile(fid, UINT32, 1)[0]) channels = list(range(nchan)) ch_names = [_edf_str(fid.read(16)).strip() for ch in channels] exclude = _find_exclude_idx(ch_names, exclude, include) @@ -1177,7 +1177,7 @@ def _read_gdf_header(fname, exclude, include=None): fid.seek(etp) etmode = np.fromfile(fid, UINT8, 1)[0] if etmode in (1, 3): - sr = np.fromfile(fid, UINT8, 3) + sr = np.fromfile(fid, UINT8, 3).astype(np.uint32) event_sr = sr[0] for i in range(1, len(sr)): event_sr = event_sr + sr[i] * 2 ** (i * 8) @@ -1297,7 +1297,7 @@ def _read_gdf_header(fname, exclude, include=None): "Default record length set to 1." ) - nchan = np.fromfile(fid, UINT16, 1)[0] + nchan = int(np.fromfile(fid, UINT16, 1)[0]) fid.seek(2, 1) # 2bytes reserved # Channels (variable header) diff --git a/mne/io/egi/egi.py b/mne/io/egi/egi.py index d6ba4b884f6..0bd669837a3 100644 --- a/mne/io/egi/egi.py +++ b/mne/io/egi/egi.py @@ -29,7 +29,7 @@ def _read_header(fid): ) def my_fread(*x, **y): - return np.fromfile(*x, **y)[0] + return int(np.fromfile(*x, **y)[0]) info = dict( version=version, @@ -57,8 +57,8 @@ def my_fread(*x, **y): dict( n_categories=0, n_segments=1, - n_samples=np.fromfile(fid, ">i4", 1)[0], - n_events=np.fromfile(fid, ">i2", 1)[0], + n_samples=int(np.fromfile(fid, ">i4", 1)[0]), + n_events=int(np.fromfile(fid, ">i2", 1)[0]), event_codes=[], category_names=[], category_lengths=[], diff --git a/mne/io/egi/egimff.py b/mne/io/egi/egimff.py index fee53f2e589..1120324c58a 100644 --- a/mne/io/egi/egimff.py +++ b/mne/io/egi/egimff.py @@ -79,7 +79,7 @@ def _read_mff_header(filepath): # by what we need to (e.g., a sample rate of 500 means we can multiply # by 1 and divide by 2 rather than multiplying by 500 and dividing by # 1000) - numerator = signal_blocks["sfreq"] + numerator = int(signal_blocks["sfreq"]) denominator = 1000 this_gcd = math.gcd(numerator, denominator) numerator = numerator // this_gcd diff --git a/mne/io/nihon/nihon.py b/mne/io/nihon/nihon.py index ab1e476fc5d..b6b7e3179ff 100644 --- a/mne/io/nihon/nihon.py +++ b/mne/io/nihon/nihon.py @@ -207,7 +207,7 @@ def _read_nihon_header(fname): t_datablock["address"] = t_data_address fid.seek(t_data_address + 0x26) - t_n_channels = np.fromfile(fid, np.uint8, 1)[0] + t_n_channels = np.fromfile(fid, np.uint8, 1)[0].astype(np.int64) t_datablock["n_channels"] = t_n_channels t_channels = [] @@ -219,14 +219,14 @@ def _read_nihon_header(fname): t_datablock["channels"] = t_channels fid.seek(t_data_address + 0x1C) - t_record_duration = np.fromfile(fid, np.uint32, 1)[0] + t_record_duration = np.fromfile(fid, np.uint32, 1)[0].astype(np.int64) t_datablock["duration"] = t_record_duration fid.seek(t_data_address + 0x1A) sfreq = np.fromfile(fid, np.uint16, 1)[0] & 0x3FFF - t_datablock["sfreq"] = sfreq + t_datablock["sfreq"] = sfreq.astype(np.int64) - t_datablock["n_samples"] = int(t_record_duration * sfreq / 10) + t_datablock["n_samples"] = np.int64(t_record_duration * sfreq // 10) t_controlblock["datablocks"].append(t_datablock) controlblocks.append(t_controlblock) header["controlblocks"] = controlblocks diff --git a/mne/io/nsx/nsx.py b/mne/io/nsx/nsx.py index 5d3b2e7a659..a74bcd05f30 100644 --- a/mne/io/nsx/nsx.py +++ b/mne/io/nsx/nsx.py @@ -365,7 +365,7 @@ def _get_hdr_info(fname, stim_channel=True, eog=None, misc=None): stim_channel_idxs, _ = _check_stim_channel(stim_channel, ch_names) - nchan = nsx_info["channel_count"] + nchan = int(nsx_info["channel_count"]) logger.info("Setting channel info structure...") chs = list() pick_mask = np.ones(len(ch_names)) diff --git a/mne/preprocessing/realign.py b/mne/preprocessing/realign.py index 396e4ba33e6..09442ca9b1c 100644 --- a/mne/preprocessing/realign.py +++ b/mne/preprocessing/realign.py @@ -28,8 +28,9 @@ def realign_raw(raw, other, t_raw, t_other, verbose=None): The second raw instance. It will be resampled to match ``raw``. t_raw : array-like, shape (n_events,) The times of shared events in ``raw`` relative to ``raw.times[0]`` (0). - Typically these could be events on some TTL channel like - ``find_events(raw)[:, 0] - raw.first_samp``. + Typically these could be events on some TTL channel such as:: + + find_events(raw)[:, 0] / raw.info["sfreq"] - raw.first_time t_other : array-like, shape (n_events,) The times of shared events in ``other`` relative to ``other.times[0]``. %(verbose)s @@ -92,11 +93,11 @@ def realign_raw(raw, other, t_raw, t_other, verbose=None): logger.info(f"Cropping {zero_ord:0.3f} s from the start of raw") raw.crop(zero_ord, None) t_raw -= zero_ord - else: # need to crop start of other to match raw - t_crop = zero_ord / first_ord + elif zero_ord < 0: # need to crop start of other to match raw + t_crop = -zero_ord / first_ord logger.info(f"Cropping {t_crop:0.3f} s from the start of other") - other.crop(-t_crop, None) - t_other += t_crop + other.crop(t_crop, None) + t_other -= t_crop # 3. Resample data using the first-order term nan_ch_names = [ diff --git a/mne/report/tests/test_report.py b/mne/report/tests/test_report.py index 7577774e313..dd7ebfc34c8 100644 --- a/mne/report/tests/test_report.py +++ b/mne/report/tests/test_report.py @@ -877,6 +877,8 @@ def test_survive_pickle(tmp_path): def test_manual_report_2d(tmp_path, invisible_fig): """Simulate user manually creating report by adding one file at a time.""" pytest.importorskip("sklearn") + pytest.importorskip("pandas") + from sklearn.exceptions import ConvergenceWarning r = Report(title="My Report") diff --git a/mne/surface.py b/mne/surface.py index b042361305a..d0aac3abe0d 100644 --- a/mne/surface.py +++ b/mne/surface.py @@ -772,7 +772,7 @@ def _call_old(self, rr, n_jobs): def _fread3(fobj): """Read 3 bytes and adjust.""" - b1, b2, b3 = np.fromfile(fobj, ">u1", 3) + b1, b2, b3 = np.fromfile(fobj, ">u1", 3).astype(np.int64) return (b1 << 16) + (b2 << 8) + b3 diff --git a/mne/time_frequency/tests/test_spectrum.py b/mne/time_frequency/tests/test_spectrum.py index 7aaa5b40ea6..96fe89a2e6d 100644 --- a/mne/time_frequency/tests/test_spectrum.py +++ b/mne/time_frequency/tests/test_spectrum.py @@ -1,9 +1,9 @@ from contextlib import nullcontext from functools import partial -import matplotlib.pyplot as plt import numpy as np import pytest +from matplotlib.colors import same_color from numpy.testing import assert_allclose, assert_array_equal from mne import Annotations, create_info, make_fixed_length_epochs @@ -449,8 +449,16 @@ def test_plot_spectrum(kind, array, request): data, freqs = spectrum.get_data(return_freqs=True) Klass = SpectrumArray if kind == "raw" else EpochsSpectrumArray spectrum = Klass(data=data, info=spectrum.info, freqs=freqs) + spectrum.info["bads"] = spectrum.ch_names[:1] # one grad channel spectrum.plot(average=True, amplitude=True, spatial_colors=True) - spectrum.plot(average=False, amplitude=False, spatial_colors=False) + spectrum.plot(average=True, amplitude=False, spatial_colors=False) + n_grad = sum(ch_type == "grad" for ch_type in spectrum.get_channel_types()) + for amp, sc in ((True, True), (False, False)): + fig = spectrum.plot(average=False, amplitude=amp, spatial_colors=sc, exclude=()) + lines = fig.axes[0].lines[2:] # grads, ignore two vlines + assert len(lines) == n_grad + bad_color = "0.5" if sc else "r" + n_bad = sum(same_color(line.get_color(), bad_color) for line in lines) + assert n_bad == 1 spectrum.plot_topo() spectrum.plot_topomap() - plt.close("all") diff --git a/mne/utils/check.py b/mne/utils/check.py index a26495106e4..2faa364b779 100644 --- a/mne/utils/check.py +++ b/mne/utils/check.py @@ -8,7 +8,6 @@ import os import re from builtins import input # no-op here but facilitates testing -from collections.abc import Sequence from difflib import get_close_matches from importlib import import_module from importlib.metadata import version @@ -542,7 +541,7 @@ def __instancecheck__(cls, other): "path-like": path_like, "int-like": (int_like,), "callable": (_Callable(),), - "array-like": (Sequence, np.ndarray), + "array-like": (list, tuple, set, np.ndarray), } diff --git a/mne/utils/config.py b/mne/utils/config.py index aa4e0b9dd90..37aca22bd03 100644 --- a/mne/utils/config.py +++ b/mne/utils/config.py @@ -205,6 +205,7 @@ def set_memmap_min_size(memmap_min_size): "MNE_DATASETS_FNIRS", # mne-nirs "MNE_NIRS", # mne-nirs "MNE_KIT2FIFF", # mne-kit-gui + "MNE_ICALABEL", # mne-icalabel ) diff --git a/mne/utils/tests/test_check.py b/mne/utils/tests/test_check.py index 2856d9ea37b..2cda3188cd8 100644 --- a/mne/utils/tests/test_check.py +++ b/mne/utils/tests/test_check.py @@ -212,6 +212,11 @@ def test_validate_type(): _validate_type(1, "int-like") with pytest.raises(TypeError, match="int-like"): _validate_type(False, "int-like") + _validate_type([1, 2, 3], "array-like") + _validate_type((1, 2, 3), "array-like") + _validate_type({1, 2, 3}, "array-like") + with pytest.raises(TypeError, match="array-like"): + _validate_type("123", "array-like") # a string is not array-like def test_check_range(): diff --git a/mne/viz/evoked.py b/mne/viz/evoked.py index 6abcbcc0d1d..1c6712a6bec 100644 --- a/mne/viz/evoked.py +++ b/mne/viz/evoked.py @@ -679,15 +679,17 @@ def _plot_lines( _handle_spatial_colors( colors, info, idx, this_type, psd, ax, sphere ) + bad_color = (0.5, 0.5, 0.5) else: if isinstance(_spat_col, (tuple, str)): col = [_spat_col] else: col = ["k"] + bad_color = "r" colors = col * len(idx) - for i in bad_ch_idx: - if i in idx: - colors[idx.index(i)] = "r" + for i in bad_ch_idx: + if i in idx: + colors[idx.index(i)] = bad_color if zorder == "std": # find the channels with the least activity diff --git a/mne/viz/tests/test_evoked.py b/mne/viz/tests/test_evoked.py index c089b064d4a..51b83f222fa 100644 --- a/mne/viz/tests/test_evoked.py +++ b/mne/viz/tests/test_evoked.py @@ -16,6 +16,7 @@ import pytest from matplotlib import gridspec from matplotlib.collections import PolyCollection +from matplotlib.colors import same_color from mpl_toolkits.axes_grid1.parasite_axes import HostAxes # spatial_colors from numpy.testing import assert_allclose @@ -134,6 +135,12 @@ def test_plot_evoked(): amplitudes = _get_amplitudes(fig) assert len(amplitudes) == len(default_picks) assert evoked.proj is False + assert evoked.info["bads"] == ["MEG 2641", "EEG 004"] + eeg_lines = fig.axes[2].lines + n_eeg = sum(ch_type == "eeg" for ch_type in evoked.get_channel_types()) + assert len(eeg_lines) == n_eeg == 4 + n_bad = sum(same_color(line.get_color(), "0.5") for line in eeg_lines) + assert n_bad == 1 # Test a click ax = fig.get_axes()[0] line = ax.lines[0] diff --git a/mne/viz/utils.py b/mne/viz/utils.py index c81bdf354c2..e9c36281bae 100644 --- a/mne/viz/utils.py +++ b/mne/viz/utils.py @@ -2506,6 +2506,7 @@ def _plot_psd( if not average: picks = np.concatenate(picks_list) info = pick_info(inst.info, sel=picks, copy=True) + bad_ch_idx = [info["ch_names"].index(ch) for ch in info["bads"]] types = np.array(info.get_channel_types()) ch_types_used = list() for this_type in _VALID_CHANNEL_TYPES: @@ -2538,7 +2539,7 @@ def _plot_psd( xlim=(freqs[0], freqs[-1]), ylim=None, times=freqs, - bad_ch_idx=[], + bad_ch_idx=bad_ch_idx, titles=titles, ch_types_used=ch_types_used, selectable=True, diff --git a/tools/azure_dependencies.sh b/tools/azure_dependencies.sh index 072665d9c3c..5cf455cf4f4 100755 --- a/tools/azure_dependencies.sh +++ b/tools/azure_dependencies.sh @@ -9,7 +9,8 @@ elif [ "${TEST_MODE}" == "pip-pre" ]; then python -m pip install $STD_ARGS pip setuptools wheel packaging setuptools_scm python -m pip install $STD_ARGS --only-binary ":all:" --extra-index-url "https://www.riverbankcomputing.com/pypi/simple" PyQt6 PyQt6-sip PyQt6-Qt6 echo "Numpy etc." - python -m pip install $STD_ARGS --only-binary ":all:" --extra-index-url "https://pypi.anaconda.org/scientific-python-nightly-wheels/simple" "numpy>=2.0.0.dev0" "scipy>=1.12.0.dev0" statsmodels pandas scikit-learn matplotlib + # As of 2023/10/25 no pandas (or statsmodels) because they pin to NumPy < 2 + python -m pip install $STD_ARGS --only-binary ":all:" --extra-index-url "https://pypi.anaconda.org/scientific-python-nightly-wheels/simple" "numpy>=2.0.0.dev0" "scipy>=1.12.0.dev0" scikit-learn matplotlib echo "dipy" python -m pip install $STD_ARGS --only-binary ":all:" --extra-index-url "https://pypi.anaconda.org/scipy-wheels-nightly/simple" dipy echo "h5py" diff --git a/tools/github_actions_dependencies.sh b/tools/github_actions_dependencies.sh index d08e5727e77..65a64e05ae5 100755 --- a/tools/github_actions_dependencies.sh +++ b/tools/github_actions_dependencies.sh @@ -1,5 +1,7 @@ #!/bin/bash -ef +set -o pipefail + STD_ARGS="--progress-bar off --upgrade" if [ ! -z "$CONDA_ENV" ]; then echo "Uninstalling MNE for CONDA_ENV=${CONDA_ENV}" @@ -18,7 +20,8 @@ else echo "PyQt6" pip install $STD_ARGS --only-binary ":all:" --default-timeout=60 --extra-index-url https://www.riverbankcomputing.com/pypi/simple PyQt6 echo "NumPy/SciPy/pandas etc." - pip install $STD_ARGS --only-binary ":all:" --default-timeout=60 --extra-index-url "https://pypi.anaconda.org/scientific-python-nightly-wheels/simple" "numpy>=2.0.0.dev0" scipy scikit-learn pandas matplotlib pillow statsmodels + # As of 2023/10/25 no pandas (or statsmodels, nilearn) because they pin to NumPy < 2 + pip install $STD_ARGS --only-binary ":all:" --default-timeout=60 --extra-index-url "https://pypi.anaconda.org/scientific-python-nightly-wheels/simple" "numpy>=2.0.0.dev0" scipy scikit-learn matplotlib pillow echo "dipy" pip install $STD_ARGS --only-binary ":all:" --default-timeout=60 --extra-index-url "https://pypi.anaconda.org/scipy-wheels-nightly/simple" dipy echo "H5py" @@ -27,7 +30,8 @@ else pip install $STD_ARGS --only-binary ":all:" --extra-index-url "https://test.pypi.org/simple" openmeeg # No Numba because it forces an old NumPy version echo "nilearn and openmeeg" - pip install $STD_ARGS git+https://github.com/nilearn/nilearn + # pip install $STD_ARGS git+https://github.com/nilearn/nilearn + pip install $STD_ARGS openmeeg echo "VTK" pip install $STD_ARGS --only-binary ":all:" --extra-index-url "https://wheels.vtk.org" vtk python -c "import vtk" @@ -45,9 +49,12 @@ else pip install $STD_ARGS git+https://github.com/joblib/joblib@master echo "EDFlib-Python" pip install $STD_ARGS git+https://gitlab.com/Teuniz/EDFlib-Python@master + # Until Pandas is fixed, make sure we didn't install it + ! python -c "import pandas" fi echo "" + # for compat_minimal and compat_old, we don't want to --upgrade if [ ! -z "$CONDA_DEPENDENCIES" ]; then echo "Installing dependencies for conda" diff --git a/tutorials/intro/70_report.py b/tutorials/intro/70_report.py index a7d3b02b2b3..951c82a5e6a 100644 --- a/tutorials/intro/70_report.py +++ b/tutorials/intro/70_report.py @@ -17,8 +17,7 @@ HTML pages it generates are self-contained and do not require a running Python environment. However, it is less flexible as you can't change code and re-run something directly within the browser. This tutorial covers the basics of -building a report. As usual, we will start by importing the modules and data we -need: +building a report. As usual, we will start by importing the modules and data we need: """ # %% diff --git a/tutorials/preprocessing/60_maxwell_filtering_sss.py b/tutorials/preprocessing/60_maxwell_filtering_sss.py index c1453528975..f07caa46257 100644 --- a/tutorials/preprocessing/60_maxwell_filtering_sss.py +++ b/tutorials/preprocessing/60_maxwell_filtering_sss.py @@ -326,7 +326,7 @@ # %% # Head position data can be computed using # :func:`mne.chpi.compute_chpi_locs` and :func:`mne.chpi.compute_head_pos`, -# or loaded with the:func:`mne.chpi.read_head_pos` function. The +# or loaded with the :func:`mne.chpi.read_head_pos` function. The # :ref:`example data ` doesn't include cHPI, so here we'll # load a :file:`.pos` file used for testing, just to demonstrate: