Skip to content

Commit

Permalink
Merge pull request #164 from netneurolab/update_register_cmap
Browse files Browse the repository at this point in the history
[FIX] Fix import error due to deprecated matplotlib function
  • Loading branch information
eric2302 authored May 22, 2024
2 parents 398ec94 + 6b65003 commit 4ff0dbf
Show file tree
Hide file tree
Showing 11 changed files with 60 additions and 40 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
python -m pip install --upgrade pip
python -m pip install ruff
- name: Run style checks
run: ruff .
run: ruff check .
codespell:
runs-on: ubuntu-latest
steps:
Expand Down
16 changes: 8 additions & 8 deletions neuromaps/datasets/atlases.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def _fetch_atlas(atlas, density, keys, url=None, data_dir=None, verbose=1):
return _bunch_outputs(keys, data, atlas != 'MNI152')


def fetch_civet(density='41k', url=None, data_dir=None, verbose=1): # noqa: D103
def fetch_civet(density='41k', url=None, data_dir=None, verbose=1): # noqa: D103
keys = ['white', 'midthickness', 'inflated', 'veryinflated', 'sphere']
return _fetch_atlas(
'civet', density, keys, url=url, data_dir=data_dir, verbose=verbose
Expand All @@ -151,7 +151,7 @@ def fetch_civet(density='41k', url=None, data_dir=None, verbose=1): # noqa: D103
""".format(**_atlas_docs, densities="', '".join(DENSITIES['civet']))


def fetch_fsaverage(density='41k', url=None, data_dir=None, verbose=1): # noqa: D103
def fetch_fsaverage(density='41k', url=None, data_dir=None, verbose=1): # noqa: D103
keys = ['white', 'pial', 'inflated', 'sphere']
return _fetch_atlas(
'fsaverage', density, keys, url=url, data_dir=data_dir, verbose=verbose
Expand All @@ -175,7 +175,7 @@ def fetch_fsaverage(density='41k', url=None, data_dir=None, verbose=1): # noqa:
""".format(**_atlas_docs, densities="', '".join(DENSITIES['fsaverage']))


def fetch_fslr(density='32k', url=None, data_dir=None, verbose=1): # noqa: D103
def fetch_fslr(density='32k', url=None, data_dir=None, verbose=1): # noqa: D103
keys = ['midthickness', 'inflated', 'veryinflated', 'sphere']
if density in ('4k', '8k'):
keys.remove('veryinflated')
Expand All @@ -201,7 +201,7 @@ def fetch_fslr(density='32k', url=None, data_dir=None, verbose=1): # noqa: D103
""".format(**_atlas_docs, densities="', '".join(DENSITIES['fsLR']))


def fetch_mni152(density='1mm', url=None, data_dir=None, verbose=1): # noqa: D103
def fetch_mni152(density='1mm', url=None, data_dir=None, verbose=1): # noqa: D103
keys = ['2009cAsym_T1w', '2009cAsym_T2w', '2009cAsym_PD',
'2009cAsym_brainmask', '2009cAsym_CSF', '2009cAsym_GM',
'2009cAsym_WM']
Expand Down Expand Up @@ -229,7 +229,7 @@ def fetch_mni152(density='1mm', url=None, data_dir=None, verbose=1): # noqa: D10
""".format(**_atlas_docs, densities="', '".join(DENSITIES['MNI152']))


def fetch_regfusion(atlas, url=None, data_dir=None, verbose=1): # noqa: D103
def fetch_regfusion(atlas, url=None, data_dir=None, verbose=1): # noqa: D103
atlas = _sanitize_atlas(atlas)
densities = DENSITIES[atlas].copy()
invalid = dict(civet=('164k',), fsLR=('4k', '8k'))
Expand Down Expand Up @@ -280,7 +280,7 @@ def fetch_regfusion(atlas, url=None, data_dir=None, verbose=1): # noqa: D103
""".format(**_atlas_docs)


def fetch_atlas(atlas, density, url=None, data_dir=None, verbose=1): # noqa: D103
def fetch_atlas(atlas, density, url=None, data_dir=None, verbose=1): # noqa: D103
atlas = _sanitize_atlas(atlas)
fetcher = globals()[f'fetch_{atlas.lower()}']
return fetcher(density, url=url, data_dir=data_dir, verbose=verbose)
Expand All @@ -305,7 +305,7 @@ def fetch_atlas(atlas, density, url=None, data_dir=None, verbose=1): # noqa: D10
""".format(**_atlas_docs, atlases="', '".join(DENSITIES.keys()))


def fetch_all_atlases(data_dir=None, verbose=1): # noqa: D103
def fetch_all_atlases(data_dir=None, verbose=1): # noqa: D103
atlases = {'regfusion': {}}
for key, resolutions in DENSITIES.items():
atlases[key] = {}
Expand Down Expand Up @@ -334,7 +334,7 @@ def fetch_all_atlases(data_dir=None, verbose=1): # noqa: D103
"""


def get_atlas_dir(atlas, data_dir=None): # noqa: D103
def get_atlas_dir(atlas, data_dir=None): # noqa: D103
try:
atlas = _sanitize_atlas(atlas)
except ValueError as err:
Expand Down
18 changes: 9 additions & 9 deletions neuromaps/nulls/nulls.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@
)


def alexander_bloch(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
def alexander_bloch(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
n_perm=1000, seed=None, spins=None, surfaces=None):
if spins is None:
if surfaces is None:
Expand Down Expand Up @@ -178,7 +178,7 @@ def alexander_bloch(data, atlas='fsaverage', density='10k', parcellation=None, #
vazquez_rodriguez = alexander_bloch


def vasa(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
def vasa(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
n_perm=1000, seed=None, spins=None, surfaces=None):
if parcellation is None:
raise ValueError('Cannot use `vasa()` null method without specifying '
Expand Down Expand Up @@ -229,7 +229,7 @@ def vasa(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
""".format(**_nulls_input_docs)


def hungarian(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
def hungarian(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
n_perm=1000, seed=None, spins=None, surfaces=None):
if parcellation is None:
raise ValueError('Cannot use `hungarian()` null method without '
Expand Down Expand Up @@ -278,7 +278,7 @@ def hungarian(data, atlas='fsaverage', density='10k', parcellation=None, # noqa:
""".format(**_nulls_input_docs)


def baum(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
def baum(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
n_perm=1000, seed=None, spins=None, surfaces=None):
if parcellation is None:
raise ValueError('Cannot use `baum()` null method without specifying '
Expand Down Expand Up @@ -326,7 +326,7 @@ def baum(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
""".format(**_nulls_input_docs)


def cornblath(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
def cornblath(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
n_perm=1000, seed=None, spins=None, surfaces=None):
if parcellation is None:
raise ValueError('Cannot use `cornblath()` null method without '
Expand Down Expand Up @@ -370,7 +370,7 @@ def cornblath(data, atlas='fsaverage', density='10k', parcellation=None, # noqa:
""".format(**_nulls_input_docs)


def _get_distmat(hemisphere, atlas='fsaverage', density='10k', # noqa: D103
def _get_distmat(hemisphere, atlas='fsaverage', density='10k', # noqa: D103
parcellation=None, drop=None, n_proc=1):
hemi = HEMI.get(hemisphere, hemisphere)
if hemi not in ('L', 'R'):
Expand Down Expand Up @@ -602,7 +602,7 @@ def _make_surrogates(data, method, atlas='fsaverage', density='10k',
""".format(**_nulls_input_docs)


def burt2018(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
def burt2018(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
n_perm=1000, seed=None, distmat=None, tempdir=None, n_proc=1,
**kwargs):
return _make_surrogates(data, 'burt2018', atlas=atlas, density=density,
Expand Down Expand Up @@ -641,7 +641,7 @@ def burt2018(data, atlas='fsaverage', density='10k', parcellation=None, # noqa:
""".format(**_nulls_input_docs)


def burt2020(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
def burt2020(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
n_perm=1000, seed=None, distmat=None, n_proc=1, tempdir=None,
**kwargs):
if not _brainsmash_avail:
Expand Down Expand Up @@ -689,7 +689,7 @@ def burt2020(data, atlas='fsaverage', density='10k', parcellation=None, # noqa:
""".format(**_nulls_input_docs)


def moran(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
def moran(data, atlas='fsaverage', density='10k', parcellation=None, # noqa: D103
n_perm=1000, seed=None, distmat=None, tempdir=None, n_proc=1,
**kwargs):
if not _brainspace_avail:
Expand Down
2 changes: 1 addition & 1 deletion neuromaps/nulls/spins.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def get_parcel_centroids(surfaces, parcellation=None, method='surface',
centroids.append(vertices)
hemiid.extend([n] * len(vertices))

return np.row_stack(centroids), np.asarray(hemiid)
return np.vstack(centroids), np.asarray(hemiid)


def _gen_rotation(seed=None):
Expand Down
14 changes: 9 additions & 5 deletions neuromaps/plotting.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
"""Functionality for plotting."""

import matplotlib
from matplotlib import colors as mcolors, pyplot as plt
from mpl_toolkits.mplot3d import Axes3D # noqa
from nilearn.plotting import plot_surf
Expand All @@ -11,11 +12,14 @@
from neuromaps.transforms import _check_hemi

HEMI = dict(L='left', R='right')
plt.cm.register_cmap(
'caret_blueorange', mcolors.LinearSegmentedColormap.from_list('blend', [
'#00d2ff', '#009eff', '#006cfe', '#0043fe',
'#fd4604', '#fe6b01', '#ffd100', '#ffff04'
])

matplotlib.colormaps.register(
mcolors.LinearSegmentedColormap.from_list(
'caret_blueorange', [
'#00d2ff', '#009eff', '#006cfe', '#0043fe',
'#fd4604', '#fe6b01', '#ffd100', '#ffff04'
]),
name="caret_blueorange"
)


Expand Down
6 changes: 3 additions & 3 deletions neuromaps/points.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ def make_surf_graph(vertices, faces, mask=None):
# get all (direct + indirect) edges from surface
direct_edges, direct_weights = get_direct_edges(vertices, faces)
indirect_edges, indirect_weights = get_indirect_edges(vertices, faces)
edges = np.row_stack((direct_edges, indirect_edges))
edges = np.vstack((direct_edges, indirect_edges))
weights = np.hstack((direct_weights, indirect_weights))

# remove edges that include a vertex in `mask`
Expand Down Expand Up @@ -374,13 +374,13 @@ def get_surface_distance(surface, parcellation=None, medial=None,

# calculate distance from each vertex to all other vertices
graph = make_surf_graph(vert, faces, mask=mask)
dist = np.row_stack(Parallel(n_jobs=n_proc, max_nbytes=None)(
dist = np.vstack(Parallel(n_jobs=n_proc, max_nbytes=None)(
delayed(_get_graph_distance)(n, graph, labels) for n in range(n_vert)
))

# average distance for all vertices within a parcel + set diagonal to 0
if labels is not None:
dist = np.row_stack([
dist = np.vstack([
dist[labels == lab].mean(axis=0) for lab in np.unique(labels)
])
dist[np.diag_indices_from(dist)] = 0
Expand Down
12 changes: 6 additions & 6 deletions neuromaps/resampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
)


def downsample_only(src, trg, src_space, trg_space, method='linear', hemi=None): # noqa: D103
def downsample_only(src, trg, src_space, trg_space, method='linear', hemi=None): # noqa: D103
src_den, trg_den = transforms._estimate_density((src, trg), hemi)
src_num, trg_num = int(src_den[:-1]), int(trg_den[:-1])
src_space, trg_space = src_space.lower(), trg_space.lower()
Expand Down Expand Up @@ -63,8 +63,8 @@ def downsample_only(src, trg, src_space, trg_space, method='linear', hemi=None):
""".format(**_resampling_docs)


def transform_to_src(src, trg, src_space, trg_space, method='linear', hemi=None): # noqa: D103
src_den, trg_den = transforms._estimate_density((src, trg), hemi)
def transform_to_src(src, trg, src_space, trg_space, method='linear', hemi=None): # noqa: D103
src_den, _ = transforms._estimate_density((src, trg), hemi)

func = getattr(transforms, f'{trg_space.lower()}_to_{src_space.lower()}')
trg = func(trg, src_den, hemi=hemi, method=method)
Expand All @@ -86,8 +86,8 @@ def transform_to_src(src, trg, src_space, trg_space, method='linear', hemi=None)
""".format(**_resampling_docs)


def transform_to_trg(src, trg, src_space, trg_space, hemi=None, method='linear'): # noqa: D103
src_den, trg_den = transforms._estimate_density((src, trg), hemi)
def transform_to_trg(src, trg, src_space, trg_space, hemi=None, method='linear'): # noqa: D103
_, trg_den = transforms._estimate_density((src, trg), hemi)

func = getattr(transforms, f'{src_space.lower()}_to_{trg_space.lower()}')
src = func(src, trg_den, hemi=hemi, method=method)
Expand Down Expand Up @@ -138,7 +138,7 @@ def transform_to_alt(src, trg, src_space, trg_space, method='linear', # noqa: D
""".format(**_resampling_docs)


def mni_transform(src, trg, src_space, trg_space, method='linear', hemi=None): # noqa: D103
def mni_transform(src, trg, src_space, trg_space, method='linear', hemi=None): # noqa: D103
if src_space != 'MNI152':
raise ValueError('Cannot perform MNI transformation when src_space is '
f'not "MNI152." Received: {src_space}.')
Expand Down
2 changes: 1 addition & 1 deletion neuromaps/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ def efficient_pearsonr(a, b, ddof=1, nan_policy='propagate', return_pval=True):
If either input contains nan and nan_policy is set to 'omit', both arrays
will be masked to omit the nan entries.
"""
a, b, axis = _chk2_asarray(a, b, 0)
a, b, _ = _chk2_asarray(a, b, 0)
if len(a) != len(b):
raise ValueError('Provided arrays do not have same length')

Expand Down
10 changes: 10 additions & 0 deletions neuromaps/tests/test_plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,13 @@
def test_plot_surf_template():
"""Test plotting a surface template."""
assert False


def test_register_cmap():
"""Test registering a colormap."""
import matplotlib
from neuromaps import plotting # noqa: F401
if "caret_blueorange" in matplotlib.colormaps:
assert True
else:
assert False
16 changes: 11 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ classifiers = [
dependencies = [
"numpy >=1.14.0",
"scipy >=0.17",
"matplotlib",
"matplotlib >=3.5.0",
"scikit-learn",
"nibabel >=3.0.0",
"nilearn"
Expand Down Expand Up @@ -73,8 +73,6 @@ tag_prefix = ""
parentdir_prefix = ""

[tool.ruff]
select = ["E", "F", "B", "D", "NPY"]
ignore = ["E402"]
line-length = 88
exclude = [
"setup.py",
Expand All @@ -85,10 +83,18 @@ exclude = [
]
target-version = "py38"

[tool.ruff.pydocstyle]
[tool.ruff.lint]
select = ["E", "F", "B", "D", "NPY"]
ignore = [
"E402"
]
extend-select = ["E302"]
preview = true

[tool.ruff.lint.pydocstyle]
convention = "numpy"

[tool.ruff.per-file-ignores]
[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["D104"]
"neuromaps/tests/*" = ["B011"]
"neuromaps/nulls/tests/*" = ["B011"]
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
matplotlib
matplotlib>=3.5.0
nibabel>=3.0.0
nilearn
numpy>=1.14.0
Expand Down

0 comments on commit 4ff0dbf

Please sign in to comment.