Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add style check test (flake8) #25

Merged
merged 5 commits into from
Aug 26, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 23 additions & 2 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,22 @@ jobs:
paths:
- src/coverage/.coverage.py310

style_check:
docker:
- image: cimg/python:3.7
working_directory: /tmp/src/nigsp
resource_class: small
steps:
- checkout
- python/install-packages:
path-args: .[style]
pypi-cache: false
venv-cache: false
pkg-manager: pip-dist
- run:
name: Check style
command: flake8 ./nigsp

merge_coverage:
working_directory: /tmp/src/nigsp
docker:
Expand Down Expand Up @@ -113,8 +129,13 @@ workflows:
build_test: # This is the name of the workflow, feel free to change it to better match your workflow.
# Inside the workflow, you define the jobs you want to run.
jobs:
- test37
- test310
- style_check
- test37:
requires:
- style_check
- test310:
requires:
- style_check
- merge_coverage:
requires:
- test37
Expand Down
9 changes: 5 additions & 4 deletions nigsp/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def check_mtx_dim(fname, data, shape=None):
-------
np.ndarray
If `data.ndim` = 2, returns data.
If `data.ndim` = 1 and `shape` == 'rectangle',
If `data.ndim` = 1 and `shape` == 'rectangle',
Returns data with added empty axis.

Raises
Expand Down Expand Up @@ -268,8 +268,8 @@ def load_mat(fname, shape=None):
"""
Read files in matlab format.

Assumes the existence of a matrix/vector in the mat file, rendered as
a numpy.ndarray. If there is more than a marix, the one with the largest
Assumes the existence of a matrix/vector in the mat file, rendered as
a numpy.ndarray. If there is more than a marix, the one with the largest
size will be selected.

Parameters
Expand Down Expand Up @@ -313,7 +313,8 @@ def load_mat(fname, shape=None):
# Check data key only if it's not hidden
# (skip '__header__', '__version__', '__global__')
if '__' not in k:
LGR.info(f'Checking {fname} key {str(k)} content for data (float array/matrices in MATLAB).')
LGR.info(f'Checking {fname} key {str(k)} content for data '
'(float array/matrices in MATLAB).')
if type(data[k]) is np.ndarray:
data_keys.append(k)

Expand Down
7 changes: 5 additions & 2 deletions nigsp/objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@


class SCGraph():
"""Main module object, containing all data representing the graph."""

def __init__(self, mtx, timeseries, atlas=None, filename=None, img=None,
eigenval=None, eigenvec=None, energy=None, lapl_mtx=None,
Expand Down Expand Up @@ -170,12 +171,14 @@ def test_significance(self, method='Bernoulli', p=0.05,
if self.sdi is not None:
surr_sdi = operations.sdi(self.surr_split, mean, keys=None)
self.sdi = operations.test_significance(surr=surr_sdi, data=self.sdi,
method=method, p=p, return_masked=return_masked,
method=method, p=p,
return_masked=return_masked,
mean=mean)
if self.gsdi is not None:
surr_sdi = operations.gsdi(self.surr_split, mean, keys=None)
self.gsdi = operations.test_significance(surr=surr_sdi, data=self.gsdi,
method=method, p=p, return_masked=return_masked,
method=method, p=p,
return_masked=return_masked,
mean=mean)
return self

Expand Down
2 changes: 1 addition & 1 deletion nigsp/operations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
`nigsp` module.

For example, calling `nigsp.operations.metrics.sdi` is equivalent to calling
`nigsp.operations.sdi` or `nigsp.metrisc.sdi`.
`nigsp.operations.sdi` or `nigsp.metrisc.sdi`.
"""


Expand Down
4 changes: 2 additions & 2 deletions nigsp/operations/laplacian.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def symmetric_normalisation(mtx):
--------
https://en.wikipedia.org/wiki/Laplacian_matrix#Symmetrically_normalized_Laplacian_2
"""
d = np.diag(mtx.sum(axis=-1) ** (-1/2))
d = np.diag(mtx.sum(axis=-1) ** (-1 / 2))

symm_norm = (d @ mtx @ d)

Expand All @@ -61,7 +61,7 @@ def decomposition(mtx):

idx = np.argsort(eigenval)
eigenval = eigenval[idx]
# #!# Check that eigenvec has the right index and not inverted
# #!# Check that eigenvec has the right index and not inverted
eigenvec = eigenvec[:, idx]

return eigenval, eigenvec
Expand Down
2 changes: 1 addition & 1 deletion nigsp/operations/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def sdi(ts_split, mean=False, keys=None):
A dictionary containing two entries. If the two entries are "low" and
"high", then SDI will be computed as the norm of the high vs the norm
of the low, oterwise as the ratio between the second (second key in
sorted keys) and the first.
sorted keys) and the first.
mean : bool, optional
If True, compute mean over the last axis (e.g. between subjects)
keys : None or list of strings, optional
Expand Down
13 changes: 7 additions & 6 deletions nigsp/operations/nifti.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def apply_mask(data, mask):
if data.shape[:mask.ndim] != mask.shape:
raise ValueError(f'Cannot mask data with shape {data.shape} using mask '
f'with shape {mask.shape}')
if (data.ndim-mask.ndim) > 1:
if (data.ndim - mask.ndim) > 1:
LGR.warning(f'Returning volume with {data.ndim-mask.ndim+1} dimensions.')
else:
LGR.info(f'Returning {data.ndim-mask.ndim+1}D array.')
Expand Down Expand Up @@ -131,7 +131,7 @@ def unmask(data, mask, shape=None, asdata=None):
------
ValueError
If both `shape` and `asdata` are empty
If the first dimension of `data` and the number of available voxels in
If the first dimension of `data` and the number of available voxels in
mask do not match.
If the mask shape does not match the first (mask)
"""
Expand Down Expand Up @@ -193,21 +193,22 @@ def apply_atlas(data, atlas, mask=None):
# #!# Add nilearn's fetching atlases utility

if atlas.ndim > 3:
raise NotImplementedError(f'Files with {atlas.ndim} dimensions are not supported as atlases.')
raise NotImplementedError(f'Files with {atlas.ndim} dimensions are not '
'supported as atlases.')
if data.shape[:mask.ndim] != mask.shape:
raise ValueError(f'Cannot mask data with shape {data.shape} using mask '
f'with shape {mask.shape}')
if data.shape[:atlas.ndim] != atlas.shape:
raise ValueError(f'Cannot apply atlas with shape {atlas.shape} on data '
f'with shape {data.shape}')
if (data.ndim-atlas.ndim) > 1:
if (data.ndim - atlas.ndim) > 1:
LGR.warning(f'returning volume with {data.ndim-atlas.ndim+1} dimensions.')
else:
LGR.info(f'Returning {data.ndim-atlas.ndim+1}D array of signal averages '
f'in atlas {atlas}.')

# Mask data and atlas first
atlas = atlas*mask
atlas = atlas * mask
labels = np.unique(atlas)
labels = labels[labels > 0]
LGR.info(f'Labels: {labels}, numbers: {len(labels)}')
Expand Down Expand Up @@ -268,7 +269,7 @@ def unfold_atlas(data, atlas, mask=None):
if atlas.shape[:mask.ndim] != mask.shape:
raise ValueError(f'Cannot mask atlas with shape {atlas.shape} using mask '
f'with shape {mask.shape}')
atlas = atlas*mask
atlas = atlas * mask

labels = np.unique(atlas)
labels = labels[labels > 0]
Expand Down
8 changes: 4 additions & 4 deletions nigsp/operations/surrogates.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,17 +365,17 @@ def test_significance(surr,
# the real data index (real_idx) is at the extremes of the matrix last axis
# (with tolerance on the extremes depending on p).
# real_idx serendipitously is the number of surrogates.
stat_mask = (reord_surr[..., :floor(real_idx * p) + 1].any(axis=-1) +
reord_surr[..., -floor(real_idx * p) - 1:].any(axis=-1))
stat_mask = (reord_surr[..., :floor(real_idx * p) + 1].any(axis=-1)
+ reord_surr[..., -floor(real_idx * p) - 1:].any(axis=-1))

if method == 'Bernoulli' and surr.shape[1] > 1 and surr.ndim >= 3:
# The following computes the CDF of a binomial distribution
# Difference with scipy's binom.cdf (100 samples) is: 5.066394802133445e-06
# #!# See if there is a quicker way to get this (probably invert testing)

def _pmf(x, n, p):
f = ((factorial(n) / (factorial(x) * factorial(n - x))) * p**x *
(1 - p)**(n - x))
f = ((factorial(n) / (factorial(x) * factorial(n - x))) * p**x
* (1 - p)**(n - x))
return f

x = np.arange(0, 100, 1)
Expand Down
16 changes: 8 additions & 8 deletions nigsp/operations/timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ def normalise_ts(timeseries):
'Returning it as is.')
return timeseries

z = ((timeseries - timeseries.mean(axis=1)[:, np.newaxis, ...]) /
timeseries.std(axis=1, ddof=1)[:, np.newaxis, ...])
z = ((timeseries - timeseries.mean(axis=1)[:, np.newaxis, ...])
/ timeseries.std(axis=1, ddof=1)[:, np.newaxis, ...])
z[np.isnan(z)] = 0

return z
Expand Down Expand Up @@ -133,7 +133,7 @@ def median_cutoff_frequency_idx(energy):

if energy.ndim == 2:
energy = energy.mean(axis=-1)
half_tot_auc = np.trapz(energy, axis=0)/2
half_tot_auc = np.trapz(energy, axis=0) / 2
LGR.debug(f'Total AUC = {half_tot_auc*2}, targetting half of total AUC')

# Compute the AUC from first to one to last frequency,
Expand Down Expand Up @@ -200,17 +200,17 @@ def graph_filter(timeseries, eigenvec, freq_idx, keys=['low', 'high']):
LGR.info(f'Splitting graph into {len(freq_idx)+1} parts')

# Check that there is the right amount of keys
if len(keys) > len(freq_idx)+1:
if len(keys) > len(freq_idx) + 1:
LGR.warning(f'The declared keys list ({keys}) has {len(keys)} elements. '
f'Since the frequency index list ({freq_idx}) has {len(freq_idx)}, '
f'any keys after {keys[len(freq_idx)]} will be ignored.')
keys = keys[:len(freq_idx)+1]
elif len(keys) < len(freq_idx)+1:
keys = keys[:len(freq_idx) + 1]
elif len(keys) < len(freq_idx) + 1:
LGR.warning(f'The declared keys list ({keys}) has {len(keys)} elements. '
f'Since the frequency index list ({freq_idx}) has {len(freq_idx)}, '
f'more keys will be created after {keys[len(freq_idx)]} .')

for i in range(len(keys), len(freq_idx)+1):
for i in range(len(keys), len(freq_idx) + 1):
keys = keys + [f'key-{i+1:03d}']

# Add 0 and None to freq_idx to have full indexes
Expand Down Expand Up @@ -287,7 +287,7 @@ def _fc(timeseries, mean=False):
elif timeseries.ndim == 2:
return np.corrcoef(timeseries)
else:
# reshape the array to allow reiteration on unknown dimensions of timeseries
# reshape the array to allow reiteration on unknown dimensions of timeseries
temp_ts, _ = prepare_ndim_iteration(timeseries, 2)
fcorr = np.empty(([temp_ts.shape[0]] * 2 + [temp_ts.shape[-1]]), dtype='float32')
for i in range(temp_ts.shape[-1]):
Expand Down
4 changes: 2 additions & 2 deletions nigsp/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

def pairwise(iterable):
"""
Recreate `itertools.pairwise()` behaviour for python < 3.10 compatibility.
Recreate `itertools.pairwise` behaviour for python < 3.10 compatibility.

Parameters
----------
Expand Down Expand Up @@ -125,7 +125,7 @@ def prepare_ndim_iteration(data, idx):
np.ndarray, np.ndarray
The reshaped data and an empty array like it.
"""
if data.ndim > idx+1:
if data.ndim > idx + 1:
new_shape = list(data.shape[:idx]) + [prod(data.shape[idx:])]
data = data.reshape(new_shape)

Expand Down
7 changes: 4 additions & 3 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -69,12 +69,13 @@ console_scripts =
[flake8]
doctest = True
exclude=
*build/
_version.py
./nigsp/cli/__init__.py
tests
ignore = E126, E402, W503
ignore = E126, E402, W503, F401, F811
max-line-length = 99
per-file-ignores =
*/__init__.py:F401
workflow.py:D401

[tool:pytest]
doctest_optionflags = NORMALIZE_WHITESPACE
Expand Down