Skip to content

Commit a2a54df

Browse files
committed
Fix formatting issues and remove code format checks from cron ci
1 parent d261b57 commit a2a54df

File tree

9 files changed

+158
-23
lines changed

9 files changed

+158
-23
lines changed

.github/workflows/cron_tests.yml

Lines changed: 138 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,138 @@
1+
name: (Auto) Tests and Coverage
2+
3+
on:
4+
# Run tests every week on sundays
5+
schedule:
6+
- cron: "0 0 * * 0"
7+
8+
jobs:
9+
# Job (1): Run testing in parallel against multiples OSs and Python versions
10+
test:
11+
if: "!contains(github.event.head_commit.message, 'skip ci')"
12+
name: Test
13+
runs-on: ${{ matrix.os }}
14+
# Determines whether the entire workflow should pass/fail based on parallel jobs
15+
continue-on-error: ${{ matrix.ok-fail }}
16+
defaults:
17+
# This ensures each step gets properly configured bash shell for conda commands to work
18+
run:
19+
shell: bash -l {0}
20+
strategy:
21+
fail-fast: false
22+
matrix:
23+
# OSs to test
24+
os: [ubuntu-latest, macos-latest, windows-latest]
25+
# Python versions to test
26+
python-version: [3.7, 3.8]
27+
# By default everything should pass for the workflow to pass
28+
ok-fail: [false]
29+
# include:
30+
# Rather than include 3.9 in the python versions, do it here so we can ignore failures on mac and windows with 3.9 (they have install issues)
31+
# - os: ubuntu-latest
32+
# python-version: 3.9
33+
# ok-fail: false
34+
# - os: macos-latest
35+
# python-version: 3.9
36+
# ok-fail: true
37+
# - os: windows-latest
38+
# python-version: 3.9
39+
# ok-fail: true
40+
steps:
41+
# Step up miniconda
42+
- name: Download and setup Miniconda
43+
uses: conda-incubator/setup-miniconda@v2
44+
with:
45+
miniconda-version: "latest"
46+
python-version: ${{ matrix.python-version }}
47+
48+
# Check out latest code on github
49+
- name: Checkout Code
50+
uses: actions/checkout@v2
51+
52+
# Install common sci-py packages via conda as well as testing packages and requirements
53+
# TODO: unpin pandas version when deepdish adds support for 1.2: https://github.com/uchicago-cs/deepdish/issues/45
54+
- name: Install Dependencies
55+
run: |
56+
conda activate test
57+
conda env list
58+
conda install -y pip pandas">=1.1.0,<1.2" numpy scipy matplotlib seaborn scikit-learn
59+
conda install -y -c conda-forge pytest pytest-xdist pytest-sugar coveralls black
60+
pip install . -r requirements.txt
61+
pip install . -r optional-dependencies.txt
62+
63+
# Actually run the tests with coverage
64+
- name: Run Tests
65+
run: |
66+
conda activate test
67+
conda env list
68+
coverage run --source=nltools -m pytest -rs -n auto
69+
70+
# Send coverage to coveralls.io but waiting on parallelization to finish
71+
# Not using the official github action in the marketplace to upload because it requires a .lcov file, which pytest doesn't generate. It's just easier to use the coveralls python library which does the same thing, but works with pytest.
72+
- name: Upload Coverage
73+
# The coveralls python package has some 422 server issues with uploads from github-actions so try both service providers, for more see:
74+
# https://github.com/TheKevJames/coveralls-python/issues/252
75+
run: coveralls --service=github || coveralls --service=github-actions
76+
env:
77+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
78+
COVERALLS_FLAG_NAME: $${{ matrix}}
79+
COVERALLS_PARALLEL: true
80+
81+
# Job (2): Send a finish notification to coveralls.io to integrate coverage across parallel tests
82+
coveralls:
83+
if: "!contains(github.event.head_commit.message, 'skip ci')"
84+
name: Coveralls.io Upload
85+
needs: test
86+
runs-on: ubuntu-latest
87+
container: python:3-slim
88+
continue-on-error: true
89+
steps:
90+
- name: Finished
91+
run: |
92+
pip3 install --upgrade coveralls
93+
coveralls --service=github --finish || coveralls --service=github-actions --finish
94+
env:
95+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
96+
97+
# Job (3): Build docs, but don't deploy. This is effectively another layer of testing because of our sphinx-gallery auto-examples
98+
docs:
99+
if: "!contains(github.event.head_commit.message, 'skip ci')"
100+
name: Build docs and auto-examples
101+
runs-on: ubuntu-latest
102+
steps:
103+
- name: Checkout Code
104+
uses: actions/checkout@v2
105+
106+
- name: Setup Python
107+
uses: actions/setup-python@v2
108+
with:
109+
python-version: "3.8"
110+
111+
- name: Upgrade pip
112+
run: |
113+
# install pip=>20.1 to use "pip cache dir"
114+
python3 -m pip install --upgrade pip
115+
116+
- name: Setup pip-cache
117+
id: pip-cache
118+
run: echo "::set-output name=dir::$(pip cache dir)"
119+
120+
- name: Cache deps
121+
uses: actions/cache@v2
122+
with:
123+
path: ${{ steps.pip-cache.outputs.dir }}
124+
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
125+
restore-keys: |
126+
${{ runner.os }}-pip-
127+
128+
- name: Install deps
129+
run: |
130+
python3 -m pip install . -r requirements.txt
131+
python3 -m pip install . -r requirements-dev.txt
132+
python3 -m pip install . -r optional-dependencies.txt
133+
134+
- name: Build docs
135+
run: |
136+
cd docs
137+
make clean
138+
make html

.github/workflows/tests_and_coverage.yml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,6 @@ on:
1010
branches:
1111
- main
1212
- master
13-
# Run tests every week on sundays
14-
schedule:
15-
- cron: "0 0 * * 0"
1613

1714
jobs:
1815
# Job (1): Run testing in parallel against multiples OSs and Python versions

nltools/analysis.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -311,7 +311,7 @@ def plot(self, plot_method="gaussian", balanced_acc=False, **kwargs):
311311
return fig
312312

313313
def summary(self):
314-
""" Display a formatted summary of ROC analysis. """
314+
"""Display a formatted summary of ROC analysis."""
315315

316316
print("------------------------")
317317
print(".:ROC Analysis Summary:.")

nltools/data/adjacency.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -338,7 +338,7 @@ def _test_is_single_matrix(data):
338338
return len(data.shape) == 1
339339

340340
def _import_single_data(self, data, matrix_type=None):
341-
""" Helper function to import single data matrix."""
341+
"""Helper function to import single data matrix."""
342342

343343
if isinstance(data, str) or isinstance(data, Path):
344344
if os.path.isfile(data):
@@ -595,11 +595,11 @@ def median(self, axis=0):
595595
return np.nanmedian(self.data, axis=axis)
596596

597597
def shape(self):
598-
""" Calculate shape of data. """
598+
"""Calculate shape of data."""
599599
return self.data.shape
600600

601601
def square_shape(self):
602-
""" Calculate shape of squareform data. """
602+
"""Calculate shape of squareform data."""
603603
if self.matrix_type == "empty":
604604
return np.array([])
605605
else:
@@ -609,7 +609,7 @@ def square_shape(self):
609609
return self[0].squareform().shape
610610

611611
def copy(self):
612-
""" Create a copy of Adjacency object."""
612+
"""Create a copy of Adjacency object."""
613613
return deepcopy(self)
614614

615615
def append(self, data):
@@ -788,7 +788,7 @@ def r_to_z(self):
788788
return out
789789

790790
def z_to_r(self):
791-
""" Convert z score back into r value for each element of data object"""
791+
"""Convert z score back into r value for each element of data object"""
792792

793793
out = self.copy()
794794
out.data = fisher_z_to_r(out.data)

nltools/data/brain_data.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -384,7 +384,7 @@ def __iter__(self):
384384
yield self[x]
385385

386386
def shape(self):
387-
""" Get images by voxels shape. """
387+
"""Get images by voxels shape."""
388388

389389
return self.data.shape
390390

@@ -463,7 +463,7 @@ def std(self, axis=0):
463463
return out
464464

465465
def sum(self):
466-
""" Sum over voxels."""
466+
"""Sum over voxels."""
467467

468468
out = deepcopy(self)
469469
if len(self.shape()) > 1:
@@ -475,7 +475,7 @@ def sum(self):
475475
return out
476476

477477
def to_nifti(self):
478-
""" Convert Brain_Data Instance into Nifti Object """
478+
"""Convert Brain_Data Instance into Nifti Object"""
479479

480480
return self.nifti_masker.inverse_transform(self.data)
481481

@@ -872,7 +872,7 @@ def append(self, data, **kwargs):
872872
return out
873873

874874
def empty(self, data=True, Y=True, X=True):
875-
""" Initalize Brain_Data.data as empty """
875+
"""Initalize Brain_Data.data as empty"""
876876

877877
tmp = deepcopy(self)
878878
if data:
@@ -884,7 +884,7 @@ def empty(self, data=True, Y=True, X=True):
884884
return tmp
885885

886886
def isempty(self):
887-
""" Check if Brain_Data.data is empty """
887+
"""Check if Brain_Data.data is empty"""
888888

889889
if isinstance(self.data, np.ndarray):
890890
boolean = False if self.data.size else True
@@ -1651,7 +1651,7 @@ def detrend(self, method="linear"):
16511651
return out
16521652

16531653
def copy(self):
1654-
""" Create a copy of a Brain_Data instance. """
1654+
"""Create a copy of a Brain_Data instance."""
16551655
return deepcopy(self)
16561656

16571657
def upload_neurovault(
@@ -1751,7 +1751,7 @@ def r_to_z(self):
17511751
return out
17521752

17531753
def z_to_r(self):
1754-
""" Convert z score back into r value for each element of data object"""
1754+
"""Convert z score back into r value for each element of data object"""
17551755

17561756
out = self.copy()
17571757
out.data = fisher_z_to_r(out.data)
@@ -1791,7 +1791,7 @@ def filter(self, sampling_freq=None, high_pass=None, low_pass=None, **kwargs):
17911791
return out
17921792

17931793
def dtype(self):
1794-
""" Get data type of Brain_Data.data."""
1794+
"""Get data type of Brain_Data.data."""
17951795
return self.data.dtype
17961796

17971797
def astype(self, dtype):

nltools/datasets.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333

3434

3535
def download_nifti(url, data_dir=None):
36-
""" Download a image to a nifti file."""
36+
"""Download a image to a nifti file."""
3737
local_filename = url.split("/")[-1]
3838
if data_dir is not None:
3939
if not os.path.isdir(data_dir):

nltools/prefs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121

2222

2323
def resolve_mni_path(MNI_Template):
24-
""" Helper function to resolve MNI path based on MNI_Template prefs setting."""
24+
"""Helper function to resolve MNI path based on MNI_Template prefs setting."""
2525

2626
res = MNI_Template["resolution"]
2727
m = MNI_Template["mask_type"]

nltools/stats.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -454,14 +454,14 @@ def upsample(
454454

455455

456456
def fisher_r_to_z(r):
457-
""" Use Fisher transformation to convert correlation to z score """
457+
"""Use Fisher transformation to convert correlation to z score"""
458458

459459
# return .5*np.log((1 + r)/(1 - r))
460460
return np.arctanh(r)
461461

462462

463463
def fisher_z_to_r(z):
464-
""" Use Fisher transformation to convert correlation to z score """
464+
"""Use Fisher transformation to convert correlation to z score"""
465465
return np.tanh(z)
466466

467467

nltools/utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def _df_meta_to_arr(df):
5454

5555

5656
def get_resource_path():
57-
""" Get path to nltools resource directory. """
57+
"""Get path to nltools resource directory."""
5858
return join(dirname(__file__), "resources") + pathsep
5959

6060

@@ -231,7 +231,7 @@ def load_class(import_string):
231231

232232

233233
def isiterable(obj):
234-
""" Returns True if the object is one of allowable iterable types. """
234+
"""Returns True if the object is one of allowable iterable types."""
235235
return isinstance(obj, (list, tuple, GeneratorType))
236236

237237

0 commit comments

Comments
 (0)