Skip to content

Commit

Permalink
ENH: Set-up testing framework
Browse files Browse the repository at this point in the history
Close nipreps#1.

Opening the way for nipreps#6.
  • Loading branch information
oesteban committed Aug 6, 2019
1 parent bba7b35 commit 48d5ba2
Show file tree
Hide file tree
Showing 9 changed files with 349 additions and 137 deletions.
108 changes: 108 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,100 @@ jobs:
paths:
- /tmp/cache/docker.tar.gz

cache_test_data:
machine:
image: circleci/classic:201711-01
working_directory: /tmp/data
steps:
- restore_cache:
keys:
- data-v0-{{ .Branch }}-
- data-v0-master-
- data-v0-
- run:
name: Setup git-annex
command: |
mkdir -p /tmp/cache
if [[ ! -e "/tmp/cache/git-annex-standalone.tar.gz" ]]; then
wget -O- http://neuro.debian.net/lists/trusty.us-ca.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list
sudo apt-key adv --recv-keys --keyserver hkp://pool.sks-keyservers.net:80 0xA5D32F012649A5A9
sudo apt update && sudo apt-get install -y --no-install-recommends git-annex-standalone
mkdir -p /tmp/cache
tar czvf /tmp/cache/git-annex-standalone.tar.gz /usr/bin/git-annex /usr/bin/git-annex-shell /usr/lib/git-annex.linux
else
sudo tar xzfv /tmp/cache/git-annex-standalone.tar.gz -C /
fi
git config --global user.name 'CRN'
git config --global user.email '[email protected]'
- run:
name: Setup DataLad
command: |
pyenv global 3.5.2
virtualenv venv
pip install --no-cache-dir -U pip
pip install --no-cache-dir -U datalad
- run:
name: Install ds001600
command: |
datalad install -r https://github.com/OpenNeuroDatasets/ds001600.git
datalad update ds001600/
datalad get -r ds001600/sub-1/fmap/*
- run:
name: Get testdata
command: |
if [[ ! -d /tmp/data/testdata ]]; then
wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q \
-O testdata.zip "https://files.osf.io/v1/resources/9sy2a/providers/osfstorage/5d44b940bcd6d900198ed6be/?zip="
unzip testdata.zip -d /tmp/data/testdata
fi
- save_cache:
key: data-v0-{{ .Branch }}-{{ .BuildNum }}
paths:
- "/opt/circleci/.pyenv/versions/3.5.2"
- /tmp/data
- /tmp/cache

- persist_to_workspace:
root: /tmp
paths:
- data

test_sdcflows:
machine:
image: circleci/classic:201711-01
working_directory: /tmp/tests
steps:
- attach_workspace:
at: /tmp
- checkout:
path: /tmp/src/sdcflows
- run:
name: Load Docker image layer cache
no_output_timeout: 30m
command: |
docker info
set +o pipefail
if [ -f /tmp/cache/docker.tar.gz ]; then
sudo apt update && sudo apt -y install pigz
pigz -d --stdout /tmp/cache/docker.tar.gz | docker load
docker images
fi
- run:
name: Run tests
command: |
docker run -it --rm=false -e TEST_DATA_HOME=/data/ -e TEST_OUTPUT_DIR=/out \
-v /tmp/data:/data:ro -v /tmp/src:/src -v /tmp/tests:/out -w /src/sdcflows \
poldracklab/sdcflows:latest \
pytest -n auto --junit-xml=/out/pytest.xml sdcflows
- store_artifacts:
path: /tmp/tests

- store_test_results:
path: /tmp/tests

deploy_docker:
machine:
image: circleci/classic:201711-01
Expand Down Expand Up @@ -207,6 +301,20 @@ workflows:
filters:
tags:
only: /.*/
- cache_test_data:
filters:
tags:
only: /.*/

- test_sdcflows:
requires:
- build
- cache_test_data
filters:
tags:
only: /.*/


- test_package:
filters:
tags:
Expand Down
1 change: 0 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,6 @@ ENV IS_DOCKER_8395080871=1

RUN ldconfig
WORKDIR /tmp/
ENTRYPOINT ["/usr/local/miniconda/bin/sdcflows"]

ARG BUILD_DATE
ARG VCS_REF
Expand Down
30 changes: 17 additions & 13 deletions sdcflows/cli/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,17 @@ def get_parser():
# Options that affect how pyBIDS is configured
g_bids = parser.add_argument_group('Options for filtering BIDS queries')
g_bids.add_argument('--participant-label', action='store', type=str,
nargs='*', help='process only particular subjects')
nargs='*', dest='subject', help='process only particular subjects')
g_bids.add_argument('--task', action='store', type=str, nargs='*',
help='select a specific task to be processed')
g_bids.add_argument('--dir', action='store', type=str, nargs='*',
help='select a specific direction entity to be processed')
g_bids.add_argument('--acq', action='store', type=str, nargs='*', dest='acquisition',
help='select a specific acquisition entity to be processed')
g_bids.add_argument('--run', action='store', type=int, nargs='*',
help='select a specific run identifier to be processed')
g_bids.add_argument('--suffix', action='store', type=str, nargs='*', default='bold',
help='select a specific run identifier to be processed')

g_perfm = parser.add_argument_group('Options to handle performance')
g_perfm.add_argument("-v", "--verbose", dest="verbose_count", action="count", default=0,
Expand All @@ -58,6 +64,7 @@ def main():
from multiprocessing import set_start_method
from bids.layout import BIDSLayout
from nipype import logging as nlogging
from ..workflows.base import init_sdc_wf
set_start_method('forkserver')

opts = get_parser().parse_args()
Expand Down Expand Up @@ -88,21 +95,18 @@ def main():
if not nthreads or nthreads < 1:
nthreads = cpu_count()

derivatives_dir = opts.derivatives_dir.resolve()
bids_dir = opts.bids_dir or derivatives_dir.parent
output_dir = opts.output_dir.resolve()
bids_dir = opts.bids_dir or output_dir.parent

# Get absolute path to BIDS directory
bids_dir = opts.bids_dir.resolve()
layout = BIDSLayout(str(bids_dir), validate=False, derivatives=str(derivatives_dir))
query = {'domains': 'derivatives', 'desc': 'preproc',
'suffix': 'bold', 'extensions': ['.nii', '.nii.gz']}

if opts.participant_label:
query['subject'] = '|'.join(opts.participant_label)
if opts.run:
query['run'] = '|'.join(opts.run)
if opts.task:
query['task'] = '|'.join(opts.task)
layout = BIDSLayout(str(bids_dir), validate=False, derivatives=str(output_dir))
query = {'suffix': opts.suffix, 'extension': ['.nii', '.nii.gz']}

for entity in ('subject', 'task', 'dir', 'acquisition', 'run'):
arg = getattr(opts, entity, None)
if arg is not None:
query[entity] = arg


if __name__ == '__main__':
Expand Down
38 changes: 38 additions & 0 deletions sdcflows/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
"""py.test configuration"""
import os
from pathlib import Path
import numpy
import pytest
from bids.layout import BIDSLayout

test_data_env = os.getenv('TEST_DATA_HOME', str(Path.home() / 'sdcflows-tests'))
test_output_dir = os.getenv('TEST_OUTPUT_DIR')

layouts = {p.name: BIDSLayout(str(p), validate=False)
for p in Path(test_data_env).glob('*') if p.is_dir()}


def pytest_report_header(config):
msg = "Datasets found: %s" % ', '.join([v.root for v in layouts.values()])
if test_output_dir is not None:
msg += '\nOutput folder: %s' % Path(test_output_dir).resolve()
return msg


@pytest.fixture(autouse=True)
def add_np(doctest_namespace):
doctest_namespace['np'] = numpy
doctest_namespace['os'] = os
doctest_namespace['Path'] = Path
for key, val in list(layouts.items()):
doctest_namespace[key] = Path(val.root)


@pytest.fixture
def output_path():
return None if test_output_dir is None else Path(test_output_dir)


@pytest.fixture
def bids_layouts():
return layouts
Loading

0 comments on commit 48d5ba2

Please sign in to comment.