Skip to content

Commit 5b235af

Browse files
committed
Merge remote-tracking branch 'upstream/master' into enh/SphinxExtension-workflows
2 parents 5e7fac9 + f85dd1f commit 5b235af

File tree

6 files changed

+62
-46
lines changed

6 files changed

+62
-46
lines changed

.circle/tests.sh

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -13,27 +13,27 @@ if [ "${CIRCLE_NODE_TOTAL:-}" != "4" ]; then
1313
exit 1
1414
fi
1515

16-
# These tests are manually balanced based on previous build timings.
16+
# These tests are manually balanced based on previous build timings.
1717
# They may need to be rebalanced in the future.
1818
case ${CIRCLE_NODE_INDEX} in
1919
0)
20-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_pytests.sh && \
21-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_pytests.sh && \
22-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -v $WORKDIR:/work -w /src/nipype/doc nipype/nipype:py35 /usr/bin/run_builddocs.sh && \
23-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow3d && \
24-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow4d
20+
docker run --rm=false -it -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_pytests.sh && \
21+
docker run --rm=false -it -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_pytests.sh && \
22+
docker run --rm=false -it -v $WORKDIR:/work -w /src/nipype/doc nipype/nipype:py35 /usr/bin/run_builddocs.sh && \
23+
docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow3d && \
24+
docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow4d
2525
;;
2626
1)
27-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ level1 && \
28-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ l2pipeline
27+
docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ level1 && \
28+
docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ l2pipeline
2929
;;
3030
2)
31-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \
32-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ l2pipeline
31+
docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \
32+
docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ l2pipeline
3333
;;
3434
3)
35-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \
36-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_fsl_feeds Linear /data/examples/ l1pipeline && \
37-
docker run --rm=false -it -e CODECOV_TOKEN=${CODECOV_TOKEN} -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_fsl_reuse Linear /data/examples/ level1_workflow
35+
docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \
36+
docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_fsl_feeds Linear /data/examples/ l1pipeline && \
37+
docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_fsl_reuse Linear /data/examples/ level1_workflow
3838
;;
3939
esac

Dockerfile

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -71,10 +71,6 @@ RUN conda config --add channels conda-forge; sync && \
7171
RUN sed -i 's/\(backend *: \).*$/\1Agg/g' /usr/local/miniconda/lib/python${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}/site-packages/matplotlib/mpl-data/matplotlibrc && \
7272
python -c "from matplotlib import font_manager"
7373

74-
# Install codecov inside container
75-
RUN curl -so /usr/bin/codecov.io https://codecov.io/bash && \
76-
chmod 755 /usr/bin/codecov.io
77-
7874
# Install CI scripts
7975
COPY docker/files/run_* /usr/bin/
8076
RUN chmod +x /usr/bin/run_*

circle.yml

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,18 @@ dependencies:
1717
- "~/docker"
1818
- "~/examples"
1919
- "~/.apt-cache"
20+
- "~/bin"
2021

2122
pre:
2223
# Let CircleCI cache the apt archive
2324
- mkdir -p ~/.apt-cache/partial && sudo rm -rf /var/cache/apt/archives && sudo ln -s ~/.apt-cache /var/cache/apt/archives
2425
- sudo apt-get -y update && sudo apt-get install -y wget bzip2
2526
# Create work folder and force group permissions
2627
- mkdir -p $WORKDIR && sudo setfacl -d -m group:ubuntu:rwx $WORKDIR && sudo setfacl -m group:ubuntu:rwx $WORKDIR
27-
- mkdir -p $HOME/docker $HOME/examples $WORKDIR/tests $WORKDIR/crashfiles $WORKDIR/logs ${CIRCLE_TEST_REPORTS}/tests/
28+
- mkdir -p $HOME/docker $HOME/examples $WORKDIR/tests $WORKDIR/logs $WORKDIR/crashfiles ${CIRCLE_TEST_REPORTS}/tests/
29+
- if [[ ! -e "$HOME/bin/codecov" ]]; then mkdir -p $HOME/bin; curl -so $HOME/bin/codecov https://codecov.io/bash && chmod 755 $HOME/bin/codecov; fi
2830
override:
29-
- if [[ -e $HOME/docker/cache.tar ]]; then docker load --input $HOME/docker/cache.tar; fi :
31+
- if [[ -e "$HOME/docker/cache.tar" ]]; then docker load --input $HOME/docker/cache.tar; fi :
3032
timeout: 6000
3133
- docker images
3234
- docker pull nipype/base:latest
@@ -50,7 +52,10 @@ test:
5052
parallel: true
5153
post:
5254
# Place reports in the appropriate folder
53-
- cp ${WORKDIR}/tests/*.xml ${CIRCLE_TEST_REPORTS}/tests/
55+
- cp ${WORKDIR}/tests/*.xml ${CIRCLE_TEST_REPORTS}/tests/
56+
# Send coverage data to codecov.io
57+
- codecov -f "coverage*.xml" -s "${WORKDIR}/tests/" -R "${HOME}/nipype/" -F unittests -v -K
58+
- codecov -f "smoketest*.xml" -s "${WORKDIR}/tests/" -R "${HOME}/nipype/" -F smoketests -v -K
5459

5560
general:
5661
artifacts:

docker/files/run_examples.sh

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,13 +25,8 @@ fi
2525
coverage run /src/nipype/tools/run_examples.py $@
2626
exit_code=$?
2727

28-
# Generate coverage report and submit if token is defined
28+
# Collect crashfiles and generate xml report
2929
coverage xml -o ${WORKDIR}/tests/smoketest_${example_id}.xml
30-
if [ "${CODECOV_TOKEN}" != "" ]; then
31-
codecov.io -f ${WORKDIR}/tests/smoketest_${example_id}.xml -t "${CODECOV_TOKEN}" -F smoketests
32-
fi
33-
34-
# Collect crashfiles
3530
find /work -name "crash-*" -maxdepth 1 -exec mv {} ${WORKDIR}/crashfiles/ \;
3631
exit $exit_code
3732

docker/files/run_pytests.sh

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -28,23 +28,16 @@ export COVERAGE_FILE=${WORKDIR}/tests/.coverage.py${PYTHON_VERSION}
2828
py.test -v --junitxml=${WORKDIR}/tests/pytests_py${PYTHON_VERSION}.xml --cov nipype --cov-config /src/nipype/.coveragerc --cov-report xml:${WORKDIR}/tests/coverage_py${PYTHON_VERSION}.xml ${TESTPATH}
2929
exit_code=$?
3030

31-
if [ "${CODECOV_TOKEN}" != "" ]; then
32-
codecov.io -f ${WORKDIR}/tests/coverage_py${PYTHON_VERSION}.xml -t "${CODECOV_TOKEN}" -F unittests
33-
fi
34-
3531
# Workaround: run here the profiler tests in python 3
3632
if [[ "${PYTHON_VERSION}" -ge "30" ]]; then
3733
echo '[execution]' >> ${HOME}/.nipype/nipype.cfg
3834
echo 'profile_runtime = true' >> ${HOME}/.nipype/nipype.cfg
3935
export COVERAGE_FILE=${WORKDIR}/tests/.coverage.py${PYTHON_VERSION}_extra
4036
py.test -v --junitxml=${WORKDIR}/tests/pytests_py${PYTHON_VERSION}_extra.xml --cov nipype --cov-report xml:${WORKDIR}/tests/coverage_py${PYTHON_VERSION}_extra.xml /src/nipype/nipype/interfaces/tests/test_runtime_profiler.py /src/nipype/nipype/pipeline/plugins/tests/test_multiproc*.py
4137
exit_code=$(( $exit_code + $? ))
42-
43-
if [ "${CODECOV_TOKEN}" != "" ]; then
44-
codecov.io -f ${WORKDIR}/tests/coverage_py${PYTHON_VERSION}_extra.xml -t "${CODECOV_TOKEN}" -F unittests
45-
fi
4638
fi
4739

4840
# Collect crashfiles
49-
find /work -name "crash-*" -maxdepth 1 -exec mv {} ${WORKDIR}/crashfiles/ \;
41+
find ${WORKDIR} -name "crash-*" -maxdepth 1 -exec mv {} ${WORKDIR}/crashfiles/ \;
42+
5043
exit ${exit_code}

nipype/interfaces/freesurfer/utils.py

Lines changed: 38 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
import re
1818
import shutil
1919

20+
from ... import logging
2021
from ...utils.filemanip import fname_presuffix, split_filename
2122
from ..base import (TraitedSpec, File, traits, OutputMultiPath, isdefined,
2223
CommandLine, CommandLineInputSpec)
@@ -29,13 +30,15 @@
2930
afni='brik', brik='brik', bshort='bshort',
3031
spm='img', analyze='img', analyze4d='img',
3132
bfloat='bfloat', nifti1='img', nii='nii',
32-
niigz='nii.gz')
33+
niigz='nii.gz', gii='gii')
3334

3435
filetypes = ['cor', 'mgh', 'mgz', 'minc', 'analyze',
3536
'analyze4d', 'spm', 'afni', 'brik', 'bshort',
3637
'bfloat', 'sdt', 'outline', 'otl', 'gdf',
3738
'nifti1', 'nii', 'niigz']
39+
implicit_filetypes = ['gii']
3840

41+
logger = logging.getLogger('interface')
3942

4043
def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None):
4144
"""Method to copy an input to the subjects directory"""
@@ -151,7 +154,8 @@ class SampleToSurfaceInputSpec(FSTraitedSpec):
151154
frame = traits.Int(argstr="--frame %d", desc="save only one frame (0-based)")
152155

153156
out_file = File(argstr="--o %s", genfile=True, desc="surface file to write")
154-
out_type = traits.Enum(filetypes, argstr="--out_type %s", desc="output file type")
157+
out_type = traits.Enum(filetypes + implicit_filetypes,
158+
argstr="--out_type %s", desc="output file type")
155159
hits_file = traits.Either(traits.Bool, File(exists=True), argstr="--srchit %s",
156160
desc="save image with number of hits at each voxel")
157161
hits_type = traits.Enum(filetypes, argstr="--srchit_type", desc="hits file type")
@@ -201,12 +205,6 @@ class SampleToSurface(FSCommand):
201205
input_spec = SampleToSurfaceInputSpec
202206
output_spec = SampleToSurfaceOutputSpec
203207

204-
filemap = dict(cor='cor', mgh='mgh', mgz='mgz', minc='mnc',
205-
afni='brik', brik='brik', bshort='bshort',
206-
spm='img', analyze='img', analyze4d='img',
207-
bfloat='bfloat', nifti1='img', nii='nii',
208-
niigz='nii.gz')
209-
210208
def _format_arg(self, name, spec, value):
211209
if name == "sampling_method":
212210
range = self.inputs.sampling_range
@@ -226,16 +224,29 @@ def _format_arg(self, name, spec, value):
226224
return spec.argstr % self.inputs.subject_id
227225
if name in ["hits_file", "vox_file"]:
228226
return spec.argstr % self._get_outfilename(name)
227+
if name == "out_type":
228+
if isdefined(self.inputs.out_file):
229+
_, base, ext = split_filename(self._get_outfilename())
230+
if ext != filemap[value]:
231+
if ext in filemap.values():
232+
raise ValueError(
233+
"Cannot create {} file with extension "
234+
"{}".format(value, ext))
235+
else:
236+
logger.warn("Creating {} file with extension {}: "
237+
"{}{}".format(value, ext, base, ext))
238+
if value in implicit_filetypes:
239+
return ""
229240
return super(SampleToSurface, self)._format_arg(name, spec, value)
230241

231242
def _get_outfilename(self, opt="out_file"):
232243
outfile = getattr(self.inputs, opt)
233244
if not isdefined(outfile) or isinstance(outfile, bool):
234245
if isdefined(self.inputs.out_type):
235246
if opt == "hits_file":
236-
suffix = '_hits.' + self.filemap[self.inputs.out_type]
247+
suffix = '_hits.' + filemap[self.inputs.out_type]
237248
else:
238-
suffix = '.' + self.filemap[self.inputs.out_type]
249+
suffix = '.' + filemap[self.inputs.out_type]
239250
elif opt == "hits_file":
240251
suffix = "_hits.mgz"
241252
else:
@@ -365,7 +376,7 @@ class SurfaceTransformInputSpec(FSTraitedSpec):
365376
source_type = traits.Enum(filetypes, argstr='--sfmt %s',
366377
requires=['source_file'],
367378
desc="source file format")
368-
target_type = traits.Enum(filetypes, argstr='--tfmt %s',
379+
target_type = traits.Enum(filetypes + implicit_filetypes, argstr='--tfmt %s',
369380
desc="output format")
370381
reshape = traits.Bool(argstr="--reshape",
371382
desc="reshape output surface to conform with Nifti")
@@ -402,6 +413,22 @@ class SurfaceTransform(FSCommand):
402413
input_spec = SurfaceTransformInputSpec
403414
output_spec = SurfaceTransformOutputSpec
404415

416+
def _format_arg(self, name, spec, value):
417+
if name == "target_type":
418+
if isdefined(self.inputs.out_file):
419+
_, base, ext = split_filename(self._list_outputs()['out_file'])
420+
if ext != filemap[value]:
421+
if ext in filemap.values():
422+
raise ValueError(
423+
"Cannot create {} file with extension "
424+
"{}".format(value, ext))
425+
else:
426+
logger.warn("Creating {} file with extension {}: "
427+
"{}{}".format(value, ext, base, ext))
428+
if value in implicit_filetypes:
429+
return ""
430+
return super(SurfaceTransform, self)._format_arg(name, spec, value)
431+
405432
def _list_outputs(self):
406433
outputs = self._outputs().get()
407434
outputs["out_file"] = self.inputs.out_file

0 commit comments

Comments
 (0)