Skip to content

can't pickle instancemethod objects during MCFLIRT #2910

Open
@Amyunimus

Description

@Amyunimus

Summary & Actual behavior

Motion correction (using fsl.MCFLIRT) in a workflow returns TypeError: can't pickle instancemethod objects.

I have tried splicing apart each section of the workflow and it seems like there is something about MCFLIRT when combined with MapNode that is giving rise to the issue (MCFLIRT on its own runs without issue, as best I can tell).

I suspect that as I build onto the workflow, there will continue to be issues with pickle and instancemethod inclusions -- that is, I don't think this is a problem specific to MCFLIRT.

Platform details:

{'commit_hash': '%h',
 'commit_source': 'archive substitution',
 'networkx_version': '2.2',
 'nibabel_version': '2.4.0',
 'nipype_version': '1.1.9',
 'numpy_version': '1.15.4',
 'pkg_path': '/anaconda2/lib/python2.7/site-packages/nipype',
 'scipy_version': '1.1.0',
 'sys_executable': '/anaconda2/bin/python',
 'sys_platform': 'darwin',
 'sys_version': '2.7.15 |Anaconda, Inc.| (default, Dec 14 2018, 13:10:39) \n[GCC 4.2.1 Compatible Clang 4.0.1 (tags/RELEASE_401/final)]',
 'traits_version': '5.0.0'}

Execution environment

macOS mojave
python 2.7

Script/Workflow details/How to replicate the behavior

import os                                    # system functions
import json                                  # reading exp info

import nipype.interfaces.io as nio           # Data i/o
import nipype.interfaces.fsl as fsl          # fsl
import nipype.interfaces.utility as util     # utility
import nipype.pipeline.engine as pe          # pypeline engine

from nipype import config

#set output file format to compressed NIFTI.
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

# Where the input data comes from
data_dir =                os.path.abspath('../ds000224_R1.0.2')
# Where the outputs goes
withinSubjectResults_dir =os.path.abspath('../fsl_datasink')
# Working Directory
workingdir =              os.path.abspath('../fsl_working_dir/working_dir')
# Crash Records
crashRecordsDir =         os.path.abspath('../fsl_working_dir/crash_dumps')

# subject directories
subject_list = ['MSC01'] 

#task
task = 'memoryfaces'

#List of functional scans
func_scan= [1,2]

# TR of functional images
with open(data_dir+'/task-'+task+'_bold.json', 'rt') as fp:
    task_info = json.load(fp)
TR = task_info['RepetitionTime']


# Templates
mfxTemplateBrain        = '/usr/local/fsl/data/standard/MNI152_T1_2mm.nii.gz'
strippedmfxTemplateBrain= '/usr/local/fsl/data/standard/MNI152_T1_2mm_brain.nii.gz'
mniConfig               = 'T1_2_MNI152_2mm'
mniMask                 = '/usr/local/fsl/data/standard/MNI152_T1_2mm_brain_mask_dil.nii.gz'


# ## Functions


#function to pick the first file from a list of files
def pickfirst(files):
    return files[0]

#function to return the 1 based index of the middle volume
def getmiddlevolume(func):
    from nibabel import load
    funcfile = func[0]
    _,_,_,timepoints = load(funcfile).get_shape()
    return (timepoints/2)-1


# ## Preprocessing Workflow

#Master node
preproc = pe.Workflow(name='preproc')

#input utility node
inputnode = pe.Node(interface=util.IdentityInterface(fields=['func',
                                                             'struct',]),
                    name='inputspec')

#Extract the middle volume of the first run as the reference
extract_ref = pe.Node(interface=fsl.ExtractROI(t_size=1),
                      name = 'extractref')

#Realign the functional runs to the middle volume of the first run
motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats = True,
                                                  save_plots = True),
                            name='motion_correct',
                            iterfield = ['in_file'])
                            

# ## META workflow

# Set up infosource to iterate over 'subject_id's
infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']),
                     name="infosource")
infosource.iterables = ('subject_id', subject_list)


# SelectFiles - to grab the data (alternative to DataGrabber)
struct = 'sub-{subject_id}/ses-struct01/anat/sub-{subject_id}_ses-struct01_run-01_T1w.nii.gz'
func =   'sub-{subject_id}/ses-func*/func/sub-{subject_id}_ses-func*_task-memoryfaces_bold.nii.gz'
evs =    'sub-{subject_id}/ses-func*/func/sub-{subject_id}_ses-func*_task-memoryfaces_events.tsv'

templates = {'struct': struct,
             'func': func,
             'evs': evs}
selectfiles = pe.Node(interface=nio.SelectFiles(templates,
                               base_directory=data_dir),
                   name="selectfiles")

#DataSink  --- stores important outputs
datasink = pe.Node(interface=nio.DataSink(base_directory= withinSubjectResults_dir,),
                   name="datasink")


preproc.connect([(infosource, selectfiles, [('subject_id', 'subject_id')]),
                 (infosource, datasink, [('subject_id', 'container')]),
                 (selectfiles, inputnode, [('struct','struct'),
                                        ('func', 'func')]),
                 (inputnode, extract_ref,[(('func',pickfirst), 'in_file')]),
                 (inputnode, extract_ref, [(('func', getmiddlevolume), 't_min')]),
                 (inputnode, motion_correct, [('func', 'in_file')]),
                 (extract_ref, motion_correct, [('roi_file', 'ref_file')]),
                 (motion_correct, datasink, [('par_file', 'motion_correct.par_file')]),
                 (motion_correct, datasink, [('out_file', 'motion_correct.out_file')])
                ])


# Create preproc analysis output graph
preproc.write_graph(graph2use='colored', format='png', simple_form=True,  dotfilename='./graph_preproc.dot')
preproc.run()

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions