From e113349bb5ac343dcb7442b269ed5260de1e9981 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 9 May 2018 17:56:57 -0400 Subject: [PATCH 01/56] enh: allow dcm2niix configuration --- heudiconv/cli/run.py | 10 ++++------ heudiconv/convert.py | 37 +++++++++++++++++++++++++++++-------- 2 files changed, 33 insertions(+), 14 deletions(-) diff --git a/heudiconv/cli/run.py b/heudiconv/cli/run.py index 5b766d5a..8c3f8fd8 100644 --- a/heudiconv/cli/run.py +++ b/heudiconv/cli/run.py @@ -207,6 +207,8 @@ def get_parser(): 'jsons') parser.add_argument('--random-seed', type=int, default=None, help='Random seed to initialize RNG') + parser.add_argument('--dcmconfig', default=None, + help='JSON file for additional dcm2niix configuration') submission = parser.add_argument_group('Conversion submission options') submission.add_argument('-q', '--queue', default=None, help='select batch system to submit jobs to instead' @@ -226,14 +228,9 @@ def process_args(args): outdir = op.abspath(args.outdir) - if args.command: - process_extra_commands(outdir, args) - return - lgr.info(INIT_MSG(packname=__packagename__, version=__version__)) - # # Load heuristic -- better do it asap to make sure it loads correctly # @@ -327,7 +324,8 @@ def process_args(args): bids=args.bids, seqinfo=seqinfo, min_meta=args.minmeta, - overwrite=args.overwrite,) + overwrite=args.overwrite, + dcmconfig=args.dcmconfig,) lgr.info("PROCESSING DONE: {0}".format( str(dict(subject=sid, outdir=study_outdir, session=session)))) diff --git a/heudiconv/convert.py b/heudiconv/convert.py index afd2bd2a..03b74e50 100644 --- a/heudiconv/convert.py +++ b/heudiconv/convert.py @@ -79,7 +79,7 @@ def conversion_info(subject, outdir, info, filegroup, ses): def prep_conversion(sid, dicoms, outdir, heuristic, converter, anon_sid, anon_outdir, with_prov, ses, bids, seqinfo, min_meta, - overwrite): + overwrite, dcmconfig): if dicoms: lgr.info("Processing %d dicoms", len(dicoms)) elif seqinfo: @@ -195,7 +195,8 @@ def prep_conversion(sid, dicoms, outdir, heuristic, converter, anon_sid, bids=bids, outdir=tdir, min_meta=min_meta, - overwrite=overwrite,) + overwrite=overwrite, + dcmconfig=dcmconfig,) for item_dicoms in filegroup.values(): clear_temp_dicoms(item_dicoms) @@ -212,7 +213,8 @@ def prep_conversion(sid, dicoms, outdir, heuristic, converter, anon_sid, def convert(items, converter, scaninfo_suffix, custom_callable, with_prov, - bids, outdir, min_meta, overwrite, symlink=True, prov_file=None): + bids, outdir, min_meta, overwrite, symlink=True, prov_file=None, + dcmconfig=None): """Perform actual conversion (calls to converter etc) given info from heuristic's `infotodict` @@ -279,7 +281,7 @@ def convert(items, converter, scaninfo_suffix, custom_callable, with_prov, # run conversion through nipype res, prov_file = nipype_convert(item_dicoms, prefix, with_prov, - bids, tmpdir) + bids, tmpdir, dcmconfig) bids_outfiles = save_converted_files(res, item_dicoms, bids, outtype, prefix, @@ -383,8 +385,25 @@ def convert_dicom(item_dicoms, bids, prefix, shutil.copyfile(filename, outfile) -def nipype_convert(item_dicoms, prefix, with_prov, bids, tmpdir): - """ """ +def nipype_convert(item_dicoms, prefix, with_prov, bids, tmpdir, dcmconfig=None): + """ + Converts DICOMs grouped from heuristic using Nipype's Dcm2niix interface. + + Parameters + ---------- + item_dicoms : List + DICOM files to convert + prefix : String + Heuristic output path + with_prov : Bool + Store provenance information + bids : Bool + Output BIDS sidecar JSONs + tmpdir : Directory + Conversion working directory + dcmconfig : File (optional) + JSON file used for additional Dcm2niix configuration + """ import nipype if with_prov: from nipype import config @@ -394,9 +413,11 @@ def nipype_convert(item_dicoms, prefix, with_prov, bids, tmpdir): item_dicoms = list(map(op.abspath, item_dicoms)) # absolute paths - dicom_dir = op.dirname(item_dicoms[0]) if item_dicoms else None + fromfile = dcmconfig if dcmconfig else None + if fromfile: + lgr.info("Using custom config file %s", fromfile) - convertnode = Node(Dcm2niix(), name='convert') + convertnode = Node(Dcm2niix(from_file=fromfile), name='convert') convertnode.base_dir = tmpdir convertnode.inputs.source_names = item_dicoms convertnode.inputs.out_filename = op.basename(op.dirname(prefix)) From c78d25b701e47b11f6356baa7a620d96b9794ec0 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 9 May 2018 18:00:23 -0400 Subject: [PATCH 02/56] enh: display version before acting --- heudiconv/cli/run.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/heudiconv/cli/run.py b/heudiconv/cli/run.py index 8c3f8fd8..899dca5b 100644 --- a/heudiconv/cli/run.py +++ b/heudiconv/cli/run.py @@ -231,6 +231,9 @@ def process_args(args): lgr.info(INIT_MSG(packname=__packagename__, version=__version__)) + if args.command: + process_extra_commands(outdir, args) + return # # Load heuristic -- better do it asap to make sure it loads correctly # From f053c0bbbd767747db9094d56ff29fc2aa64b624 Mon Sep 17 00:00:00 2001 From: pvelasco Date: Mon, 17 Sep 2018 15:01:56 -0400 Subject: [PATCH 03/56] Handles multi-echo data. It handles both ME-EPI and ME-MPRAGE. For multi-echo functional data, it generates a single _events.tsv file (following BIDS spec 1.1.0). --- heudiconv/bids.py | 16 ++++++++ heudiconv/convert.py | 97 +++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 111 insertions(+), 2 deletions(-) diff --git a/heudiconv/bids.py b/heudiconv/bids.py index 80875e56..33b03360 100644 --- a/heudiconv/bids.py +++ b/heudiconv/bids.py @@ -88,6 +88,22 @@ def populate_bids_templates(path, defaults={}): # create a stub onsets file for each one of those suf = '_bold.json' assert fpath.endswith(suf) + # specify the name of the '_events.tsv' file: + if ( '_echo-' in fpath ): + # multi-echo sequence: bids (1.1.0) specifies just one '_events.tsv' + # file, common for all echoes. The name will not include _echo-. + # So, find out the echo number: + fpath_split = fpath.split('_echo-') # split fpath using '_echo-' + fpath_split_2 = fpath_split[1].split('_') # split the second part of fpath_split using '_' + echoNo = fpath_split_2[0] # get echo number + if ( echoNo == '1' ): + # we modify fpath to exclude '_echo-' + echoNo: + fpath = fpath_split[0] + '_' + fpath_split_2[1] + else: + # for echoNo greater than 1, don't create the events file, so go to + # the next for loop iteration: + continue + events_file = fpath[:-len(suf)] + '_events.tsv' # do not touch any existing thing, it may be precious if not op.lexists(events_file): diff --git a/heudiconv/convert.py b/heudiconv/convert.py index d1749164..bca94a0b 100644 --- a/heudiconv/convert.py +++ b/heudiconv/convert.py @@ -441,6 +441,9 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, """ from nipype.interfaces.base import isdefined + prefix_dirname = op.dirname(prefix + '.ext') + prefix_basename = op.basename(prefix) + bids_outfiles = [] res_files = res.outputs.converted_files @@ -473,11 +476,101 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, if len(res.outputs.bids) == len(res_files) else [None] * len(res_files)) + ### Do we have a multi-echo series? ### + # Some Siemens sequences (e.g. CMRR's MB-EPI) set the label 'TE1', + # 'TE2', etc. in the 'ImageType' field. However, other seqs do not + # (e.g. MGH ME-MPRAGE). They do set a 'EchoNumber', but not for the + # first echo. To compound the problem, the echoes are NOT in order, + # so the first NIfTI file does not correspond to echo-1, etc. So, we + # need to know, beforehand, whether we are dealing with a multi-echo + # series. To do that, the most straightforward way is to read the + # echo times for all bids_files and see if they are all the same or not. + + # Get the echo times: + echoTimes = [load_json(bids_file).get('EchoTime') for bids_file in bids_files] + + # To see if the echo times are the same, convert it to a set and see if + # only one remains: + if ( len(set(echoTimes)) == 1 ): multiecho = False + else : multiecho = True + + ### Loop through the bids_files, set the output name and save files ### + for fl, suffix, bids_file in zip(res_files, suffixes, bids_files): - outname = "%s%s.%s" % (prefix, suffix, outtype) + # load the json file info: + fileinfo = load_json(bids_file) + + # set the prefix basename for this specific file (we'll modify it, and + # we don't want to modify it for all the bids_files): + this_prefix_basename = prefix_basename + + # _sbref sequences reconstructing magnitude and phase generate + # two NIfTI files IN THE SAME SERIES, so we cannot just add + # the suffix, if we want to be bids compliant: + if ( bids and (this_prefix_basename[-6:] == '_sbref') ): + # Check to see if it is magnitude or phase reconstruction: + if ('M' in fileinfo.get('ImageType')): mag_or_phase = 'magnitude' + elif ('P' in fileinfo.get('ImageType')): mag_or_phase = 'phase' + else : mag_or_phase = suffix + + # If "_rec-'mag_or_phase'" is not already there, check where to insert it: + if not (("_rec-%s" % mag_or_phase) in this_prefix_basename): + + # If "_rec-" is specified, append the 'mag_or_phase' value. + if ('_rec-' in this_prefix_basename): + spt = this_prefix_basename.split('_rec-',1) + # grab the reconstruction type (grab whatever we have before the next "_"): + spt_spt = spt[1].split('_',1) + # update 'this_prefix_basename': + this_prefix_basename = "%s_rec-%s-%s_%s" % (spt[0], spt_spt[0], mag_or_phase, spt_spt[1]) + + # If not, insert "_rec-" + 'mag_or_phase' into the prefix_basename + # **before** "_run", "_echo" or "_sbref", whichever appears first: + else: + for my_str in ['_run', '_echo', '_sbref']: + if (my_str in this_prefix_basename): + spt = this_prefix_basename.split(my_str, 1) + this_prefix_basename = "%s_rec-%s%s%s" % (spt[0], mag_or_phase, my_str, spt[1]) + break + + # Now check if this run is multi-echo (Note: it can be _sbref and multiecho, so + # don't use "elif"): + # For multi-echo sequences, we have to specify the echo number in the file name: + if ( bids and multiecho ): + # Get the EchoNumber from json file info. If not present, it's echo-1 + echoNumber=( fileinfo.get('EchoNumber') or 1 ) + + # Now, decide where to insert it. + # Insert it **before** the following string(s), whichever appears first. + # (Note: If you decide to support multi-echo for other sequences (e.g. + # ME-MPRAGE), add the string before which you want to add the echo number + # to the list below): + for my_str in ['_bold', '_sbref', '_T1w']: + if (my_str in this_prefix_basename): + spt = this_prefix_basename.split(my_str, 1) + this_prefix_basename = "%s_echo-%s%s%s" % (spt[0], echoNumber, my_str, spt[1]) + break + + # For Scout runs with multiple NIfTI images per run: + if ( bids and ('scout' in this_prefix_basename.lower()) ): + # in some cases (more than one slice slab), there are several + # NIfTI images in the scout run, so distinguish them with "_acq-" + spt = this_prefix_basename.split('_acq-Scout', 1) + this_prefix_basename = "%s%s%s%s" % (spt[0], '_acq-Scout', suffix, spt[1]) + + # Fallback option: + # If we have failed to modify this_prefix_basename, because it didn't fall + # into any of the options above, just add the suffix at the end: + if ( this_prefix_basename == prefix_basename ): + this_prefix_basename = "%s%s" % (this_prefix_basename, suffix) + + # Finally, form the outname by stitch the directory and outtype: + outname = "%s/%s.%s" % (prefix_dirname, this_prefix_basename, outtype) + + # Write the files needed: safe_copyfile(fl, outname, overwrite) if bids_file: - outname_bids_file = "%s%s.json" % (prefix, suffix) + outname_bids_file = "%s.json" % (outname.strip(outtype)) safe_copyfile(bids_file, outname_bids_file, overwrite) bids_outfiles.append(outname_bids_file) # res_files is not a list From 3250283ff7be3f478b35b1fb4e504cec42403ddc Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 6 Nov 2018 17:11:10 -0500 Subject: [PATCH 04/56] rf+enh: ME support, custom BIDS exception --- heudiconv/bids.py | 5 +++ heudiconv/convert.py | 100 ++++++++++++++++++++++++------------------- 2 files changed, 61 insertions(+), 44 deletions(-) diff --git a/heudiconv/bids.py b/heudiconv/bids.py index 33b03360..8af08f22 100644 --- a/heudiconv/bids.py +++ b/heudiconv/bids.py @@ -24,6 +24,11 @@ lgr = logging.getLogger(__name__) + +class BIDSException(Exception): + pass + + def populate_bids_templates(path, defaults={}): """Premake BIDS text files with templates""" diff --git a/heudiconv/convert.py b/heudiconv/convert.py index bca94a0b..4518e9bb 100644 --- a/heudiconv/convert.py +++ b/heudiconv/convert.py @@ -24,6 +24,7 @@ save_scans_key, tuneup_bids_json_files, add_participant_record, + BIDSException ) from .dicoms import ( group_dicoms_into_seqinfos, @@ -486,73 +487,83 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, # series. To do that, the most straightforward way is to read the # echo times for all bids_files and see if they are all the same or not. - # Get the echo times: - echoTimes = [load_json(bids_file).get('EchoTime') for bids_file in bids_files] + # Get the echo times while not breaking non-BIDS compliance + echo_times = [] + for bids_file in bids_files: + if bids_file: + echo_times.append(load_json(bids_file).get('EchoTime')) # To see if the echo times are the same, convert it to a set and see if # only one remains: - if ( len(set(echoTimes)) == 1 ): multiecho = False - else : multiecho = True + multiecho = False + if echo_times: + multiecho = len(set(echo_times)) == 1 - ### Loop through the bids_files, set the output name and save files ### + ### Loop through the bids_files, set the output name and save files for fl, suffix, bids_file in zip(res_files, suffixes, bids_files): # load the json file info: - fileinfo = load_json(bids_file) + # TODO: time performance + if bids_file: + fileinfo = load_json(bids_file) - # set the prefix basename for this specific file (we'll modify it, and - # we don't want to modify it for all the bids_files): + # set the prefix basename for this specific file (we'll modify it, + # and we don't want to modify it for all the bids_files): this_prefix_basename = prefix_basename # _sbref sequences reconstructing magnitude and phase generate # two NIfTI files IN THE SAME SERIES, so we cannot just add # the suffix, if we want to be bids compliant: - if ( bids and (this_prefix_basename[-6:] == '_sbref') ): + if (bids_file and (this_prefix_basename.endswith('_sbref'))): # Check to see if it is magnitude or phase reconstruction: - if ('M' in fileinfo.get('ImageType')): mag_or_phase = 'magnitude' - elif ('P' in fileinfo.get('ImageType')): mag_or_phase = 'phase' - else : mag_or_phase = suffix + if 'M' in fileinfo.get('ImageType'): + mag_or_phase = 'magnitude' + elif 'P' in fileinfo.get('ImageType'): + mag_or_phase = 'phase' + else: + mag_or_phase = suffix - # If "_rec-'mag_or_phase'" is not already there, check where to insert it: + # Insert reconstruction label if not (("_rec-%s" % mag_or_phase) in this_prefix_basename): - # If "_rec-" is specified, append the 'mag_or_phase' value. + # If "_rec-" is specified, prepend the 'mag_or_phase' value. if ('_rec-' in this_prefix_basename): - spt = this_prefix_basename.split('_rec-',1) - # grab the reconstruction type (grab whatever we have before the next "_"): - spt_spt = spt[1].split('_',1) - # update 'this_prefix_basename': - this_prefix_basename = "%s_rec-%s-%s_%s" % (spt[0], spt_spt[0], mag_or_phase, spt_spt[1]) + raise BIDSException( + "Reconstruction label for multi-echo single-band" + " reference images will be automatically set, remove" + " from heuristic" + ) # If not, insert "_rec-" + 'mag_or_phase' into the prefix_basename # **before** "_run", "_echo" or "_sbref", whichever appears first: - else: - for my_str in ['_run', '_echo', '_sbref']: - if (my_str in this_prefix_basename): - spt = this_prefix_basename.split(my_str, 1) - this_prefix_basename = "%s_rec-%s%s%s" % (spt[0], mag_or_phase, my_str, spt[1]) - break - - # Now check if this run is multi-echo (Note: it can be _sbref and multiecho, so - # don't use "elif"): - # For multi-echo sequences, we have to specify the echo number in the file name: - if ( bids and multiecho ): + for label in ['_run', '_echo', '_sbref']: + if (label in this_prefix_basename): + this_prefix_basename = this_prefix_basename.replace( + label, "_rec-%s%s" % (mag_or_phase, label) + ) + break + + # Now check if this run is multi-echo + # (Note: it can be _sbref and multiecho, so don't use "elif"): + # For multi-echo sequences, we have to specify the echo number in + # the file name: + if bids and multiecho: # Get the EchoNumber from json file info. If not present, it's echo-1 - echoNumber=( fileinfo.get('EchoNumber') or 1 ) + echo_number = fileinfo.get('EchoNumber', 1) + + supported_multiecho = ['_bold', '_sbref', '_T1w'] # epi? # Now, decide where to insert it. # Insert it **before** the following string(s), whichever appears first. - # (Note: If you decide to support multi-echo for other sequences (e.g. - # ME-MPRAGE), add the string before which you want to add the echo number - # to the list below): - for my_str in ['_bold', '_sbref', '_T1w']: - if (my_str in this_prefix_basename): - spt = this_prefix_basename.split(my_str, 1) - this_prefix_basename = "%s_echo-%s%s%s" % (spt[0], echoNumber, my_str, spt[1]) + for imgtype in ['_bold', '_sbref', '_T1w']: + if (imgtype in this_prefix_basename): + this_prefix_basename = this_prefix_basename.replace( + imgtype, "_echo-%d%s" % (echo_number, imgtype) + ) break # For Scout runs with multiple NIfTI images per run: - if ( bids and ('scout' in this_prefix_basename.lower()) ): + if (bids and ('scout' in this_prefix_basename.lower())): # in some cases (more than one slice slab), there are several # NIfTI images in the scout run, so distinguish them with "_acq-" spt = this_prefix_basename.split('_acq-Scout', 1) @@ -562,15 +573,16 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, # If we have failed to modify this_prefix_basename, because it didn't fall # into any of the options above, just add the suffix at the end: if ( this_prefix_basename == prefix_basename ): - this_prefix_basename = "%s%s" % (this_prefix_basename, suffix) + this_prefix_basename += suffix - # Finally, form the outname by stitch the directory and outtype: - outname = "%s/%s.%s" % (prefix_dirname, this_prefix_basename, outtype) + # Finally, form the outname by stitching the directory and outtype: + outname = op.join(prefix_dirname, this_prefix_basename) + outfile = outname + '.' + outtype # Write the files needed: - safe_copyfile(fl, outname, overwrite) + safe_copyfile(fl, outfile, overwrite) if bids_file: - outname_bids_file = "%s.json" % (outname.strip(outtype)) + outname_bids_file = "%s.json" % (outname) safe_copyfile(bids_file, outname_bids_file, overwrite) bids_outfiles.append(outname_bids_file) # res_files is not a list From 266de4a9a5d755455f348cb66ef025337a8d3b9a Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 8 Jan 2019 12:09:00 -0500 Subject: [PATCH 05/56] ref+tst: some refactoring, intial testing --- heudiconv/convert.py | 45 ++++++++++++++-------------- heudiconv/heuristics/bids-ME.py | 24 +++++++++++++++ heudiconv/tests/test_regression.py | 48 ++++++++++++++++++++++++++---- heudiconv/tests/utils.py | 46 +++++++++++++++++++++++----- 4 files changed, 128 insertions(+), 35 deletions(-) create mode 100644 heudiconv/heuristics/bids-ME.py diff --git a/heudiconv/convert.py b/heudiconv/convert.py index bbd8f18f..be6db96f 100644 --- a/heudiconv/convert.py +++ b/heudiconv/convert.py @@ -242,7 +242,7 @@ def convert(items, converter, scaninfo_suffix, custom_callable, with_prov, if not isinstance(outtypes, (list, tuple)): outtypes = (outtypes,) - prefix_dirname = op.dirname(prefix + '.ext') + prefix_dirname = op.dirname(prefix) outname_bids = prefix + '.json' bids_outfiles = [] lgr.info('Converting %s (%d DICOMs) -> %s . ' @@ -442,8 +442,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, """ from nipype.interfaces.base import isdefined - prefix_dirname = op.dirname(prefix + '.ext') - prefix_basename = op.basename(prefix) + prefix_dirname, prefix_basename = op.split(prefix) bids_outfiles = [] res_files = res.outputs.converted_files @@ -475,8 +474,8 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, # Also copy BIDS files although they might need to # be merged/postprocessed later bids_files = sorted(res.outputs.bids - if len(res.outputs.bids) == len(res_files) - else [None] * len(res_files)) + if len(res.outputs.bids) == len(res_files) + else [None] * len(res_files)) ### Do we have a multi-echo series? ### # Some Siemens sequences (e.g. CMRR's MB-EPI) set the label 'TE1', @@ -488,23 +487,24 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, # series. To do that, the most straightforward way is to read the # echo times for all bids_files and see if they are all the same or not. - # Get the echo times while not breaking non-BIDS compliance - echo_times = [] + # Check for echotime information + echo_times = set() + for bids_file in bids_files: if bids_file: - echo_times.append(load_json(bids_file).get('EchoTime')) + # check for varying EchoTimes + echot = load_json(bids_file).get('EchoTime', None) + if echot is not None: + echo_times.add(echot) # To see if the echo times are the same, convert it to a set and see if - # only one remains: - multiecho = False - if echo_times: - multiecho = len(set(echo_times)) == 1 + # only one remains: + is_multiecho = len(echo_times) >= 1 if echo_times else False ### Loop through the bids_files, set the output name and save files - for fl, suffix, bids_file in zip(res_files, suffixes, bids_files): - # load the json file info: - # TODO: time performance + + # TODO: monitor conversion duration if bids_file: fileinfo = load_json(bids_file) @@ -515,7 +515,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, # _sbref sequences reconstructing magnitude and phase generate # two NIfTI files IN THE SAME SERIES, so we cannot just add # the suffix, if we want to be bids compliant: - if (bids_file and (this_prefix_basename.endswith('_sbref'))): + if bids_file and this_prefix_basename.endswith('_sbref'): # Check to see if it is magnitude or phase reconstruction: if 'M' in fileinfo.get('ImageType'): mag_or_phase = 'magnitude' @@ -525,7 +525,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, mag_or_phase = suffix # Insert reconstruction label - if not (("_rec-%s" % mag_or_phase) in this_prefix_basename): + if not ("_rec-%s" % mag_or_phase) in this_prefix_basename: # If "_rec-" is specified, prepend the 'mag_or_phase' value. if ('_rec-' in this_prefix_basename): @@ -548,15 +548,15 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, # (Note: it can be _sbref and multiecho, so don't use "elif"): # For multi-echo sequences, we have to specify the echo number in # the file name: - if bids and multiecho: + if bids_file and is_multiecho: # Get the EchoNumber from json file info. If not present, it's echo-1 echo_number = fileinfo.get('EchoNumber', 1) - supported_multiecho = ['_bold', '_sbref', '_T1w'] # epi? + supported_multiecho = ['_bold', '_epi', '_sbref', '_T1w'] # Now, decide where to insert it. # Insert it **before** the following string(s), whichever appears first. - for imgtype in ['_bold', '_sbref', '_T1w']: + for imgtype in supported_multiecho: if (imgtype in this_prefix_basename): this_prefix_basename = this_prefix_basename.replace( imgtype, "_echo-%d%s" % (echo_number, imgtype) @@ -564,7 +564,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, break # For Scout runs with multiple NIfTI images per run: - if (bids and ('scout' in this_prefix_basename.lower())): + if bids and 'scout' in this_prefix_basename.lower(): # in some cases (more than one slice slab), there are several # NIfTI images in the scout run, so distinguish them with "_acq-" spt = this_prefix_basename.split('_acq-Scout', 1) @@ -573,7 +573,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, # Fallback option: # If we have failed to modify this_prefix_basename, because it didn't fall # into any of the options above, just add the suffix at the end: - if ( this_prefix_basename == prefix_basename ): + if this_prefix_basename == prefix_basename: this_prefix_basename += suffix # Finally, form the outname by stitching the directory and outtype: @@ -586,6 +586,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, outname_bids_file = "%s.json" % (outname) safe_copyfile(bids_file, outname_bids_file, overwrite) bids_outfiles.append(outname_bids_file) + # res_files is not a list else: outname = "{}.{}".format(prefix, outtype) diff --git a/heudiconv/heuristics/bids-ME.py b/heudiconv/heuristics/bids-ME.py new file mode 100644 index 00000000..dd88bc69 --- /dev/null +++ b/heudiconv/heuristics/bids-ME.py @@ -0,0 +1,24 @@ +import os + +def create_key(template, outtype=('nii.gz',), annotation_classes=None): + if template is None or not template: + raise ValueError('Template must be a valid format string') + return template, outtype, annotation_classes + +def infotodict(seqinfo): + """Heuristic evaluator for determining which runs belong where + + allowed template fields - follow python string module: + + item: index within category + subject: participant id + seqitem: run number during scanning + subindex: sub index within group + """ + bold = create_key('sub-{subject}/func/sub-{subject}_task-test_run-{item}_bold') + + info = {bold: []} + for s in seqinfo: + if '_ME_' in s.series_description: + info[bold].append(s.series_id) + return info diff --git a/heudiconv/tests/test_regression.py b/heudiconv/tests/test_regression.py index a4c98687..9bb2ecef 100644 --- a/heudiconv/tests/test_regression.py +++ b/heudiconv/tests/test_regression.py @@ -1,6 +1,7 @@ """Testing conversion with conversion saved on datalad""" import json from glob import glob +import os.path as op import pytest @@ -11,8 +12,8 @@ except ImportError: have_datalad = False -import heudiconv from heudiconv.cli.run import main as runner +from heudiconv.utils import load_json # testing utilities from .utils import fetch_data, gen_heudiconv_args @@ -24,12 +25,19 @@ def test_conversion(tmpdir, subject, heuristic, anon_cmd): tmpdir.chdir() try: - datadir = fetch_data(tmpdir.strpath, subject) + datadir = fetch_data(tmpdir.strpath, + "dbic/QA", # path from datalad database root + getpath=op.join('sourcedata', subject)) except IncompleteResultsError as exc: pytest.skip("Failed to fetch test data: %s" % str(exc)) outdir = tmpdir.mkdir('out').strpath - args = gen_heudiconv_args(datadir, outdir, subject, heuristic, anon_cmd) + args = gen_heudiconv_args(datadir, + outdir, + subject, + heuristic, + anon_cmd, + template=op.join('sourcedata/{subject}/*/*/*.tgz')) runner(args) # run conversion # verify functionals were converted @@ -38,8 +46,38 @@ def test_conversion(tmpdir, subject, heuristic, anon_cmd): # compare some json metadata json_ = '{}/task-rest_acq-24mm64sl1000tr32te600dyn_bold.json'.format - orig, conv = (json.load(open(json_(datadir))), - json.load(open(json_(outdir)))) + orig, conv = (load_json(json_(datadir)), + load_json(json_(outdir))) keys = ['EchoTime', 'MagneticFieldStrength', 'Manufacturer', 'SliceTiming'] for key in keys: assert orig[key] == conv[key] + +@pytest.mark.skipif(not have_datalad, reason="no datalad") +def test_multiecho(tmpdir, subject='MEEPI', heuristic='bids-ME.py'): + tmpdir.chdir() + try: + datadir = fetch_data(tmpdir.strpath, "dicoms/velasco/MEEPI") + except IncompleteResultsError as exc: + pytest.skip("Failed to fetch test data: %s" % str(exc)) + + outdir = tmpdir.mkdir('out').strpath + args = gen_heudiconv_args(datadir, outdir, subject, heuristic) + runner(args) # run conversion + + # check if we have echo functionals + echoes = glob(op.join('out', 'sub-' + subject, 'func', '*echo*nii.gz')) + assert len(echoes) == 3 + + # check EchoTime of each functional + # ET1 < ET2 < ET3 + prev_echo = 0 + for echo in sorted(echoes): + _json = echo.replace('.nii.gz', '.json') + assert _json + echotime = load_json(_json).get('EchoTime', None) + assert echotime > prev_echo + prev_echo = echotime + + events = glob(op.join('out', 'sub-' + subject, 'func', '*events.tsv')) + for event in events: + assert 'echo-' not in event diff --git a/heudiconv/tests/utils.py b/heudiconv/tests/utils.py index 2749cd51..25f85bab 100644 --- a/heudiconv/tests/utils.py +++ b/heudiconv/tests/utils.py @@ -5,14 +5,24 @@ TESTS_DATA_PATH = op.join(op.dirname(__file__), 'data') -def gen_heudiconv_args(datadir, outdir, subject, heuristic_file, anon_cmd=None, xargs=None): +def gen_heudiconv_args(datadir, outdir, subject, heuristic_file, + anon_cmd=None, template=None, xargs=None): heuristic = op.realpath(op.join(HEURISTICS_PATH, heuristic_file)) - args = ["-d", op.join(datadir, 'sourcedata/{subject}/*/*/*.tgz'), + + if template: + # use --dicom_dir_template + args = ["-d", op.join(datadir, template)] + else: + args = ["--files", datadir] + + args.extend([ "-c", "dcm2niix", "-o", outdir, "-s", subject, "-f", heuristic, - "--bids",] + "--bids", + "--minmeta",] + ) if anon_cmd: args += ["--anon-cmd", op.join(op.dirname(__file__), anon_cmd), "-a", outdir] if xargs: @@ -21,10 +31,30 @@ def gen_heudiconv_args(datadir, outdir, subject, heuristic_file, anon_cmd=None, return args -def fetch_data(tmpdir, subject): - """Fetches some test dicoms using datalad""" +def fetch_data(tmpdir, dataset, getpath=None): + """ + Utility function to interface with datalad database. + Performs datalad `install` and datalad `get` operations. + + Parameters + ---------- + tmpdir : str + directory to temporarily store data + dataset : str + dataset path from `http://datasets-tests.datalad.org` + getpath : str [optional] + exclusive path to get + + Returns + ------- + targetdir : str + directory with installed dataset + """ from datalad import api - targetdir = op.join(tmpdir, 'QA') - api.install(path=targetdir, source='http://datasets-tests.datalad.org/dbic/QA') - api.get('{}/sourcedata/{}'.format(targetdir, subject)) + targetdir = op.join(tmpdir, op.basename(dataset)) + api.install(path=targetdir, + source='http://datasets-tests.datalad.org/{}'.format(dataset)) + + getdir = targetdir + (op.sep + getpath if getpath is not None else '') + api.get(getdir) return targetdir From f30fc9254017f1456d7f13f7421cfa43bfa19ff8 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 8 Jan 2019 12:11:53 -0500 Subject: [PATCH 06/56] fix: allow --files usage without infotoids --- heudiconv/parser.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/heudiconv/parser.py b/heudiconv/parser.py index 6d470c5c..1adf8904 100644 --- a/heudiconv/parser.py +++ b/heudiconv/parser.py @@ -168,9 +168,18 @@ def get_study_sessions(dicom_dir_template, files_opt, heuristic, outdir, grouping=grouping) if not getattr(heuristic, 'infotoids', None): - raise NotImplementedError( - "For now, if no subj template is provided, requiring " - "heuristic to have infotoids") + lgr.warn("Heuristic is missing an `infotoids` method, assigning " + "empty method. For best results, define an `infotoids`") + def infotoids(seqinfos, outdir): + return { + 'locator': None, + 'session': None, + 'subject': None + } + heuristic.infotoids = infotoids + # raise NotImplementedError( + # "For now, if no subj template is provided, requiring " + # "heuristic to have infotoids") if sids: if not (len(sids) == 1 and len(seqinfo_dict) == 1): From 74f8ceb2760b668ab6910e392531be0de725f8d8 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 8 Jan 2019 13:51:39 -0500 Subject: [PATCH 07/56] fix+tst: --files heuristic flexiblility --- heudiconv/parser.py | 31 ++++++++++++++++-------------- heudiconv/tests/test_heuristics.py | 5 ++++- 2 files changed, 21 insertions(+), 15 deletions(-) diff --git a/heudiconv/parser.py b/heudiconv/parser.py index 1adf8904..2f38e2cb 100644 --- a/heudiconv/parser.py +++ b/heudiconv/parser.py @@ -167,20 +167,6 @@ def get_study_sessions(dicom_dir_template, files_opt, heuristic, outdir, dcmfilter=getattr(heuristic, 'filter_dicom', None), grouping=grouping) - if not getattr(heuristic, 'infotoids', None): - lgr.warn("Heuristic is missing an `infotoids` method, assigning " - "empty method. For best results, define an `infotoids`") - def infotoids(seqinfos, outdir): - return { - 'locator': None, - 'session': None, - 'subject': None - } - heuristic.infotoids = infotoids - # raise NotImplementedError( - # "For now, if no subj template is provided, requiring " - # "heuristic to have infotoids") - if sids: if not (len(sids) == 1 and len(seqinfo_dict) == 1): raise RuntimeError( @@ -193,6 +179,23 @@ def infotoids(seqinfos, outdir): else: sid = None + if not getattr(heuristic, 'infotoids', None): + # allow bypass with subject override + if not sid: + raise NotImplementedError("Cannot guarantee subject id - add " + "`infotoids` to heuristic file or " + "provide `--subject` argument") + lgr.warn("Heuristic is missing an `infotoids` method, assigning " + "empty method and using provided subject id", sid, + "Provide `session` and `locator` fields for best results.") + def infotoids(seqinfos, outdir): + return { + 'locator': None, + 'session': None, + 'subject': None + } + heuristic.infotoids = infotoids + for studyUID, seqinfo in seqinfo_dict.items(): # so we have a single study, we need to figure out its # locator, session, subject diff --git a/heudiconv/tests/test_heuristics.py b/heudiconv/tests/test_heuristics.py index 41f0b821..ad8749e4 100644 --- a/heudiconv/tests/test_heuristics.py +++ b/heudiconv/tests/test_heuristics.py @@ -64,9 +64,12 @@ def test_reproin_largely_smoke(tmpdir, heuristic, invocation): runner(args + ['--subjects', 'sub1', 'sub2']) if heuristic != 'reproin': - # none other heuristic has mighty infotoids atm + # if subject is not overriden, raise error with pytest.raises(NotImplementedError): runner(args) + + # but run successful when provided + runner(args + ['--subjects', 'sub1']) return runner(args) ds = Dataset(str(tmpdir)) From 8f61c519c4a86a7199c45290bd04c362404bf701 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 8 Jan 2019 13:57:15 -0500 Subject: [PATCH 08/56] fix: do not run when differing sequences present --- heudiconv/tests/test_heuristics.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/heudiconv/tests/test_heuristics.py b/heudiconv/tests/test_heuristics.py index ad8749e4..3b6d44e9 100644 --- a/heudiconv/tests/test_heuristics.py +++ b/heudiconv/tests/test_heuristics.py @@ -68,9 +68,6 @@ def test_reproin_largely_smoke(tmpdir, heuristic, invocation): with pytest.raises(NotImplementedError): runner(args) - # but run successful when provided - runner(args + ['--subjects', 'sub1']) - return runner(args) ds = Dataset(str(tmpdir)) assert ds.is_installed() From c8c046b2d0acf14ece6eafa531af28637e744a09 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 8 Jan 2019 14:07:56 -0500 Subject: [PATCH 09/56] fix: exit test early, typo --- heudiconv/parser.py | 2 +- heudiconv/tests/test_heuristics.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/heudiconv/parser.py b/heudiconv/parser.py index 2f38e2cb..7d48397d 100644 --- a/heudiconv/parser.py +++ b/heudiconv/parser.py @@ -184,7 +184,7 @@ def get_study_sessions(dicom_dir_template, files_opt, heuristic, outdir, if not sid: raise NotImplementedError("Cannot guarantee subject id - add " "`infotoids` to heuristic file or " - "provide `--subject` argument") + "provide `--subjects` option") lgr.warn("Heuristic is missing an `infotoids` method, assigning " "empty method and using provided subject id", sid, "Provide `session` and `locator` fields for best results.") diff --git a/heudiconv/tests/test_heuristics.py b/heudiconv/tests/test_heuristics.py index 3b6d44e9..37cd0d63 100644 --- a/heudiconv/tests/test_heuristics.py +++ b/heudiconv/tests/test_heuristics.py @@ -67,6 +67,7 @@ def test_reproin_largely_smoke(tmpdir, heuristic, invocation): # if subject is not overriden, raise error with pytest.raises(NotImplementedError): runner(args) + return runner(args) ds = Dataset(str(tmpdir)) From f14dee6dc8f4993a77d3c34a6451909e4286d0db Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 11 Jan 2019 09:47:52 -0500 Subject: [PATCH 10/56] [DATALAD RUNCMD] RF: minor - use Error not Exception suffix for custom exceptions === Do not change lines below === { "chain": [], "cmd": "git-sedi BIDSException BIDSError", "exit": 0, "inputs": [], "outputs": [ "heudiconv/*py" ], "pwd": "." } ^^^ Do not change lines above ^^^ --- heudiconv/bids.py | 2 +- heudiconv/convert.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/heudiconv/bids.py b/heudiconv/bids.py index f4166546..f0d7a7ad 100644 --- a/heudiconv/bids.py +++ b/heudiconv/bids.py @@ -26,7 +26,7 @@ lgr = logging.getLogger(__name__) -class BIDSException(Exception): +class BIDSError(Exception): pass diff --git a/heudiconv/convert.py b/heudiconv/convert.py index be6db96f..2c419574 100644 --- a/heudiconv/convert.py +++ b/heudiconv/convert.py @@ -24,7 +24,7 @@ save_scans_key, tuneup_bids_json_files, add_participant_record, - BIDSException + BIDSError ) from .dicoms import ( group_dicoms_into_seqinfos, @@ -529,7 +529,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, # If "_rec-" is specified, prepend the 'mag_or_phase' value. if ('_rec-' in this_prefix_basename): - raise BIDSException( + raise BIDSError( "Reconstruction label for multi-echo single-band" " reference images will be automatically set, remove" " from heuristic" From 83fdd1d70cea445a29f6e87780d1be0ca77e6905 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 11 Jan 2019 10:13:01 -0500 Subject: [PATCH 11/56] BF: minor typo in logger call --- heudiconv/parser.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/heudiconv/parser.py b/heudiconv/parser.py index 7d48397d..2ed0c049 100644 --- a/heudiconv/parser.py +++ b/heudiconv/parser.py @@ -186,8 +186,9 @@ def get_study_sessions(dicom_dir_template, files_opt, heuristic, outdir, "`infotoids` to heuristic file or " "provide `--subjects` option") lgr.warn("Heuristic is missing an `infotoids` method, assigning " - "empty method and using provided subject id", sid, - "Provide `session` and `locator` fields for best results.") + "empty method and using provided subject id %s." + "Provide `session` and `locator` fields for best results." + , sid) def infotoids(seqinfos, outdir): return { 'locator': None, From 5bb9bcb26ef69bf434c0e06905b07b746469411a Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 11 Jan 2019 10:13:31 -0500 Subject: [PATCH 12/56] ENH: restrict splitting to only 2 components while parsing for _echo --- heudiconv/bids.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/heudiconv/bids.py b/heudiconv/bids.py index f0d7a7ad..40cd0749 100644 --- a/heudiconv/bids.py +++ b/heudiconv/bids.py @@ -119,14 +119,17 @@ def populate_aggregated_jsons(path): suf = '_bold.json' assert fpath.endswith(suf) # specify the name of the '_events.tsv' file: - if ( '_echo-' in fpath ): + if '_echo-' in fpath: # multi-echo sequence: bids (1.1.0) specifies just one '_events.tsv' # file, common for all echoes. The name will not include _echo-. + # TODO: RF to use re.match for better readability/robustness # So, find out the echo number: - fpath_split = fpath.split('_echo-') # split fpath using '_echo-' - fpath_split_2 = fpath_split[1].split('_') # split the second part of fpath_split using '_' - echoNo = fpath_split_2[0] # get echo number - if ( echoNo == '1' ): + fpath_split = fpath.split('_echo-', 1) # split fpath using '_echo-' + fpath_split_2 = fpath_split[1].split('_', 1) # split the second part of fpath_split using '_' + echoNo = fpath_split_2[0] # get echo number + if echoNo == '1': + if len(fpath_split_2) != 2: + raise ValueError("Found no trailer after _echo-") # we modify fpath to exclude '_echo-' + echoNo: fpath = fpath_split[0] + '_' + fpath_split_2[1] else: From b024f7daaa9a5b81b23e3dcabd6d3e98731823af Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 11 Jan 2019 10:14:43 -0500 Subject: [PATCH 13/56] RF: bids-ME.py -> bids_ME.py to assure imports to work etc --- heudiconv/heuristics/{bids-ME.py => bids_ME.py} | 0 heudiconv/tests/test_regression.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename heudiconv/heuristics/{bids-ME.py => bids_ME.py} (100%) diff --git a/heudiconv/heuristics/bids-ME.py b/heudiconv/heuristics/bids_ME.py similarity index 100% rename from heudiconv/heuristics/bids-ME.py rename to heudiconv/heuristics/bids_ME.py diff --git a/heudiconv/tests/test_regression.py b/heudiconv/tests/test_regression.py index 9bb2ecef..4f68d055 100644 --- a/heudiconv/tests/test_regression.py +++ b/heudiconv/tests/test_regression.py @@ -53,7 +53,7 @@ def test_conversion(tmpdir, subject, heuristic, anon_cmd): assert orig[key] == conv[key] @pytest.mark.skipif(not have_datalad, reason="no datalad") -def test_multiecho(tmpdir, subject='MEEPI', heuristic='bids-ME.py'): +def test_multiecho(tmpdir, subject='MEEPI', heuristic='bids_ME.py'): tmpdir.chdir() try: datadir = fetch_data(tmpdir.strpath, "dicoms/velasco/MEEPI") From 3c989deb0c6ba3d17c866f473de6e7fe35c70288 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 11 Jan 2019 10:18:09 -0500 Subject: [PATCH 14/56] DOC: A little description for bids_ME --- heudiconv/heuristics/bids_ME.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/heudiconv/heuristics/bids_ME.py b/heudiconv/heuristics/bids_ME.py index dd88bc69..71664206 100644 --- a/heudiconv/heuristics/bids_ME.py +++ b/heudiconv/heuristics/bids_ME.py @@ -1,4 +1,9 @@ -import os +"""Heuristic demonstrating conversion of the Multi-Echo sequences. + +It only cares about converting sequences which have _ME_ in their +series_description and outputs to BIDS. +""" + def create_key(template, outtype=('nii.gz',), annotation_classes=None): if template is None or not template: From 083f460e62f240672d9f905e541e037768a8a9b1 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Sat, 12 Jan 2019 09:53:47 -0500 Subject: [PATCH 15/56] Show must go on! --- CHANGELOG.md | 17 +++++++++++++++++ heudiconv/info.py | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c6a76d5d..1889a4b3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,23 @@ All notable changes to this project will be documented (for humans) in this file The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). +## [0.5.4] - Date + +TODO Summary + +### Added + +### Changed + +### Deprecated + +### Fixed + +### Removed + +### Security + + ## [0.5.3] - 2019-01-12 Minor hot bugfix release diff --git a/heudiconv/info.py b/heudiconv/info.py index 8b575ff1..69c291b0 100644 --- a/heudiconv/info.py +++ b/heudiconv/info.py @@ -1,4 +1,4 @@ -__version__ = "0.5.3" +__version__ = "0.5.4.dev1" __author__ = "HeuDiConv team and contributors" __url__ = "https://github.com/nipy/heudiconv" __packagename__ = 'heudiconv' From 956928280537427c7d358c454cbdafc83bdce973 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Fri, 18 Jan 2019 15:33:54 -0500 Subject: [PATCH 16/56] fix: remove scout special parsing --- heudiconv/convert.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/heudiconv/convert.py b/heudiconv/convert.py index 2c419574..42f5de81 100644 --- a/heudiconv/convert.py +++ b/heudiconv/convert.py @@ -563,13 +563,6 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids, ) break - # For Scout runs with multiple NIfTI images per run: - if bids and 'scout' in this_prefix_basename.lower(): - # in some cases (more than one slice slab), there are several - # NIfTI images in the scout run, so distinguish them with "_acq-" - spt = this_prefix_basename.split('_acq-Scout', 1) - this_prefix_basename = "%s%s%s%s" % (spt[0], '_acq-Scout', suffix, spt[1]) - # Fallback option: # If we have failed to modify this_prefix_basename, because it didn't fall # into any of the options above, just add the suffix at the end: From c7aca41ceb4ba9c8472712c1638be013205f459a Mon Sep 17 00:00:00 2001 From: Franklin Feingold <35307458+franklin-feingold@users.noreply.github.com> Date: Wed, 30 Jan 2019 10:35:15 -0800 Subject: [PATCH 17/56] Add tutorial to README.md Added tutorial from the Center for Reproducible Neuroscience blog --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index adfb4ba8..1b454398 100644 --- a/README.md +++ b/README.md @@ -76,6 +76,8 @@ dicomstack. - YouTube: - ["Heudiconv Example"](https://www.youtube.com/watch?v=O1kZAuR7E00) by [James Kent](https://github.com/jdkent) +- Blog post: + - ["BIDS Tutorial Series: HeuDiConv Walkthrough"](http://reproducibility.stanford.edu/bids-tutorial-series-part-2a/) by the [Stanford Center for Reproducible Neuroscience](http://reproducibility.stanford.edu/) ## How it works (in some more detail) From b25a7ae375f5f015fbcd200b5e7c9115e22db8bf Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Mon, 4 Feb 2019 13:09:36 -0500 Subject: [PATCH 18/56] fix: queue --- heudiconv/cli/run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/heudiconv/cli/run.py b/heudiconv/cli/run.py index 01979127..fd840571 100644 --- a/heudiconv/cli/run.py +++ b/heudiconv/cli/run.py @@ -293,7 +293,7 @@ def process_args(args): queue_conversion(progname, args.queue, - study_outdir, + outdir, heuristic.filename, dicoms, sid, From fefc26baca858772fc4b45eb1efa9cfa8a73d860 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 4 Feb 2019 19:13:55 -0500 Subject: [PATCH 19/56] rf: queue support --- heudiconv/cli/run.py | 54 +++++++++++----------- heudiconv/queue.py | 87 +++++++++++++++++++++++------------ heudiconv/tests/test_queue.py | 47 +++++++++++++++++++ 3 files changed, 132 insertions(+), 56 deletions(-) create mode 100644 heudiconv/tests/test_queue.py diff --git a/heudiconv/cli/run.py b/heudiconv/cli/run.py index fd840571..0d984fcb 100644 --- a/heudiconv/cli/run.py +++ b/heudiconv/cli/run.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + import os import os.path as op from argparse import ArgumentParser @@ -215,12 +217,11 @@ def get_parser(): parser.add_argument('--dcmconfig', default=None, help='JSON file for additional dcm2niix configuration') submission = parser.add_argument_group('Conversion submission options') - submission.add_argument('-q', '--queue', default=None, - help='select batch system to submit jobs to instead' - ' of running the conversion serially') - submission.add_argument('--sbargs', dest='sbatch_args', default=None, - help='Additional sbatch arguments if running with ' - 'queue arg') + submission.add_argument('-q', '--queue', choices=("SLURM", None), + default=None, + help='batch system to submit jobs in parallel') + submission.add_argument('--queue-args', dest='queue_args', default=None, + help='Additional queue arguments') return parser @@ -281,27 +282,28 @@ def process_args(args): continue if args.queue: - if seqinfo and not dicoms: - # flatten them all and provide into batching, which again - # would group them... heh - dicoms = sum(seqinfo.values(), []) - raise NotImplementedError( - "we already grouped them so need to add a switch to avoid " - "any grouping, so no outdir prefix doubled etc") - - progname = op.abspath(inspect.getfile(inspect.currentframe())) - - queue_conversion(progname, + # if seqinfo and not dicoms: + # # flatten them all and provide into batching, which again + # # would group them... heh + # dicoms = sum(seqinfo.values(), []) + # raise NotImplementedError( + # "we already grouped them so need to add a switch to avoid " + # "any grouping, so no outdir prefix doubled etc") + + pyscript = op.abspath(inspect.getfile(inspect.currentframe())) + + studyid = sid + if session: + studyid += "-%s" % session + if locator: + studyid += "-%s" % locator + # remove any separators + studyid = studyid.replace(op.sep, '_') + + queue_conversion(pyscript, args.queue, - outdir, - heuristic.filename, - dicoms, - sid, - args.anon_cmd, - args.converter, - session, - args.with_prov, - args.bids) + studyid, + args.queue_args) continue anon_sid = anonymize_sid(sid, args.anon_cmd) if args.anon_cmd else None diff --git a/heudiconv/queue.py b/heudiconv/queue.py index 89c912f2..4a05ccde 100644 --- a/heudiconv/queue.py +++ b/heudiconv/queue.py @@ -1,35 +1,62 @@ -import os -import os.path as op +import subprocess +import sys import logging lgr = logging.getLogger(__name__) -# start with SLURM but extend past that #TODO -def queue_conversion(progname, queue, outdir, heuristic, dicoms, sid, - anon_cmd, converter, session,with_prov, bids): - - # Rework this... - convertcmd = ' '.join(['python', progname, - '-o', outdir, - '-f', heuristic, - '-s', sid, - '--anon-cmd', anon_cmd, - '-c', converter]) - if session: - convertcmd += " --ses '%s'" % session - if with_prov: - convertcmd += " --with-prov" - if bids: - convertcmd += " --bids" - if dicoms: - convertcmd += " --files" - convertcmd += [" '%s'" % f for f in dicoms] - - script_file = 'dicom-%s.sh' % sid - with open(script_file, 'wt') as fp: - fp.writelines(['#!/bin/bash\n', convertcmd]) - outcmd = 'sbatch -J dicom-%s -p %s -N1 -c2 --mem=20G %s' \ - % (sid, queue, script_file) - - os.system(outcmd) +def queue_conversion(pyscript, queue, studyid, queue_args=None): + """ + Write out conversion arguments to file and submit to a job scheduler. + Parses `sys.argv` for heudiconv arguments. + + Parameters + ---------- + pyscript: file + path to `heudiconv` script + queue: string + batch scheduler to use + studyid: string + identifier for conversion + queue_args: string (optional) + additional queue arguments for job submission + + Returns + ------- + proc: int + Queue submission exit code + """ + + SUPPORTED_QUEUES = {'SLURM': 'sbatch'} + if queue not in SUPPORTED_QUEUES: + raise NotImplementedError("Queuing with %s is not supported", queue) + + args = sys.argv[1:] + print(sys.argv) + # search args for queue flag + for i, arg in enumerate(args): + if arg in ["-q", "--queue"]: + break + if i == len(args) - 1: + raise RuntimeError( + "Queue flag not found (must be provided as a command-line arg)" + ) + # remove queue flag and value + del args[i:i+2] + + # make arguments executable again + args.insert(0, pyscript) + pypath = sys.executable or "python" + args.insert(0, pypath) + convertcmd = " ".join(args) + + # will overwrite across subjects + queue_file = 'heudiconv-%s.sh' % queue + with open(queue_file, 'wt') as fp: + fp.writelines(['#!/bin/bash\n', convertcmd, '\n']) + + cmd = [SUPPORTED_QUEUES[queue], queue_file] + if queue_args: + cmd.insert(1, queue_args) + proc = subprocess.call(cmd) + return proc diff --git a/heudiconv/tests/test_queue.py b/heudiconv/tests/test_queue.py new file mode 100644 index 00000000..38f94d3b --- /dev/null +++ b/heudiconv/tests/test_queue.py @@ -0,0 +1,47 @@ +import os +import sys +import subprocess + +from heudiconv.cli.run import main as runner +from .utils import TESTS_DATA_PATH +import pytest +from nipype.utils.filemanip import which + +@pytest.mark.skipif(which("sbatch"), reason="skip a real slurm call") +@pytest.mark.parametrize( + 'invocation', [ + "--files %s/01-fmap_acq-3mm" % TESTS_DATA_PATH, # our new way with automated groupping + "-d %s/{subject}/* -s 01-fmap_acq-3mm" % TESTS_DATA_PATH # "old" way specifying subject + ]) +def test_queue_no_slurm(tmpdir, invocation): + tmpdir.chdir() + hargs = invocation.split(" ") + hargs.extend(["-f", "reproin", "-b", "--minmeta", "--queue", "SLURM"]) + print(hargs) + + # simulate command-line call + _sys_args = sys.argv + sys.argv = ['heudiconv'] + hargs + + try: + with pytest.raises(FileNotFoundError): + runner(hargs) + # should have generated a slurm submission script + slurm_cmd_file = tmpdir / 'heudiconv-SLURM.sh' + assert slurm_cmd_file + # check contents and ensure args match + with open(slurm_cmd_file) as fp: + lines = fp.readlines() + assert lines[0] == "#!/bin/bash\n" + cmd = lines[1] + + # check that all flags we gave still being called + for arg in hargs: + # except --queue + if arg in ['--queue', 'SLURM']: + assert arg not in cmd + else: + assert arg in cmd + finally: + # revert before breaking something + sys.argv = _sys_args From 86607d796c96837bd1f11c9a3b00a595acd31ff9 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 5 Feb 2019 13:04:24 -0500 Subject: [PATCH 20/56] fix: more general error, path as string --- heudiconv/queue.py | 4 ++-- heudiconv/tests/test_queue.py | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/heudiconv/queue.py b/heudiconv/queue.py index 4a05ccde..ba8ad662 100644 --- a/heudiconv/queue.py +++ b/heudiconv/queue.py @@ -1,5 +1,6 @@ import subprocess import sys +import os import logging @@ -32,7 +33,6 @@ def queue_conversion(pyscript, queue, studyid, queue_args=None): raise NotImplementedError("Queuing with %s is not supported", queue) args = sys.argv[1:] - print(sys.argv) # search args for queue flag for i, arg in enumerate(args): if arg in ["-q", "--queue"]: @@ -51,7 +51,7 @@ def queue_conversion(pyscript, queue, studyid, queue_args=None): convertcmd = " ".join(args) # will overwrite across subjects - queue_file = 'heudiconv-%s.sh' % queue + queue_file = os.path.abspath('heudiconv-%s.sh' % queue) with open(queue_file, 'wt') as fp: fp.writelines(['#!/bin/bash\n', convertcmd, '\n']) diff --git a/heudiconv/tests/test_queue.py b/heudiconv/tests/test_queue.py index 38f94d3b..a90dd9b5 100644 --- a/heudiconv/tests/test_queue.py +++ b/heudiconv/tests/test_queue.py @@ -17,17 +17,16 @@ def test_queue_no_slurm(tmpdir, invocation): tmpdir.chdir() hargs = invocation.split(" ") hargs.extend(["-f", "reproin", "-b", "--minmeta", "--queue", "SLURM"]) - print(hargs) # simulate command-line call _sys_args = sys.argv sys.argv = ['heudiconv'] + hargs try: - with pytest.raises(FileNotFoundError): + with pytest.raises(OSError): runner(hargs) # should have generated a slurm submission script - slurm_cmd_file = tmpdir / 'heudiconv-SLURM.sh' + slurm_cmd_file = (tmpdir / 'heudiconv-SLURM.sh').strpath assert slurm_cmd_file # check contents and ensure args match with open(slurm_cmd_file) as fp: From 8b16e63b12ab8f2dab26d6a3ae54fa0a3fe51e78 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 5 Feb 2019 13:17:38 -0500 Subject: [PATCH 21/56] sty: removed top-level import, fix log spacing --- heudiconv/dicoms.py | 1 - heudiconv/parser.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/heudiconv/dicoms.py b/heudiconv/dicoms.py index 1206894c..b94013f7 100644 --- a/heudiconv/dicoms.py +++ b/heudiconv/dicoms.py @@ -4,7 +4,6 @@ import logging from collections import OrderedDict import tarfile -from nibabel.nicom import csareader from heudiconv.external.pydicom import dcm from .utils import SeqInfo, load_json, set_readonly diff --git a/heudiconv/parser.py b/heudiconv/parser.py index 2ed0c049..8ceba641 100644 --- a/heudiconv/parser.py +++ b/heudiconv/parser.py @@ -186,7 +186,7 @@ def get_study_sessions(dicom_dir_template, files_opt, heuristic, outdir, "`infotoids` to heuristic file or " "provide `--subjects` option") lgr.warn("Heuristic is missing an `infotoids` method, assigning " - "empty method and using provided subject id %s." + "empty method and using provided subject id %s. " "Provide `session` and `locator` fields for best results." , sid) def infotoids(seqinfos, outdir): From f1d7f44de26d7b94af6d7217c5afa661717674c6 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 6 Feb 2019 14:10:50 -0500 Subject: [PATCH 22/56] fix: embedding issue --- heudiconv/dicoms.py | 17 +++++++--------- heudiconv/tests/test_dicoms.py | 37 +++++++++++++++++++++++++++++++++- 2 files changed, 43 insertions(+), 11 deletions(-) diff --git a/heudiconv/dicoms.py b/heudiconv/dicoms.py index b94013f7..13a200b1 100644 --- a/heudiconv/dicoms.py +++ b/heudiconv/dicoms.py @@ -353,7 +353,7 @@ def _assign_dicom_time(ti): return outtar -def embed_nifti(dcmfiles, niftifile, infofile, bids_info, force, min_meta): +def embed_nifti(dcmfiles, niftifile, infofile, bids_info, min_meta): """ If `niftifile` doesn't exist, it gets created out of the `dcmfiles` stack, @@ -370,7 +370,6 @@ def embed_nifti(dcmfiles, niftifile, infofile, bids_info, force, min_meta): niftifile infofile bids_info - force min_meta Returns @@ -387,10 +386,11 @@ def embed_nifti(dcmfiles, niftifile, infofile, bids_info, force, min_meta): if not min_meta: import dcmstack as ds - stack = ds.parse_and_stack(dcmfiles, force=force).values() + stack = ds.parse_and_stack(dcmfiles, force=True).values() if len(stack) > 1: raise ValueError('Found multiple series') - stack = stack[0] + # may be odict now - iter to be safe + stack = next(iter(stack)) #Create the nifti image using the data array if not op.exists(niftifile): @@ -458,7 +458,7 @@ def embed_metadata_from_dicoms(bids, item_dicoms, outname, outname_bids, item_dicoms = list(map(op.abspath, item_dicoms)) embedfunc = Node(Function(input_names=['dcmfiles', 'niftifile', 'infofile', - 'bids_info', 'force', 'min_meta'], + 'bids_info', 'min_meta'], output_names=['outfile', 'meta'], function=embed_nifti), name='embedder') @@ -466,13 +466,10 @@ def embed_metadata_from_dicoms(bids, item_dicoms, outname, outname_bids, embedfunc.inputs.niftifile = op.abspath(outname) embedfunc.inputs.infofile = op.abspath(scaninfo) embedfunc.inputs.min_meta = min_meta - if bids: - embedfunc.inputs.bids_info = load_json(op.abspath(outname_bids)) - else: - embedfunc.inputs.bids_info = None - embedfunc.inputs.force = True + embedfunc.inputs.bids_info = load_json(op.abspath(outname_bids)) if bids else None embedfunc.base_dir = tmpdir cwd = os.getcwd() + lgr.debug("Embedding into %s based on dicoms[0]=%s for nifti %s", scaninfo, item_dicoms[0], outname) try: diff --git a/heudiconv/tests/test_dicoms.py b/heudiconv/tests/test_dicoms.py index 536d9d42..de8e9a1a 100644 --- a/heudiconv/tests/test_dicoms.py +++ b/heudiconv/tests/test_dicoms.py @@ -1,10 +1,11 @@ import os.path as op +import json import pytest from heudiconv.external.pydicom import dcm from heudiconv.cli.run import main as runner -from heudiconv.dicoms import parse_private_csa_header +from heudiconv.dicoms import parse_private_csa_header, embed_nifti from .utils import TESTS_DATA_PATH # Public: Private DICOM tags @@ -23,3 +24,37 @@ def test_private_csa_header(tmpdir): assert parse_private_csa_header(dcm_data, pub, priv) != '' # and quickly run heudiconv with no conversion runner(['--files', dcm_file, '-c' 'none', '-f', 'reproin']) + + +def test_nifti_embed(tmpdir): + """Test dcmstack's additional fields""" + tmpdir.chdir() + # set up testing files + dcmfiles = [op.join(TESTS_DATA_PATH, 'axasc35.dcm')] + infofile = 'infofile.json' + + # 1) nifti does not exist + out = embed_nifti(dcmfiles, 'nifti.nii', 'infofile.json', None, False) + # string -> json + out = json.loads(out) + # should have created nifti file + assert op.exists('nifti.nii') + + # 2) nifti exists + nifti, info = embed_nifti(dcmfiles, 'nifti.nii', 'infofile.json', None, False) + assert op.exists(nifti) + assert op.exists(info) + with open(info) as fp: + out2 = json.load(fp) + + assert out == out2 + + # 3) with existing metadata + bids = {"existing": "data"} + nifti, info = embed_nifti(dcmfiles, 'nifti.nii', 'infofile.json', bids, False) + with open(info) as fp: + out3 = json.load(fp) + + assert out3["existing"] + del out3["existing"] + assert out3 == out2 == out From 6259724f5e3b71dd34e8ea1850fd05a1b81350d3 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Wed, 13 Feb 2019 16:52:59 -0500 Subject: [PATCH 23/56] ENH: remove unused variable --- heudiconv/convert.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/heudiconv/convert.py b/heudiconv/convert.py index 1ebeaa9b..fb5b4f07 100644 --- a/heudiconv/convert.py +++ b/heudiconv/convert.py @@ -261,8 +261,6 @@ def convert(items, converter, scaninfo_suffix, custom_callable, with_prov, len(item_dicoms), outtype, overwrite) lgr.debug("Includes the following dicoms: %s", item_dicoms) - seqtype = op.basename(op.dirname(prefix)) if bids else None - # set empty outname and scaninfo in case we only want dicoms outname = '' scaninfo = '' From c219c0fbdb3624a43e204a43594f5a899a0632e9 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Wed, 13 Feb 2019 16:58:13 -0500 Subject: [PATCH 24/56] BF: convert - move out resetting of outname and scaninfo outside of the loop Otherwise, if outtype had nii.gz BEFORE dicom by the heuristic (or like it is done in our tests) - embedding etc simply would be skipped!not ... --- heudiconv/convert.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/heudiconv/convert.py b/heudiconv/convert.py index fb5b4f07..c1150395 100644 --- a/heudiconv/convert.py +++ b/heudiconv/convert.py @@ -247,6 +247,9 @@ def convert(items, converter, scaninfo_suffix, custom_callable, with_prov, prefix_dirname = op.dirname(prefix) outname_bids = prefix + '.json' bids_outfiles = [] + # set empty outname and scaninfo in case we only want dicoms + outname = '' + scaninfo = '' lgr.info('Converting %s (%d DICOMs) -> %s . ' 'Converter: %s . Output types: %s', prefix, len(item_dicoms), prefix_dirname, converter, outtypes) @@ -261,9 +264,6 @@ def convert(items, converter, scaninfo_suffix, custom_callable, with_prov, len(item_dicoms), outtype, overwrite) lgr.debug("Includes the following dicoms: %s", item_dicoms) - # set empty outname and scaninfo in case we only want dicoms - outname = '' - scaninfo = '' if outtype == 'dicom': convert_dicom(item_dicoms, bids, prefix, outdir, tempdirs, symlink, overwrite) From 7578097273b09b2721e05f255544686d09bd26ad Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Wed, 13 Feb 2019 17:02:19 -0500 Subject: [PATCH 25/56] Changelog entry --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1889a4b3..421ecb25 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,10 @@ TODO Summary ### Fixed +- correctly handle the case when `outtype` of heuristic has "dicom" + before '.nii.gz'. Previously would have lead to absent additional metadata + extraction etc + ### Removed ### Security From 35dd1c32ba2303af234ecc6a4fae914025135064 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 1 Mar 2019 14:45:53 -0500 Subject: [PATCH 26/56] Original version of the script to link issues from datalad 0.11.3-26-g8af6b67d --- utils/link_issues_CHANGELOG | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100755 utils/link_issues_CHANGELOG diff --git a/utils/link_issues_CHANGELOG b/utils/link_issues_CHANGELOG new file mode 100755 index 00000000..c0b4186f --- /dev/null +++ b/utils/link_issues_CHANGELOG @@ -0,0 +1,14 @@ +#!/bin/bash + +in=CHANGELOG.md + +# Replace them with Markdown references +sed -i -e 's/(\(#[0-9]\+\))/([\1])/g' "$in" + +# Populate references +cat "$in" | sponge | sed -n -e 's/.*(\[#\([0-9]\+\)\]).*/\1/gp' | sort | uniq \ +| while read issue; do + # remove old one if exists + sed -i -e "/^\[#$issue\]:.*/d" "$in" + echo "[#$issue]: https://github.com/datalad/datalad/issues/$issue" >> "$in"; +done \ No newline at end of file From 27bd7c08a1e6be3e416db7162c7d6987f5e07779 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 1 Mar 2019 14:46:33 -0500 Subject: [PATCH 27/56] Make link_issues_CHANGELOG work for heudiconv --- CHANGELOG.md | 3 ++- utils/link_issues_CHANGELOG | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 421ecb25..71964331 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,9 +16,10 @@ TODO Summary ### Fixed +- TODO: fix: embedding issue ??? - correctly handle the case when `outtype` of heuristic has "dicom" before '.nii.gz'. Previously would have lead to absent additional metadata - extraction etc + extraction etc (#310) ### Removed diff --git a/utils/link_issues_CHANGELOG b/utils/link_issues_CHANGELOG index c0b4186f..fa77e70e 100755 --- a/utils/link_issues_CHANGELOG +++ b/utils/link_issues_CHANGELOG @@ -10,5 +10,5 @@ cat "$in" | sponge | sed -n -e 's/.*(\[#\([0-9]\+\)\]).*/\1/gp' | sort | uniq \ | while read issue; do # remove old one if exists sed -i -e "/^\[#$issue\]:.*/d" "$in" - echo "[#$issue]: https://github.com/datalad/datalad/issues/$issue" >> "$in"; -done \ No newline at end of file + echo "[#$issue]: https://github.com/nipy/heudiconv/issues/$issue" >> "$in"; +done From 96adb7d534af6e647ed960bd563ce7dfb495b6f8 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 1 Mar 2019 14:49:16 -0500 Subject: [PATCH 28/56] Plug of "summaries" about merges --- CHANGELOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 71964331..b060259e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,7 +16,11 @@ TODO Summary ### Fixed -- TODO: fix: embedding issue ??? +- TODO: fix: embedding issue (#306) +- TODO: (#304) from mgxd/fix/queue +- TODO: (#301) from franklin-feingold/master +- TODO: fix/queue +- TODO: (#293) from mgxd/multiecho - correctly handle the case when `outtype` of heuristic has "dicom" before '.nii.gz'. Previously would have lead to absent additional metadata extraction etc (#310) From 5de1fea887ece19ba692642efd89237c28b8613e Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 1 Mar 2019 14:50:44 -0500 Subject: [PATCH 29/56] ignore virtualenvs directory I have --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 0d4bc8e0..9189a5c8 100755 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ .coverage *.egg-info/ .idea/ +venvs/ From f7d16697bd35bcbb1da0890b42dbced92a85b5e8 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Fri, 1 Mar 2019 14:51:55 -0500 Subject: [PATCH 30/56] [DATALAD RUNCMD] utils/link_issues_CHANGELOG === Do not change lines below === { "chain": [], "cmd": "utils/link_issues_CHANGELOG", "exit": 0, "extra_inputs": [], "inputs": [ "CHANGELOG.md" ], "outputs": [ "CHANGELOG.md" ], "pwd": "." } ^^^ Do not change lines above ^^^ --- CHANGELOG.md | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b060259e..c0df8923 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,14 +16,14 @@ TODO Summary ### Fixed -- TODO: fix: embedding issue (#306) -- TODO: (#304) from mgxd/fix/queue -- TODO: (#301) from franklin-feingold/master +- TODO: fix: embedding issue ([#306]) +- TODO: ([#304]) from mgxd/fix/queue +- TODO: ([#301]) from franklin-feingold/master - TODO: fix/queue -- TODO: (#293) from mgxd/multiecho +- TODO: ([#293]) from mgxd/multiecho - correctly handle the case when `outtype` of heuristic has "dicom" before '.nii.gz'. Previously would have lead to absent additional metadata - extraction etc (#310) + extraction etc ([#310]) ### Removed @@ -178,3 +178,8 @@ TODO Summary [DBIC]: http://dbic.dartmouth.edu [datalad]: http://datalad.org [dcm2niix]: https://github.com/rordenlab/dcm2niix +[#293]: https://github.com/nipy/heudiconv/issues/293 +[#301]: https://github.com/nipy/heudiconv/issues/301 +[#304]: https://github.com/nipy/heudiconv/issues/304 +[#306]: https://github.com/nipy/heudiconv/issues/306 +[#310]: https://github.com/nipy/heudiconv/issues/310 From 2d07d3be32cdac32e6bcecc45fdb70862ffaaf69 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 6 Mar 2019 13:22:49 -0500 Subject: [PATCH 31/56] doc: document changes --- CHANGELOG.md | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c0df8923..0768872b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,22 +6,32 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [0.5.4] - Date -TODO Summary +This release includes fixes to BIDS multi-echo conversions, the + re-implementation of queuing support (currently just SLURM), as well as + some bugfixes. + + Starting today, we will (finally) push versioned releases to DockerHub. + Finally, to more accurately reflect on-going development, the `latest` + tag has been renamed to `unstable`. ### Added +- Additional Stanford tutorial added to README ([#301]) ### Changed +- `--sbargs` argument was renamed to `--queue-args` ([#304]) +- Allow usage of `--files` with basic heuristics. This requires + use of `--subject` flag, and is limited to one subject. ([#293]) ### Deprecated ### Fixed -- TODO: fix: embedding issue ([#306]) -- TODO: ([#304]) from mgxd/fix/queue -- TODO: ([#301]) from franklin-feingold/master -- TODO: fix/queue -- TODO: ([#293]) from mgxd/multiecho -- correctly handle the case when `outtype` of heuristic has "dicom" +- Fixed an issue where generated BIDS sidecar files were missing additional + information - treating all conversions as if the `--minmeta` flag was + used ([#306]) +- Re-enable SLURM queuing support ([#304]) +- BIDS multi-echo support for EPI + T1 images ([#293]) +- Correctly handle the case when `outtype` of heuristic has "dicom" before '.nii.gz'. Previously would have lead to absent additional metadata extraction etc ([#310]) From 5b7c33ecfb9f93c547fa5d0a38cbd22e05ea0ebd Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Mon, 11 Mar 2019 17:14:40 -0400 Subject: [PATCH 32/56] DOC: issue template clarity --- .github/ISSUE_TEMPLATE.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 98b55187..184824c5 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -1,3 +1,8 @@ + + ### Summary - Heudiconv version: - + From ef65e7854086a39bf2a38ed47e1887a7aab81aa6 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Mon, 11 Mar 2019 17:14:40 -0400 Subject: [PATCH 33/56] DOC: issue template clarity --- .github/ISSUE_TEMPLATE.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 98b55187..184824c5 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -1,3 +1,8 @@ + + ### Summary - Heudiconv version: - + From 2f7a75eb7ef05aa6436bcd58bfd9962d2708826f Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 25 Mar 2019 17:21:48 -0400 Subject: [PATCH 34/56] doc+wip: re-implementation of rtd --- README.rst | 35 +++++++++ docs/Makefile | 19 +++++ docs/conf.py | 178 ++++++++++++++++++++++++++++++++++++++++++ docs/heuristics.rst | 82 +++++++++++++++++++ docs/index.rst | 19 +++++ docs/installation.rst | 38 +++++++++ docs/tutorials.rst | 13 +++ docs/usage.rst | 27 +++++++ 8 files changed, 411 insertions(+) create mode 100644 README.rst create mode 100644 docs/Makefile create mode 100644 docs/conf.py create mode 100644 docs/heuristics.rst create mode 100644 docs/index.rst create mode 100644 docs/installation.rst create mode 100644 docs/tutorials.rst create mode 100644 docs/usage.rst diff --git a/README.rst b/README.rst new file mode 100644 index 00000000..764dff43 --- /dev/null +++ b/README.rst @@ -0,0 +1,35 @@ +heudiconv: A Heuristic-centric DICOM Converter + +.. image:: https://img.shields.io/badge/docker-nipy/heudiconv:unstable-brightgreen.svg?logo=docker&style=flat + :target: https://hub.docker.com/r/nipy/heudiconv/tags/ + :alt: Our Docker image + +.. image:: https://travis-ci.org/nipy/heudiconv.svg?branch=master + :target: https://travis-ci.org/nipy/heudiconv + :alt: TravisCI + +.. image:: https://codecov.io/gh/nipy/heudiconv/branch/master/graph/badge.svg + :target: https://codecov.io/gh/nipy/heudiconv + :alt: CodeCoverage + +.. image:: https://readthedocs.org/projects/heudiconv/badge/?version=latest + :target: http://heudiconv.readthedocs.io/en/latest/?badge=latest + :alt: Readthedocs + +About +----- + +``heudiconv`` is a flexible DICOM converter for organizing brain imaging data + into structured directory layouts. + +- it allows flexible directory layouts and naming schemes through + customizable heuristics implementations +- it only converts the necessary DICOMs, not everything in a directory +- you can keep links to DICOM files in the participant layout +- using dcm2niix under the hood, it's fast +- it can track the provenance of the conversion from DICOM to NIfTI in W3C + PROV format +- it provides assistance in converting to `BIDS ` +- it integrates with `DataLad ` to place converted and + original data under git/git-annex version control, while automatically + annotating files with sensitive information (e.g., non-defaced anatomicals, etc) diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..298ea9e2 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,19 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..3c5f2b61 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = 'heudiconv' +copyright = '2019, Heudiconv team' +author = 'Heudiconv team' + +# The short X.Y version +version = '' +# The full version, including alpha/beta/rc tags +release = '0.5.4' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'recommonmark', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = None + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'heudiconvdoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'heudiconv.tex', 'heudiconv Documentation', + 'Heudiconv team', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'heudiconv', 'heudiconv Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'heudiconv', 'heudiconv Documentation', + author, 'heudiconv', 'One line description of project.', + 'Miscellaneous'), +] + + +# -- Options for Epub output ------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + + +# -- Extension configuration ------------------------------------------------- diff --git a/docs/heuristics.rst b/docs/heuristics.rst new file mode 100644 index 00000000..f035ae03 --- /dev/null +++ b/docs/heuristics.rst @@ -0,0 +1,82 @@ +## The heuristic file + +The heuristic file controls how information about the dicoms is used to convert +to a file system layout (e.g., BIDS). This is a python file that must have the +function `infotodict`, which takes a single argument `seqinfo`. + +### `seqinfo` and the `s` variable + +`seqinfo` is a list of namedtuple objects, each containing the following fields: + +* total_files_till_now +* example_dcm_file +* series_id +* dcm_dir_name +* unspecified2 +* unspecified3 +* dim1 +* dim2 +* dim3 +* dim4 +* TR +* TE +* protocol_name +* is_motion_corrected +* is_derived +* patient_id +* study_description +* referring_physician_name +* series_description +* image_type + +``` +128 125000-1-1.dcm 1 - - +- 160 160 128 1 0.00315 1.37 AAHScout False +``` + +### The dictionary returned by `infotodict` + +This dictionary contains as keys a 3-tuple `(template, a tuple of output types, + annotation classes)`. + +template - how the file should be relative to the base directory +tuple of output types - what format of output should be created - nii.gz, dicom, + etc.,. +annotation classes - unused + +``` +Example: ('func/sub-{subject}_task-face_run-{item:02d}_acq-PA_bold', ('nii.gz', + 'dicom'), None) +``` + +A few fields are defined by default and can be used in the template: + +- item: index within category +- subject: participant id +- seqitem: run number during scanning +- subindex: sub index within group +- session: session info for multi-session studies and when session has been + defined as a parameter for heudiconv + +Additional variables may be added and can be returned in the value of the +dictionary returned from the function. + +`info[some_3-tuple] = [12, 14, 16]` would assign dicom sequence groups 12, 14 +and 16 to be converted using the template specified in `some_3-tuple`. + +if the template contained a non-sanctioned variable, it would have to be +provided in the values for that key. + +``` +some_3_tuple = ('func/sub-{subject}_task-face_run-{item:02d}_acq-{acq}_bold', ('nii.gz', + 'dicom'), None) +``` + +In the above example `{acq}` is not a standard variable. In this case, values +for this variable needs to be added. + +``` +info[some_3-tuple] = [{'item': 12, 'acq': 'AP'}, + {'item': 14, 'acq': 'AP'}, + {'item': 16, 'acq': 'PA'}] +``` diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..805f1626 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,19 @@ +.. heudiconv documentation master file, created by + sphinx-quickstart on Mon Mar 25 15:42:31 2019. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +.. include:: ../README.rst + +Contents +-------- + +.. toctree:: + :maxdepth: 2 + + installation + ../CHANGELOG.md + usage + heuristics + tutorials + reference diff --git a/docs/installation.rst b/docs/installation.rst new file mode 100644 index 00000000..96701eec --- /dev/null +++ b/docs/installation.rst @@ -0,0 +1,38 @@ + +Installation +------------ + +``Heudiconv`` is packaged and available from many different sources. + + +Local +----- +Released versions of HeuDiConv are available on `PyPI ` + and `conda `. + If installing through ``PyPI``, eg:: + + pip install heudiconv[all] + +Manual installation of `dcm2niix ` + is required. + + On Debian-based systems we recommend using `NeuroDebian ` + which provides the `heudiconv package `. + + +Docker +------ +If `Docker ` is available on your system, you + can visit `our page on Docker Hub ` + to view available releases. To pull the latest release, run:: + + $ docker pull nipy/heudiconv:0.5.4 + + +Singularity +----------- +If `Singularity ` is available on your system, + you can use it to pull and convert our Docker images! For example, to pull and + build the latest release, you can run:: + + $ singularity pull docker://nipy/heudiconv:0.5.4 diff --git a/docs/tutorials.rst b/docs/tutorials.rst new file mode 100644 index 00000000..27d06e31 --- /dev/null +++ b/docs/tutorials.rst @@ -0,0 +1,13 @@ +Luckily(?), we live in an era of plentiful information. Below are some links to + other users' tutorials covering their experience with ``heudiconv``. + + - `YouTube tutorial ` by + `James Kent ` + - `Walkthrough ` + by the `Standard Center for Reproducible Neuroscience ` + - `Sample Conversion: Coastal Coding 2019 ` + - `U of A Neuroimaging Core ` + by `Dianne Patterson ` + + * Note: some of these tutorials may not be 100% up to date with the latest releases + of ``heudiconv``, so refer to this documentation first. diff --git a/docs/usage.rst b/docs/usage.rst new file mode 100644 index 00000000..2374c663 --- /dev/null +++ b/docs/usage.rst @@ -0,0 +1,27 @@ +## How it works (in some more detail) + +Call `heudiconv` like this: + + heudiconv -d '{subject}*.tar*' -s xx05 -f ~/myheuristics/convertall.py + +where `-d '{subject}*tar*'` is an expression used to find DICOM files +(`{subject}` expands to a subject ID so that the expression will match any +`.tar` files, compressed or not that start with the subject ID in their name). +An additional flag for session (`{session}`) can be included in the expression +as well. `-s od05` specifies a subject ID for the conversion (this could be a +list of multiple IDs), and `-f ~/myheuristics/convertall.py` identifies a +heuristic implementation for this conversion (see below) for details. + +This call will locate the DICOMs (in any number of matching tarballs), extract +them to a temporary directory, search for any DICOM series it can find, and +attempts a conversion storing output in the current directory. The output +directory will contain a subdirectory per subject, which in turn contains an +`info` directory with a full protocol of detected DICOM series, and how their +are converted. + + +To generate lean BIDS output, consider using both the `-b` and the `--minmeta` flags +to your heudiconv command. The `-b` flag generates a json file with BIDS keys, while +the `--minmeta` flag restricts the json file to only BIDS keys. Without `--minmeta`, +the json file and the associated Nifti file contains DICOM metadata extracted using +dicomstack. From dc44b32cc443805b28f588d64d1e4dc1cfad5adf Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 25 Mar 2019 17:51:03 -0400 Subject: [PATCH 35/56] fix: links + allow markdown as source --- README.rst | 6 +++--- docs/conf.py | 4 ++-- docs/index.rst | 2 +- docs/installation.rst | 16 ++++++++-------- docs/tutorials.rst | 16 ++++++---------- 5 files changed, 20 insertions(+), 24 deletions(-) diff --git a/README.rst b/README.rst index 764dff43..844e45f2 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -heudiconv: A Heuristic-centric DICOM Converter +.. topic:: `HeuDiConv`: a heuristic-centric DICOM converter .. image:: https://img.shields.io/badge/docker-nipy/heudiconv:unstable-brightgreen.svg?logo=docker&style=flat :target: https://hub.docker.com/r/nipy/heudiconv/tags/ @@ -29,7 +29,7 @@ About - using dcm2niix under the hood, it's fast - it can track the provenance of the conversion from DICOM to NIfTI in W3C PROV format -- it provides assistance in converting to `BIDS ` -- it integrates with `DataLad ` to place converted and +- it provides assistance in converting to `BIDS `_. +- it integrates with `DataLad `_ to place converted and original data under git/git-annex version control, while automatically annotating files with sensitive information (e.g., non-defaced anatomicals, etc) diff --git a/docs/conf.py b/docs/conf.py index 3c5f2b61..de52eb37 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -49,8 +49,8 @@ # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ['.rst', '.md'] +# source_suffix = '.rst' # The master toctree document. master_doc = 'index' diff --git a/docs/index.rst b/docs/index.rst index 805f1626..2442992a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -9,7 +9,7 @@ Contents -------- .. toctree:: - :maxdepth: 2 + :maxdepth: 1 installation ../CHANGELOG.md diff --git a/docs/installation.rst b/docs/installation.rst index 96701eec..0b19158e 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -7,23 +7,23 @@ Installation Local ----- -Released versions of HeuDiConv are available on `PyPI ` - and `conda `. +Released versions of HeuDiConv are available on `PyPI `_ + and `conda `_. If installing through ``PyPI``, eg:: pip install heudiconv[all] -Manual installation of `dcm2niix ` +Manual installation of `dcm2niix `_ is required. - On Debian-based systems we recommend using `NeuroDebian ` - which provides the `heudiconv package `. + On Debian-based systems we recommend using `NeuroDebian `_ + which provides the `heudiconv package `_. Docker ------ -If `Docker ` is available on your system, you - can visit `our page on Docker Hub ` +If `Docker `_ is available on your system, you + can visit `our page on Docker Hub `_ to view available releases. To pull the latest release, run:: $ docker pull nipy/heudiconv:0.5.4 @@ -31,7 +31,7 @@ If `Docker ` is available on your system, you Singularity ----------- -If `Singularity ` is available on your system, +If `Singularity `_ is available on your system, you can use it to pull and convert our Docker images! For example, to pull and build the latest release, you can run:: diff --git a/docs/tutorials.rst b/docs/tutorials.rst index 27d06e31..a46f490f 100644 --- a/docs/tutorials.rst +++ b/docs/tutorials.rst @@ -1,13 +1,9 @@ Luckily(?), we live in an era of plentiful information. Below are some links to - other users' tutorials covering their experience with ``heudiconv``. +other users' tutorials covering their experience with ``heudiconv``. - - `YouTube tutorial ` by - `James Kent ` - - `Walkthrough ` - by the `Standard Center for Reproducible Neuroscience ` - - `Sample Conversion: Coastal Coding 2019 ` - - `U of A Neuroimaging Core ` - by `Dianne Patterson ` + - `YouTube tutorial `_ by `James Kent `_. + - `Walkthrough `_ by the `Standard Center for Reproducible Neuroscience `_. + - `Sample Conversion: Coastal Coding 2019 `_. + - `U of A Neuroimaging Core `_ by `Dianne Patterson `_. - * Note: some of these tutorials may not be 100% up to date with the latest releases - of ``heudiconv``, so refer to this documentation first. +** Note: some of these tutorials may not be 100% up to date with the latest releases of ``heudiconv``, so refer to this documentation first. From 3598e6db05994a99be5516779a29877bb098f18c Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 26 Mar 2019 14:27:45 -0400 Subject: [PATCH 36/56] doc: styling, heuristics --- docs/heuristics.rst | 153 ++++++++++++++++++++---------------------- docs/index.rst | 8 +-- docs/installation.rst | 10 +-- docs/tutorials.rst | 23 +++++-- 4 files changed, 98 insertions(+), 96 deletions(-) diff --git a/docs/heuristics.rst b/docs/heuristics.rst index f035ae03..b50acbdd 100644 --- a/docs/heuristics.rst +++ b/docs/heuristics.rst @@ -1,82 +1,71 @@ -## The heuristic file - -The heuristic file controls how information about the dicoms is used to convert -to a file system layout (e.g., BIDS). This is a python file that must have the -function `infotodict`, which takes a single argument `seqinfo`. - -### `seqinfo` and the `s` variable - -`seqinfo` is a list of namedtuple objects, each containing the following fields: - -* total_files_till_now -* example_dcm_file -* series_id -* dcm_dir_name -* unspecified2 -* unspecified3 -* dim1 -* dim2 -* dim3 -* dim4 -* TR -* TE -* protocol_name -* is_motion_corrected -* is_derived -* patient_id -* study_description -* referring_physician_name -* series_description -* image_type - -``` -128 125000-1-1.dcm 1 - - -- 160 160 128 1 0.00315 1.37 AAHScout False -``` - -### The dictionary returned by `infotodict` - -This dictionary contains as keys a 3-tuple `(template, a tuple of output types, - annotation classes)`. - -template - how the file should be relative to the base directory -tuple of output types - what format of output should be created - nii.gz, dicom, - etc.,. -annotation classes - unused - -``` -Example: ('func/sub-{subject}_task-face_run-{item:02d}_acq-PA_bold', ('nii.gz', - 'dicom'), None) -``` - -A few fields are defined by default and can be used in the template: - -- item: index within category -- subject: participant id -- seqitem: run number during scanning -- subindex: sub index within group -- session: session info for multi-session studies and when session has been - defined as a parameter for heudiconv - -Additional variables may be added and can be returned in the value of the -dictionary returned from the function. - -`info[some_3-tuple] = [12, 14, 16]` would assign dicom sequence groups 12, 14 -and 16 to be converted using the template specified in `some_3-tuple`. - -if the template contained a non-sanctioned variable, it would have to be -provided in the values for that key. - -``` -some_3_tuple = ('func/sub-{subject}_task-face_run-{item:02d}_acq-{acq}_bold', ('nii.gz', - 'dicom'), None) -``` - -In the above example `{acq}` is not a standard variable. In this case, values -for this variable needs to be added. - -``` -info[some_3-tuple] = [{'item': 12, 'acq': 'AP'}, - {'item': 14, 'acq': 'AP'}, - {'item': 16, 'acq': 'PA'}] -``` +========= +Heuristic +========= + +The heuristic file controls how information about the DICOMs is used to convert +to a file system layout (e.g., BIDS). ``heudiconv`` includes some built-in +heuristics, including `ReproIn `_ +(which is great to adopt if you will be starting your data collection!). + +However, there is a large variety of data out there, and not all DICOMs will be +covered by the existing heuristics. This section will outline what makes up a +heuristic file, and some useful functions available when making one. + + +Components +========== + +--------------------- +`infotodict(seqinfos)` +--------------------- + +The only required function for a heuristic, `infotodict` is used to both define +the conversion outputs and specify the criteria for scan to output association. +Conversion outputs are defined as keys, a `tuple` consisting of a template path +used for the basis of outputs, as well as a `tuple` of output types. Valid types +include `nii`, `nii.gz`, and `dicom`. + +.. note:: + + An example conversion key: + ('sub-{subject}/func/sub-{subject}_task-test_run-{item}_bold', ('nii.gz', 'dicom')) + + +The `seqinfos` parameter is a list of namedtuples which serves as a grouped and +stacked record of the DICOMs passed in. Each item in `seqinfo` contains DICOM +metadata that can be used to isolate the series, and assign it to a conversion +key. + +A dictionary of {conversion key: seqinfo} is returned. + +------------------------------- +`create_key(template, outtype)` +------------------------------- + +A common helper function used to create the conversion key in `infotodict`. + +------------------ +`filter_files(fl)` +------------------ + +A utility function used to filter any input files. + +If this function is included, every file found will go through this filter. Any +files where this function returns `True` will be filtered out. + +------------------------ +`filter_dicom(dcm_data)` +------------------------ + +A utility function used to filter any DICOMs. + +If this function is included, every DICOM found will go through this filter. Any +DICOMs where this function returns `True` will be filtered out. + +---------------------------- +`infotoids(seqinfos, outdir)` +---------------------------- + +Further processing on `seqinfos` to deduce/customize subject, session, and locator. + +A dictionary of {"locator": locator, "session": session, "subject": subject} is returned. diff --git a/docs/index.rst b/docs/index.rst index 2442992a..177e3056 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -9,11 +9,11 @@ Contents -------- .. toctree:: - :maxdepth: 1 - + :maxdepth: 2 + installation - ../CHANGELOG.md + ../CHANGELOG usage heuristics tutorials - reference + API diff --git a/docs/installation.rst b/docs/installation.rst index 0b19158e..ca081e50 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -1,12 +1,12 @@ - +============ Installation ------------- +============ ``Heudiconv`` is packaged and available from many different sources. Local ------ +===== Released versions of HeuDiConv are available on `PyPI `_ and `conda `_. If installing through ``PyPI``, eg:: @@ -21,7 +21,7 @@ Manual installation of `dcm2niix Docker ------- +====== If `Docker `_ is available on your system, you can visit `our page on Docker Hub `_ to view available releases. To pull the latest release, run:: @@ -30,7 +30,7 @@ If `Docker `_ is available on your system, you Singularity ------------ +=========== If `Singularity `_ is available on your system, you can use it to pull and convert our Docker images! For example, to pull and build the latest release, you can run:: diff --git a/docs/tutorials.rst b/docs/tutorials.rst index a46f490f..b5ac9d73 100644 --- a/docs/tutorials.rst +++ b/docs/tutorials.rst @@ -1,9 +1,22 @@ +============== +User Tutorials +============== + Luckily(?), we live in an era of plentiful information. Below are some links to other users' tutorials covering their experience with ``heudiconv``. - - `YouTube tutorial `_ by `James Kent `_. - - `Walkthrough `_ by the `Standard Center for Reproducible Neuroscience `_. - - `Sample Conversion: Coastal Coding 2019 `_. - - `U of A Neuroimaging Core `_ by `Dianne Patterson `_. +- `YouTube tutorial `_ by +`James Kent `_. + +- `Walkthrough `_ +by the `Standard Center for Reproducible Neuroscience `_. + +- `U of A Neuroimaging Core `_ +by `Dianne Patterson `_. + +- `Sample Conversion: Coastal Coding 2019 `_. + +.. note:: -** Note: some of these tutorials may not be 100% up to date with the latest releases of ``heudiconv``, so refer to this documentation first. + Keep in mind, some of these tutorials may not be 100% up to date with + the latest releases of ``heudiconv``. From e4e3a272cde8217064c4407d73b64b54d396fe23 Mon Sep 17 00:00:00 2001 From: shotgunosine Date: Tue, 26 Mar 2019 15:29:03 -0400 Subject: [PATCH 37/56] update dcm2niix to v1.0.20181125 --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4c864e7b..1bfa5c70 100644 --- a/Dockerfile +++ b/Dockerfile @@ -39,7 +39,7 @@ RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \ ENTRYPOINT ["/neurodocker/startup.sh"] -ENV PATH="/opt/dcm2niix-v1.0.20180622/bin:$PATH" +ENV PATH="/opt/dcm2niix-v1.0.20181125/bin:$PATH" RUN apt-get update -qq \ && apt-get install -y -q --no-install-recommends \ cmake \ @@ -54,10 +54,10 @@ RUN apt-get update -qq \ && git clone https://github.com/rordenlab/dcm2niix /tmp/dcm2niix \ && cd /tmp/dcm2niix \ && git fetch --tags \ - && git checkout v1.0.20180622 \ + && git checkout v1.0.20181125 \ && mkdir /tmp/dcm2niix/build \ && cd /tmp/dcm2niix/build \ - && cmake -DCMAKE_INSTALL_PREFIX:PATH=/opt/dcm2niix-v1.0.20180622 .. \ + && cmake -DCMAKE_INSTALL_PREFIX:PATH=/opt/dcm2niix-v1.0.20181125 .. \ && make \ && make install \ && rm -rf /tmp/dcm2niix From a02798923c7956703effaa524ad749964b9f7b48 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 26 Mar 2019 18:18:56 -0400 Subject: [PATCH 38/56] sty+enh: try new style, add more content --- docs/changes.rst | 1 + docs/heuristics.rst | 48 +++++++++++++++++++++---------------------- docs/index.rst | 4 ++-- docs/installation.rst | 26 +++++++++++------------ docs/tutorials.rst | 14 +++++-------- docs/usage.rst | 42 ++++++++++++++++++++++++++++++++++++- 6 files changed, 86 insertions(+), 49 deletions(-) create mode 100644 docs/changes.rst diff --git a/docs/changes.rst b/docs/changes.rst new file mode 100644 index 00000000..b56065ad --- /dev/null +++ b/docs/changes.rst @@ -0,0 +1 @@ +.. include:: ../CHANGELOG.md \ No newline at end of file diff --git a/docs/heuristics.rst b/docs/heuristics.rst index b50acbdd..79b5bed3 100644 --- a/docs/heuristics.rst +++ b/docs/heuristics.rst @@ -15,9 +15,9 @@ heuristic file, and some useful functions available when making one. Components ========== ---------------------- -`infotodict(seqinfos)` ---------------------- +------------------------ +``infotodict(seqinfos)`` +------------------------ The only required function for a heuristic, `infotodict` is used to both define the conversion outputs and specify the criteria for scan to output association. @@ -26,46 +26,46 @@ used for the basis of outputs, as well as a `tuple` of output types. Valid types include `nii`, `nii.gz`, and `dicom`. .. note:: - - An example conversion key: - ('sub-{subject}/func/sub-{subject}_task-test_run-{item}_bold', ('nii.gz', 'dicom')) + An example conversion key + + ``('sub-{subject}/func/sub-{subject}_task-test_run-{item}_bold', ('nii.gz', 'dicom'))`` -The `seqinfos` parameter is a list of namedtuples which serves as a grouped and +The ``seqinfos`` parameter is a list of namedtuples which serves as a grouped and stacked record of the DICOMs passed in. Each item in `seqinfo` contains DICOM metadata that can be used to isolate the series, and assign it to a conversion key. -A dictionary of {conversion key: seqinfo} is returned. +A dictionary of {``conversion key``: ``seqinfo``} is returned. -------------------------------- -`create_key(template, outtype)` -------------------------------- +--------------------------------- +``create_key(template, outtype)`` +--------------------------------- -A common helper function used to create the conversion key in `infotodict`. +A common helper function used to create the conversion key in ``infotodict``. ------------------- -`filter_files(fl)` ------------------- +-------------------- +``filter_files(fl)`` +-------------------- A utility function used to filter any input files. If this function is included, every file found will go through this filter. Any -files where this function returns `True` will be filtered out. +files where this function returns ``True`` will be filtered out. ------------------------- -`filter_dicom(dcm_data)` ------------------------- +-------------------------- +``filter_dicom(dcm_data)`` +-------------------------- A utility function used to filter any DICOMs. If this function is included, every DICOM found will go through this filter. Any -DICOMs where this function returns `True` will be filtered out. +DICOMs where this function returns ``True`` will be filtered out. ----------------------------- -`infotoids(seqinfos, outdir)` ----------------------------- +------------------------------- +``infotoids(seqinfos, outdir)`` +------------------------------- -Further processing on `seqinfos` to deduce/customize subject, session, and locator. +Further processing on ``seqinfos`` to deduce/customize subject, session, and locator. A dictionary of {"locator": locator, "session": session, "subject": subject} is returned. diff --git a/docs/index.rst b/docs/index.rst index 177e3056..8c68c07c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -12,8 +12,8 @@ Contents :maxdepth: 2 installation - ../CHANGELOG + changes usage heuristics tutorials - API + api diff --git a/docs/installation.rst b/docs/installation.rst index ca081e50..89b736e9 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -7,32 +7,32 @@ Installation Local ===== -Released versions of HeuDiConv are available on `PyPI `_ - and `conda `_. - If installing through ``PyPI``, eg:: +Released versions of HeuDiConv are available on `PyPI `_ +and `conda `_. +If installing through ``PyPI``, eg:: pip install heudiconv[all] -Manual installation of `dcm2niix `_ - is required. +Manual installation of `dcm2niix `_ +is required. - On Debian-based systems we recommend using `NeuroDebian `_ - which provides the `heudiconv package `_. +On Debian-based systems we recommend using `NeuroDebian `_ +which provides the `heudiconv package `_. Docker ====== -If `Docker `_ is available on your system, you - can visit `our page on Docker Hub `_ - to view available releases. To pull the latest release, run:: +If `Docker `_ is available on your system, you +can visit `our page on Docker Hub `_ +to view available releases. To pull the latest release, run:: $ docker pull nipy/heudiconv:0.5.4 Singularity =========== -If `Singularity `_ is available on your system, - you can use it to pull and convert our Docker images! For example, to pull and - build the latest release, you can run:: +If `Singularity `_ is available on your system, +you can use it to pull and convert our Docker images! For example, to pull and +build the latest release, you can run:: $ singularity pull docker://nipy/heudiconv:0.5.4 diff --git a/docs/tutorials.rst b/docs/tutorials.rst index b5ac9d73..07a509fc 100644 --- a/docs/tutorials.rst +++ b/docs/tutorials.rst @@ -5,18 +5,14 @@ User Tutorials Luckily(?), we live in an era of plentiful information. Below are some links to other users' tutorials covering their experience with ``heudiconv``. -- `YouTube tutorial `_ by -`James Kent `_. +- `YouTube tutorial `_ by `James Kent `_. -- `Walkthrough `_ -by the `Standard Center for Reproducible Neuroscience `_. +- `Walkthrough `_ by the `Standard Center for Reproducible Neuroscience `_. -- `U of A Neuroimaging Core `_ -by `Dianne Patterson `_. +- `U of A Neuroimaging Core `_ by `Dianne Patterson `_. - `Sample Conversion: Coastal Coding 2019 `_. -.. note:: - - Keep in mind, some of these tutorials may not be 100% up to date with +.. caution:: + Some of these tutorials may not be up to date with the latest releases of ``heudiconv``. diff --git a/docs/usage.rst b/docs/usage.rst index 2374c663..ee5de698 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -1,4 +1,44 @@ -## How it works (in some more detail) +===== +Usage +===== + +``heudiconv`` processes DICOM files and converts the output into user defined +paths. + +CommandLine Arguments +====================== + +.. argparse:: + :ref: heudiconv.cli.run.get_parser + :prog: heudiconv + :nodefault: + :nodefaultconst: + + +Support +======= + +All bugs, concerns and enhancement requests for this software can be submitted here: +https://github.com/nipy/heudiconv/issues. + +If you have a problem or would like to ask a question about how to use ``heudiconv``, +please submit a question to `NeuroStars.org `_ with a ``heudiconv`` tag. +NeuroStars.org is a platform similar to StackOverflow but dedicated to neuroinformatics. + +All previous ``heudiconv`` questions are available here: +http://neurostars.org/tags/heudiconv/ + + +Batch example +============= + +``heudiconv`` can greatly facilitate large scale conversions + + +******************************************************************************************* +******************************************************************************************* +******************************************************************************************* +******************************************************************************************* Call `heudiconv` like this: From 76b21f1c6ee837316a2e9f98c8c4a4826ceb92b8 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 27 Mar 2019 14:52:41 -0400 Subject: [PATCH 39/56] sty: whitespace + sphinx argparser --- README.rst | 18 +++++++++--------- docs/conf.py | 1 + 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/README.rst b/README.rst index 844e45f2..713f66c6 100644 --- a/README.rst +++ b/README.rst @@ -19,17 +19,17 @@ About ----- -``heudiconv`` is a flexible DICOM converter for organizing brain imaging data - into structured directory layouts. +``heudiconv`` is a flexible DICOM converter for organizing brain imaging data +into structured directory layouts. -- it allows flexible directory layouts and naming schemes through - customizable heuristics implementations +- it allows flexible directory layouts and naming schemes through +customizable heuristics implementations - it only converts the necessary DICOMs, not everything in a directory - you can keep links to DICOM files in the participant layout - using dcm2niix under the hood, it's fast -- it can track the provenance of the conversion from DICOM to NIfTI in W3C - PROV format +- it can track the provenance of the conversion from DICOM to NIfTI in W3C +PROV format - it provides assistance in converting to `BIDS `_. -- it integrates with `DataLad `_ to place converted and - original data under git/git-annex version control, while automatically - annotating files with sensitive information (e.g., non-defaced anatomicals, etc) +- it integrates with `DataLad `_ to place converted and +original data under git/git-annex version control, while automatically +annotating files with sensitive information (e.g., non-defaced anatomicals, etc) diff --git a/docs/conf.py b/docs/conf.py index de52eb37..990e0a50 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -41,6 +41,7 @@ extensions = [ 'sphinx.ext.autodoc', 'recommonmark', + 'sphinx-argparse', ] # Add any paths that contain templates here, relative to this directory. From 4030e00c449d658b183b9b0e44f710d7c32e1112 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 27 Mar 2019 15:18:04 -0400 Subject: [PATCH 40/56] doc: doc-building dependencies --- docs/requirements.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 docs/requirements.txt diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..7cafd8a4 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,2 @@ +sphinx-argparse +recommonmark From 8300db710b1c79894e86c7f33476c700ff82f5f3 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 27 Mar 2019 15:25:07 -0400 Subject: [PATCH 41/56] fix: extension name --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 990e0a50..30d19789 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -40,8 +40,8 @@ # ones. extensions = [ 'sphinx.ext.autodoc', + 'sphinxarg.ext', 'recommonmark', - 'sphinx-argparse', ] # Add any paths that contain templates here, relative to this directory. From 893731bea822c3160013f250fef74d86303ce2a0 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 27 Mar 2019 15:46:44 -0400 Subject: [PATCH 42/56] doc: install all project dependencies on rtd --- docs/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/requirements.txt b/docs/requirements.txt index 7cafd8a4..fc152f04 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,3 @@ sphinx-argparse recommonmark +-r ../dev-requirements.txt From 2ef877f58f3aa8d92363543f4ca874b53933a9e3 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 27 Mar 2019 18:00:36 -0400 Subject: [PATCH 43/56] doc: finalize rtd --- .gitignore | 1 + README.rst | 18 +++++------ docs/api.rst | 13 ++++++++ docs/api/bids.rst | 5 ++++ docs/api/convert.rst | 5 ++++ docs/api/dicoms.rst | 5 ++++ docs/api/parser.rst | 5 ++++ docs/api/queue.rst | 5 ++++ docs/api/utils.rst | 5 ++++ docs/changes.rst | 6 +++- docs/conf.py | 4 ++- docs/heuristics.rst | 33 ++++++++++---------- docs/index.rst | 2 +- docs/requirements.txt | 2 +- docs/usage.rst | 70 +++++++++++++++++++++++++------------------ 15 files changed, 120 insertions(+), 59 deletions(-) create mode 100644 docs/api.rst create mode 100644 docs/api/bids.rst create mode 100644 docs/api/convert.rst create mode 100644 docs/api/dicoms.rst create mode 100644 docs/api/parser.rst create mode 100644 docs/api/queue.rst create mode 100644 docs/api/utils.rst diff --git a/.gitignore b/.gitignore index 9189a5c8..6641a666 100755 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ *.egg-info/ .idea/ venvs/ +_build/ diff --git a/README.rst b/README.rst index 713f66c6..3656af14 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,8 @@ -.. topic:: `HeuDiConv`: a heuristic-centric DICOM converter +============= +**HeuDiConv** +============= + +`a heuristic-centric DICOM converter` .. image:: https://img.shields.io/badge/docker-nipy/heudiconv:unstable-brightgreen.svg?logo=docker&style=flat :target: https://hub.docker.com/r/nipy/heudiconv/tags/ @@ -19,17 +23,13 @@ About ----- -``heudiconv`` is a flexible DICOM converter for organizing brain imaging data +``heudiconv`` is a flexible DICOM converter for organizing brain imaging data into structured directory layouts. -- it allows flexible directory layouts and naming schemes through -customizable heuristics implementations +- it allows flexible directory layouts and naming schemes through customizable heuristics implementations - it only converts the necessary DICOMs, not everything in a directory - you can keep links to DICOM files in the participant layout - using dcm2niix under the hood, it's fast -- it can track the provenance of the conversion from DICOM to NIfTI in W3C -PROV format +- it can track the provenance of the conversion from DICOM to NIfTI in W3C PROV format - it provides assistance in converting to `BIDS `_. -- it integrates with `DataLad `_ to place converted and -original data under git/git-annex version control, while automatically -annotating files with sensitive information (e.g., non-defaced anatomicals, etc) +- it integrates with `DataLad `_ to place converted and original data under git/git-annex version control, while automatically annotating files with sensitive information (e.g., non-defaced anatomicals, etc) diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 00000000..91d3b44f --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,13 @@ +============= +API Reference +============= + +.. toctree:: + :maxdepth: 1 + + api/bids + api/convert + api/dicoms + api/parser + api/queue + api/util diff --git a/docs/api/bids.rst b/docs/api/bids.rst new file mode 100644 index 00000000..64a82b5d --- /dev/null +++ b/docs/api/bids.rst @@ -0,0 +1,5 @@ +==== +BIDS +==== + +.. automodule:: heudiconv.bids diff --git a/docs/api/convert.rst b/docs/api/convert.rst new file mode 100644 index 00000000..77ce2bac --- /dev/null +++ b/docs/api/convert.rst @@ -0,0 +1,5 @@ +========== +Conversion +========== + +.. automodule:: heudiconv.convert diff --git a/docs/api/dicoms.rst b/docs/api/dicoms.rst new file mode 100644 index 00000000..dc67cc46 --- /dev/null +++ b/docs/api/dicoms.rst @@ -0,0 +1,5 @@ +====== +DICOMS +====== + +.. automodule:: heudiconv.dicoms diff --git a/docs/api/parser.rst b/docs/api/parser.rst new file mode 100644 index 00000000..184a7e06 --- /dev/null +++ b/docs/api/parser.rst @@ -0,0 +1,5 @@ +======= +Parsing +======= + +.. automodule:: heudiconv.parser diff --git a/docs/api/queue.rst b/docs/api/queue.rst new file mode 100644 index 00000000..d0b84ad1 --- /dev/null +++ b/docs/api/queue.rst @@ -0,0 +1,5 @@ +============= +Batch Queuing +============= + +.. automodule:: heudiconv.queue diff --git a/docs/api/utils.rst b/docs/api/utils.rst new file mode 100644 index 00000000..cc6ae1f3 --- /dev/null +++ b/docs/api/utils.rst @@ -0,0 +1,5 @@ +======= +Utility +======= + +.. automodule:: heudiconv.utils diff --git a/docs/changes.rst b/docs/changes.rst index b56065ad..3d2cfd1a 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -1 +1,5 @@ -.. include:: ../CHANGELOG.md \ No newline at end of file +======= +Changes +======= + +.. mdinclude:: ../CHANGELOG.md diff --git a/docs/conf.py b/docs/conf.py index 30d19789..d1b7f918 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -41,7 +41,7 @@ extensions = [ 'sphinx.ext.autodoc', 'sphinxarg.ext', - 'recommonmark', + 'm2r', ] # Add any paths that contain templates here, relative to this directory. @@ -177,3 +177,5 @@ # -- Extension configuration ------------------------------------------------- +autodoc_default_options={ + 'members': None} diff --git a/docs/heuristics.rst b/docs/heuristics.rst index 79b5bed3..aee4f354 100644 --- a/docs/heuristics.rst +++ b/docs/heuristics.rst @@ -2,13 +2,13 @@ Heuristic ========= -The heuristic file controls how information about the DICOMs is used to convert +The heuristic file controls how information about the DICOMs is used to convert to a file system layout (e.g., BIDS). ``heudiconv`` includes some built-in -heuristics, including `ReproIn `_ +heuristics, including `ReproIn `_ (which is great to adopt if you will be starting your data collection!). -However, there is a large variety of data out there, and not all DICOMs will be -covered by the existing heuristics. This section will outline what makes up a +However, there is a large variety of data out there, and not all DICOMs will be +covered by the existing heuristics. This section will outline what makes up a heuristic file, and some useful functions available when making one. @@ -19,21 +19,20 @@ Components ``infotodict(seqinfos)`` ------------------------ -The only required function for a heuristic, `infotodict` is used to both define -the conversion outputs and specify the criteria for scan to output association. -Conversion outputs are defined as keys, a `tuple` consisting of a template path -used for the basis of outputs, as well as a `tuple` of output types. Valid types +The only required function for a heuristic, `infotodict` is used to both define +the conversion outputs and specify the criteria for scan to output association. +Conversion outputs are defined as keys, a `tuple` consisting of a template path +used for the basis of outputs, as well as a `tuple` of output types. Valid types include `nii`, `nii.gz`, and `dicom`. -.. note:: - An example conversion key - +.. note:: An example conversion key + ``('sub-{subject}/func/sub-{subject}_task-test_run-{item}_bold', ('nii.gz', 'dicom'))`` -The ``seqinfos`` parameter is a list of namedtuples which serves as a grouped and -stacked record of the DICOMs passed in. Each item in `seqinfo` contains DICOM -metadata that can be used to isolate the series, and assign it to a conversion +The ``seqinfos`` parameter is a list of namedtuples which serves as a grouped and +stacked record of the DICOMs passed in. Each item in `seqinfo` contains DICOM +metadata that can be used to isolate the series, and assign it to a conversion key. A dictionary of {``conversion key``: ``seqinfo``} is returned. @@ -42,7 +41,7 @@ A dictionary of {``conversion key``: ``seqinfo``} is returned. ``create_key(template, outtype)`` --------------------------------- -A common helper function used to create the conversion key in ``infotodict``. +A common helper function used to create the conversion key in ``infotodict``. -------------------- ``filter_files(fl)`` @@ -50,7 +49,7 @@ A common helper function used to create the conversion key in ``infotodict``. A utility function used to filter any input files. -If this function is included, every file found will go through this filter. Any +If this function is included, every file found will go through this filter. Any files where this function returns ``True`` will be filtered out. -------------------------- @@ -59,7 +58,7 @@ files where this function returns ``True`` will be filtered out. A utility function used to filter any DICOMs. -If this function is included, every DICOM found will go through this filter. Any +If this function is included, every DICOM found will go through this filter. Any DICOMs where this function returns ``True`` will be filtered out. ------------------------------- diff --git a/docs/index.rst b/docs/index.rst index 8c68c07c..621bd2b2 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,7 +10,7 @@ Contents .. toctree:: :maxdepth: 2 - + installation changes usage diff --git a/docs/requirements.txt b/docs/requirements.txt index fc152f04..46f4b641 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,3 +1,3 @@ sphinx-argparse -recommonmark +m2r -r ../dev-requirements.txt diff --git a/docs/usage.rst b/docs/usage.rst index ee5de698..2d7a527c 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -2,7 +2,7 @@ Usage ===== -``heudiconv`` processes DICOM files and converts the output into user defined +``heudiconv`` processes DICOM files and converts the output into user defined paths. CommandLine Arguments @@ -13,7 +13,7 @@ CommandLine Arguments :prog: heudiconv :nodefault: :nodefaultconst: - + Support ======= @@ -29,39 +29,51 @@ All previous ``heudiconv`` questions are available here: http://neurostars.org/tags/heudiconv/ -Batch example -============= +Batch jobs +========== + +``heudiconv`` can natively handle multi-subject, multi-session conversions, +although it will process these linearly. To speed this up, multiple ``heudiconv`` +processes can be spawned concurrently, each converting a different subject and/or +session. + +The following example uses SLURM and Singularity to submit every subjects' +DICOMs as an independent ``heudiconv`` execution. + +The first script aggregates the DICOM directories and submits them to +``run_heudiconv.sh`` with SLURM as a job array.:: + + #!/bin/bash + + set -eu -``heudiconv`` can greatly facilitate large scale conversions + # where the DICOMs are located + DCMROOT=/dicom/storage/voice + # where we want to output the data + OUTPUT=/converted/data/voice + # find all DICOM directories that start with "voice" + DCMDIRS=(`find ${DCMROOT} -maxdepth 1 -name voice* -type d`) -******************************************************************************************* -******************************************************************************************* -******************************************************************************************* -******************************************************************************************* + # submit to another script as a job array on SLURM + sbatch --array=0-$len run_heudiconv.sh ${OUTPUT} ${DCMDIRS[@]} -Call `heudiconv` like this: - heudiconv -d '{subject}*.tar*' -s xx05 -f ~/myheuristics/convertall.py +The second script processes a DICOM directory with ``heudiconv`` using the built-in +`reproin` heuristic.:: -where `-d '{subject}*tar*'` is an expression used to find DICOM files -(`{subject}` expands to a subject ID so that the expression will match any -`.tar` files, compressed or not that start with the subject ID in their name). -An additional flag for session (`{session}`) can be included in the expression -as well. `-s od05` specifies a subject ID for the conversion (this could be a -list of multiple IDs), and `-f ~/myheuristics/convertall.py` identifies a -heuristic implementation for this conversion (see below) for details. + #!/bin/bash + set -eu -This call will locate the DICOMs (in any number of matching tarballs), extract -them to a temporary directory, search for any DICOM series it can find, and -attempts a conversion storing output in the current directory. The output -directory will contain a subdirectory per subject, which in turn contains an -`info` directory with a full protocol of detected DICOM series, and how their -are converted. + OUTDIR=${1} + # receive all directories, and index them per job array + DCMDIRS=(${@:2}) + DCMDIR=${DCMDIRS[${SLURM_ARRAY_TASK_ID}]} + echo Submitted directory: ${DCMDIR} + IMG="/singularity-images/heudiconv-0.5.4-dev.sif" + CMD="singularity run -B ${DCMDIR}:/dicoms:ro -B ${OUTDIR}:/output -e ${IMG} --files /dicoms/ -o /output -f reproin -c dcm2niix -b --minmeta -l ." -To generate lean BIDS output, consider using both the `-b` and the `--minmeta` flags -to your heudiconv command. The `-b` flag generates a json file with BIDS keys, while -the `--minmeta` flag restricts the json file to only BIDS keys. Without `--minmeta`, -the json file and the associated Nifti file contains DICOM metadata extracted using -dicomstack. + printf "Command:\n${CMD}\n" + ${CMD} + echo "Successful process" From 9409ce591251da629ce490924742b32879b82caa Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 27 Mar 2019 18:07:10 -0400 Subject: [PATCH 44/56] fix: batch example --- docs/usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/usage.rst b/docs/usage.rst index 2d7a527c..7fcdd44f 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -56,7 +56,7 @@ The first script aggregates the DICOM directories and submits them to DCMDIRS=(`find ${DCMROOT} -maxdepth 1 -name voice* -type d`) # submit to another script as a job array on SLURM - sbatch --array=0-$len run_heudiconv.sh ${OUTPUT} ${DCMDIRS[@]} + sbatch --array=0-`expr ${#DCMDIRS[@]} - 1` run_heudiconv.sh ${OUTPUT} ${DCMDIRS[@]} The second script processes a DICOM directory with ``heudiconv`` using the built-in From 5242f55475f701478d83a80f814a12d3b68c4b80 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 28 Mar 2019 11:36:50 -0400 Subject: [PATCH 45/56] fix: utils api link --- docs/api.rst | 2 +- docs/usage.rst | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index 91d3b44f..a0ed9133 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -10,4 +10,4 @@ API Reference api/dicoms api/parser api/queue - api/util + api/utils diff --git a/docs/usage.rst b/docs/usage.rst index 7fcdd44f..b64f8abc 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -41,7 +41,9 @@ The following example uses SLURM and Singularity to submit every subjects' DICOMs as an independent ``heudiconv`` execution. The first script aggregates the DICOM directories and submits them to -``run_heudiconv.sh`` with SLURM as a job array.:: +``run_heudiconv.sh`` with SLURM as a job array. + +.. code:: shell #!/bin/bash @@ -60,7 +62,9 @@ The first script aggregates the DICOM directories and submits them to The second script processes a DICOM directory with ``heudiconv`` using the built-in -`reproin` heuristic.:: +`reproin` heuristic. + +.. code:: shell #!/bin/bash set -eu From bae6dfd6a1423aaf5b1037b962f59cda830d1922 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Mon, 1 Apr 2019 11:08:42 -0400 Subject: [PATCH 46/56] Updated link_issues_CHANGELOG with new version from datalad 0.12.0rc2-125-gc350b96e It should generate markdown links as [LINKID][] to follow original markdown (not github flavor) and so pandoc and possibly other tools do not barf --- utils/link_issues_CHANGELOG | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/utils/link_issues_CHANGELOG b/utils/link_issues_CHANGELOG index fa77e70e..4810ca90 100755 --- a/utils/link_issues_CHANGELOG +++ b/utils/link_issues_CHANGELOG @@ -3,11 +3,12 @@ in=CHANGELOG.md # Replace them with Markdown references -sed -i -e 's/(\(#[0-9]\+\))/([\1])/g' "$in" +sed -i -e 's/(\(#[0-9]\+\))/([\1][])/g' "$in" # Populate references -cat "$in" | sponge | sed -n -e 's/.*(\[#\([0-9]\+\)\]).*/\1/gp' | sort | uniq \ +tr ' ,' '\n\n' < "$in" | sponge | sed -n -e 's/.*(\[#\([0-9]\+\)\]\(\[\]*\)).*/\1/gp' | sort | uniq \ | while read issue; do + #echo "issue $issue" # remove old one if exists sed -i -e "/^\[#$issue\]:.*/d" "$in" echo "[#$issue]: https://github.com/nipy/heudiconv/issues/$issue" >> "$in"; From d80a46f96cce57b6446eda5eb6915385343ce316 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Mon, 1 Apr 2019 11:12:31 -0400 Subject: [PATCH 47/56] ENH: manually fixed up already existing MD links to contain empty trailing [] --- CHANGELOG.md | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c0df8923..13922e60 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,14 +16,14 @@ TODO Summary ### Fixed -- TODO: fix: embedding issue ([#306]) -- TODO: ([#304]) from mgxd/fix/queue -- TODO: ([#301]) from franklin-feingold/master +- TODO: fix: embedding issue (([#306][])) +- TODO: ([#304][]) from mgxd/fix/queue +- TODO: ([#301][]) from franklin-feingold/master - TODO: fix/queue -- TODO: ([#293]) from mgxd/multiecho +- TODO: ([#293][]) from mgxd/multiecho - correctly handle the case when `outtype` of heuristic has "dicom" before '.nii.gz'. Previously would have lead to absent additional metadata - extraction etc ([#310]) + extraction etc ([#310][]) ### Removed @@ -127,28 +127,28 @@ The first release after major refactoring: - Many other various issues ## [0.4] - 2017-10-15 -A usable release to support [DBIC] use-case +A usable release to support [DBIC][] use-case ### Added - more testing ### Changes -- Dockerfile updates (added pigz, progressed forward [dcm2niix]) +- Dockerfile updates (added pigz, progressed forward [dcm2niix][]) ### Fixed - correct date/time in BIDS `_scans` files - sort entries in `_scans` by date and then filename ## [0.3] - 2017-07-10 -A somewhat working release on the way to support [DBIC] use-case +A somewhat working release on the way to support [DBIC][] use-case ### Added - more tests - groupping of dicoms by series if provided - many more features and fixes ## [0.2] - 2016-10-20 -An initial release on the way to support [DBIC] use-case +An initial release on the way to support [DBIC][] use-case ### Added - basic Python project assets (`setup.py`, etc) - basic tests -- [datalad] support +- [datalad][] support - dbic_bids heuristic - `--dbg` command line flag to enter `pdb` environment upon failure ## Fixed From b2b1d2b22752f6b16c6659b68b5df4fdde0bd9f0 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 1 Apr 2019 15:34:12 -0400 Subject: [PATCH 48/56] rf: move to absolute import --- heudiconv/__init__.py | 2 +- heudiconv/bids.py | 4 ++-- heudiconv/cli/run.py | 16 ++++++++-------- heudiconv/convert.py | 6 +++--- heudiconv/dicoms.py | 4 ++-- heudiconv/external/dcmstack.py | 2 +- heudiconv/parser.py | 4 ++-- 7 files changed, 19 insertions(+), 19 deletions(-) diff --git a/heudiconv/__init__.py b/heudiconv/__init__.py index e6025296..005d74e8 100644 --- a/heudiconv/__init__.py +++ b/heudiconv/__init__.py @@ -1,7 +1,7 @@ # set logger handler import logging import os -from .info import (__version__, __packagename__) +from heudiconv.info import (__version__, __packagename__) # Rudimentary logging support. lgr = logging.getLogger(__name__) diff --git a/heudiconv/bids.py b/heudiconv/bids.py index 40cd0749..3f8d923d 100644 --- a/heudiconv/bids.py +++ b/heudiconv/bids.py @@ -13,8 +13,8 @@ from heudiconv.external.pydicom import dcm -from .parser import find_files -from .utils import ( +from heudiconv.parser import find_files +from heudiconv.utils import ( load_json, save_json, create_file_if_missing, diff --git a/heudiconv/cli/run.py b/heudiconv/cli/run.py index 0d984fcb..d3c16f05 100644 --- a/heudiconv/cli/run.py +++ b/heudiconv/cli/run.py @@ -5,12 +5,12 @@ from argparse import ArgumentParser import sys -from .. import __version__, __packagename__ -from ..parser import get_study_sessions -from ..utils import load_heuristic, anonymize_sid, treat_infofile, SeqInfo -from ..convert import prep_conversion -from ..bids import populate_bids_templates, tuneup_bids_json_files -from ..queue import queue_conversion +from heudiconv import __version__, __packagename__ +from heudiconv.parser import get_study_sessions +from heudiconv.utils import load_heuristic, anonymize_sid, treat_infofile, SeqInfo +from heudiconv.convert import prep_conversion +from heudiconv.bids import populate_bids_templates, tuneup_bids_json_files +from heudiconv.queue import queue_conversion import inspect import logging @@ -84,11 +84,11 @@ def process_extra_commands(outdir, args): elif args.command == 'sanitize-jsons': tuneup_bids_json_files(args.files) elif args.command == 'heuristics': - from ..utils import get_known_heuristics_with_descriptions + from heudiconv.utils import get_known_heuristics_with_descriptions for name_desc in get_known_heuristics_with_descriptions().items(): print("- %s: %s" % name_desc) elif args.command == 'heuristic-info': - from ..utils import get_heuristic_description, get_known_heuristic_names + from heudiconv.utils import get_heuristic_description, get_known_heuristic_names if not args.heuristic: raise ValueError("Specify heuristic using -f. Known are: %s" % ', '.join(get_known_heuristic_names())) diff --git a/heudiconv/convert.py b/heudiconv/convert.py index c1150395..5fc0bc94 100644 --- a/heudiconv/convert.py +++ b/heudiconv/convert.py @@ -4,7 +4,7 @@ import shutil import sys -from .utils import ( +from heudiconv.utils import ( read_config, load_json, save_json, @@ -18,7 +18,7 @@ assure_no_file_exists, file_md5sum ) -from .bids import ( +from heudiconv.bids import ( convert_sid_bids, populate_bids_templates, save_scans_key, @@ -26,7 +26,7 @@ add_participant_record, BIDSError ) -from .dicoms import ( +from heudiconv.dicoms import ( group_dicoms_into_seqinfos, embed_metadata_from_dicoms, compress_dicoms diff --git a/heudiconv/dicoms.py b/heudiconv/dicoms.py index 13a200b1..5a265f66 100644 --- a/heudiconv/dicoms.py +++ b/heudiconv/dicoms.py @@ -4,9 +4,9 @@ import logging from collections import OrderedDict import tarfile -from heudiconv.external.pydicom import dcm -from .utils import SeqInfo, load_json, set_readonly +from heudiconv.external.pydicom import dcm +from heudiconv.utils import SeqInfo, load_json, set_readonly lgr = logging.getLogger(__name__) diff --git a/heudiconv/external/dcmstack.py b/heudiconv/external/dcmstack.py index 80ebc33f..8ce721f4 100644 --- a/heudiconv/external/dcmstack.py +++ b/heudiconv/external/dcmstack.py @@ -2,7 +2,7 @@ from __future__ import absolute_import -from .pydicom import dcm # to assure that we have it one way or another +from heudiconv.external.pydicom import dcm # to assure that we have it one way or another try: import dcmstack as ds diff --git a/heudiconv/parser.py b/heudiconv/parser.py index 8ceba641..986d5df7 100644 --- a/heudiconv/parser.py +++ b/heudiconv/parser.py @@ -9,8 +9,8 @@ import tarfile from tempfile import mkdtemp -from .dicoms import group_dicoms_into_seqinfos -from .utils import ( +from heudiconv.dicoms import group_dicoms_into_seqinfos +from heudiconv.utils import ( docstring_parameter, StudySessionInfo, ) From 5c5b5d1af4b43b5331321ec3c2d6575f1c05ba3b Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 1 Apr 2019 17:32:11 -0400 Subject: [PATCH 49/56] fix: better support for queue args --- heudiconv/cli/run.py | 4 +- heudiconv/queue.py | 114 ++++++++++++++++++---------------- heudiconv/tests/test_queue.py | 17 +++++ 3 files changed, 82 insertions(+), 53 deletions(-) diff --git a/heudiconv/cli/run.py b/heudiconv/cli/run.py index d3c16f05..7b9813cd 100644 --- a/heudiconv/cli/run.py +++ b/heudiconv/cli/run.py @@ -221,7 +221,9 @@ def get_parser(): default=None, help='batch system to submit jobs in parallel') submission.add_argument('--queue-args', dest='queue_args', default=None, - help='Additional queue arguments') + help='Additional queue arguments passed as ' + 'single string of Argument=Value pairs space ' + 'separated.') return parser diff --git a/heudiconv/queue.py b/heudiconv/queue.py index ba8ad662..7fe9fda1 100644 --- a/heudiconv/queue.py +++ b/heudiconv/queue.py @@ -7,56 +7,66 @@ lgr = logging.getLogger(__name__) def queue_conversion(pyscript, queue, studyid, queue_args=None): - """ - Write out conversion arguments to file and submit to a job scheduler. - Parses `sys.argv` for heudiconv arguments. - - Parameters - ---------- - pyscript: file - path to `heudiconv` script - queue: string - batch scheduler to use - studyid: string - identifier for conversion - queue_args: string (optional) - additional queue arguments for job submission - - Returns - ------- - proc: int - Queue submission exit code - """ - - SUPPORTED_QUEUES = {'SLURM': 'sbatch'} - if queue not in SUPPORTED_QUEUES: - raise NotImplementedError("Queuing with %s is not supported", queue) - - args = sys.argv[1:] - # search args for queue flag - for i, arg in enumerate(args): - if arg in ["-q", "--queue"]: - break - if i == len(args) - 1: - raise RuntimeError( - "Queue flag not found (must be provided as a command-line arg)" - ) - # remove queue flag and value - del args[i:i+2] - - # make arguments executable again - args.insert(0, pyscript) - pypath = sys.executable or "python" - args.insert(0, pypath) - convertcmd = " ".join(args) - - # will overwrite across subjects - queue_file = os.path.abspath('heudiconv-%s.sh' % queue) - with open(queue_file, 'wt') as fp: - fp.writelines(['#!/bin/bash\n', convertcmd, '\n']) - - cmd = [SUPPORTED_QUEUES[queue], queue_file] + """ + Write out conversion arguments to file and submit to a job scheduler. + Parses `sys.argv` for heudiconv arguments. + + Parameters + ---------- + pyscript: file + path to `heudiconv` script + queue: string + batch scheduler to use + studyid: string + identifier for conversion + queue_args: string (optional) + additional queue arguments for job submission + + Returns + ------- + proc: int + Queue submission exit code + """ + + SUPPORTED_QUEUES = {'SLURM': 'sbatch'} + if queue not in SUPPORTED_QUEUES: + raise NotImplementedError("Queuing with %s is not supported", queue) + + args = clean_args(sys.argv[1:]) + # make arguments executable + args.insert(0, pyscript) + pypath = sys.executable or "python" + args.insert(0, pypath) + convertcmd = " ".join(args) + + # will overwrite across subjects + queue_file = os.path.abspath('heudiconv-%s.sh' % queue) + with open(queue_file, 'wt') as fp: + fp.write("#!/bin/bash\n") if queue_args: - cmd.insert(1, queue_args) - proc = subprocess.call(cmd) - return proc + for qarg in queue_args.split(): + fp.write("#SBATCH %s\n" % qarg) + fp.write(convertcmd + "\n") + + cmd = [SUPPORTED_QUEUES[queue], queue_file] + proc = subprocess.call(cmd) + return proc + +def clean_args(hargs, keys=['-q', '--queue', '--queue-args']): + """ + Filters out unwanted arguments + + :param hargs: Arguments passed + :type hargs: Iterable + :param keys: Unwanted arguments + :type keys: Iterable + :return: Filtered arguments + """ + indicies = [] + for i, arg in enumerate(hargs): + if arg in keys: + indicies.extend([i, i+1]) + for j in sorted(indicies, reverse=True): + del hargs[j] + return hargs + diff --git a/heudiconv/tests/test_queue.py b/heudiconv/tests/test_queue.py index a90dd9b5..612851ed 100644 --- a/heudiconv/tests/test_queue.py +++ b/heudiconv/tests/test_queue.py @@ -3,6 +3,7 @@ import subprocess from heudiconv.cli.run import main as runner +from heudiconv.queue import clean_args from .utils import TESTS_DATA_PATH import pytest from nipype.utils.filemanip import which @@ -44,3 +45,19 @@ def test_queue_no_slurm(tmpdir, invocation): finally: # revert before breaking something sys.argv = _sys_args + +def test_argument_filtering(tmpdir): + cmdargs = [ + 'heudiconv', + '--files', + '/fake/path/to/files', + '-f', + 'convertall', + '-q', + 'SLURM', + '--queue-args', + '--cpus-per-task=4 --contiguous --time=10' + ] + filtered = cmdargs[:-4] + + assert(clean_args(cmdargs) == filtered) From 66bc53f2cc8df9cc92692ba3636c9a3d7ab9c2cc Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 2 Apr 2019 12:17:47 -0400 Subject: [PATCH 50/56] fix: revert to explicit relative imports, use heudiconv executable for queue submissions --- heudiconv/__init__.py | 2 +- heudiconv/bids.py | 6 +++--- heudiconv/cli/run.py | 28 +++++++++------------------- heudiconv/convert.py | 6 +++--- heudiconv/dicoms.py | 10 +++++----- heudiconv/external/dcmstack.py | 4 ++-- heudiconv/parser.py | 4 ++-- heudiconv/queue.py | 12 +++++------- heudiconv/tests/test_queue.py | 3 +-- heudiconv/utils.py | 14 ++++++++------ 10 files changed, 39 insertions(+), 50 deletions(-) diff --git a/heudiconv/__init__.py b/heudiconv/__init__.py index 005d74e8..e6025296 100644 --- a/heudiconv/__init__.py +++ b/heudiconv/__init__.py @@ -1,7 +1,7 @@ # set logger handler import logging import os -from heudiconv.info import (__version__, __packagename__) +from .info import (__version__, __packagename__) # Rudimentary logging support. lgr = logging.getLogger(__name__) diff --git a/heudiconv/bids.py b/heudiconv/bids.py index 3f8d923d..42283e60 100644 --- a/heudiconv/bids.py +++ b/heudiconv/bids.py @@ -11,10 +11,10 @@ from random import sample from glob import glob -from heudiconv.external.pydicom import dcm +from .external.pydicom import dcm -from heudiconv.parser import find_files -from heudiconv.utils import ( +from .parser import find_files +from .utils import ( load_json, save_json, create_file_if_missing, diff --git a/heudiconv/cli/run.py b/heudiconv/cli/run.py index 7b9813cd..eaf6e629 100644 --- a/heudiconv/cli/run.py +++ b/heudiconv/cli/run.py @@ -5,12 +5,12 @@ from argparse import ArgumentParser import sys -from heudiconv import __version__, __packagename__ -from heudiconv.parser import get_study_sessions -from heudiconv.utils import load_heuristic, anonymize_sid, treat_infofile, SeqInfo -from heudiconv.convert import prep_conversion -from heudiconv.bids import populate_bids_templates, tuneup_bids_json_files -from heudiconv.queue import queue_conversion +from .. import __version__, __packagename__ +from ..parser import get_study_sessions +from ..utils import load_heuristic, anonymize_sid, treat_infofile, SeqInfo +from ..convert import prep_conversion +from ..bids import populate_bids_templates, tuneup_bids_json_files +from ..queue import queue_conversion import inspect import logging @@ -84,11 +84,11 @@ def process_extra_commands(outdir, args): elif args.command == 'sanitize-jsons': tuneup_bids_json_files(args.files) elif args.command == 'heuristics': - from heudiconv.utils import get_known_heuristics_with_descriptions + from .utils import get_known_heuristics_with_descriptions for name_desc in get_known_heuristics_with_descriptions().items(): print("- %s: %s" % name_desc) elif args.command == 'heuristic-info': - from heudiconv.utils import get_heuristic_description, get_known_heuristic_names + from .utils import get_heuristic_description, get_known_heuristic_names if not args.heuristic: raise ValueError("Specify heuristic using -f. Known are: %s" % ', '.join(get_known_heuristic_names())) @@ -284,15 +284,6 @@ def process_args(args): continue if args.queue: - # if seqinfo and not dicoms: - # # flatten them all and provide into batching, which again - # # would group them... heh - # dicoms = sum(seqinfo.values(), []) - # raise NotImplementedError( - # "we already grouped them so need to add a switch to avoid " - # "any grouping, so no outdir prefix doubled etc") - - pyscript = op.abspath(inspect.getfile(inspect.currentframe())) studyid = sid if session: @@ -302,8 +293,7 @@ def process_args(args): # remove any separators studyid = studyid.replace(op.sep, '_') - queue_conversion(pyscript, - args.queue, + queue_conversion(args.queue, studyid, args.queue_args) continue diff --git a/heudiconv/convert.py b/heudiconv/convert.py index 5fc0bc94..c1150395 100644 --- a/heudiconv/convert.py +++ b/heudiconv/convert.py @@ -4,7 +4,7 @@ import shutil import sys -from heudiconv.utils import ( +from .utils import ( read_config, load_json, save_json, @@ -18,7 +18,7 @@ assure_no_file_exists, file_md5sum ) -from heudiconv.bids import ( +from .bids import ( convert_sid_bids, populate_bids_templates, save_scans_key, @@ -26,7 +26,7 @@ add_participant_record, BIDSError ) -from heudiconv.dicoms import ( +from .dicoms import ( group_dicoms_into_seqinfos, embed_metadata_from_dicoms, compress_dicoms diff --git a/heudiconv/dicoms.py b/heudiconv/dicoms.py index 5a265f66..3e0491f0 100644 --- a/heudiconv/dicoms.py +++ b/heudiconv/dicoms.py @@ -5,8 +5,8 @@ from collections import OrderedDict import tarfile -from heudiconv.external.pydicom import dcm -from heudiconv.utils import SeqInfo, load_json, set_readonly +from .external.pydicom import dcm +from .utils import SeqInfo, load_json, set_readonly lgr = logging.getLogger(__name__) @@ -55,10 +55,10 @@ def group_dicoms_into_seqinfos(files, file_filter, dcmfilter, grouping): lgr.info('Filtering out {0} dicoms based on their filename'.format( nfl_before-nfl_after)) for fidx, filename in enumerate(files): - from heudiconv.external.dcmstack import ds + import nibabel.nicom.dicomwrappers as dw # TODO after getting a regression test check if the same behavior # with stop_before_pixels=True - mw = ds.wrapper_from_data(dcm.read_file(filename, force=True)) + mw = dw.wrapper_from_data(dcm.read_file(filename, force=True)) for sig in ('iop', 'ICE_Dims', 'SequenceName'): try: @@ -385,7 +385,7 @@ def embed_nifti(dcmfiles, niftifile, infofile, bids_info, min_meta): import re if not min_meta: - import dcmstack as ds + from .external.dcmstack import ds stack = ds.parse_and_stack(dcmfiles, force=True).values() if len(stack) > 1: raise ValueError('Found multiple series') diff --git a/heudiconv/external/dcmstack.py b/heudiconv/external/dcmstack.py index 8ce721f4..4e00eb25 100644 --- a/heudiconv/external/dcmstack.py +++ b/heudiconv/external/dcmstack.py @@ -2,12 +2,12 @@ from __future__ import absolute_import -from heudiconv.external.pydicom import dcm # to assure that we have it one way or another +from .pydicom import dcm # to assure that we have it one way or another try: import dcmstack as ds except ImportError as e: - from heudiconv import lgr + from .. import lgr # looks different between py2 and 3 so we go for very rudimentary matching e_str = str(e) # there were changes from how diff --git a/heudiconv/parser.py b/heudiconv/parser.py index 986d5df7..8ceba641 100644 --- a/heudiconv/parser.py +++ b/heudiconv/parser.py @@ -9,8 +9,8 @@ import tarfile from tempfile import mkdtemp -from heudiconv.dicoms import group_dicoms_into_seqinfos -from heudiconv.utils import ( +from .dicoms import group_dicoms_into_seqinfos +from .utils import ( docstring_parameter, StudySessionInfo, ) diff --git a/heudiconv/queue.py b/heudiconv/queue.py index 7fe9fda1..2a24b909 100644 --- a/heudiconv/queue.py +++ b/heudiconv/queue.py @@ -1,20 +1,19 @@ import subprocess import sys import os - import logging +from .utils import which + lgr = logging.getLogger(__name__) -def queue_conversion(pyscript, queue, studyid, queue_args=None): +def queue_conversion(queue, studyid, queue_args=None): """ Write out conversion arguments to file and submit to a job scheduler. Parses `sys.argv` for heudiconv arguments. Parameters ---------- - pyscript: file - path to `heudiconv` script queue: string batch scheduler to use studyid: string @@ -34,9 +33,8 @@ def queue_conversion(pyscript, queue, studyid, queue_args=None): args = clean_args(sys.argv[1:]) # make arguments executable - args.insert(0, pyscript) - pypath = sys.executable or "python" - args.insert(0, pypath) + heudiconv_exec = which("heudiconv") or "heudiconv" + args.insert(0, heudiconv_exec) convertcmd = " ".join(args) # will overwrite across subjects diff --git a/heudiconv/tests/test_queue.py b/heudiconv/tests/test_queue.py index 612851ed..aea40931 100644 --- a/heudiconv/tests/test_queue.py +++ b/heudiconv/tests/test_queue.py @@ -3,10 +3,9 @@ import subprocess from heudiconv.cli.run import main as runner -from heudiconv.queue import clean_args +from heudiconv.queue import clean_args, which from .utils import TESTS_DATA_PATH import pytest -from nipype.utils.filemanip import which @pytest.mark.skipif(which("sbatch"), reason="skip a real slurm call") @pytest.mark.parametrize( diff --git a/heudiconv/utils.py b/heudiconv/utils.py index a4cc1c93..847dfafd 100644 --- a/heudiconv/utils.py +++ b/heudiconv/utils.py @@ -12,6 +12,9 @@ from pathlib import Path from collections import namedtuple from glob import glob +from subprocess import check_output + +from nipype.utils.filemanip import which import logging lgr = logging.getLogger(__name__) @@ -103,18 +106,17 @@ def dec(obj): def anonymize_sid(sid, anon_sid_cmd): - import sys - from subprocess import check_output - + cmd = [anon_sid_cmd, sid] shell_return = check_output(cmd) - ### Handle subprocess returning a bytes literal string to a python3 interpreter - if all([sys.version_info[0] > 2, isinstance(shell_return, bytes), isinstance(sid, str)]): + if all([sys.version_info[0] > 2, + isinstance(shell_return, bytes), + isinstance(sid, str)]): anon_sid = shell_return.decode() else: anon_sid = shell_return - + return anon_sid.strip() From 9255e2b13a8b9305922a1b991880ddd85653bead Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 3 Apr 2019 14:24:47 -0400 Subject: [PATCH 51/56] fix: imports, outdated argument help --- heudiconv/cli/run.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/heudiconv/cli/run.py b/heudiconv/cli/run.py index eaf6e629..eabd71af 100644 --- a/heudiconv/cli/run.py +++ b/heudiconv/cli/run.py @@ -84,11 +84,11 @@ def process_extra_commands(outdir, args): elif args.command == 'sanitize-jsons': tuneup_bids_json_files(args.files) elif args.command == 'heuristics': - from .utils import get_known_heuristics_with_descriptions + from ..utils import get_known_heuristics_with_descriptions for name_desc in get_known_heuristics_with_descriptions().items(): print("- %s: %s" % name_desc) elif args.command == 'heuristic-info': - from .utils import get_heuristic_description, get_known_heuristic_names + from ..utils import get_heuristic_description, get_known_heuristic_names if not args.heuristic: raise ValueError("Specify heuristic using -f. Known are: %s" % ', '.join(get_known_heuristic_names())) @@ -142,7 +142,7 @@ def get_parser(): group.add_argument('--files', nargs='*', help='Files (tarballs, dicoms) or directories ' 'containing files to process. Cannot be provided if ' - 'using --dicom_dir_template or --subjects') + 'using --dicom_dir_template.') parser.add_argument('-s', '--subjects', dest='subjs', type=str, nargs='*', help='list of subjects - required for dicom template. ' 'If not provided, DICOMS would first be "sorted" and ' @@ -173,8 +173,6 @@ def get_parser(): 'single argument and return a single anonymized ID. ' 'Also see --conv-outdir') parser.add_argument('-f', '--heuristic', dest='heuristic', - # some commands might not need heuristic - # required=True, help='Name of a known heuristic or path to the Python' 'script containing heuristic') parser.add_argument('-p', '--with-prov', action='store_true', From 96d69deeb3c5ec462c26e99f38888b5d6609d7ec Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 3 Apr 2019 17:32:28 -0400 Subject: [PATCH 52/56] fix: minimize processing before batch submission --- heudiconv/cli/run.py | 22 ++----- heudiconv/queue.py | 121 +++++++++++++++++++++++----------- heudiconv/tests/test_queue.py | 55 ++++++++++++---- 3 files changed, 132 insertions(+), 66 deletions(-) diff --git a/heudiconv/cli/run.py b/heudiconv/cli/run.py index eabd71af..f51d167f 100644 --- a/heudiconv/cli/run.py +++ b/heudiconv/cli/run.py @@ -246,6 +246,13 @@ def process_args(args): if not args.heuristic: raise RuntimeError("No heuristic specified - add to arguments and rerun") + if args.queue: + lgr.info("Queuing %s conversion", args.queue) + iterarg, iterables = ("files", len(args.files)) if args.files else \ + ("subjects", len(args.subjs)) + queue_conversion(args.queue, iterarg, iterables, args.queue_args) + sys.exit(0) + heuristic = load_heuristic(args.heuristic) study_sessions = get_study_sessions(args.dicom_dir_template, args.files, @@ -281,21 +288,6 @@ def process_args(args): lgr.warning("Skipping unknown locator dataset") continue - if args.queue: - - studyid = sid - if session: - studyid += "-%s" % session - if locator: - studyid += "-%s" % locator - # remove any separators - studyid = studyid.replace(op.sep, '_') - - queue_conversion(args.queue, - studyid, - args.queue_args) - continue - anon_sid = anonymize_sid(sid, args.anon_cmd) if args.anon_cmd else None if args.anon_cmd: lgr.info('Anonymized {} to {}'.format(sid, anon_sid)) diff --git a/heudiconv/queue.py b/heudiconv/queue.py index 2a24b909..98498782 100644 --- a/heudiconv/queue.py +++ b/heudiconv/queue.py @@ -7,7 +7,7 @@ lgr = logging.getLogger(__name__) -def queue_conversion(queue, studyid, queue_args=None): +def queue_conversion(queue, iterarg, iterables, queue_args=None): """ Write out conversion arguments to file and submit to a job scheduler. Parses `sys.argv` for heudiconv arguments. @@ -15,56 +15,99 @@ def queue_conversion(queue, studyid, queue_args=None): Parameters ---------- queue: string - batch scheduler to use - studyid: string - identifier for conversion + Batch scheduler to use + iterarg: str + Multi-argument to index (`subjects` OR `files`) + iterables: int + Number of `iterarg` arguments queue_args: string (optional) - additional queue arguments for job submission + Additional queue arguments for job submission - Returns - ------- - proc: int - Queue submission exit code """ SUPPORTED_QUEUES = {'SLURM': 'sbatch'} if queue not in SUPPORTED_QUEUES: raise NotImplementedError("Queuing with %s is not supported", queue) - args = clean_args(sys.argv[1:]) - # make arguments executable - heudiconv_exec = which("heudiconv") or "heudiconv" - args.insert(0, heudiconv_exec) - convertcmd = " ".join(args) - - # will overwrite across subjects - queue_file = os.path.abspath('heudiconv-%s.sh' % queue) - with open(queue_file, 'wt') as fp: - fp.write("#!/bin/bash\n") - if queue_args: - for qarg in queue_args.split(): - fp.write("#SBATCH %s\n" % qarg) - fp.write(convertcmd + "\n") - - cmd = [SUPPORTED_QUEUES[queue], queue_file] - proc = subprocess.call(cmd) - return proc - -def clean_args(hargs, keys=['-q', '--queue', '--queue-args']): + for i in range(iterables): + args = clean_args(sys.argv[1:], iterarg, i) + # make arguments executable + heudiconv_exec = which("heudiconv") or "heudiconv" + args.insert(0, heudiconv_exec) + convertcmd = " ".join(args) + + print(convertcmd) + # will overwrite across subjects + queue_file = os.path.abspath('heudiconv-%s.sh' % queue) + with open(queue_file, 'wt') as fp: + fp.write("#!/bin/bash\n") + if queue_args: + for qarg in queue_args.split(): + fp.write("#SBATCH %s\n" % qarg) + fp.write(convertcmd + "\n") + + cmd = [SUPPORTED_QUEUES[queue], queue_file] + proc = subprocess.call(cmd) + lgr.info("Submitted %d jobs", iterables) + +def clean_args(hargs, iterarg, iteridx): """ - Filters out unwanted arguments + Filters arguments for batch submission. + + Parameters + ---------- + hargs: list + Command-line arguments + iterarg: str + Multi-argument to index (`subjects` OR `files`) + iteridx: int + `iterarg` index to submit + + Returns + ------- + cmdargs : list + Filtered arguments for batch submission - :param hargs: Arguments passed - :type hargs: Iterable - :param keys: Unwanted arguments - :type keys: Iterable - :return: Filtered arguments + Example + -------- + >>> from heudiconv.queue import clean_args + >>> cmd = ['heudiconv', '-d', '/some/{subject}/path', + ... '-q', 'SLURM', + ... '-s', 'sub-1', 'sub-2', 'sub-3', 'sub-4'] + >>> clean_args(cmd, 'subjects', 0) + ['heudiconv', '-d', '/some/{subject}/path', '-s', 'sub-1'] """ + + if iterarg == "subjects": + iterarg = ['-s', '--subjects'] + elif iterarg == "files": + iterarg = ['--files'] + else: + raise ValueError("Cannot index %s" % iterarg) + + # remove these or cause an infinite loop + queue_args = ['-q', '--queue', '--queue-args'] + + # control variables for multi-argument parsing + is_iterarg = False + itercount = 0 + indicies = [] + cmdargs = hargs[:] + for i, arg in enumerate(hargs): - if arg in keys: + if arg.startswith('-') and is_iterarg: + # moving on to another argument + is_iterarg = False + if is_iterarg: + if iteridx != itercount: + indicies.append(i) + itercount += 1 + if arg in iterarg: + is_iterarg = True + if arg in queue_args: indicies.extend([i, i+1]) - for j in sorted(indicies, reverse=True): - del hargs[j] - return hargs + for j in sorted(indicies, reverse=True): + del cmdargs[j] + return cmdargs diff --git a/heudiconv/tests/test_queue.py b/heudiconv/tests/test_queue.py index aea40931..8d80448d 100644 --- a/heudiconv/tests/test_queue.py +++ b/heudiconv/tests/test_queue.py @@ -23,7 +23,7 @@ def test_queue_no_slurm(tmpdir, invocation): sys.argv = ['heudiconv'] + hargs try: - with pytest.raises(OSError): + with pytest.raises(OSError): # SLURM should not be installed runner(hargs) # should have generated a slurm submission script slurm_cmd_file = (tmpdir / 'heudiconv-SLURM.sh').strpath @@ -46,17 +46,48 @@ def test_queue_no_slurm(tmpdir, invocation): sys.argv = _sys_args def test_argument_filtering(tmpdir): - cmdargs = [ - 'heudiconv', - '--files', - '/fake/path/to/files', - '-f', - 'convertall', - '-q', - 'SLURM', - '--queue-args', + cmd_files = [ + 'heudiconv', + '--files', + '/fake/path/to/files', + '/another/fake/path', + '-f', + 'convertall', + '-q', + 'SLURM', + '--queue-args', '--cpus-per-task=4 --contiguous --time=10' ] - filtered = cmdargs[:-4] + filtered = [ + 'heudiconv', + '--files', + '/another/fake/path', + '-f', + 'convertall', + ] + assert clean_args(cmd_files, 'files', 1) == filtered - assert(clean_args(cmdargs) == filtered) + cmd_subjects = [ + 'heudiconv', + '-d', + '/some/{subject}/path', + '--queue', + 'SLURM', + '--subjects', + 'sub1', + 'sub2', + 'sub3', + 'sub4', + '-f', + 'convertall' + ] + filtered = [ + 'heudiconv', + '-d', + '/some/{subject}/path', + '--subjects', + 'sub3', + '-f', + 'convertall' + ] + assert clean_args(cmd_subjects, 'subjects', 2) == filtered From da45311100d613a2409112b17a19679618da6bfc Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 3 Apr 2019 17:47:13 -0400 Subject: [PATCH 53/56] fix: remove print statement --- heudiconv/queue.py | 1 - 1 file changed, 1 deletion(-) diff --git a/heudiconv/queue.py b/heudiconv/queue.py index 98498782..8e091ca3 100644 --- a/heudiconv/queue.py +++ b/heudiconv/queue.py @@ -36,7 +36,6 @@ def queue_conversion(queue, iterarg, iterables, queue_args=None): args.insert(0, heudiconv_exec) convertcmd = " ".join(args) - print(convertcmd) # will overwrite across subjects queue_file = os.path.abspath('heudiconv-%s.sh' % queue) with open(queue_file, 'wt') as fp: From 462142302798353484cb981a44f83b722d4fd7a7 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 4 Apr 2019 11:19:49 -0400 Subject: [PATCH 54/56] fix: avoid relative import in nipype node --- heudiconv/dicoms.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/heudiconv/dicoms.py b/heudiconv/dicoms.py index 3e0491f0..9ef9b9c9 100644 --- a/heudiconv/dicoms.py +++ b/heudiconv/dicoms.py @@ -385,7 +385,7 @@ def embed_nifti(dcmfiles, niftifile, infofile, bids_info, min_meta): import re if not min_meta: - from .external.dcmstack import ds + from heudiconv.external.dcmstack import ds stack = ds.parse_and_stack(dcmfiles, force=True).values() if len(stack) > 1: raise ValueError('Found multiple series') From c79d151855a93ea1f2025b8e7fecab5a538b2680 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Fri, 26 Apr 2019 15:28:41 -0400 Subject: [PATCH 55/56] bump: dcm2niix to latest release v1.0.20190410 --- Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 1bfa5c70..49a763bb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -39,7 +39,7 @@ RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \ ENTRYPOINT ["/neurodocker/startup.sh"] -ENV PATH="/opt/dcm2niix-v1.0.20181125/bin:$PATH" +ENV PATH="/opt/dcm2niix-v1.0.20190410/bin:$PATH" RUN apt-get update -qq \ && apt-get install -y -q --no-install-recommends \ cmake \ @@ -54,10 +54,10 @@ RUN apt-get update -qq \ && git clone https://github.com/rordenlab/dcm2niix /tmp/dcm2niix \ && cd /tmp/dcm2niix \ && git fetch --tags \ - && git checkout v1.0.20181125 \ + && git checkout v1.0.20190410 \ && mkdir /tmp/dcm2niix/build \ && cd /tmp/dcm2niix/build \ - && cmake -DCMAKE_INSTALL_PREFIX:PATH=/opt/dcm2niix-v1.0.20181125 .. \ + && cmake -DCMAKE_INSTALL_PREFIX:PATH=/opt/dcm2niix-v1.0.20190410 .. \ && make \ && make install \ && rm -rf /tmp/dcm2niix @@ -115,7 +115,7 @@ RUN echo '{ \ \n [ \ \n "dcm2niix", \ \n { \ - \n "version": "v1.0.20180622", \ + \n "version": "v1.0.20190410", \ \n "method": "source" \ \n } \ \n ], \ From 17fdb9c80ad14aeb7ffc83707424a26d8c069c4c Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 29 Apr 2019 11:43:22 -0400 Subject: [PATCH 56/56] rel: 0.5.4 --- heudiconv/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/heudiconv/info.py b/heudiconv/info.py index 69c291b0..19a93d0d 100644 --- a/heudiconv/info.py +++ b/heudiconv/info.py @@ -1,4 +1,4 @@ -__version__ = "0.5.4.dev1" +__version__ = "0.5.4" __author__ = "HeuDiConv team and contributors" __url__ = "https://github.com/nipy/heudiconv" __packagename__ = 'heudiconv'