From 1374b8901681431e84ecdee09419a9bd622a8f14 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 4 Feb 2019 17:00:12 -0500 Subject: [PATCH] ENH: Calculate TA and store in run_info --- bids/analysis/analysis.py | 13 +++++++++++-- bids/variables/entities.py | 8 +++++--- bids/variables/io.py | 26 +++++++++++++++++++++++--- 3 files changed, 39 insertions(+), 8 deletions(-) diff --git a/bids/analysis/analysis.py b/bids/analysis/analysis.py index fd3c28660..814d45f1b 100644 --- a/bids/analysis/analysis.py +++ b/bids/analysis/analysis.py @@ -388,13 +388,22 @@ def get_design_matrix(self, names=None, format='long', mode='both', if sampling_rate == 'TR': trs = {var.run_info[0].tr for var in self.collection.variables.values()} + tas = {var.run_info[0].ta for var in self.collection.variables.values()} if not trs: raise ValueError("Repetition time unavailable; specify sampling_rate " "explicitly") elif len(trs) > 1: raise ValueError("Non-unique Repetition times found ({!r}); specify " - "sampling_rate explicitly") - sampling_rate = 1. / trs.pop() + "sampling_rate explicitly".format(trs)) + TR = trs.pop() + if not tas: + warnings.warn("Acquisition time unavailable; assuming TA = TR") + tas = {TR} + elif len(tas) > 1: + raise ValueError("Non-unique acquisition times found ({!r})".format(tas)) + + sampling_rate = 1. / TR + acquisition_time = tas.pop() elif sampling_rate == 'highest': sampling_rate = None dense_df = coll.to_df(names, format='wide', diff --git a/bids/variables/entities.py b/bids/variables/entities.py index a1060f6f3..d46da3b36 100644 --- a/bids/variables/entities.py +++ b/bids/variables/entities.py @@ -36,23 +36,25 @@ class RunNode(Node): image_file (str): The full path to the corresponding nifti image. duration (float): Duration of the run, in seconds. repetition_time (float): TR for the run. + acquisition_time (float): TA for the run. task (str): The task name for this run. ''' - def __init__(self, entities, image_file, duration, repetition_time): + def __init__(self, entities, image_file, duration, repetition_time, acquisition_time): self.image_file = image_file self.duration = duration self.repetition_time = repetition_time + self.acquisition_time = acquisition_time super(RunNode, self).__init__('run', entities) def get_info(self): return RunInfo(self.entities, self.duration, self.repetition_time, - self.image_file) + self.acquisition_time, self.image_file) # Stores key information for each Run. -RunInfo = namedtuple('RunInfo', ['entities', 'duration', 'tr', 'image']) +RunInfo = namedtuple('RunInfo', ['entities', 'duration', 'tr', 'ta', 'image']) class NodeIndex(Node): diff --git a/bids/variables/io.py b/bids/variables/io.py index d35c32094..8a0c516c5 100644 --- a/bids/variables/io.py +++ b/bids/variables/io.py @@ -82,6 +82,24 @@ def load_variables(layout, types=None, levels=None, skip_empty=True, return dataset +def _get_timing_info(img_md): + if 'RepetitionTime' in img_md: + tr = img_md['RepetitionTime'] + if 'DelayTime' in img_md: + ta = tr - img_md['DelayTime'] + elif 'SliceTiming' in img_md: + slicetimes = sorted(img_md['SliceTiming']) + # a, b ... z + # z = final slice onset, b - a = slice duration + ta = slicetimes[-1] + slicetimes[1] - slicetimes[0] + else: + ta = tr + elif 'VolumeTiming' in img_md: + return NotImplemented + + return tr, ta + + def _load_time_variables(layout, dataset=None, columns=None, scan_length=None, drop_na=True, events=True, physio=True, stim=True, regressors=True, skip_empty=True, **selectors): @@ -141,8 +159,9 @@ def _load_time_variables(layout, dataset=None, columns=None, scan_length=None, if 'run' in entities: entities['run'] = int(entities['run']) - tr = layout.get_metadata(img_f, suffix='bold', domains=domains, - full_search=True)['RepetitionTime'] + img_md = layout.get_metadata(img_f, suffix='bold', domains=domains, + full_search=True)['RepetitionTime'] + tr, ta = _get_timing_info(img_md) # Get duration of run: first try to get it directly from the image # header; if that fails, try to get NumberOfVolumes from the @@ -162,7 +181,8 @@ def _load_time_variables(layout, dataset=None, columns=None, scan_length=None, raise ValueError(msg) run = dataset.get_or_create_node('run', entities, image_file=img_f, - duration=duration, repetition_time=tr) + duration=duration, repetition_time=tr, + acquisition_time=ta) run_info = run.get_info() # Process event files