|
27 | 27 | from ..interfaces.base import (traits, TraitedSpec, BaseInterface, |
28 | 28 | BaseInterfaceInputSpec, File, isdefined, |
29 | 29 | InputMultiPath) |
30 | | -from nipype.utils import NUMPY_MMAP |
| 30 | +from ..utils import NUMPY_MMAP |
| 31 | +from ..utils.misc import normalize_mc_params |
31 | 32 |
|
32 | 33 | IFLOG = logging.getLogger('interface') |
33 | 34 |
|
@@ -206,7 +207,10 @@ def _list_outputs(self): |
206 | 207 |
|
207 | 208 |
|
208 | 209 | class FramewiseDisplacementInputSpec(BaseInterfaceInputSpec): |
209 | | - in_plots = File(exists=True, mandatory=True, desc='motion parameters as written by FSL MCFLIRT') |
| 210 | + in_file = File(exists=True, mandatory=True, desc='motion parameters') |
| 211 | + parameter_source = traits.Enum("FSL", "AFNI", "SPM", "FSFAST", |
| 212 | + desc="Source of movement parameters", |
| 213 | + mandatory=True) |
210 | 214 | radius = traits.Float(50, usedefault=True, |
211 | 215 | desc='radius in mm to calculate angular FDs, 50mm is the ' |
212 | 216 | 'default since it is used in Power et al. 2012') |
@@ -260,9 +264,12 @@ class FramewiseDisplacement(BaseInterface): |
260 | 264 | }] |
261 | 265 |
|
262 | 266 | def _run_interface(self, runtime): |
263 | | - mpars = np.loadtxt(self.inputs.in_plots) # mpars is N_t x 6 |
264 | | - diff = mpars[:-1, :] - mpars[1:, :] |
265 | | - diff[:, :3] *= self.inputs.radius |
| 267 | + mpars = np.loadtxt(self.inputs.in_file) # mpars is N_t x 6 |
| 268 | + mpars = np.apply_along_axis(func1d=normalize_mc_params, |
| 269 | + axis=1, arr=mpars, |
| 270 | + source=self.inputs.parameter_source) |
| 271 | + diff = mpars[:-1, :6] - mpars[1:, :6] |
| 272 | + diff[:, 3:6] *= self.inputs.radius |
266 | 273 | fd_res = np.abs(diff).sum(axis=1) |
267 | 274 |
|
268 | 275 | self._results = { |
@@ -572,6 +579,79 @@ def _list_outputs(self): |
572 | 579 | outputs['detrended_file'] = op.abspath(self.inputs.detrended_file) |
573 | 580 | return outputs |
574 | 581 |
|
| 582 | + |
| 583 | +class NonSteadyStateDetectorInputSpec(BaseInterfaceInputSpec): |
| 584 | + in_file = File(exists=True, mandatory=True, desc='4D NIFTI EPI file') |
| 585 | + |
| 586 | + |
| 587 | +class NonSteadyStateDetectorOutputSpec(TraitedSpec): |
| 588 | + n_volumes_to_discard = traits.Int(desc='Number of non-steady state volumes' |
| 589 | + 'detected in the beginning of the scan.') |
| 590 | + |
| 591 | + |
| 592 | +class NonSteadyStateDetector(BaseInterface): |
| 593 | + """ |
| 594 | + Returns the number of non-steady state volumes detected at the beginning |
| 595 | + of the scan. |
| 596 | + """ |
| 597 | + |
| 598 | + input_spec = NonSteadyStateDetectorInputSpec |
| 599 | + output_spec = NonSteadyStateDetectorOutputSpec |
| 600 | + |
| 601 | + def _run_interface(self, runtime): |
| 602 | + in_nii = nb.load(self.inputs.in_plots) |
| 603 | + global_signal = in_nii.get_data()[:,:,:,:50].mean(axis=0).mean(axis=0).mean(axis=0) |
| 604 | + |
| 605 | + self._results = { |
| 606 | + 'out_file': _is_outlier(global_signal) |
| 607 | + } |
| 608 | + |
| 609 | + return runtime |
| 610 | + |
| 611 | + def _list_outputs(self): |
| 612 | + return self._results |
| 613 | + |
| 614 | +def _is_outlier(points, thresh=3.5): |
| 615 | + """ |
| 616 | + Returns a boolean array with True if points are outliers and False |
| 617 | + otherwise. |
| 618 | +
|
| 619 | + Parameters: |
| 620 | + ----------- |
| 621 | + points : An numobservations by numdimensions array of observations |
| 622 | + thresh : The modified z-score to use as a threshold. Observations with |
| 623 | + a modified z-score (based on the median absolute deviation) greater |
| 624 | + than this value will be classified as outliers. |
| 625 | +
|
| 626 | + Returns: |
| 627 | + -------- |
| 628 | + mask : A numobservations-length boolean array. |
| 629 | +
|
| 630 | + References: |
| 631 | + ---------- |
| 632 | + Boris Iglewicz and David Hoaglin (1993), "Volume 16: How to Detect and |
| 633 | + Handle Outliers", The ASQC Basic References in Quality Control: |
| 634 | + Statistical Techniques, Edward F. Mykytka, Ph.D., Editor. |
| 635 | + """ |
| 636 | + if len(points.shape) == 1: |
| 637 | + points = points[:, None] |
| 638 | + median = np.median(points, axis=0) |
| 639 | + diff = np.sum((points - median) ** 2, axis=-1) |
| 640 | + diff = np.sqrt(diff) |
| 641 | + med_abs_deviation = np.median(diff) |
| 642 | + |
| 643 | + modified_z_score = 0.6745 * diff / med_abs_deviation |
| 644 | + |
| 645 | + timepoints_to_discard = 0 |
| 646 | + for i in range(len(modified_z_score)): |
| 647 | + if modified_z_score[i] <= thresh: |
| 648 | + break |
| 649 | + else: |
| 650 | + timepoints_to_discard += 1 |
| 651 | + |
| 652 | + return timepoints_to_discard |
| 653 | + |
| 654 | + |
575 | 655 | def regress_poly(degree, data, remove_mean=True, axis=-1): |
576 | 656 | ''' returns data with degree polynomial regressed out. |
577 | 657 | Be default it is calculated along the last axis (usu. time). |
|
0 commit comments