Package mvpa :: Package measures :: Module noiseperturbation
[hide private]
[frames] | no frames]

Source Code for Module mvpa.measures.noiseperturbation

  1  # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 
  2  # vi: set ft=python sts=4 ts=4 sw=4 et: 
  3  ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 
  4  # 
  5  #   See COPYING file distributed along with the PyMVPA package for the 
  6  #   copyright and license terms. 
  7  # 
  8  ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 
  9  """This is a `FeaturewiseDatasetMeasure` that uses a scalar `DatasetMeasure` and 
 10  selective noise perturbation to compute a sensitivity map. 
 11  """ 
 12   
 13  __docformat__ = 'restructuredtext' 
 14   
 15  if __debug__: 
 16      from mvpa.base import debug 
 17   
 18  from mvpa.support.copy import deepcopy 
 19   
 20  import numpy as N 
 21   
 22  from mvpa.measures.base import FeaturewiseDatasetMeasure 
 23   
 24   
25 -class NoisePerturbationSensitivity(FeaturewiseDatasetMeasure):
26 """This is a `FeaturewiseDatasetMeasure` that uses a scalar 27 `DatasetMeasure` and selective noise perturbation to compute a sensitivity 28 map. 29 30 First the scalar `DatasetMeasure` computed using the original dataset. Next 31 the data measure is computed multiple times each with a single feature in 32 the dataset perturbed by noise. The resulting difference in the 33 scalar `DatasetMeasure` is used as the sensitivity for the respective 34 perturbed feature. Large differences are treated as an indicator of a 35 feature having great impact on the scalar `DatasetMeasure`. 36 37 The computed sensitivity map might have positive and negative values! 38 """
39 - def __init__(self, datameasure, 40 noise=N.random.normal):
41 """Cheap initialization. 42 43 :Parameters: 44 datameasure: `Datameasure` that is used to quantify the effect of 45 noise perturbation. 46 noise: Functor to generate noise. The noise generator has to return 47 an 1d array of n values when called the `size=n` keyword 48 argument. This is the default interface of the random number 49 generators in NumPy's `random` module. 50 """ 51 # init base classes first 52 FeaturewiseDatasetMeasure.__init__(self) 53 54 self.__datameasure = datameasure 55 self.__noise = noise
56 57
58 - def _call(self, dataset):
59 """Compute the sensitivity map. 60 61 Returns a 1d array of sensitivities for all features in `dataset`. 62 """ 63 # first cast to floating point dtype, because noise is most likely 64 # floating point as well and '+=' on int would not do the right thing 65 # XXX should we already deepcopy here to keep orig dtype? 66 if not N.issubdtype(dataset.samples.dtype, N.float): 67 dataset.setSamplesDType('float32') 68 69 if __debug__: 70 nfeatures = dataset.nfeatures 71 72 sens_map = [] 73 74 # compute the datameasure on the original dataset 75 # this is used as a baseline 76 orig_measure = self.__datameasure(dataset) 77 78 # do for every _single_ feature in the dataset 79 for feature in xrange(dataset.nfeatures): 80 if __debug__: 81 debug('PSA', "Analyzing %i features: %i [%i%%]" \ 82 % (nfeatures, 83 feature+1, 84 float(feature+1)/nfeatures*100,), cr=True) 85 86 # make a copy of the dataset to preserve data integrity 87 wdata = deepcopy(dataset) 88 89 # add noise to current feature 90 wdata.samples[:, feature] += self.__noise(size=wdata.nsamples) 91 92 # compute the datameasure on the perturbed dataset 93 perturbed_measure = self.__datameasure(wdata) 94 95 # difference from original datameasure is sensitivity 96 sens_map.append(perturbed_measure - orig_measure) 97 98 if __debug__: 99 debug('PSA', '') 100 101 return N.array(sens_map)
102