1
2
3
4
5
6
7
8
9 """This is a `FeaturewiseDatasetMeasure` that uses a scalar `DatasetMeasure` and
10 selective noise perturbation to compute a sensitivity map.
11 """
12
13 __docformat__ = 'restructuredtext'
14
15 if __debug__:
16 from mvpa.misc import debug
17
18 from copy import deepcopy
19
20 import numpy as N
21
22 from mvpa.measures.base import FeaturewiseDatasetMeasure
23
24
26 """This is a `FeaturewiseDatasetMeasure` that uses a scalar
27 `DatasetMeasure` and selective noise perturbation to compute a sensitivity
28 map.
29
30 First the scalar `DatasetMeasure` computed using the original dataset. Next
31 the data measure is computed multiple times each with a single feature in
32 the dataset perturbed by noise. The resulting difference in the
33 scalar `DatasetMeasure` is used as the sensitivity for the respective
34 perturbed feature. Large differences are treated as an indicator of a
35 feature having great impact on the scalar `DatasetMeasure`.
36
37 The computed sensitivity map might have positive and negative values!
38 """
39 - def __init__(self, datameasure,
40 noise=N.random.normal):
41 """Cheap initialization.
42
43 Parameters
44 ----------
45 - `datameasure`: `Datameasure` that is used to quantify the effect of
46 noise perturbation.
47 - `noise`: Functor to generate noise. The noise generator has to return
48 an 1d array of n values when called the `size=n` keyword
49 argument. This is the default interface of the random number
50 generators in NumPy's `random` module.
51 """
52
53 FeaturewiseDatasetMeasure.__init__(self)
54
55 self.__datameasure = datameasure
56 self.__noise = noise
57
58
59 - def _call(self, dataset):
60 """Compute the sensitivity map.
61
62 Returns a 1d array of sensitivities for all features in `dataset`.
63 """
64
65
66
67 if not N.issubdtype(dataset.samples.dtype, N.float):
68 dataset.setSamplesDType('float32')
69
70 if __debug__:
71 nfeatures = dataset.nfeatures
72
73 sens_map = []
74
75
76
77 orig_measure = self.__datameasure(dataset)
78
79
80 for feature in xrange(dataset.nfeatures):
81 if __debug__:
82 debug('PSA', "Analyzing %i features: %i [%i%%]" \
83 % (nfeatures,
84 feature+1,
85 float(feature+1)/nfeatures*100,), cr=True)
86
87
88 wdata = deepcopy(dataset)
89
90
91 wdata.samples[:, feature] += self.__noise(size=wdata.nsamples)
92
93
94 perturbed_measure = self.__datameasure(wdata)
95
96
97 sens_map.append(perturbed_measure - orig_measure)
98
99 if __debug__:
100 debug('PSA', '')
101
102 return N.array(sens_map)
103