Package mvpa :: Package clfs :: Module ridge
[hide private]
[frames] | no frames]

Source Code for Module mvpa.clfs.ridge

 1  #emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- 
 2  #ex: set sts=4 ts=4 sw=4 et: 
 3  ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 
 4  # 
 5  #   See COPYING file distributed along with the PyMVPA package for the 
 6  #   copyright and license terms. 
 7  # 
 8  ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 
 9  """Ridge regression classifier.""" 
10   
11  __docformat__ = 'restructuredtext' 
12   
13   
14  import numpy as N 
15  from scipy.linalg import lstsq 
16   
17  from mvpa.clfs.base import Classifier 
18   
19   
20 -class RidgeReg(Classifier):
21 """Ridge regression `Classifier`. 22 23 This ridge regression adds an intercept term so your labels do not 24 have to be zero-centered. 25 """ 26
27 - def __init__(self, lm=None, **kwargs):
28 """ 29 Initialize a ridge regression analysis. 30 31 :Parameters: 32 lm : float 33 the penalty term lambda. 34 (Defaults to .05*nFeatures) 35 36 """ 37 # init base class first 38 Classifier.__init__(self, **kwargs) 39 40 # pylint happiness 41 self.w = None 42 43 # It does not make sense to calculate a confusion matrix for a 44 # ridge regression 45 self.states.enable('training_confusion', False) 46 47 # verify that they specified lambda 48 self.__lm = lm
49 50
51 - def __repr__(self):
52 """String summary of the object 53 """ 54 if self.__lm is None: 55 return """Ridge(lm=.05*nfeatures, enable_states=%s)""" % \ 56 (str(self.states.enabled)) 57 else: 58 return """Ridge(lm=%f, enable_states=%s)""" % \ 59 (self.__lm, str(self.states.enabled))
60 61
62 - def _train(self, data):
63 """Train the classifier using `data` (`Dataset`). 64 """ 65 66 # create matrices to solve with additional penalty term 67 # determine the lambda matrix 68 if self.__lm is None: 69 # Not specified, so calculate based on .05*nfeatures 70 Lambda = .05*data.nfeatures*N.eye(data.nfeatures) 71 else: 72 # use the provided penalty 73 Lambda = self.__lm*N.eye(data.nfeatures) 74 75 # add the penalty term 76 a = N.concatenate( \ 77 (N.concatenate((data.samples, N.ones((data.nsamples, 1))), 1), 78 N.concatenate((Lambda, N.zeros((data.nfeatures, 1))), 1))) 79 b = N.concatenate((data.labels, N.zeros(data.nfeatures))) 80 81 # perform the least sq regression and save the weights 82 self.w = lstsq(a, b)[0]
83 84
85 - def _predict(self, data):
86 """ 87 Predict the output for the provided data. 88 """ 89 # predict using the trained weights 90 return N.dot(N.concatenate((data, N.ones((len(data), 1))), 1), 91 self.w)
92