diff --git a/requirements.txt b/requirements.txt index 0f18f53..7fd3ec9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,3 +2,4 @@ numpy scipy scikit-learn +Cython diff --git a/skbayes/linear_models/precision_inversion_tester.py b/skbayes/linear_models/precision_inversion_tester.py index 5d931d2..7ff347d 100644 --- a/skbayes/linear_models/precision_inversion_tester.py +++ b/skbayes/linear_models/precision_inversion_tester.py @@ -37,22 +37,22 @@ def inversion_checker(X,alpha,beta): beta = 1000 print('\n Example 1: beta = {0} \n'.format(beta)) S,v1, v2 = inversion_checker(X, alpha, beta) - print "Previous inversion method \n" + print("Previous inversion method \n") print (v1) - print '\n Current inversion method \n' + print("\n Current inversion method \n") print (v2) # large beta case beta = 1e+16 print('\n Example 2: beta = {0} \n'.format(beta)) S,v1, v2 = inversion_checker(X, alpha, beta) - print "Previous inversion method \n" + print("Previous inversion method \n") print (v1) - print '\n Current inversion method \n' + print("\n Current inversion method \n") print (v2) X = np.random.random([5,5]) + 0.00000001*np.eye(5) #print np.linalg.inv(X) #print pinvh(X) - \ No newline at end of file + diff --git a/skbayes/linear_models/variational_regression.py b/skbayes/linear_models/variational_regression.py index 58aafeb..f8227b3 100644 --- a/skbayes/linear_models/variational_regression.py +++ b/skbayes/linear_models/variational_regression.py @@ -131,7 +131,7 @@ def fit(self,X,y): # --------- Convergence Check --------- if self.verbose is True: - print "Iteration {0} is completed".format(i) + print("Iteration {0} is completed".format(i)) # check convergence converged = self._check_convergence(Mw,Mw_old) diff --git a/skbayes/rvm_ard_models/fast_rvm.py b/skbayes/rvm_ard_models/fast_rvm.py index 99529b5..8b256de 100644 --- a/skbayes/rvm_ard_models/fast_rvm.py +++ b/skbayes/rvm_ard_models/fast_rvm.py @@ -596,7 +596,7 @@ def predict_proba(self,X): ---------- X: array-like of size [n_samples_test,n_features] Matrix of explanatory variables (test set) - + Returns ------- probs: numpy array of size [n_samples_test] @@ -618,7 +618,7 @@ def predict_proba(self,X): prob = pr / np.reshape(np.sum(pr, axis = 1), (pr.shape[0],1)) return prob - + def _predict_proba(self,X,y_hat,sigma): ''' Calculates predictive distribution @@ -627,26 +627,34 @@ def _predict_proba(self,X,y_hat,sigma): ks = 1. / ( 1. + np.pi * var/ 8)**0.5 pr = expit(y_hat * ks) return pr - - def _sparsity_quality(self,X,Xa,y,B,A,Aa,active,Sn): - ''' - Calculates sparsity & quality parameters for each feature - ''' - XB = X.T*B - YB = y*B - XSX = np.dot(np.dot(Xa,Sn),Xa.T) - bxy = np.dot(XB,y) - Q = bxy - np.dot( np.dot(XB,XSX), YB) - S = np.sum( XB*X.T,1 ) - np.sum( np.dot( XB,XSX )*XB,1 ) - qi = np.copy(Q) - si = np.copy(S) - Qa,Sa = Q[active], S[active] - qi[active] = Aa * Qa / (Aa - Sa ) - si[active] = Aa * Sa / (Aa - Sa ) - return [si,qi,S,Q] - - + + def _sparsity_quality(self, X, Xa, y, B, A, Aa, active, Sn): + '''Calculates sparsity & quality parameters for each feature.''' + XB = X.T*B + XSX = np.dot(Xa, Sn) + XSX = np.dot(XSX, Xa.T) + + S = np.dot(XB, XSX) + del XSX + + Q = -np.dot(S, y*B) + Q += np.dot(XB, y) + + S *= XB + S = -np.sum(S, 1) + S += np.sum(XB*X.T, 1) + del XB + + qi = np.copy(Q) + si = np.copy(S) + Qa, Sa = Q[active], S[active] + qi[active] = Aa * Qa / (Aa - Sa) + si[active] = Aa * Sa / (Aa - Sa) + + return [si, qi, S, Q] + + def _posterior_dist(self,X,y,A,intercept_prior): ''' Uses Laplace approximation for calculating posterior distribution diff --git a/skbayes/rvm_ard_models/vrvm.py b/skbayes/rvm_ard_models/vrvm.py index eb9bc05..703e7f6 100644 --- a/skbayes/rvm_ard_models/vrvm.py +++ b/skbayes/rvm_ard_models/vrvm.py @@ -155,11 +155,11 @@ def fit(self,X,y): # print progress report if required if self.verbose is True: - print "Iteration {0} is completed, lower bound equals {1}".format(i,self.lower_bound[-1]) + print("Iteration {0} is completed, lower bound equals {1}".format(i,self.lower_bound[-1])) if np.sum( abs(Mw - Mw0) > self.tol) == 0 or i == self.n_iter - 1: if self.verbose is True: - print "Mean Field Approximation completed" + print("Mean Field Approximation completed") break Mw0 = Mw