Skip to content
This repository has been archived by the owner on Apr 2, 2020. It is now read-only.

Commit

Permalink
Renamed resid methods to resdiuals, w to whitened_ (#425)
Browse files Browse the repository at this point in the history
* Renamed resid methods to resdiuals

* Fixed typo and call

* Added residuals and resid to tests

* Renamed norm_resid to normalized_resuiduals

* Undid inadvertent option string change

* Reanmed *_resid to *_residuals and added deprecation warning

* Reanmed *_resid to *_residuals and .resid to .residuals

* Added tests for renamed attributes and associated warnings

 - Reduced verbosity in warning message.
 - Renamed wresiduals to whitened_residuals.

* Renamed wX and wY to whitened_X, whitened_Y

* Replaced wdesign with whitened_design, improved tests
  • Loading branch information
kchawla-pi authored Jan 24, 2020
1 parent 5be4c0d commit 4c3d0e0
Show file tree
Hide file tree
Showing 6 changed files with 174 additions and 50 deletions.
2 changes: 1 addition & 1 deletion nistats/contrasts.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def compute_contrast(labels, regression_result, con_val, contrast_type=None):
effect_[:, label_mask] = wcbeta
var_[label_mask] = rss

dof_ = regression_result[label_].df_resid
dof_ = regression_result[label_].df_residuals
return Contrast(effect=effect_, variance=var_, dim=dim, dof=dof_,
contrast_type=contrast_type)

Expand Down
4 changes: 2 additions & 2 deletions nistats/first_level_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,8 @@ def run_glm(Y, X, noise_model='ar1', bins=100, n_jobs=1, verbose=0):

if noise_model == 'ar1':
# compute and discretize the AR1 coefs
ar1 = ((ols_result.resid[1:] * ols_result.resid[:-1]).sum(axis=0) /
(ols_result.resid ** 2).sum(axis=0))
ar1 = ((ols_result.residuals[1:] * ols_result.residuals[:-1]).sum(axis=0) /
(ols_result.residuals ** 2).sum(axis=0))
del ols_result
ar1 = (ar1 * bins).astype(np.int) * 1. / bins
# Fit the AR model acccording to current AR(1) estimates
Expand Down
25 changes: 17 additions & 8 deletions nistats/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Author: Bertrand Thirion, 2011--2015
"""
import warnings

import numpy as np

Expand Down Expand Up @@ -72,8 +73,16 @@ def __init__(self, theta, Y, model, cov=None, dispersion=1., nuisance=None,
self.df_total = Y.shape[0]
self.df_model = model.df_model
# put this as a parameter of LikelihoodModel
self.df_resid = self.df_total - self.df_model

self.df_residuals = self.df_total - self.df_model

@setattr_on_read
def df_resid(self):
warnings.warn("'df_resid' from LikelihoodModelResults "
"has been deprecated and will be removed. "
"Please use 'df_residuals'.",
FutureWarning)
return self.df_residuals

@setattr_on_read
def logL(self):
"""
Expand Down Expand Up @@ -200,7 +209,7 @@ def Tcontrast(self, matrix, store=('t', 'effect', 'sd'), dispersion=None):
if 't' in store:
st_t = np.squeeze(effect * positive_reciprocal(sd))
return TContrastResults(effect=st_effect, t=st_t, sd=st_sd,
df_den=self.df_resid)
df_den=self.df_residuals)

def Fcontrast(self, matrix, dispersion=None, invcov=None):
""" Compute an Fcontrast for a contrast matrix `matrix`.
Expand Down Expand Up @@ -262,7 +271,7 @@ def Fcontrast(self, matrix, dispersion=None, invcov=None):
return FContrastResults(
effect=ctheta, covariance=self.vcov(
matrix=matrix, dispersion=dispersion[np.newaxis]),
F=F, df_den=self.df_resid, df_num=invcov.shape[0])
F=F, df_den=self.df_residuals, df_num=invcov.shape[0])

def conf_int(self, alpha=.05, cols=None, dispersion=None):
''' The confidence interval of the specified theta estimates.
Expand Down Expand Up @@ -306,18 +315,18 @@ def conf_int(self, alpha=.05, cols=None, dispersion=None):
'''
if cols is None:
lower = self.theta - inv_t_cdf(1 - alpha / 2, self.df_resid) *\
lower = self.theta - inv_t_cdf(1 - alpha / 2, self.df_residuals) * \
np.sqrt(np.diag(self.vcov(dispersion=dispersion)))
upper = self.theta + inv_t_cdf(1 - alpha / 2, self.df_resid) *\
upper = self.theta + inv_t_cdf(1 - alpha / 2, self.df_residuals) * \
np.sqrt(np.diag(self.vcov(dispersion=dispersion)))
else:
lower, upper = [], []
for i in cols:
lower.append(
self.theta[i] - inv_t_cdf(1 - alpha / 2, self.df_resid) *
self.theta[i] - inv_t_cdf(1 - alpha / 2, self.df_residuals) *
np.sqrt(self.vcov(column=i, dispersion=dispersion)))
upper.append(
self.theta[i] + inv_t_cdf(1 - alpha / 2, self.df_resid) *
self.theta[i] + inv_t_cdf(1 - alpha / 2, self.df_residuals) *
np.sqrt(self.vcov(column=i, dispersion=dispersion)))
return np.asarray(list(zip(lower, upper)))

Expand Down
144 changes: 116 additions & 28 deletions nistats/regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,16 @@

__docformat__ = 'restructuredtext en'

import warnings

import numpy as np

from nibabel.onetime import setattr_on_read
from numpy.linalg import matrix_rank
import scipy.linalg as spl

from .model import LikelihoodModelResults
from nistats._utils.helpers import replace_parameters
from .utils import positive_reciprocal


Expand All @@ -47,8 +50,8 @@ class OLSModel(object):
design : ndarray
This is the design, or X, matrix.
wdesign : ndarray
This is the whitened design matrix. `design` == `wdesign` by default
whitened_design : ndarray
This is the whitened design matrix. `design` == `whitened_design` by default
for the OLSModel, though models that inherit from the OLSModel will
whiten the design.
Expand All @@ -58,7 +61,7 @@ class OLSModel(object):
normalized_cov_beta : ndarray
``np.dot(calc_beta, calc_beta.T)``
df_resid : scalar
df_residuals : scalar
Degrees of freedom of the residuals. Number of observations less the
rank of the design.
Expand All @@ -82,15 +85,31 @@ def initialize(self, design):
# PLEASE don't assume we have a constant...
# TODO: handle case for noconstant regression
self.design = design
self.wdesign = self.whiten(self.design)
self.calc_beta = spl.pinv(self.wdesign)
self.whitened_design = self.whiten(self.design)
self.calc_beta = spl.pinv(self.whitened_design)
self.normalized_cov_beta = np.dot(self.calc_beta,
np.transpose(self.calc_beta))
self.df_total = self.wdesign.shape[0]
self.df_total = self.whitened_design.shape[0]

eps = np.abs(self.design).sum() * np.finfo(np.float).eps
self.df_model = matrix_rank(self.design, eps)
self.df_resid = self.df_total - self.df_model
self.df_residuals = self.df_total - self.df_model

@setattr_on_read
def df_resid(self):
warnings.warn("'df_resid' from OLSModel"
"has been deprecated and will be removed. "
"Please use 'df_residuals'.",
FutureWarning)
return self.df_residuals

@setattr_on_read
def wdesign(self):
warnings.warn("'wdesign' from OLSModel"
"has been deprecated and will be removed. "
"Please use 'whitened_design'.",
FutureWarning)
return self.whitened_design

def logL(self, beta, Y, nuisance=None):
r''' Returns the value of the loglikelihood function at beta.
Expand Down Expand Up @@ -145,7 +164,7 @@ def logL(self, beta, Y, nuisance=None):
.. [1] W. Green. "Econometric Analysis," 5th ed., Pearson, 2003.
'''
# This is overwriting an abstract method of LikelihoodModel
X = self.wdesign
X = self.whitened_design
wY = self.whiten(Y)
r = wY - np.dot(X, beta)
n = self.df_total
Expand All @@ -167,7 +186,7 @@ def whiten(self, X):
Returns
-------
wX : array
whitened_X : array
This matrix is the matrix whose pseudoinverse is ultimately
used in estimating the coefficients. For OLSModel, it is
does nothing. For WLSmodel, ARmodel, it pre-applies
Expand Down Expand Up @@ -195,9 +214,9 @@ def fit(self, Y):
# squares models assume covariance is diagonal, i.e. heteroscedastic).
wY = self.whiten(Y)
beta = np.dot(self.calc_beta, wY)
wresid = wY - np.dot(self.wdesign, beta)
dispersion = np.sum(wresid ** 2, 0) / (self.wdesign.shape[0] -
self.wdesign.shape[1])
wresid = wY - np.dot(self.whitened_design, beta)
dispersion = np.sum(wresid ** 2, 0) / (self.whitened_design.shape[0] -
self.whitened_design.shape[1])
lfit = RegressionResults(beta, Y, self,
wY, wresid, dispersion=dispersion,
cov=self.normalized_cov_beta)
Expand Down Expand Up @@ -248,14 +267,14 @@ def whiten(self, X):
Returns
-------
wX : ndarray
whitened_X : ndarray
X whitened with order self.order AR
"""
X = np.asarray(X, np.float64)
_X = X.copy()
whitened_X = X.copy()
for i in range(self.order):
_X[(i + 1):] = _X[(i + 1):] - self.rho[i] * X[0: - (i + 1)]
return _X
whitened_X[(i + 1):] = whitened_X[(i + 1):] - self.rho[i] * X[0: - (i + 1)]
return whitened_X


class RegressionResults(LikelihoodModelResults):
Expand All @@ -264,8 +283,8 @@ class RegressionResults(LikelihoodModelResults):
It handles the output of contrasts, estimates of covariance, etc.
"""

def __init__(self, theta, Y, model, wY, wresid, cov=None, dispersion=1.,
@replace_parameters({'wresid': 'whitened_residuals', 'wY': 'whitened_Y'}, lib_name='Nistats')
def __init__(self, theta, Y, model, whitened_Y, whitened_residuals, cov=None, dispersion=1.,
nuisance=None):
"""See LikelihoodModelResults constructor.
Expand All @@ -274,19 +293,64 @@ def __init__(self, theta, Y, model, wY, wresid, cov=None, dispersion=1.,
"""
LikelihoodModelResults.__init__(self, theta, Y, model, cov,
dispersion, nuisance)
self.wY = wY
self.wresid = wresid
self.wdesign = model.wdesign
self.whitened_Y = whitened_Y
self.whitened_residuals = whitened_residuals
self.whitened_design = model.whitened_design

@setattr_on_read
def wdesign(self):
warnings.warn("'wdesign' from RegressionResults"
"has been deprecated and will be removed. "
"Please use 'whitened_design'.",
FutureWarning)
return self.whitened_design


@setattr_on_read
def wY(self):
warnings.warn("'wY' from RegressionResults "
"has been deprecated and will be removed. "
"Please use 'whitened_Y' instead.",
FutureWarning,
)
return self.whitened_Y

@setattr_on_read
def wresid(self):
warnings.warn("'wresid' from RegressionResults "
"has been deprecated and will be removed. "
"Please use 'whitened_residuals' instead.",
FutureWarning,
)
return self.whitened_residuals

@setattr_on_read
def resid(self):
warnings.warn("'resid' from RegressionResults "
"has been deprecated and will be removed. "
"Please use 'residuals' instead.",
FutureWarning,
)
return self.residuals

@setattr_on_read
def residuals(self):
"""
Residuals from the fit.
"""
return self.Y - self.predicted

@setattr_on_read
def norm_resid(self):
warnings.warn("'norm_resid' from RegressionResults "
"has been deprecated and will be removed. "
"Please use 'normalized_residuals' instead.",
FutureWarning,
)
return self.normalized_residuals

@setattr_on_read
def normalized_residuals(self):
"""
Residuals, normalized to have unit length.
Expand All @@ -303,34 +367,34 @@ def norm_resid(self):
See: Montgomery and Peck 3.2.1 p. 68
Davidson and MacKinnon 15.2 p 662
"""
return self.resid * positive_reciprocal(np.sqrt(self.dispersion))
return self.residuals * positive_reciprocal(np.sqrt(self.dispersion))

@setattr_on_read
def predicted(self):
""" Return linear predictor values from a design matrix.
"""
beta = self.theta
# the LikelihoodModelResults has parameters named 'theta'
X = self.wdesign
X = self.whitened_design
return np.dot(X, beta)

@setattr_on_read
def SSE(self):
"""Error sum of squares. If not from an OLS model this is "pseudo"-SSE.
"""
return (self.wresid ** 2).sum(0)
return (self.whitened_residuals ** 2).sum(0)

@setattr_on_read
def r_square(self):
"""Proportion of explained variance.
If not from an OLS model this is "pseudo"-R2.
"""
return np.var(self.predicted, 0) / np.var(self.wY, 0)
return np.var(self.predicted, 0) / np.var(self.whitened_Y, 0)

@setattr_on_read
def MSE(self):
""" Mean square (error) """
return self.SSE / self.df_resid
return self.SSE / self.df_residuals


class SimpleRegressionResults(LikelihoodModelResults):
Expand All @@ -353,7 +417,7 @@ def __init__(self, results):
self.df_total = results.Y.shape[0]
self.df_model = results.model.df_model
# put this as a parameter of LikelihoodModel
self.df_resid = self.df_total - self.df_model
self.df_residuals = self.df_total - self.df_model

def logL(self, Y):
"""
Expand All @@ -362,12 +426,36 @@ def logL(self, Y):
raise ValueError('can not use this method for simple results')

def resid(self, Y):
warnings.warn("'resid()' from SimpleRegressionResults"
" has been deprecated and will be removed. "
"Please use 'residuals()'.",
FutureWarning,
)
return self.residuals(Y)

def residuals(self, Y):
"""
Residuals from the fit.
"""
return Y - self.predicted

@setattr_on_read
def df_resid(self):
warnings.warn("The attribute 'df_resid' from OLSModel"
"has been deprecated and will be removed. "
"Please use 'df_residuals'.",
FutureWarning)
return self.df_residuals

def norm_resid(self, Y):
warnings.warn("'SimpleRegressionResults.norm_resid' method "
"has been deprecated and will be removed. "
"Please use 'normalized_residuals'.",
FutureWarning,
)
return self.normalized_residuals(Y)

def normalized_residuals(self, Y):
"""
Residuals, normalized to have unit length.
Expand All @@ -384,7 +472,7 @@ def norm_resid(self, Y):
See: Montgomery and Peck 3.2.1 p. 68
Davidson and MacKinnon 15.2 p 662
"""
return self.resid(Y) * positive_reciprocal(np.sqrt(self.dispersion))
return self.residuals(Y) * positive_reciprocal(np.sqrt(self.dispersion))

def predicted(self):
""" Return linear predictor values from a design matrix.
Expand Down
6 changes: 3 additions & 3 deletions nistats/tests/test_first_level_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -627,9 +627,9 @@ def test_first_level_model_residuals():
noise_model='ols')
model.fit(fmri_data, design_matrices=design_matrices)

resid = model.residuals[0]
mean_resids = model.masker_.transform(resid).mean(0)
assert_array_almost_equal(mean_resids, 0)
residuals = model.residuals[0]
mean_residuals = model.masker_.transform(residuals).mean(0)
assert_array_almost_equal(mean_residuals, 0)


def test_first_level_model_predictions_r_square():
Expand Down
Loading

0 comments on commit 4c3d0e0

Please sign in to comment.