001/*
002 * Licensed to the Apache Software Foundation (ASF) under one or more
003 * contributor license agreements.  See the NOTICE file distributed with
004 * this work for additional information regarding copyright ownership.
005 * The ASF licenses this file to You under the Apache License, Version 2.0
006 * (the "License"); you may not use this file except in compliance with
007 * the License.  You may obtain a copy of the License at
008 *
009 *      http://www.apache.org/licenses/LICENSE-2.0
010 *
011 * Unless required by applicable law or agreed to in writing, software
012 * distributed under the License is distributed on an "AS IS" BASIS,
013 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
014 * See the License for the specific language governing permissions and
015 * limitations under the License.
016 */
017package org.apache.commons.math3.stat.regression;
018
019import org.apache.commons.math3.exception.MathIllegalArgumentException;
020import org.apache.commons.math3.linear.Array2DRowRealMatrix;
021import org.apache.commons.math3.linear.LUDecomposition;
022import org.apache.commons.math3.linear.QRDecomposition;
023import org.apache.commons.math3.linear.RealMatrix;
024import org.apache.commons.math3.linear.RealVector;
025import org.apache.commons.math3.stat.StatUtils;
026import org.apache.commons.math3.stat.descriptive.moment.SecondMoment;
027
028/**
029 * <p>Implements ordinary least squares (OLS) to estimate the parameters of a
030 * multiple linear regression model.</p>
031 *
032 * <p>The regression coefficients, <code>b</code>, satisfy the normal equations:
033 * <pre><code> X<sup>T</sup> X b = X<sup>T</sup> y </code></pre></p>
034 *
035 * <p>To solve the normal equations, this implementation uses QR decomposition
036 * of the <code>X</code> matrix. (See {@link QRDecomposition} for details on the
037 * decomposition algorithm.) The <code>X</code> matrix, also known as the <i>design matrix,</i>
038 * has rows corresponding to sample observations and columns corresponding to independent
039 * variables.  When the model is estimated using an intercept term (i.e. when
040 * {@link #isNoIntercept() isNoIntercept} is false as it is by default), the <code>X</code>
041 * matrix includes an initial column identically equal to 1.  We solve the normal equations
042 * as follows:
043 * <pre><code> X<sup>T</sup>X b = X<sup>T</sup> y
044 * (QR)<sup>T</sup> (QR) b = (QR)<sup>T</sup>y
045 * R<sup>T</sup> (Q<sup>T</sup>Q) R b = R<sup>T</sup> Q<sup>T</sup> y
046 * R<sup>T</sup> R b = R<sup>T</sup> Q<sup>T</sup> y
047 * (R<sup>T</sup>)<sup>-1</sup> R<sup>T</sup> R b = (R<sup>T</sup>)<sup>-1</sup> R<sup>T</sup> Q<sup>T</sup> y
048 * R b = Q<sup>T</sup> y </code></pre></p>
049 *
050 * <p>Given <code>Q</code> and <code>R</code>, the last equation is solved by back-substitution.</p>
051 *
052 * @version $Id: OLSMultipleLinearRegression.java 1591624 2014-05-01 11:54:06Z tn $
053 * @since 2.0
054 */
055public class OLSMultipleLinearRegression extends AbstractMultipleLinearRegression {
056
057    /** Cached QR decomposition of X matrix */
058    private QRDecomposition qr = null;
059
060    /** Singularity threshold for QR decomposition */
061    private final double threshold;
062
063    /**
064     * Create an empty OLSMultipleLinearRegression instance.
065     */
066    public OLSMultipleLinearRegression() {
067        this(0d);
068    }
069
070    /**
071     * Create an empty OLSMultipleLinearRegression instance, using the given
072     * singularity threshold for the QR decomposition.
073     *
074     * @param threshold the singularity threshold
075     * @since 3.3
076     */
077    public OLSMultipleLinearRegression(final double threshold) {
078        this.threshold = threshold;
079    }
080
081    /**
082     * Loads model x and y sample data, overriding any previous sample.
083     *
084     * Computes and caches QR decomposition of the X matrix.
085     * @param y the [n,1] array representing the y sample
086     * @param x the [n,k] array representing the x sample
087     * @throws MathIllegalArgumentException if the x and y array data are not
088     *             compatible for the regression
089     */
090    public void newSampleData(double[] y, double[][] x) throws MathIllegalArgumentException {
091        validateSampleData(x, y);
092        newYSampleData(y);
093        newXSampleData(x);
094    }
095
096    /**
097     * {@inheritDoc}
098     * <p>This implementation computes and caches the QR decomposition of the X matrix.</p>
099     */
100    @Override
101    public void newSampleData(double[] data, int nobs, int nvars) {
102        super.newSampleData(data, nobs, nvars);
103        qr = new QRDecomposition(getX(), threshold);
104    }
105
106    /**
107     * <p>Compute the "hat" matrix.
108     * </p>
109     * <p>The hat matrix is defined in terms of the design matrix X
110     *  by X(X<sup>T</sup>X)<sup>-1</sup>X<sup>T</sup>
111     * </p>
112     * <p>The implementation here uses the QR decomposition to compute the
113     * hat matrix as Q I<sub>p</sub>Q<sup>T</sup> where I<sub>p</sub> is the
114     * p-dimensional identity matrix augmented by 0's.  This computational
115     * formula is from "The Hat Matrix in Regression and ANOVA",
116     * David C. Hoaglin and Roy E. Welsch,
117     * <i>The American Statistician</i>, Vol. 32, No. 1 (Feb., 1978), pp. 17-22.
118     * </p>
119     * <p>Data for the model must have been successfully loaded using one of
120     * the {@code newSampleData} methods before invoking this method; otherwise
121     * a {@code NullPointerException} will be thrown.</p>
122     *
123     * @return the hat matrix
124     * @throws NullPointerException unless method {@code newSampleData} has been
125     * called beforehand.
126     */
127    public RealMatrix calculateHat() {
128        // Create augmented identity matrix
129        RealMatrix Q = qr.getQ();
130        final int p = qr.getR().getColumnDimension();
131        final int n = Q.getColumnDimension();
132        // No try-catch or advertised NotStrictlyPositiveException - NPE above if n < 3
133        Array2DRowRealMatrix augI = new Array2DRowRealMatrix(n, n);
134        double[][] augIData = augI.getDataRef();
135        for (int i = 0; i < n; i++) {
136            for (int j =0; j < n; j++) {
137                if (i == j && i < p) {
138                    augIData[i][j] = 1d;
139                } else {
140                    augIData[i][j] = 0d;
141                }
142            }
143        }
144
145        // Compute and return Hat matrix
146        // No DME advertised - args valid if we get here
147        return Q.multiply(augI).multiply(Q.transpose());
148    }
149
150    /**
151     * <p>Returns the sum of squared deviations of Y from its mean.</p>
152     *
153     * <p>If the model has no intercept term, <code>0</code> is used for the
154     * mean of Y - i.e., what is returned is the sum of the squared Y values.</p>
155     *
156     * <p>The value returned by this method is the SSTO value used in
157     * the {@link #calculateRSquared() R-squared} computation.</p>
158     *
159     * @return SSTO - the total sum of squares
160     * @throws MathIllegalArgumentException if the sample has not been set or does
161     * not contain at least 3 observations
162     * @see #isNoIntercept()
163     * @since 2.2
164     */
165    public double calculateTotalSumOfSquares() throws MathIllegalArgumentException {
166        if (isNoIntercept()) {
167            return StatUtils.sumSq(getY().toArray());
168        } else {
169            return new SecondMoment().evaluate(getY().toArray());
170        }
171    }
172
173    /**
174     * Returns the sum of squared residuals.
175     *
176     * @return residual sum of squares
177     * @since 2.2
178     */
179    public double calculateResidualSumOfSquares() {
180        final RealVector residuals = calculateResiduals();
181        // No advertised DME, args are valid
182        return residuals.dotProduct(residuals);
183    }
184
185    /**
186     * Returns the R-Squared statistic, defined by the formula <pre>
187     * R<sup>2</sup> = 1 - SSR / SSTO
188     * </pre>
189     * where SSR is the {@link #calculateResidualSumOfSquares() sum of squared residuals}
190     * and SSTO is the {@link #calculateTotalSumOfSquares() total sum of squares}
191     *
192     * @return R-square statistic
193     * @throws MathIllegalArgumentException if the sample has not been set or does
194     * not contain at least 3 observations
195     * @since 2.2
196     */
197    public double calculateRSquared() throws MathIllegalArgumentException {
198        return 1 - calculateResidualSumOfSquares() / calculateTotalSumOfSquares();
199    }
200
201    /**
202     * <p>Returns the adjusted R-squared statistic, defined by the formula <pre>
203     * R<sup>2</sup><sub>adj</sub> = 1 - [SSR (n - 1)] / [SSTO (n - p)]
204     * </pre>
205     * where SSR is the {@link #calculateResidualSumOfSquares() sum of squared residuals},
206     * SSTO is the {@link #calculateTotalSumOfSquares() total sum of squares}, n is the number
207     * of observations and p is the number of parameters estimated (including the intercept).</p>
208     *
209     * <p>If the regression is estimated without an intercept term, what is returned is <pre>
210     * <code> 1 - (1 - {@link #calculateRSquared()}) * (n / (n - p)) </code>
211     * </pre></p>
212     *
213     * @return adjusted R-Squared statistic
214     * @throws MathIllegalArgumentException if the sample has not been set or does
215     * not contain at least 3 observations
216     * @see #isNoIntercept()
217     * @since 2.2
218     */
219    public double calculateAdjustedRSquared() throws MathIllegalArgumentException {
220        final double n = getX().getRowDimension();
221        if (isNoIntercept()) {
222            return 1 - (1 - calculateRSquared()) * (n / (n - getX().getColumnDimension()));
223        } else {
224            return 1 - (calculateResidualSumOfSquares() * (n - 1)) /
225                (calculateTotalSumOfSquares() * (n - getX().getColumnDimension()));
226        }
227    }
228
229    /**
230     * {@inheritDoc}
231     * <p>This implementation computes and caches the QR decomposition of the X matrix
232     * once it is successfully loaded.</p>
233     */
234    @Override
235    protected void newXSampleData(double[][] x) {
236        super.newXSampleData(x);
237        qr = new QRDecomposition(getX());
238    }
239
240    /**
241     * Calculates the regression coefficients using OLS.
242     *
243     * <p>Data for the model must have been successfully loaded using one of
244     * the {@code newSampleData} methods before invoking this method; otherwise
245     * a {@code NullPointerException} will be thrown.</p>
246     *
247     * @return beta
248     */
249    @Override
250    protected RealVector calculateBeta() {
251        return qr.getSolver().solve(getY());
252    }
253
254    /**
255     * <p>Calculates the variance-covariance matrix of the regression parameters.
256     * </p>
257     * <p>Var(b) = (X<sup>T</sup>X)<sup>-1</sup>
258     * </p>
259     * <p>Uses QR decomposition to reduce (X<sup>T</sup>X)<sup>-1</sup>
260     * to (R<sup>T</sup>R)<sup>-1</sup>, with only the top p rows of
261     * R included, where p = the length of the beta vector.</p>
262     *
263     * <p>Data for the model must have been successfully loaded using one of
264     * the {@code newSampleData} methods before invoking this method; otherwise
265     * a {@code NullPointerException} will be thrown.</p>
266     *
267     * @return The beta variance-covariance matrix
268     */
269    @Override
270    protected RealMatrix calculateBetaVariance() {
271        int p = getX().getColumnDimension();
272        RealMatrix Raug = qr.getR().getSubMatrix(0, p - 1 , 0, p - 1);
273        RealMatrix Rinv = new LUDecomposition(Raug).getSolver().getInverse();
274        return Rinv.multiply(Rinv.transpose());
275    }
276
277}