forked from Team254/FRC-2019-Public
-
Notifications
You must be signed in to change notification settings - Fork 0
/
PolynomialRegression.java
178 lines (155 loc) · 5.92 KB
/
PolynomialRegression.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
// NOTE: This file is available at http://algs4.cs.princeton.edu/14analysis/PolynomialRegression.java.html
package com.team254.lib.util;
/******************************************************************************
* Compilation: javac -cp .:jama.jar PolynomialRegression.java
* Execution: java -cp .:jama.jar PolynomialRegression
* Dependencies: jama.jar StdOut.java
*
* % java -cp .:jama.jar PolynomialRegression
* 0.01 n^3 + -1.64 n^2 + 168.92 n + -2113.73 (R^2 = 0.997)
*
******************************************************************************/
import Jama.Matrix;
import Jama.QRDecomposition;
/**
* The {@code PolynomialRegression} class performs a polynomial regression on an set of <em>N</em> data points (
* <em>y<sub>i</sub></em>, <em>x<sub>i</sub></em>). That is, it fits a polynomial <em>y</em> = β<sub>0</sub> +
* β<sub>1</sub> <em>x</em> + β<sub>2</sub> <em>x</em><sup>2</sup> + ... + β<sub><em>d</em></sub>
* <em>x</em><sup><em>d</em></sup> (where <em>y</em> is the response variable, <em>x</em> is the predictor variable, and
* the β<sub><em>i</em></sub> are the regression coefficients) that minimizes the sum of squared residuals of the
* multiple regression model. It also computes associated the coefficient of determination <em>R</em><sup>2</sup>.
* <p>
* This implementation performs a QR-decomposition of the underlying Vandermonde matrix, so it is not the fastest or
* most numerically stable way to perform the polynomial regression.
*
* @author Robert Sedgewick
* @author Kevin Wayne
*/
public class PolynomialRegression {
private int degree; // degree of the polynomial regression
private Matrix beta; // the polynomial regression coefficients
private double sse; // sum of squares due to error
private double sst; // total sum of squares
public PolynomialRegression(double[][] xy, int degree) {
double[] x = new double[xy.length];
double[] y = new double[xy.length];
for (int i = 0; i < xy.length; ++i) {
x[i] = xy[i][0];
y[i] = xy[i][1];
}
solve(x, y, degree);
}
/**
* Performs a polynomial regression on the data points {@code (y[i], x[i])}.
*
* @param x the values of the predictor variable
* @param y the corresponding values of the response variable
* @param degree the degree of the polynomial to fit
*/
public PolynomialRegression(double[] x, double[] y, int degree) {
solve(x, y, degree);
}
private void solve(double[] x, double[] y, int degree) {
this.degree = degree;
int n = x.length;
QRDecomposition qr = null;
Matrix matrixX = null;
// in case Vandermonde matrix does not have full rank, reduce degree until it does
while (true) {
// build Vandermonde matrix
double[][] vandermonde = new double[n][this.degree + 1];
for (int i = 0; i < n; i++) {
for (int j = 0; j <= this.degree; j++) {
vandermonde[i][j] = Math.pow(x[i], j);
}
}
matrixX = new Matrix(vandermonde);
// find least squares solution
qr = new QRDecomposition(matrixX);
if (qr.isFullRank())
break;
// decrease degree and try again
this.degree--;
}
// create matrix from vector
Matrix matrixY = new Matrix(y, n);
// linear regression coefficients
beta = qr.solve(matrixY);
// mean of y[] values
double sum = 0.0;
for (int i = 0; i < n; i++)
sum += y[i];
double mean = sum / n;
// total variation to be accounted for
for (int i = 0; i < n; i++) {
double dev = y[i] - mean;
sst += dev * dev;
}
// variation not accounted for
Matrix residuals = matrixX.times(beta).minus(matrixY);
sse = residuals.norm2() * residuals.norm2();
}
/**
* Returns the {@code j}th regression coefficient.
*
* @param j the index
* @return the {@code j}th regression coefficient
*/
public double beta(int j) {
// to make -0.0 print as 0.0
if (Math.abs(beta.get(j, 0)) < 1E-4)
return 0.0;
return beta.get(j, 0);
}
/**
* Returns the degree of the polynomial to fit.
*
* @return the degree of the polynomial to fit
*/
public int degree() {
return degree;
}
/**
* Returns the coefficient of determination <em>R</em><sup>2</sup>.
*
* @return the coefficient of determination <em>R</em><sup>2</sup>, which is a real number between 0 and 1
*/
public double R2() {
if (sst == 0.0)
return 1.0; // constant function
return 1.0 - sse / sst;
}
/**
* Returns the expected response {@code y} given the value of the predictor variable {@code x}.
*
* @param x the value of the predictor variable
* @return the expected response {@code y} given the value of the predictor variable {@code x}
*/
public double predict(double x) {
// horner's method
double y = 0.0;
for (int j = degree; j >= 0; j--)
y = beta(j) + (x * y);
return y;
}
@Override
public String toString() {
StringBuilder s = new StringBuilder();
int j = degree;
// ignoring leading zero coefficients
while (j >= 0 && Math.abs(beta(j)) < 1E-5)
j--;
// create remaining terms
while (j >= 0) {
if (j == 0)
s.append(String.format("%.2f ", beta(j)));
else if (j == 1)
s.append(String.format("%.2f x + ", beta(j)));
else
s.append(String.format("%.2f x^%d + ", beta(j), j));
j--;
}
s.append(" (R^2 = " + String.format("%.3f", R2()) + ")");
return s.toString();
}
}