Skip to content

Commit 7e66494

Browse files
committed
least squares poly regression
Regression class 1. least squares regression of nth order 3. output polynomial coefficients 2. plot result
1 parent af1925b commit 7e66494

File tree

3 files changed

+120
-0
lines changed

3 files changed

+120
-0
lines changed

matrix/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from . import matrix_operation

regression/__init__.py

Whitespace-only changes.

regression/regr.py

Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
"""
2+
References
3+
------------
4+
https://en.wikipedia.org/wiki/Polynomial_regression
5+
"""
6+
7+
import numpy as np
8+
from scipy import linalg
9+
import matplotlib.pyplot as plt
10+
from matplotlib import style
11+
style.use('seaborn')
12+
13+
# from matrix import matrix_operation as mat_op
14+
15+
16+
class Regression:
17+
"""
18+
Class that contains functions for linear and polynomial regression
19+
Both regression cases are offered with:
20+
1. traditional (least squares)
21+
2. gradient descent
22+
"""
23+
def __init__(self, x, y):
24+
self.x = np.sort(np.asarray(x))
25+
self.y = y
26+
self.beta = None
27+
self.coeffs = None
28+
self.poly_eqn = None
29+
30+
def the_algorithm_ls_reg(self, order=1):
31+
"""
32+
:param order: nth order polynomial
33+
:type order: int
34+
:return: ndarray
35+
36+
37+
_____________________________________________
38+
Still under development
39+
Plan to incorporate this with the matrix operations already committed to the repo
40+
_____________________________________________
41+
"""
42+
pass
43+
44+
def ls_reg(self, order=1):
45+
"""
46+
:param order: nth order polynomial
47+
:type order: int
48+
:return: self : object;
49+
Output is in increasing order of x. Ex. Cx**0 + Bx**1 + Ax**2
50+
51+
Regression model according to the equation below:
52+
y = ß0 + ß1(xi) + ß2(xi)**2 + ... + ßm(xi)**m --> (i == 1, 2, ..., n)
53+
54+
Calculated according to the equation below, written in vector form
55+
ß = (X.T • X)**-1 • X.T • y
56+
57+
Nomenclature:
58+
ß -- coefficient (paramater) vector
59+
X -- design matrix
60+
X.T -- transposed design matrix
61+
y -- response vector
62+
"""
63+
64+
self.coeffs = self._create_coeffs(order)
65+
print(self.x)
66+
print(self.coeffs)
67+
beta = np.matmul(np.matmul(linalg.pinv(np.matmul(np.transpose(self.coeffs), self.coeffs)),
68+
np.transpose(self.coeffs)), self.y)
69+
self.beta = beta
70+
return self
71+
72+
def _create_coeffs(self, order):
73+
coeffs = np.zeros((self.x.shape[0], order + 1))
74+
for i in np.arange(0, order + 1):
75+
coeffs[:, i] = self.x ** i
76+
return coeffs
77+
78+
def plot_prediction(self):
79+
"""
80+
Plot regression prediction
81+
:return: matplotlib figure
82+
"""
83+
84+
# Following taken from https://github.com/pickus91/Polynomial-Regression-From-Scratch.git
85+
pred_line = self.beta[0]
86+
label_holder = []
87+
for i in range(self.beta.shape[0]-1, 0, -1):
88+
pred_line += self.beta[i] * self.x ** i
89+
label_holder.append('%.*f' % (2, self.beta[i]) + r'$x^' + str(i) + '$')
90+
label_holder.append('%.*f' % (2, self.beta[0]))
91+
92+
plt.figure()
93+
plt.scatter(self.x, self.y)
94+
plt.plot(self.x, pred_line, label=''.join(label_holder))
95+
plt.title(f'Poly Fit: Order {self.beta.shape[0]-1}')
96+
plt.xlabel('x')
97+
plt.ylabel('y')
98+
plt.legend(loc='best', frameon=True, fancybox=True, facecolor='white', shadow=True)
99+
plt.show()
100+
101+
def __str__(self):
102+
build_string = f'{self.__class__.__name__} analysis with the following inputs: \n\n\tx : \n{self.x} \n\tx-type : {type(self.x)}'
103+
build_string += f'\n\n\ty : \n{self.y} \n\ty-type : {type(self.y)}'
104+
build_string += f'\n\n\tOutput : \n{self.beta}'
105+
return build_string
106+
107+
def __repr__(self):
108+
return f'{self.__class__.__name__} analysis with inputs \n\tx shape: {self.x.shape}\n\ty shape: {self.y.shape}'
109+
110+
111+
if __name__ == '__main__':
112+
np.random.seed(0)
113+
num_points = 30
114+
x = 2 - 3 * np.random.normal(0, 1, num_points)
115+
y = x - 2 * (x ** 2) + 0.5 * (x ** 3) + np.random.normal(-3, 3, num_points)
116+
117+
reg = Regression(x, y)
118+
print(reg.ls_reg(order=4))
119+
reg.plot_prediction()

0 commit comments

Comments
 (0)