Here are the M-files to implement quadratic (second-order polynomial) regression, multiple linear regression and general linear least-squares regression.
Quadratic Regression M-file
function [a,r2,syx] = polyregr(order,x,y)
% polyregr: least-squares regression curve fitting (quadratic)
% [a, r2] = polyregr(x,y): Least squares fit of curve line to data by solving the normal equations
% input:
% x = independent variable
% y = dependent variable
% output:
% a = vector of coefficients->> a(1):intercept,a(2):slope x,a(3):slope x2
% r2 = coefficient of determination
% syx = standard error of estimate
n = length(x);
if length(y)~=n, error('x and y must be same length'); end
% error checking to have complete quantity of data point
x = x(:); y = y(:); % convert to column vectors
sx = sum(x); sy = sum(y);
sx2 = sum(x.^2); sxy = sum(x.*y);
sx3 = sum(x.^3); sx4 = sum(x.^4); sx2y = sum(x.*x.*y);
matrix = [n sx sx2; sx sx2 sx3; sx2 sx3 sx4];
right = [sy sxy sx2y]';
a = matrix \ right;
% to calculate error statistics
Sr = sum((y-a(1)-(a(2)*x)-(a(3)*x.^2)).^2);
St = sum((y-(sy/n)).^2);
r2 = (St-Sr)/St;
syx = sqrt(Sr/(length(x)-length(a)));
% create plot of data and best fit line
xp = min(x):0.1:max(x);
yp = zeros(1,length(xp));
for k = 1:length(yp)
for l = 0:order
yp(k) = yp(k) + a(l+1)*(xp(k))^l;
end
end
plot(x,y,'o',xp,yp),xlabel('T (K)'),ylabel('cp (J mol-1 K-1')
grid on
Multiple Linear Regression M-file
function [a,Sr,r2,syx] = multilinear(x1,x2,y)
% multilinear: multiple linear regression curve fitting
% input:
% x1 = independent variable 1
% x2 = indepencent variable 2
% y = dependent variable (function of x1 and x2)
% output:
% a = vector of coefficient->> a(1)= intercept,a(2)= slope x1,a(3)= slope x2
% Sr = sum of square of residuals
% r2 = coefficient of determination
% syx = standard error of estimate
n = length(y);
if length(x1)~=n, error('x1 and y must be same length'); end
if length(x2)~=n, error('x2 and y must be same length'); end
x1 = x1(:); x2 = x2(:); y = y(:); % convert to column vectors
sx1 = sum(x1);sx2 = sum(x2);sy = sum(y);sx1x2 = sum(x1.*x2);
sx1y = sum(x1.*y); sx2y = sum(x2.*y);
sx1sq = sum(x1.*x1); sx2sq = sum(x2.*x2);
%calculate for coefficients [a]
A = [n sx1 sx2;sx1 sx1sq sx1x2;sx2 sx1x2 sx2sq];
b = [sy;sx1y;sx2y];
a = A\b;
%calculate for Sr
yp = a(1)+a(2)*x1+a(3)*x2;
Sr = sum((y-yp).^2);
%calculate for syx
syx = sqrt(Sr/(n-length(a)));
%calculate for r2
r2 = 1-(Sr/sum((y-mean(y)).^2));
end
General Linear Regression M-file
function [a, r2, syx] = genlinreg(x1,x2,y)
% genlinreg: general linear least-squares regression model
% [a, r2, syx] = genlinreg(x,y): Least squares fit of line to data by solving the normal equations
% input:
% x1 = independent variable 1
% x2 = independent variable 2 = zeros(size(x1)) for polynomial regression
% y = dependent variable
% output:
% a = vector of coefficients->> a(1):intercept,a(2):slope for x1/x,a(2):slope of x2/x^2
% r2 = coefficient of determination
% syx = standard error of estimate
n = length(y);
if length(x1)~=n, error('x1 and y must be same length'); end
if length(x2)~=n, error('x2 and y must be same length'); end
x1 = x1(:); x2 = x2(:); y = y(:); % convert to column vectors
if x2 == zeros(size(x1))
Z = [ones(size(x1)) x1 x1.^2];
else
Z = [ones(size(y)) x1 x2];
end
a = (Z'*Z)\(Z'*y); %solve for the unknown coefficients by utilizing backslash operator
% calculate for coefficient of determination
Sr = sum((y-Z*a).^2);
St = sum((y-mean(y)).^2);
r2 = 1-Sr/St;
% calculate for standard error of estimate
syx = sqrt(Sr/(n-length(a)));
end
No comments:
Post a Comment