#newton_regression.m
## Copyright (C) 2013 xiuleili
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Octave; see the file COPYING. If not, see
## <http://www.gnu.org/licenses/>.
## -*- texinfo -*-
## @deftypefn {Function File} {} [ theta ] = newton_regression ( x, y)
## Return the parameter of linear founction where y = theta[2:n+1]*x + theta(1).
## where n is the row of matrix x.
## It use newton gradient algorithm obviously.
## For example:
##
## @example
## @group
## x=[1 4;2 5;5 1; 4 2] y = [ 19 26 19 20]
## newton_regression(x, y)
## @result{} [0.0060406 2.9990063 3.9990063]
## @end group
## @end example
## @seealso{stichastic_gradient}
## @end deftypefn
## Author: xiuleili <xiuleili@XIULEILI>
## Created: 2013-05-03
function [ theta ] = newton_regression (x,y)
[n,m]=size(x);
[my,ny]=size(y);
if(ny ~= n | my!= 1)
error("Error: x should be a matrix with(n,m) and y must be (1,n), where n is the count of training samples.");
end;
X = [ones(n,1) x]';
theta = rand(1, m+1);
learning_rate = 0.1;
errors = 1;
threshold=0.000001;
times = 0;
% thetaj = thetaj + sum[(y-h(x))*xj]/sum[xj^2]
while errors > threshold
%sum((y-h(x))*xj)
sum_of_error = (y-theta*X)*X';
%sum(xj^2)
sum_of_xj2 = (diag(X*X'))';
theta += learning_rate * sum_of_error./sum_of_xj2;
errors = sum((y-theta*X).^2)/2;
times ++;
printf("[%d] errors = %f", times, errors);
disp(theta);
if(times > 100000)
break;
end
end
endfunction