clc;
clear;
close all;
%% generate random data
shift = 3;
n = 200;
m=200;
d = 2;
sigma = 1;
x = randn(d,n)-shift;
y = randn(d,m)*sigma+shift;
%%
%show the data
figure;
plot(x(1,:),x(2,:),'rs');
hold on;
plot(y(1,:),y(2,:),'Go');
legend('Positive samples','Negativesamples');
for i=1:n
A(i,:) = [-x(:,i)',-1];
end
for i=1:m
A(i+n,:) = [y(:,i)',1];
end
c = ones(n+m,1)*(-1);
w = linprog(zeros(d+1,1),A,c);
hold on;
%% visualize the classification area
x1 = -shift-2:0.1:shift+2*sigma;
y1 = (-w(3)-w(1)*x1)/w(2);
plot(x1,y1,'-','LineWidth',2);
legend('Positive samples','Negativesamples','Linear programming');
H = eye(d+1);
H(d+1,d+1) = 0;
w = quadprog(H,zeros(d+1,1),A,c);
hold on;
x1 = -shift-2:0.1:shift+2*sigma;
y1 = (-w(3)-w(1)*x1)/w(2);
plot(x1,y1,'g-','LineWidth',2);
y1 = (-1-w(3)-w(1)*x1)/w(2);
plot(x1,y1,'g-','LineWidth',2);
y1 = (1-w(3)-w(1)*x1)/w(2);
plot(x1,y1,'g-','LineWidth',2);
legend('Positive samples','Negativesamples','Linear programming','Linear SVM');