Permalink
Cannot retrieve contributors at this time
Name already in use
A tag already exists with the provided branch name. Many Git commands accept both tag and branch names, so creating this branch may cause unexpected behavior. Are you sure you want to create this branch?
SRGP/Main_2D_Surface.m
Go to fileThis commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
183 lines (168 sloc)
6.87 KB
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
%%______________________________________________________________________________ | |
%% This file is the main file is to demonstrate an 2D surface example using GP regression | |
%% This file is created by Kai Zhou 7/31/2019 | |
%%____________________________________________________________________________________ | |
clc | |
clear | |
%%% Section 1_________________________________________________________________________ | |
%%% Load data: for demonstration purpose,one 2-D analytical surface is used as the | |
%%% model to be captured | |
%%% In real cases,need to load true data | |
global numInp | |
numInp = 2; % number of input features | |
numOup = 1; % number of output features - single-response GP only allows this parameter to be 1 | |
n1 = 40; | |
n2 = 40; | |
nSamp = n1*n2; | |
x1 = linspace(0,10,n1); | |
x2 = linspace(0,10,n2); | |
DataInp = zeros(nSamp,numInp); | |
Dataoutp = zeros(nSamp,numOup); | |
k = 0; | |
for i1 = 1:n1 | |
for i2 = 1:n2 | |
k = k+1; | |
DataInp(k,1) = x1(i1); | |
DataInp(k,2) = x2(i2); | |
Dataoutp(k) = sin(x1(i1)) + cos(x2(i2)) + (x1(i1)-5)^2/10 + 0.5*abs(x2(i2))^(1/2); | |
end | |
end | |
Dataoutp_ori = Dataoutp; | |
DataInp_ori = DataInp; | |
%%%__________________________________________________________________________________ | |
%%% Section 2_________________________________________________________________________ | |
%%% Data pre-processing in order to facilitate learning process | |
dataProcessMethod = 2; | |
para1 = zeros(1,numInp); | |
para2 = zeros(1,numInp); | |
for i1 = 1:numInp | |
[DataInp(:,i1),para1(i1),para2(i1)] = DataPreProcess(DataInp(:,i1),dataProcessMethod); | |
end | |
dataProcessMethod = 2; | |
[Dataoutp,outpMin,outpMax] = DataPreProcess(Dataoutp,dataProcessMethod); | |
%%%___________________________________________________________________________________ | |
%%% Section 3__________________________________________________________________________ | |
%%% Split training and testing datasets | |
%%% here uniform sampling is used; In real cases, random sampling is | |
%%% preferred using function (randperm) | |
%%% In this example, 80 (5%) datasets for training, and other 1520 datasets for | |
%%% testing - can play with this parameter to see how performance changes | |
train_ind = floor(linspace(1,nSamp,0.05*nSamp)); | |
test_ind = setdiff(1:nSamp,train_ind); | |
dataInpTrain = DataInp(train_ind,:); | |
dataOupTrain = Dataoutp(train_ind,:); | |
test_ind = setdiff(1:nSamp,train_ind); | |
dataInpTest = DataInp(test_ind,:); | |
dataOupTest = Dataoutp(test_ind,:); | |
%%%___________________________________________________________________________________ | |
%%% Section 4__________________________________________________________________________ | |
%%% Train model and do prediction | |
[dataOupTest_pred, dataOupTest_cov, hyper_opt] = SR_GPR(dataInpTrain,dataOupTrain,dataInpTest); | |
%%%____________________________________________________________________________________ | |
%%% Section 5__________________________________________________________________________ | |
%%% Map datasets back to original space | |
dataProcessMethod = 2; | |
for i1 = 1:numInp | |
dataInpTrain(:,i1) = DataPreProcessBack(dataInpTrain(:,i1),para1(i1),para2(i1), dataProcessMethod); | |
dataInpTest(:,i1) = DataPreProcessBack(dataInpTest(:,i1),para1(i1), para2(i1), dataProcessMethod); | |
end | |
dataProcessMethod = 2; | |
dataOupTrain = DataPreProcessBack(dataOupTrain,outpMin,outpMax, dataProcessMethod); | |
dataOupTest = DataPreProcessBack(dataOupTest,outpMin,outpMax, dataProcessMethod); | |
dataOupTest_pred_temp = DataPreProcessBack(dataOupTest_pred,outpMin,outpMax, dataProcessMethod); | |
for i1 = 1:length(test_ind) | |
for i2 = i1:length(test_ind) | |
dataOupTest_cov(i1,i2) = dataOupTest_cov(i1,i2)*dataOupTest_pred_temp(i1)*dataOupTest_pred_temp(i2)/dataOupTest_pred(i1)/dataOupTest_pred(i2); | |
dataOupTest_cov(i2,i1) = dataOupTest_cov(i1,i2); | |
end | |
end | |
dataOupTest_pred = dataOupTest_pred_temp; | |
%%%_____________________________________________________________________________________ | |
%%% Section 6__________________________________________________________________________ | |
%%% Result visualization | |
figure (1) | |
subplot(2,2,1:2) | |
[X,Y]=meshgrid(x1,x2); | |
Z = sin(X)+cos(Y)+(X-5).^2/10+0.5*(abs(Y)).^(1/2); | |
surf(X,Y,Z); | |
hold on | |
plot3(dataInpTrain(:,1),dataInpTrain(:,2),dataOupTrain,'rd','LineWidth',2,... | |
'MarkerSize',6) | |
legend('Actual Model', 'Training Data Points') | |
set(gca, "FontSize", 13) | |
xlabel('Input 1','FontSize',14) | |
ylabel('Input 2','FontSize',14) | |
zlabel('Output','FontSize',14) | |
title('Actual Model with Training Data Points','FontSize',14) | |
subplot(2,2,3) | |
for i1 = 1:n2 | |
h1 = plot(X(i1,:),Z(i1,:),'-b*'); | |
hold on | |
end | |
h2 = plot(dataInpTrain(:,1),dataOupTrain,'rd','LineWidth',2,... | |
'MarkerSize',6); | |
legend([h1 h2],{'Actual Model Projection on Input 1', 'Training Data Points'}) | |
set(gca, "FontSize", 13) | |
xlabel('Input 1','FontSize',14) | |
ylabel('Output','FontSize',14) | |
title('Outpt w.r.t.Input 1(Actual Model)','FontSize',14) | |
subplot(2,2,4) | |
for i1 = 1:n1 | |
h1 = plot(Y(:,i1),Z(:,i1),'-r+'); | |
hold on | |
end | |
h2 = plot(dataInpTrain(:,2),dataOupTrain,'bd','LineWidth',2,... | |
'MarkerSize',6); | |
legend([h1 h2],{'Actual Model Projection on Input 2', 'Training Data Points'}) | |
set(gca, "FontSize", 13) | |
xlabel('Input 2','FontSize',14) | |
ylabel('Output','FontSize',14) | |
title('Outpt w.r.t.Input 2(Actual Model)','FontSize',14) | |
figure (2) | |
subplot(2,2,1:2) | |
Dataoutp_r = zeros(nSamp,numOup); | |
Dataoutp_r(train_ind) = dataOupTrain; | |
Dataoutp_r(test_ind) = dataOupTest_pred; | |
Z_1 = reshape(Dataoutp_r',[n2,n1]); | |
surf(X,Y,Z_1); | |
hold on | |
plot3(dataInpTrain(:,1),dataInpTrain(:,2),dataOupTrain,'rd','LineWidth',2,... | |
'MarkerSize',6) | |
legend('Predicted Surface', 'Training Data Points') | |
set(gca, "FontSize", 13) | |
xlabel('Input 1','FontSize',14) | |
ylabel('Input 2','FontSize',14) | |
zlabel('Output','FontSize',14) | |
title('Predicted Surface with Training Data Points','FontSize',14) | |
subplot(2,2,3) | |
for i1 = 1:n2 | |
h1 = plot(X(i1,:),Z_1(i1,:),'-b*'); | |
hold on | |
end | |
h2 = plot(dataInpTrain(:,1),dataOupTrain,'rd','LineWidth',2,... | |
'MarkerSize',6); | |
legend([h1 h2],{'Predicted Surface Projection on Input 1', 'Training Data Points'}) | |
set(gca, "FontSize", 13) | |
xlabel('Input 1','FontSize',14) | |
ylabel('Output','FontSize',14) | |
title('Outpt w.r.t.Input 1(Predicted Surface)','FontSize',14) | |
subplot(2,2,4) | |
for i1 = 1:n1 | |
h1 = plot(Y(:,i1),Z_1(:,i1),'-r+'); | |
hold on | |
end | |
h2 = plot(dataInpTrain(:,2),dataOupTrain,'bd','LineWidth',2,... | |
'MarkerSize',6); | |
legend([h1 h2],{'Predicted Surface Projection on Input 2', 'Training Data Points'}) | |
set(gca, "FontSize", 13) | |
xlabel('Input 2','FontSize',14) | |
ylabel('Output','FontSize',14) | |
title('Outpt w.r.t.Input 2(Predicted Surface)','FontSize',14) | |
% figure (3) | |
% error = (Z_1-Z)./Z*100; | |
% surf(X,Y,error); | |
% xlabel('Input 1','FontSize',14) | |
% ylabel('Input 2','FontSize',14) | |
% zlabel('Error(%)','FontSize',14) | |
% title('Prediction Error','FontSize',14) | |
%%%_____________________________________________________________________________________ | |