%loading = size(trainingXDataset); %kernal value KernelSigma = zeros(dimension,dimension,size(valuesOfSigma,2),1); for

%loading boston dataset load(‘boston.mat’);%generatting  gamma and sigma valuess using numel%rangesOfGamma = 2.^(-40:-26);%rangesOFSigma = 2.^(7:0.5:13);%valuesOfGamma = numel(rangesOfGamma);%valuesOfSigma = numel(rangesOFSigma);%Gamma and Sigma ranges. valuesOfGamma = 2.^(-40:-26);valuesOfSigma = 2.^(7:0.5:13);% Looping through over 20 random (2/3, 1/3) splitted datafor i=1:20    %Splitting the dataset 2/3 as training     trainingDataset = datasample(boston,round(2/3*size(boston,1)),1,’Replace’,false);    %Splitting the dataset 1/3 as training     testDataset = setdiff(boston, trainingDataset, ‘rows’);    %Creating a vector of ones using function ones    xiTrainingSet = trainingDataset(:,1:13);    yiTrainingSet = trainingDataset(:,14);    %Splitting the dataset 1/3 as training     xiTestSet = testDataset(:,1:13);    yiTestSet = testDataset(:,14);    %Dimension    dimension, d = size(xiTrainingSet);        kernelMeanSquaredError = ;    % five-fold cross-validation to chose sigma and gamma     for kernelFiveFoldeCrossValidation=1:5        %Getting the first diamension using round function        fiveFoldeCrossValidationWidth = round(dimension/5);        %concatenating the training x values using function ‘cat’        trainingXDataset = cat( 1, trainingDataset(1:(kernelFiveFoldeCrossValidation-1)*fiveFoldeCrossValidationWidth, :), trainingDataset(kernelFiveFoldeCrossValidation*fiveFoldeCrossValidationWidth +1 :end,:));        %Xvalidation data          trainingValidationDataset = trainingDataset((kernelFiveFoldeCrossValidation-1)*fiveFoldeCrossValidationWidth + 1:kernelFiveFoldeCrossValidation*fiveFoldeCrossValidationWidth, :);        %concatenating the training y values using function ‘cat’        trainingYDataset = cat( 1, yiTrainingSet(1:(kernelFiveFoldeCrossValidation-1)*fiveFoldeCrossValidationWidth, :), yiTrainingSet(kernelFiveFoldeCrossValidation*fiveFoldeCrossValidationWidth +1 :end,:));        %Yvalidation data          trainingValidationYDataset = yiTrainingSet((kernelFiveFoldeCrossValidation-1)*fiveFoldeCrossValidationWidth + 1:kernelFiveFoldeCrossValidation*fiveFoldeCrossValidationWidth, :);        % trainingValidationYDataset = yiTrainingSet((kernelFiveFoldeCrossValidation-1)*fiveFoldeCrossValidationWidth + 1:kernelFiveFoldeCrossValidation*fiveFoldeCrossValidationWidth, :);         train_dimension, d2 = size(trainingXDataset);            %kernal value          KernelSigma = zeros(dimension,dimension,size(valuesOfSigma,2),1);               for sigma=1:size(valuesOfSigma,2)            %calculating kernel sig        KernelSigma(:,:,sigma) = computeKernel(trainingXDataset,trainingValidationDataset,valuesOfSigma(sigma));        end        %for gamma=1:size(valuesOfGamma,2)           %for sigma2=1:size(valuesOfSigma,2)                %kernel = KernelSigma(:,:,sigma2);               % alpha = kridgereg(kernel(1:train_dimension,1:train_dimension),trainingYDataset,valuesOfGamma(gamma));             %  kernelMeanSquaredError(gamma,sigma2,kernelFiveFoldeCrossValidation) = dualcost(kernel(train_dimension+1:end,1:train_dimension) ,trainingValidationYDataset,alpha);            %end        for gamma=1:size(valuesOfGamma,2)             for sigma2=1:size(valuesOfSigma,2)                kernel = KernelSigma(:,:,sigma2);                alpha = kridgereg(kernel(1:train_dimension,1:train_dimension),trainingYDataset,valuesOfGamma(gamma));                               kernelMeanSquaredError(gamma,sigma2,kernelFiveFoldeCrossValidation) = dualcost(kernel(train_dimension+1:end,1:train_dimension) ,trainingValidationYDataset,alpha);            end        end    end     averageKernalMSE = mean(kernelMeanSquaredError,3);    %Plotting 3D cross validation error as function of gamma and sigma    if(i == 1)         %Plotting 3D cross validation error as function of gamma and sigma        surf(valuesOfSigma’,valuesOfGamma’,averageKernalMSE)        ylabel(‘Sigma’);        xlabel(‘Gamma’);        zlabel(‘Cross-validaion’);         title(‘Cross-validation error (mean over folds of validation error)’)          end    %optima; gamma value    optimalGamma, optimalSigma = find(averageKernalMSE == min(min(averageKernalMSE)));    %optimal value of the kernel    optimalKernel = computeKernel(xiTrainingSet,xiTestSet,valuesOfSigma(optimalSigma));    %optimal value of alpha    optimalAlpha = kridgereg(optimalKernel(1:dimension,1:dimension),yiTrainingSet,valuesOfGamma(optimalGamma));    %calculating traing mean squared value at index i    trainingDataMeanSquaredError(i)  = dualcost(optimalKernel(1:dimension,1:dimension) ,yiTrainingSet,optimalAlpha);    %calculating test mean squared value at index i    testDataMeanSquaredError(i) = dualcost(optimalKernel(dimension+1:end,1:dimension) ,yiTestSet,optimalAlpha);end%The avergae mean on trianing dataaverage_mse_TrainingData = mean(trainingDataMeanSquaredError)%The avergae standard deviation on trianing datastandardDeviationTraining = std2(trainingDataMeanSquaredError)%The avergae mean on test dataaverage_mse_TestData = mean(testDataMeanSquaredError)%The avergae standard deviation on test datastandardDeviationTest = std2(testDataMeanSquaredError)kernelRidgeTrain = sprintf(‘%f ± %f’,average_mse_TrainingData(1), standardDeviationTraining(:,1));kernelRidgeTest = sprintf(‘%f ± %f’,average_mse_TestData(1),standardDeviationTest (:,1));columnNames = {‘Method’, ‘MSE train’, ‘MSE test’};rowNames = {‘Kernel Ridge Regression’};% data creationdata = {rowNames{1}, kernelRidgeTrain, kernelRidgeTest; };% Create the uitableuserInterface = figure(‘Position’, 90 90 800 450);%setting user interface tableuserInterfaceTable = uitable(‘Parent’, userInterface, ‘Position’, 25 100 800 330, ‘Data’, data,’ColumnName’,columnNames,’RowName’,rowNames);userInterfaceTable.ColumnWidth = {170,320, 320};userInterfaceTable.RowName = ;

x

Hi!
I'm Dianna!

Would you like to get a custom essay? How about receiving a customized one?

Check it out