差分進化算法DE優化LSSVR

(一)差分進化算法DE

進化機制: 變異(mutation)   -   交叉(crossover)   -   選擇(selection)

(二)最小二乘支持向量迴歸LSSVR

該算法有2個關鍵參數: alpha + sig2

 

(三)DE-LSSVR

%載入數據
data = csvread('D:\matlab2016a\OA\sample.csv');

%劃分數據集
rv = rand(1,size(data,1));
[value,index] = sort(rv);
x_train = data(index(1:150),1:2);y_train = data(index(1:150),end);
x_test = data(index(151:end),1:2);y_test = data(index(151:end),end);

%歸一化處理
[train_input,inputps] = mapminmax(x_train');
test_input = mapminmax('apply',x_test',inputps);
[train_output,outputps] = mapminmax(y_train');

type = 'function estimation';

%設置DE參數
sizepop = 20;
dim = 2;
lb = [1];ub = [1000];
mutation = 0.5;
crossover = 0.2;
maxgen = 50;

if size(lb,1) == 1
    lb = ones(dim,1).*lb;
    ub = ones(dim,1).*ub;
end

%個體初始化
Targetfitness = inf;
for i = 1:sizepop
    position(i,:) = lb'+(ub'-lb').*rand(1,dim);
    gamma = position(i,1);
    sig2 = position(i,2);
    [alpha,b] = trainlssvm({train_input',train_output',type,gamma,sig2,'RBF_kernel'});
    predict = simlssvm({train_input',train_output',type,gamma,sig2,'RBF_kernel'},{alpha,b},train_input');
    predict = mapminmax('reverse',predict',outputps);
    fit(i) = sqrt(sum((predict'-y_train).^2)/length(y_train));
    if fit(i) < Targetfitness
        Targetfitness = fit(i);
        Targetposition = position(i,:);
    end
end

%迭代尋優
converage = zeros(1,maxgen);
for L = 1:maxgen
    for j = 1:sizepop
        %變異操作
        ri = randperm(sizepop,3);
        while isempty(find(ri==j))==0
            ri = randperm(sizepop,3);
        end
        mpop = position(ri(1),:)+mutation*(position(ri(2),:)-position(ri(3),:));
        if isempty(find(mpop>ub'))== 0
            mpop(find(mpop>ub')) = (ub(find(mpop>ub')))';
        end
        if isempty(find(mpop<lb'))== 0
            mpop(find(mpop<lb')) = (lb(find(mpop<lb')))';
        end
        
        %交叉操作
        tmp = zeros(1,dim);
        for i = 1:dim
            if rand < crossover
                tmp(i) = mpop(i);
            else
                tmp(i) = position(j,i);
            end
        end
        
        %選擇操作
        gamma = tmp(1);
        sig2 = tmp(2);
        [alpha,b] = trainlssvm({train_input',train_output',type,gamma,sig2,'RBF_kernel'});
        predict = simlssvm({train_input',train_output',type,gamma,sig2,'RBF_kernel'},{alpha,b},train_input');
        cpredict = mapminmax('reverse',predict',outputps);
        cfit = sqrt(sum((cpredict'-y_train).^2)/length(y_train));
        
        %更新局部最優和全局最優
        if cfit < fit(j)
            position(j,:) = tmp;
            fit(j) = cfit;
        end
        if fit(j) < Targetfitness
            Targetposition = position(j,:);
            Targetfitness = fit(j);
        end    
    end
    converage(L) = Targetfitness;
end

gamma = Targetposition(1);
sig2 = Targetposition(2);
[alpha,b] = trainlssvm({train_input',train_output',type,gamma,sig2,'RBF_kernel'});
predict = simlssvm({train_input',train_output',type,gamma,sig2,'RBF_kernel'},{alpha,b},train_input');
predict_train = mapminmax('reverse',predict',outputps);
train_rmse = sqrt(sum((predict_train'-y_train).^2)/length(y_train));

predict = simlssvm({train_input',train_output',type,gamma,sig2,'RBF_kernel'},{alpha,b},test_input');
predict_test = mapminmax('reverse',predict',outputps);
test_rmse = sqrt(sum((predict_test'-y_test).^2)/length(y_test));

disp(['Train RMSE = ',num2str(train_rmse),' Test RMSE = ',num2str(test_rmse)])

Train RMSE = 0.065179 Test RMSE = 0.16031

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章