当前位置:首页 > IT技术 > 系统服务 > 正文

【UWB】ELM,Extreme Learning Machine
2021-10-20 10:40:41


例子 1

【UWB】ELM,Extreme Learning Machine_python

% 任务2: ELM 网络
% Date: 2021-10-15
% Author: Zhao-Jichao
clear
clc

%% 训练
% 导入数据集
data = [760,4550,4550,6300]; % 输入数据
label = [500, 500, 880]; % 示教数据输出

[N,n] = size(data); % 返回输入数据的维度

L = 7; % 隐层节点个数
m = 3; % 要分的类别数

% 初始化权重和偏置矩阵
W = rand(n,L)*2-1; % rand 随机生成范围是 (0,1),*2-1运算后为 (-1,1)
b_1 = rand(1,L); % 随机生成 b 偏置矩阵
ind = ones(N,1); b = b_1(ind,:); % 将生成的 b 矩阵扩充成 N*L 维度的矩阵,为了计算
H = G(data*W+b); % 得到 H

beta = pinv(H)*label; % 求解出来输出权重 beta 的最小二乘解

output = H * beta; % 计算实际输出

%% 验证
validataData = [4580 6000 1290 3960];
G(validataData*W+b) * beta

%% 激活函数的定义
function out = G(in)
out = 1./(1 + exp(-in));
% out = x;
end



例子 2

%% I. 清空环境变量
clear
clc

%% II. 训练集/测试集产生
% 1. 导入数据
load spectra_data.mat

% 2. 随机产生训练集和测试集
temp = randperm(size(NIR,1)); % randperm 整数的随机排列

% 训练集――50个样本
P_train = NIR(temp(1:50),:)'; % 401x50
T_train = octane(temp(1:50),:)'; % 1x50

% 测试集――10个样本
P_test = NIR(temp(51:end),:)';
T_test = octane(temp(51:end),:)';

% 有了随机,效果更好

%% III. 数据归一化
% 1. 训练集
[Pn_train,inputps] = mapminmax(P_train);
Pn_test = mapminmax('apply',P_test,inputps);

% 2. 测试集
[Tn_train,outputps] = mapminmax(T_train);
Tn_test = mapminmax('apply',T_test,outputps);

%% IV. ELM创建/训练
[IW,B,LW,TF,TYPE] = elmtrain(Pn_train,Tn_train,300,'sig',0);

%% V. ELM仿真测试
tn_sim = elmpredict(Pn_test,IW,B,LW,TF,TYPE);
% 1. 反归一化
T_sim = mapminmax('reverse',tn_sim,outputps);

%% VI. 结果对比
result = [T_test' T_sim'];

% 1. 均方误差
E = mse(T_sim - T_test);

% 2. 决定系数
N = length(T_test);
R2=(N*sum(T_sim.*T_test)-sum(T_sim)*sum(T_test))^2/((N*sum((T_sim).^2)-(sum(T_sim))^2)*(N*sum((T_test).^2)-(sum(T_test))^2));

%% VII. 绘图
figure(1)
plot(1:N,T_test,'r-*',1:N,T_sim,'b:o')
grid on
legend('真实值','预测值')
xlabel('样本编号')
ylabel('辛烷值')
string = {'测试集辛烷值含量预测结果对比(ELM)';['(mse = ' num2str(E) ' R^2 = ' num2str(R2) ')']};
title(string)


%%
function [IW,B,LW,TF,TYPE] = elmtrain(P,T,N,TF,TYPE)
% ELMTRAIN Create and Train a Extreme Learning Machine
% Syntax
% [IW,B,LW,TF,TYPE] = elmtrain(P,T,N,TF,TYPE)
% Description
% Input
% P - Input Matrix of Training Set (R*Q)
% T - Output Matrix of Training Set (S*Q)
% N - Number of Hidden Neurons (default = Q)
% TF - Transfer Function:
% 'sig' for Sigmoidal function (default)
% 'sin' for Sine function
% 'hardlim' for Hardlim function
% TYPE - Regression (0,default) or Classification (1)
% Output
% IW - Input Weight Matrix (N*R)
% B - Bias Matrix (N*1)
% LW - Layer Weight Matrix (N*S)
% Example
% Regression:
% [IW,B,LW,TF,TYPE] = elmtrain(P,T,20,'sig',0)
% Y = elmtrain(P,IW,B,LW,TF,TYPE)
% Classification:
% [IW,B,LW,TF,TYPE] = elmtrain(P,T,20,'sig',1)
% Y = elmtrain(P,IW,B,LW,TF,TYPE)
% See also ELMPREDICT
% Yu Lei,11-7-2010
% Copyright www.matlabsky.com
% $Revision:1.0 $
if nargin < 2
error('ELM:Arguments','Not enough input arguments.');
end
if nargin < 3
N = size(P,2);
end
if nargin < 4
TF = 'sig';
end
if nargin < 5
TYPE = 0;
end
if size(P,2) ~= size(T,2)
error('ELM:Arguments','The columns of P and T must be same.');
end
[R,Q] = size(P);
if TYPE == 1
T = ind2vec(T);
end
[S,Q] = size(T);
% Randomly Generate the Input Weight Matrix
IW = rand(N,R) * 2 - 1;
% Randomly Generate the Bias Matrix
B = rand(N,1);
BiasMatrix = repmat(B,1,Q);
% Calculate the Layer Output Matrix H
tempH = IW * P + BiasMatrix;
switch TF
case 'sig'
H = 1 ./ (1 + exp(-tempH));
case 'sin'
H = sin(tempH);
case 'hardlim'
H = hardlim(tempH);
end
% Calculate the Output Weight Matrix
LW = pinv(H') * T';
end



本文摘自 :https://blog.51cto.com/u

开通会员,享受整站包年服务立即开通 >