📄 subsasgn.m
字号:
break;
end
end
if (~found)
eval(['o.' subs '=v;'],'err=lasterr');
return
end
else
err = 'Attempt to reference field of non-structure array.';
return
end
eval(['o2=o.' subs ';'],'err=lasterr');
if length(err), return, end
[v,err] = nsubsasn(o2,subscripts,v);
if length(err), return, end
eval(['o.' subs '=v;'],'err=lasterr;')
end
% ===========================================================
% NEW SUBOBJECTS
% ===========================================================
function structure=nninput
% NNINPUT Construct an input structure.
structure.range = [0 1];
structure.size = 1;
structure.userdata.note = 'Put your custom input information here.';
% ===========================================================
function structure=nnlayer
%NNLAYER Construct a layer structure.
structure.dimensions = 1;
structure.distanceFcn = '';
structure.distances = [0];
structure.initFcn = 'initwb';
structure.netInputFcn = 'netsum';
structure.positions = [0];
structure.size = 1;
structure.topologyFcn = 'hextop';
structure.transferFcn = 'purelin';
structure.userdata.note = 'Put your custom layer information here.';
% ===========================================================
function structure=nnbias(a1)
%NNBIAS Construct a bias structure.
structure.initFcn = '';
structure.learn = 1;
structure.learnFcn = '';
structure.learnParam = '';
structure.size = 0;
structure.userdata.note = 'Put your custom bias information here.';
% ===========================================================
function structure=nnweight
%NNWEIGHT Construct a weight structure.
structure.delays = [0];
structure.initFcn = '';
structure.learn = 1;
structure.learnFcn = '';
structure.learnParam = '';
structure.size = [0 0];
structure.userdata.note = 'Put your custom weight information here.';
structure.weightFcn = 'dotprod';
% ===========================================================
function structure=nnoutput
% NNOUTPUT Construct an output structure.
structure.size = 0;
structure.userdata.note = 'Put your custom output information here.';
% ===========================================================
function structure=nntarget
% NNTARGET Construct a target structure.
structure.size = 0;
structure.userdata.note = 'Put your custom output information here.';
% ===========================================================
function i=findne(a,b)
% FINDNE Find not equal.
% Get around warning for [] == [].
if length(a) == 0
i = [];
else
i = find(a ~= b);
end
% ===========================================================
function [net,err]=hint(net)
% INPUTS
% ======
% inputSizes(i), totalInputSize
net.hint.inputSizes = zeros(net.numInputs,1);
for i=1:net.numInputs
net.hint.inputSizes(i) = net.inputs{i}.size;
end
net.hint.totalInputSize = sum(net.hint.inputSizes);
% LAYERS
% ======
% layerSizes(i), totalLayerSize,
net.hint.layerSizes = zeros(net.numLayers,1);
for i=1:net.numLayers
net.hint.layerSizes(i) = net.layers{i}.size;
end
net.hint.totalLayerSize = sum(net.hint.layerSizes);
% OUTPUTS
% =======
% outputInd, outputSizes(i), totalOutputSize
net.hint.outputInd = find(net.outputConnect);
net.hint.outputSizes = zeros(net.numOutputs,1);
for i=1:net.numOutputs
net.hint.outputSizes(i) = net.outputs{net.hint.outputInd(i)}.size;
end
net.hint.totalOutputSize = sum(net.hint.outputSizes);
% TARGETS
% =======
% targetInd, targetSizes(i), totalTargetSize
net.hint.targetInd = find(net.targetConnect);
net.hint.targetSizes = zeros(net.numTargets,1);
for i=1:net.numTargets
net.hint.targetSizes(i) = net.targets{net.hint.targetInd(i)}.size;
end
net.hint.totalTargetSize = sum(net.hint.targetSizes);
% CONNECT
% =======
% inputConnectFrom{i}, inputConnectTo{i}
net.hint.inputConnectFrom = cell(net.numLayers,1);
for i=1:net.numLayers
net.hint.inputConnectFrom{i} = find(net.inputConnect(i,:));
end
net.hint.inputConnectTo = cell(net.numInputs,1);
for i=1:net.numInputs
net.hint.inputConnectTo{i} = find(net.inputConnect(:,i)');
end
% layerConnectFrom{i}, layerConnectTo{i}
net.hint.layerConnectFrom = cell(net.numLayers,1);
net.hint.layerConnectTo = cell(net.numLayers,1);
for i=1:net.numLayers
net.hint.layerConnectFrom{i} = find(net.layerConnect(i,:));
net.hint.layerConnectTo{i} = find(net.layerConnect(:,i)');
end
% biasConnectTo, biasConnectFrom
net.hint.biasConnectFrom = cell(net.numLayers,1);
for i=1:net.numLayers
net.hint.biasConnectFrom{i} = find(net.biasConnect(i));
end
net.hint.biasConnectTo = find(net.biasConnect)';
% LAYER ORDERS
% ============
% simLayerOrder, bpLayerOrder
[net.hint.simLayerOrder,net.hint.zeroDelay] = simlayorder(net);
[net.hint.bpLayerOrder,net.hint.zeroDelay] = bplayorder(net);
% CHECK LAYERS HAVE WEIGHTS
% =========================
net.hint.noWeights = find(~any([net.inputConnect net.layerConnect],2));
% DELAYS
% ======
% layerDelays, layerConnectOZD, layerConnectWZD
net.hint.layerDelays = cell(net.numLayers,net.numLayers);
net.hint.layerConnectOZD = zeros(net.numLayers,net.numLayers);
net.hint.layerConnectWZD = zeros(net.numLayers,net.numLayers);
for i=1:net.numLayers
for j=net.hint.layerConnectFrom{i}
net.hint.layerDelays{i,j} = net.layerWeights{i,j}.delays;
net.hint.layerConnectOZD(i,j) = all(net.hint.layerDelays{i,j} == 0);
net.hint.layerConnectWZD(i,j) = any(net.hint.layerDelays{i,j} == 0);
end
end
net.hint.layerConnectOZD = net.hint.layerConnectOZD & net.hint.layerConnectWZD;
net.hint.layerConnectWZD = net.hint.layerConnectWZD & ~net.hint.layerConnectOZD;
% layerConnectToOZD, layerConnectToWZD
net.hint.layerConnectToZD = net.hint.layerConnectTo;
for i=1:net.numLayers
net.hint.layerConnectToOZD{i} = find(net.hint.layerConnectOZD(:,i)');
net.hint.layerConnectToWZD{i} = find(net.hint.layerConnectWZD(:,i)');
end
% FUNCTIONS
% =========
% inputWeightFcn, layerWeightFcn
% netInputFcn, dNetInputFcn, transferFcn, dTransferFcn
net.hint.inputWeightFcn = cell(net.numLayers,net.numInputs);
net.hint.dInputWeightFcn = net.hint.inputWeightFcn;
net.hint.layerWeightFcn = cell(net.numLayers,net.numInputs);
net.hint.dLayerWeightFcn = net.hint.layerWeightFcn;
net.hint.netInputFcn = cell(net.numLayers,1);
net.hint.dNetInputFcn = net.hint.netInputFcn;
net.hint.transferFcn = cell(net.numLayers,1);
net.hint.dTransferFcn = net.hint.transferFcn;
for i=1:net.numLayers
for j=net.hint.inputConnectFrom{i}
net.hint.inputWeightFcn{i,j} = net.inputWeights{i,j}.weightFcn;
net.hint.dInputWeightFcn{i,j} = feval(net.hint.inputWeightFcn{i,j},'deriv');
if ~length(net.hint.dInputWeightFcn{i,j})
net.hint.dInputWeightFcn{i,j} = 'dnullwf';
end
end
for j=net.hint.layerConnectFrom{i}
net.hint.layerWeightFcn{i,j} = net.layerWeights{i,j}.weightFcn;
net.hint.dLayerWeightFcn{i,j} = feval(net.hint.layerWeightFcn{i,j},'deriv');
if ~length(net.hint.dLayerWeightFcn{i,j})
net.hint.dLayerWeightFcn{i,j} = 'dnullwf';
end
end
net.hint.netInputFcn{i} = net.layers{i}.netInputFcn;
net.hint.dNetInputFcn{i} = feval(net.hint.netInputFcn{i},'deriv');
net.hint.transferFcn{i} = net.layers{i}.transferFcn;
net.hint.dTransferFcn{i} = feval(net.hint.transferFcn{i},'deriv');
if ~length(net.hint.dTransferFcn{i})
net.hint.dTransferFcn{i} = 'dnulltf';
end
end
% WEIGHT & BIAS LEARNING RULES
% ============================
% net.hint.needGradient
net.hint.needGradient = 0;
for i=1:net.numLayers
for j=find(net.inputConnect(i,:))
learnFcn = net.inputWeights{i,j}.learnFcn;
if length(learnFcn) & feval(learnFcn,'needg');
net.hint.needGradient = 1;
break;
end
end
if (net.hint.needGradient), break, end
for j=find(net.layerConnect(i,:))
learnFcn = net.layerWeights{i,j}.learnFcn;
if length(learnFcn) & feval(learnFcn,'needg');
net.hint.needGradient = 1;
break;
end
end
if (net.hint.needGradient), break, end
if net.biasConnect(i)
learnFcn = net.biases{i}.learnFcn;
if length(learnFcn) & feval(learnFcn,'needg');
net.hint.needGradient = 1;
end
end
end
% WEIGHT & BIASES COLUMNS
% =======================
net.hint.inputWeightCols = zeros(net.numLayers,net.numInputs);
net.hint.layerWeightCols = zeros(net.numLayers,net.numLayers);
for i=1:net.numLayers
for j=find(net.inputConnect(i,:))
net.hint.inputWeightCols(i,j) = ...
net.inputs{j}.size * length(net.inputWeights{i,j}.delays);;
end
for j=find(net.layerConnect(i,:))
net.hint.layerWeightCols(i,j) = ...
net.layers{j}.size * length(net.layerWeights{i,j}.delays);
end
end
% WEIGHT & BIASES LEARNING
% ========================
% inputLearn, layerLearn, biasLearn
net.hint.inputLearn = net.inputConnect;
net.hint.layerLearn = net.layerConnect;
net.hint.biasLearn = net.biasConnect;
for i=1:net.numLayers
for j=find(net.inputConnect(i,:))
net.hint.inputLearn(i,j) = net.inputWeights{i,j}.learn;
end
for j=find(net.layerConnect(i,:))
net.hint.layerLearn(i,j) = net.layerWeights{i,j}.learn;
end
if (net.biasConnect(i))
net.hint.biasLearn(i) = net.biases{i}.learn;
end
end
% inputLearnFrom, layerLearnFrom
net.hint.inputLearnFrom = cell(net.numLayers,1);
for i=1:net.numLayers
net.hint.inputLearnFrom{i} = find(net.hint.inputLearn(i,:));
end
net.hint.layerLearnFrom = cell(net.numLayers,1);
for i=1:net.numLayers
net.hint.layerLearnFrom{i} = find(net.hint.layerLearn(i,:));
end
% WEIGHT & BIAS INDICES INTO X VECTOR
% ===================================
net.hint.inputWeightInd = cell(net.numLayers,net.numInputs);
net.hint.layerWeightInd = cell(net.numLayers,net.numLayers);
net.hint.biasInd = cell(1,net.numLayers);
net.hint.xLen = 0;
for i=1:net.numLayers
for j=find(net.hint.inputLearn(i,:))
cols = net.hint.inputWeightCols(i,j);
len = net.layers{i}.size * cols;
net.hint.inputWeightInd{i,j} = [net.hint.xLen + (1:len)];
net.hint.xLen = net.hint.xLen + len;
end
for j=find(net.hint.layerLearn(i,:))
cols = net.hint.layerWeightCols(i,j);
len = net.layers{i}.size * cols;
net.hint.layerWeightInd{i,j} = [net.hint.xLen + (1:len)];
net.hint.xLen = net.hint.xLen + len;
end
if (net.hint.biasLearn(i))
len = net.layers{i}.size;
net.hint.biasInd{i} = [net.hint.xLen + (1:len)];
net.hint.xLen = net.hint.xLen + len;
end
end
% ===========================================================
function [order,zeroDelay]=simlayorder(net)
%SIMLAYORDER Order to simulate layers in.
% INITIALIZATION
order = zeros(1,net.numLayers);
unordered = ones(1,net.numLayers);
% FIND ZERO-DELAY CONNECTIONS BETWEEN LAYERS
dependancies = zeros(net.numLayers,net.numLayers);
for i=1:net.numLayers
for j=find(net.layerConnect(i,:))
if any(net.layerWeights{i,j}.delays == 0)
dependancies(i,j) = 1;
end
end
end
% FIND LAYER ORDER
for k=1:net.numLayers
for i=find(unordered)
if ~any(dependancies(i,:))
dependancies(:,i) = 0;
order(k) = i;
unordered(i) = 0;
break;
end
end
end
% CHECK THAT ALL LAYERS WERE ORDERED
zeroDelay = any(unordered);
% ===========================================================
function [order,zeroDelay]=bplayorder(net)
%BPLAYORDER Order to backprop through layers.
% INITIALIZE
order = zeros(1,net.numLayers);
unordered = ones(1,net.numLayers);
% FIND ZERO-DELAY CONNECTIONS BETWEEN LAYERS
dependancies = zeros(net.numLayers,net.numLayers);
for i=1:net.numLayers
for j=find(net.layerConnect(i,:))
if any(net.layerWeights{i,j}.delays == 0)
dependancies(i,j) = 1;
end
end
end
% FIND LAYER ORDER
for k=1:net.numLayers
for i=find(unordered)
if ~any(dependancies(:,i))
dependancies(i,:) = 0;
order(k) = i;
unordered(i) = 0;
break;
end
end
end
% CHECK THAT ALL LAYERS WERE ORDERED
zeroDelay = any(unordered);
% ===========================================================
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -