📄 subsasgn.m
字号:
for i=find(net.inputConnect(:,j)')
rows = net.layers{i}.size;
cols = net.inputs{j}.size * length(net.inputWeights{i,j}.delays);
net.inputWeights{i,j}.size = [rows cols];
net.IW{i,j} = resizem(net.IW{i,j},rows,cols);
end
% ===========================================================
% LAYER PROPERTIES
% ===========================================================
function [net,err] = setLayerDimensions(net,i,newDimensions)
% Check value
err = '';
if ~isa(newDimensions,'double')
err = sprintf('"layers{%g}.dimensions" must be an integer row vector.',i);
return
end
if size(newDimensions,1) ~= 1
err = sprintf('"layers{%g}.dimensions" must be an integer row vector.',i);
return
end
if any(newDimensions ~= floor(newDimensions))
err = sprintf('"layers{%g}.dimensions" must be an integer row vector.',i);
return
end
% Change layer size
[net,err] = setLayerSize(net,i,prod(newDimensions));
% Change layer dimensions
net.layers{i}.dimensions = newDimensions;
% ===========================================================
function [net,err] = setLayerDistanceFcn(net,i,distanceFcn)
% Check value
err = '';
if ~isstr(distanceFcn)
err = sprintf('"layers{%g}.distanceFcn" must be the name of a distance function or ''''.',i);
return
end
if length(distanceFcn) ~= 0
if ~exist(distanceFcn)
err = sprintf('"layers{%g}.distanceFcn" cannot be set to non-existing function "%s".',i,distanceFcn);
return
end
end
% Change transfer function
net.layers{i}.distanceFcn = distanceFcn;
% ===========================================================
function [net,err] = setLayerInitFcn(net,i,initFcn)
% Check value
err = '';
if ~isstr(initFcn)
err = sprintf('"layers{%g}.initFcn" must be '''' or the name of a bias initialization function.',i);
return
end
if length(initFcn) & ~exist(initFcn)
err = sprintf('"layers{%g}.initFcn" cannot be set to non-existing function "%s".',i,initFcn);
return
end
% Change initialization function
net.layers{i}.initFcn = initFcn;
% ===========================================================
function [net,err] = setLayerSize(net,ind,newSize)
% Check value
err = '';
if ~isposint(newSize)
err = sprintf('"layers{%g}.size" must be a positive integer.',ind);
return
end
% Change layer size
net.layers{ind}.size = newSize;
% Change layer dimensions
net.layers{ind}.dimensions = newSize;
% Change bias size
if net.biasConnect(ind)
net.biases{ind}.size = newSize;
net.b{ind} = resizem(net.b{ind},newSize,1);
end
% Change weights from input sizes
rows = newSize;
for j=find(net.inputConnect(ind,:))
cols = net.inputs{j}.size * length(net.inputWeights{ind,j}.delays);
net.inputWeights{ind,j}.size = [rows cols];
net.IW{ind,j} = resizem(net.IW{ind,j},rows,cols);
end
% Change weights from layer sizes
rows = newSize;
for j=find(net.layerConnect(ind,:))
cols = net.layers{j}.size * length(net.layerWeights{ind,j}.delays);
net.layerWeights{ind,j}.size = [rows cols];
net.LW{ind,j} = resizem(net.LW{ind,j},rows,cols);
end
% Change weights from layer sizes
for i=find(net.layerConnect(:,ind)')
rows = net.layers{i}.size;
cols = newSize * length(net.layerWeights{i,ind}.delays);
net.layerWeights{i,ind}.size = [rows cols];
net.LW{i,ind} = resizem(net.LW{i,ind},rows,cols);
end
% Change output size
if net.outputConnect(ind)
net.outputs{ind}.size = newSize;
end
% Change target size
if net.targetConnect(ind)
net.targets{ind}.size = newSize;
end
% ===========================================================
function [net,err] = setLayerTopologyFcn(net,i,topologyFcn)
% Check value
err = '';
if ~isstr(topologyFcn)
err = sprintf('"layers{%g}.topologyFcn" must be the name of a topology function.',i);
return
end
if ~exist(topologyFcn)
err = sprintf('"layers{%g}.topologyFcn" cannot be set to non-existing function "%s".',i,topologyFcn);
return
end
% Change transfer function
net.layers{i}.topologyFcn = topologyFcn;
% ===========================================================
function [net,err] = setLayerTransferFcn(net,i,transferFcn)
% Check value
err = '';
if ~isstr(transferFcn)
err = sprintf('"layers{%g}.transferFcn" must be the name of a transfer function.',i);
return
end
if ~exist(transferFcn)
err = sprintf('"layers{%g}.transferFcn" cannot be set to non-existing function "%s".',i,transferFcn);
return
end
% Change transfer function
net.layers{i}.transferFcn = transferFcn;
% ===========================================================
function [net,err] = setLayerNetInputFcn(net,i,netInputFcn)
% Check value
err = '';
if ~isstr(netInputFcn)
err = sprintf('"layers{%g}.netInputFcn" must be the name of a net input function.',i);
return
end
if ~exist(netInputFcn)
err = sprintf('"layers{%g}.netInputFcn" cannot be set to non-existing function "%s".',i,netInputFcn);
return
end
% Change net input function
net.layers{i}.netInputFcn = netInputFcn;
% ===========================================================
% OUTPUT PROPERTIES
% ===========================================================
function [net,err] = setOutputTarget(net,i,target)
% Check value
err = '';
if ~isboolmat(target,1,1)
err = sprintf('"outputs{%g}.target" must be 0 or 1.',i);
return
end
% Change net input function
net.outputs{i}.target = target;
% ===========================================================
% BIAS PROPERTIES
% ===========================================================
function [net,err] = setBiasInitFcn(net,i,initFcn)
% Check value
err = '';
if ~isstr(initFcn)
err = sprintf('"biases{%g}.initFcn" must be '''' or the name of a bias initialization function.',i);
return
end
if length(initFcn) & ~exist(initFcn)
err = sprintf('"biases{%g}.initFcn" cannot be set to non-existing function "%s".',i,initFcn);
return
end
% Change init function
net.biases{i}.initFcn = initFcn;
% ===========================================================
function [net,err] = setBiasLearn(net,i,learn)
% Check value
err = '';
if ~isbool(learn,1,1)
err = sprintf('"biases{%g}.learn" must be 0 or 1.',i);
return
end
% Change learn function
net.biases{i}.learn = learn;
% ===========================================================
function [net,err] = setBiasLearnFcn(net,i,learnFcn)
% Check value
err = '';
if ~isstr(learnFcn)
err = sprintf('"biases{%g}.learnFcn" must be '''' or the name of a bias learning function.',i);
return
end
if length(learnFcn) & ~exist(learnFcn)
err = sprintf('"biases{%g}.learnFcn" cannot be set to non-existing function "%s".',i,learnFcn);
return
end
% Change learn function
net.biases{i}.learnFcn = learnFcn;
% Default learn parameters
if length(learnFcn)
net.biases{i}.learnParam = feval(learnFcn,'pdefaults');
else
net.biases{i}.learnParam = [];
end
% ===========================================================
function [net,err] = setBiasLearnParam(net,i,learnParam)
% Check value
err = '';
% Change learn paramters
net.biases{i}.learnParam = learnParam;
% ===========================================================
% INPUT WEIGHT PROPERTIES
% ===========================================================
function [net,err] = setInputWeightDelays(net,i,j,delays)
% Check value
err = '';
if ~isdelay(delays)
err = sprintf('"inputWeights{%g,%g}.delays" must be a row vector of increasing integer values.',i,j);
return
end
% Change learn parameters
net.inputWeights{i,j}.delays = delays;
% Change value
rows = net.layers{i}.size;
cols = net.inputs{j}.size * length(net.inputWeights{i,j}.delays);
net.inputWeights{i,j}.size = [rows cols];
net.IW{i,j} = resizem(net.IW{i,j},rows,cols);
% ===========================================================
function [net,err] = setInputWeightInitFcn(net,i,j,initFcn)
% Check value
err = '';
if ~isstr(initFcn)
err = sprintf('"inputWeights{%g,%g}.initFcn" must be '''' or the name of a weight initialization function.',i,j);
return
end
if length(initFcn) & ~exist(initFcn)
err = sprintf('"inputWeights{%g,%g}.initFcn" cannot be set to non-existing function "%s".',i,j,initFcn);
return
end
% Change init function
net.inputWeights{i,j}.initFcn = initFcn;
% ===========================================================
function [net,err] = setInputWeightLearn(net,i,j,learn)
% Check value
err = '';
if ~isbool(learn,1,1)
err = sprintf('"inputWeights{%g,%g}.learn" must be 0 or 1.',i,j);
return
end
% Change learn function
net.inputWeights{i,j}.learn = learn;
% ===========================================================
function [net,err] = setInputWeightLearnFcn(net,i,j,learnFcn)
% Check value
err = '';
if ~isstr(learnFcn)
err = sprintf('"inputWeights{%g,%g}.learnFcn" must be '''' or the name of a weight learning function.',i,j);
return
end
if length(learnFcn) & ~exist(learnFcn)
err = sprintf('"inputWeights{%g,%g}.learnFcn" cannot be set to non-existing function "%s".',i,j,learnFcn);
return
end
% Change learn function
net.inputWeights{i,j}.learnFcn = learnFcn;
% Default learn parameters
if length(learnFcn)
net.inputWeights{i,j}.learnParam = feval(learnFcn,'pdefaults');
else
net.inputWeights{i,j}.learnParam = [];
end
% ===========================================================
function [net,err] = setInputWeightLearnParam(net,i,j,learnParam)
% Check value
err = '';
% Change learn parameters
net.inputWeights{i,j}.learnParam = learnParam;
% ===========================================================
function [net,err] = setInputWeightWeightFcn(net,i,j,weightFcn)
% Check value
err = '';
if ~isstr(weightFcn)
err = sprintf('"inputWeights{%g,%g}.weightFcn" must be the name of a weight function.',i,j);
return
end
if ~exist(weightFcn)
err = sprintf('"inputWeights{%g,%g}.weightFcn" cannot be set to non-existing function "%s".',i,j,weightFcn);
return
end
% Change net input function
net.inputWeights{i,j}.weightFcn = weightFcn;
% ===========================================================
% LAYER WEIGHT PROPERTIES
% ===========================================================
function [net,err] = setLayerWeightDelays(net,i,j,delays)
% Check value
err = '';
if ~isdelay(delays)
err = sprintf('"layerWeights{%g,%g}.delays" must be a row vector of increasing integer values.',i,j);
return
end
% Change learn parameters
net.layerWeights{i,j}.delays = delays;
% Change value
rows = net.layers{i}.size;
cols = net.layers{j}.size * length(net.layerWeights{i,j}.delays);
net.layerWeights{i,j}.size = [rows cols];
net.LW{i,j} = resizem(net.LW{i,j},rows,cols);
% ===========================================================
function [net,err] = setLayerWeightInitFcn(net,i,j,initFcn)
% Check value
err = '';
if ~isstr(initFcn)
err = sprintf('"layerWeights{%g,%g}.initFcn" must be '''' or the name of a weight initialization function.',i,j);
return
end
if length(initFcn) & ~exist(initFcn)
err = sprintf('"layerWeights{%g,%g}.initFcn" cannot be set to non-existing function "%s".',i,j,initFcn);
return
end
% Change init function
net.layerWeights{i,j}.initFcn = initFcn;
% ===========================================================
function [net,err] = setLayerWeightLearn(net,i,j,learn)
% Check value
err = '';
if ~isbool(learn,1,1)
err = sprintf('"layerWeights{%g,%g}.learn" must be 0 or 1.',i,j);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -