我在 Modelica 中编写了一个代码来为我的硕士论文训练一个前馈两层神经网络。该代码接收一个包含六个元素的向量 (u[nin]) 并提供一个包含两个元素 y[nout] 的输出向量。当我在 dymola 中翻译代码时,我收到一条错误消息,指出特定代码行中包含的部分的维数必须相等。一旦我删除了这个特定的行,代码就会成功翻译。我真的花了很多精力试图解决这个错误,但徒劳无功!任何有关该问题的帮助将不胜感激。
代码如下:(请注意,导致翻译错误产生的行在代码中用 // 标记)。
model NN_block
Modelica.Blocks.Interfaces.RealInput
u[nin] "Connector of Real input signals"
annotation (Placement(transformation(extent={{-140,-20},{-100,20}},
rotation=0)));
Modelica.Blocks.Interfaces.RealOutput y[nout]
annotation (Placement(transformation(extent={{100,-10},{120,10}})));
parameter Integer nin2=1;
parameter Integer nin=6;
parameter Integer nout=2;
Real wji[10,6];
Real delta_wij[6,10];
Real bj[10,1];
Real delta_bjT[1,10];
Real wkj[2,10];
Real delta_wjk[10,2];
Real bk[2,1];
Real delta_bkT[1,2];
Real E;
Real ek[1,2];
Real yj[10,1];
Modelica.Blocks.Interfaces.BooleanOutput Input_trigger
annotation (Placement(
transformation(extent={{100,-46},{120,-26}}), iconTransformation(extent=
{{100,-46},{120,-26}})));
Modelica.Blocks.Interfaces.RealInput eTau1 annotation (Placement(
transformation(extent={{-182,36},{-142,76}}), iconTransformation(
extent={{10,-10},{-10,10}},
rotation=90,
origin={-44,90})));
Modelica.Blocks.Interfaces.RealInput eTau2 annotation (Placement(
transformation(extent={{-148,46},{-108,86}}), iconTransformation(
extent={{10,-10},{-10,10}},
rotation=90,
origin={50,90})));
annotation (Placement(transformation(extent={{-10,-10},{10,10}},
rotation=-90,
origin={-50,90}), iconTransformation(
extent={{-10,-10},{10,10}},
rotation=-90,
origin={0,90})));
algorithm
E:=1;
wji:=0.5*ones(10,6);
bj:=0.25*ones(10,1);
wkj:=0.75*ones(2,10);
bk:=0.6*ones(2,1);
delta_wij:=zeros(6,10);
delta_bjT:=zeros(1,10);
delta_wjk:=zeros(10,2);
delta_bkT:=zeros(1,2);
while E>0.01 loop
Input_trigger:=true;
y:=wkj*NeuralNetwork.Utilities.LogSig(wji*u+bj[:,1])+bk[:,1];
yj[:,1]:=NeuralNetwork.Utilities.LogSig(wji*u+bj[:,1]);
ek:=[eTau1,eTau2];
E:=0.5*(eTau1^2+eTau2^2);
if E>0.01 then
// delta_wij:=0.01*u*transpose(yj)*(ones(10,1)-yj)*ek*wkj+0.9*delta_wij;
delta_bjT:=0.01*transpose(yj)*(ones(10,1)-yj)*ek*wkj + 0.9*delta_bjT;
delta_wjk:=0.01*yj*ek + 0.9*delta_wjk;
delta_bkT:=0.01*ek + 0.9*delta_bkT;
wji:=wji+transpose(delta_wij);
bj:=bj+transpose(delta_bjT);
wkj:=wkj+transpose(delta_wjk);
bk:=bk+transpose(delta_bkT);
else
break;
end if;
end while;
annotation (Placement(transformation(extent={{-140,44},{-100,84}})),
uses(Modelica(version="3.2")), Icon(graphics={Rectangle(
extent={{-100,-100},{100,80}},
lineColor={0,0,255},
lineThickness=1), Text(
extent={{-68,20},{78,-16}},
lineColor={0,0,255},
lineThickness=1,
textString="Two-LayerNeural Network")}),
Diagram(graphics));
end NN_block;