class ConvolutionalNetwork(nn.Module):
def __init__(self, in_features, trial):
# we optimize the number of layers, hidden units and dropout ratio in each layer.
n_layers = self.trial.suggest_int("n_layers", 1, 5)
p = self.trial.suggest_uniform("dropout_1{}".format(i), 0, 1.0)
layers = []
for i in range(n_layers):
self.out_features = self.trial.suggest_int("n_units_1{}".format(i), 16, 160,step =2)
kernel_size = trial.suggest_int('kernel_size', 2, 7)
layers.append(nn.Conv1d(1, self.out_features,kernel_size,1))
layers.append(nn.RReLU())
layers.append(nn.BatchNorm1d(self.out_features)
layers.append(nn.Dropout(p))
self.in_features = self.out_features
layers.append(nn.Conv1d(self.in_features, 16,kernel_size,1))
layers.append(nn.RReLU())
return nn.Sequential(*layers)
正如您在上面看到的,我对参数进行了一些 Optuna 调整,包括调整层数。
def forward(self,x):
# shape x for conv 1d op
x = x.view(-1, 1, self.in_features)
x = self.conv1(x)
x = F.rrelu(x)
x = F.max_pool1d(x, 64, 64)
x = self.conv2(x)
x = F.rrelu(x)
x = F.max_pool1d(x, 64, 64)
x = x.view(-1, self.n_conv)
x = self.dp(x)
x = self.fc3(x)
x = F.log_softmax(x, dim=1)
return x
我现在需要对上面的 forward 函数做同样的事情,我做了下面的伪代码,但它不会运行,请告知如何。主要问题是将 for 循环函数合并到 forward 函数中。
def forward(self,x):
# shape x for conv 1d op
x = x.view(-1, 1, self.in_features)
for i in range(n_layers):
layers.append(self.conv1(x))
layers.append(F.rrelu(x))
layers.append(F.max_pool1d(x, 64, 64))
x = x.view(-1, self.n_conv)
x = self.dp(x)
x = self.fc3(x)
#x = F.sigmoid(x)
x = F.log_softmax(x, dim=1)
return x