Skip to content

Commit 73b3369

Browse files
committed
bug fix
1 parent 30cd283 commit 73b3369

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

joeynmt/encoders.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -299,14 +299,14 @@ def forward(self, embed_src: Tensor, src_length: Tensor, mask: Tensor, \
299299

300300
if not self.same_weights:
301301
if self.activation == "tanh":
302-
lila_out4 = torch.tanh(self.lila4(conv_out1))
302+
lila_out4 = torch.tanh(self.lila4(conv_out2))
303303
else:
304-
lila_out4 = torch.relu(self.lila4(conv_out1))
304+
lila_out4 = torch.relu(self.lila4(conv_out2))
305305
else:
306306
if self.activation == "tanh":
307-
lila_out4 = torch.tanh(self.lila2(conv_out1))
307+
lila_out4 = torch.tanh(self.lila2(conv_out2))
308308
else:
309-
lila_out4 = torch.relu(self.lila2(conv_out1))
309+
lila_out4 = torch.relu(self.lila2(conv_out2))
310310

311311
# apply dropout to the rnn input
312312
lila_do = self.rnn_input_dropout(lila_out4)

0 commit comments

Comments
 (0)