From 98a5247a7de2ef08decc0f47f0880a920a064980 Mon Sep 17 00:00:00 2001
From: Ming Ding <dm_thu@qq.com>
Date: Thu, 25 Nov 2021 16:41:00 +0000
Subject: [PATCH] fix typo encdec

---
 SwissArmyTransformer/model/encoder_decoder_model.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/SwissArmyTransformer/model/encoder_decoder_model.py b/SwissArmyTransformer/model/encoder_decoder_model.py
index dbc7cbc..e702453 100644
--- a/SwissArmyTransformer/model/encoder_decoder_model.py
+++ b/SwissArmyTransformer/model/encoder_decoder_model.py
@@ -29,11 +29,11 @@ class CrossAttentionMixin(BaseMixin):
                 attention_dropout_prob, output_dropout_prob,
                 init_method, enc_hidden_size=enc_hidden_size, inner_hidden_size=inner_hidden_size, 
                 output_layer_init_method=output_layer_init_method
-            )] for layer_id in range(num_layers)
+            ) for layer_id in range(num_layers)]
         ) # Just copy args
         self.cross_lns = torch.nn.ModuleList(
-            [LayerNorm(hidden_size, 1e-5)]
-            for layer_id in range(num_layers)
+            [LayerNorm(hidden_size, 1e-5)
+            for layer_id in range(num_layers)]
         )
         
 
-- 
GitLab