Fix attention order in unit tests (fixes #195) (#197)

This commit is contained in:
Myle Ott 2018-06-25 12:16:10 -04:00 committed by GitHub
parent bd4db8fbd0
commit 74efc21403
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -154,7 +154,7 @@ class TestIncrementalDecoder(FairseqIncrementalDecoder):
probs[:, i, self.dictionary.eos()] = 1.0
# random attention
attn = torch.rand(bbsz, src_len, tgt_len)
attn = torch.rand(bbsz, tgt_len, src_len)
return Variable(probs), Variable(attn)