Skip to content

Commit

Permalink
move dropout to be faithful to paper
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Jan 4, 2021
1 parent 48954bb commit 86b8316
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions conformer/conformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,11 +118,11 @@ def forward(self, x, context = None, mask = None, context_mask = None):
dots.masked_fill_(~mask, mask_value)

attn = dots.softmax(dim = -1)
attn = self.dropout(attn)

out = einsum('b h i j, b h j d -> b h i d', attn, v)
out = rearrange(out, 'b h n d -> b n (h d)')
return self.to_out(out)
out = self.to_out(out)
return self.dropout(out)

class FeedForward(nn.Module):
def __init__(
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'conformer',
packages = find_packages(),
version = '0.2.2',
version = '0.2.3',
license='MIT',
description = 'The convolutional module from the Conformer paper',
author = 'Phil Wang',
Expand Down

0 comments on commit 86b8316

Please sign in to comment.