Skip to content

Commit

Permalink
rm positionwise_feed_forward.py
Browse files Browse the repository at this point in the history
  • Loading branch information
Mddct committed Oct 23, 2023
1 parent e987b00 commit 7ed212b
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 34 deletions.
33 changes: 31 additions & 2 deletions wenet/paraformer/ali_paraformer/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@
MultiHeadedAttentionSANM
)
from wenet.paraformer.ali_paraformer.lfr import LFR
from wenet.paraformer.ali_paraformer.positionwise_feed_forward import \
PositionwiseFeedForwardDecoderSANM
from wenet.transformer.search import DecodeResult
from wenet.transformer.encoder import BaseEncoder
from wenet.transformer.decoder import TransformerDecoder
Expand All @@ -20,6 +18,37 @@
from wenet.utils.mask import make_non_pad_mask


class PositionwiseFeedForwardDecoderSANM(torch.nn.Module):
"""Positionwise feed forward layer.
Args:
idim (int): Input dimenstion.
hidden_units (int): The number of hidden units.
dropout_rate (float): Dropout rate.
"""

def __init__(self,
idim,
hidden_units,
dropout_rate,
adim=None,
activation=torch.nn.ReLU()):
"""Construct an PositionwiseFeedForward object."""
super(PositionwiseFeedForwardDecoderSANM, self).__init__()
self.w_1 = torch.nn.Linear(idim, hidden_units)
self.w_2 = torch.nn.Linear(hidden_units,
idim if adim is None else adim,
bias=False)
self.dropout = torch.nn.Dropout(dropout_rate)
self.activation = activation
self.norm = torch.nn.LayerNorm(hidden_units)

def forward(self, x):
"""Forward function."""
return self.w_2(self.norm(self.dropout(self.activation(self.w_1(x)))))


class SinusoidalPositionEncoder(torch.nn.Module):
"""https://github.com/alibaba-damo-academy/FunASR/blob/main/funasr/modules/embedding.py#L387
"""
Expand Down
32 changes: 0 additions & 32 deletions wenet/paraformer/ali_paraformer/positionwise_feed_forward.py

This file was deleted.

0 comments on commit 7ed212b

Please sign in to comment.