Transformer
NLP Attention
Context attention - LSTM
Encoder and Decoder
class Attention(nn.Module):
def __init__(self, method, hidden_size):
super(Attention, self).__init__()
self.method = method
self.hidden_size = hidden_size
if self.method == 'general':
self.attention = nn.Linear(self.hidden_size, self.hidden_size)
elif self.method …