diff options
author | Marcin Chrzanowski <m@m-chrzan.xyz> | 2021-05-23 13:18:52 +0200 |
---|---|---|
committer | Marcin Chrzanowski <m@m-chrzan.xyz> | 2021-05-23 13:18:52 +0200 |
commit | 49f264647b6073c304936a95fea1704a8c0965dc (patch) | |
tree | 44b19e5f881076a6b15fa1a755d2dc1e6d9307e9 /model/attention.py | |
parent | 8ff8739b236a00169339b0b78e1f39357fdfff17 (diff) |
Add model skeleton
Diffstat (limited to 'model/attention.py')
-rw-r--r-- | model/attention.py | 15 |
1 files changed, 15 insertions, 0 deletions
diff --git a/model/attention.py b/model/attention.py new file mode 100644 index 0000000..d82ca17 --- /dev/null +++ b/model/attention.py @@ -0,0 +1,15 @@ +from torch import nn + +class Attention(nn.Module): + def __init__(self, hidden_dim, num_heads): + super(Attention, self).__init__() + # TODO: implement Attention + pass + + def forward(self, x): + # TODO: implement Attention; return both result of attention mechanism and + # attention weights (for visualization). + # x shape: (seqlen, batch, hiddendim) + result, att_weights = x, None # placeholder + pass + return result, att_weights |