m-chrzan.xyz
aboutsummaryrefslogtreecommitdiff
path: root/model/attention.py
diff options
context:
space:
mode:
Diffstat (limited to 'model/attention.py')
-rw-r--r--model/attention.py15
1 files changed, 15 insertions, 0 deletions
diff --git a/model/attention.py b/model/attention.py
new file mode 100644
index 0000000..d82ca17
--- /dev/null
+++ b/model/attention.py
@@ -0,0 +1,15 @@
+from torch import nn
+
+class Attention(nn.Module):
+ def __init__(self, hidden_dim, num_heads):
+ super(Attention, self).__init__()
+ # TODO: implement Attention
+ pass
+
+ def forward(self, x):
+ # TODO: implement Attention; return both result of attention mechanism and
+ # attention weights (for visualization).
+ # x shape: (seqlen, batch, hiddendim)
+ result, att_weights = x, None # placeholder
+ pass
+ return result, att_weights