<bound method Module.parameters of SASRec(
(embedding): SASEmbedding(
(token): TokenEmbedding(11, 4, padding_idx=0)
(position): PositionalEmbedding(
(pe): Embedding(9, 4)
)
(dropout): Dropout(p=0.2, inplace=False)
)
(model): SASModel(
(transformer_blocks): ModuleList(
(0): SASTransformerBlock(
(layer_norm): LayerNorm()
(attention): SASMultiHeadedAttention(
(linear_layers): ModuleList(
(0): Linear(in_features=4, out_features=8, bias=True)
(1): Linear(in_features=4, out_features=8, bias=True)
(2): Linear(in_features=4, out_features=8, bias=True)
)
(attention): Attention()
(dropout): Dropout(p=0.2, inplace=False)
(layer_norm): LayerNorm()
)
(feed_forward): SASPositionwiseFeedForward(
(conv1): Conv1d(4, 16, kernel_size=(1,), stride=(1,))
(activation): ReLU()
(dropout): Dropout(p=0.2, inplace=False)
(conv2): Conv1d(16, 4, kernel_size=(1,), stride=(1,))
(layer_norm): LayerNorm()
)
)
(1): SASTransformerBlock(
(layer_norm): LayerNorm()
(attention): SASMultiHeadedAttention(
(linear_layers): ModuleList(
(0): Linear(in_features=4, out_features=8, bias=True)
(1): Linear(in_features=4, out_features=8, bias=True)
(2): Linear(in_features=4, out_features=8, bias=True)
)
(attention): Attention()
(dropout): Dropout(p=0.2, inplace=False)
(layer_norm): LayerNorm()
)
(feed_forward): SASPositionwiseFeedForward(
(conv1): Conv1d(4, 16, kernel_size=(1,), stride=(1,))
(activation): ReLU()
(dropout): Dropout(p=0.2, inplace=False)
(conv2): Conv1d(16, 4, kernel_size=(1,), stride=(1,))
(layer_norm): LayerNorm()
)
)
(2): SASTransformerBlock(
(layer_norm): LayerNorm()
(attention): SASMultiHeadedAttention(
(linear_layers): ModuleList(
(0): Linear(in_features=4, out_features=8, bias=True)
(1): Linear(in_features=4, out_features=8, bias=True)
(2): Linear(in_features=4, out_features=8, bias=True)
)
(attention): Attention()
(dropout): Dropout(p=0.2, inplace=False)
(layer_norm): LayerNorm()
)
(feed_forward): SASPositionwiseFeedForward(
(conv1): Conv1d(4, 16, kernel_size=(1,), stride=(1,))
(activation): ReLU()
(dropout): Dropout(p=0.2, inplace=False)
(conv2): Conv1d(16, 4, kernel_size=(1,), stride=(1,))
(layer_norm): LayerNorm()
)
)
(3): SASTransformerBlock(
(layer_norm): LayerNorm()
(attention): SASMultiHeadedAttention(
(linear_layers): ModuleList(
(0): Linear(in_features=4, out_features=8, bias=True)
(1): Linear(in_features=4, out_features=8, bias=True)
(2): Linear(in_features=4, out_features=8, bias=True)
)
(attention): Attention()
(dropout): Dropout(p=0.2, inplace=False)
(layer_norm): LayerNorm()
)
(feed_forward): SASPositionwiseFeedForward(
(conv1): Conv1d(4, 16, kernel_size=(1,), stride=(1,))
(activation): ReLU()
(dropout): Dropout(p=0.2, inplace=False)
(conv2): Conv1d(16, 4, kernel_size=(1,), stride=(1,))
(layer_norm): LayerNorm()
)
)
)
)
)>