22.02.2024 Views

Daniel Voigt Godoy - Deep Learning with PyTorch Step-by-Step A Beginner’s Guide-leanpub

You also want an ePaper? Increase the reach of your titles

YUMPU automatically turns print PDFs into web optimized ePapers that Google loves.

Let’s see it in code, starting with the "layer," and all its wrapped "sub-layers":

Encoder "Layer"

1 class EncoderLayer(nn.Module):

2 def __init__(self, n_heads, d_model, ff_units, dropout=0.1):

3 super().__init__()

4 self.n_heads = n_heads

5 self.d_model = d_model

6 self.ff_units = ff_units

7 self.self_attn_heads = \

8 MultiHeadedAttention(n_heads, d_model, dropout)

9 self.ffn = nn.Sequential(

10 nn.Linear(d_model, ff_units),

11 nn.ReLU(),

12 nn.Dropout(dropout),

13 nn.Linear(ff_units, d_model),

14 )

15

16 self.norm1 = nn.LayerNorm(d_model) 1

17 self.norm2 = nn.LayerNorm(d_model) 1

18 self.drop1 = nn.Dropout(dropout)

19 self.drop2 = nn.Dropout(dropout)

20

21 def forward(self, query, mask=None):

22 # Sublayer #0

23 # Norm

24 norm_query = self.norm1(query)

25 # Multi-headed Attention

26 self.self_attn_heads.init_keys(norm_query)

27 states = self.self_attn_heads(norm_query, mask)

28 # Add

29 att = query + self.drop1(states)

30

31 # Sublayer #1

32 # Norm

33 norm_att = self.norm2(att)

34 # Feed Forward

35 out = self.ffn(norm_att)

36 # Add

37 out = att + self.drop2(out)

38 return out

Transformer Encoder | 813

Hooray! Your file is uploaded and ready to be published.

Saved successfully!

Ooh no, something went wrong!