This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import numpy as np | |
| def get_len_mask(batch_size, seq_len): | |
| """Create attention mask (simplified for ViT where all patches are attended to)""" | |
| # In ViT, typically all patches are attended to (no padding) | |
| return np.zeros((batch_size, seq_len, seq_len), dtype=bool) | |
| def pos_sinusoid_embedding(seq_len, d_model): |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import numpy as np | |
| def get_len_mask(batch_size, max_len, feat_lens): | |
| """Create padding mask""" | |
| attn_mask = np.ones((batch_size, max_len, max_len)) | |
| for i in range(batch_size): | |
| attn_mask[i, :, :feat_lens[i]] = 0 | |
| return attn_mask.astype(bool) |