import torch
from torch import nn
from torch.nn import Module, Embedding, Linear, Dropout, MaxPool1d, Sequential, ReLU
import copy
import pandas as pd
import numpy as np
import torch
import torch.nn.functional as F
device = "cpu" if not torch.cuda.is_available() else "cuda"
[docs]def ut_mask(seq_len):
""" Upper Triangular Mask
"""
return torch.triu(torch.ones(seq_len,seq_len),diagonal=1).to(dtype=torch.bool).to(device)
[docs]def lt_mask(seq_len):
""" Upper Triangular Mask
"""
return torch.tril(torch.ones(seq_len,seq_len),diagonal=-1).to(dtype=torch.bool).to(device)
[docs]def pos_encode(seq_len):
""" position Encoding
"""
return torch.arange(seq_len).unsqueeze(0).to(device)
[docs]def get_clones(module, N):
""" Cloning nn modules
"""
return nn.ModuleList([copy.deepcopy(module) for i in range(N)])