import torch | |
from torch import nn | |
class LayerScale(nn.Module): | |
""" LayerScale on tensors with channels in last-dim. | |
""" | |
def __init__( | |
self, | |
dim: int, | |
init_values: float = 1e-5, | |
inplace: bool = False, | |
) -> None: | |
super().__init__() | |
self.inplace = inplace | |
self.gamma = nn.Parameter(init_values * torch.ones(dim)) | |
def forward(self, x: torch.Tensor) -> torch.Tensor: | |
return x.mul_(self.gamma) if self.inplace else x * self.gamma | |
class LayerScale2d(nn.Module): | |
""" LayerScale for tensors with torch 2D NCHW layout. | |
""" | |
def __init__( | |
self, | |
dim: int, | |
init_values: float = 1e-5, | |
inplace: bool = False, | |
): | |
super().__init__() | |
self.inplace = inplace | |
self.gamma = nn.Parameter(init_values * torch.ones(dim)) | |
def forward(self, x): | |
gamma = self.gamma.view(1, -1, 1, 1) | |
return x.mul_(gamma) if self.inplace else x * gamma | |