layers.py 1.4 KB

12345678910111213141516171819202122232425
  1. from fastai.layers import *
  2. from fastai.torch_core import *
  3. from torch.nn.parameter import Parameter
  4. from torch.autograd import Variable
  5. #The code below is meant to be merged into fastaiv1 ideally
  6. def custom_conv_layer(ni:int, nf:int, ks:int=3, stride:int=1, padding:int=None, bias:bool=None, is_1d:bool=False,
  7. norm_type:Optional[NormType]=NormType.Batch, use_activ:bool=True, leaky:float=None,
  8. transpose:bool=False, init:Callable=nn.init.kaiming_normal_, self_attention:bool=False,
  9. extra_bn:bool=False):
  10. "Create a sequence of convolutional (`ni` to `nf`), ReLU (if `use_activ`) and batchnorm (if `bn`) layers."
  11. if padding is None: padding = (ks-1)//2 if not transpose else 0
  12. bn = norm_type in (NormType.Batch, NormType.BatchZero) or extra_bn==True
  13. if bias is None: bias = not bn
  14. conv_func = nn.ConvTranspose2d if transpose else nn.Conv1d if is_1d else nn.Conv2d
  15. conv = init_default(conv_func(ni, nf, kernel_size=ks, bias=bias, stride=stride, padding=padding), init)
  16. if norm_type==NormType.Weight: conv = weight_norm(conv)
  17. elif norm_type==NormType.Spectral: conv = spectral_norm(conv)
  18. layers = [conv]
  19. if use_activ: layers.append(relu(True, leaky=leaky))
  20. if bn: layers.append((nn.BatchNorm1d if is_1d else nn.BatchNorm2d)(nf))
  21. if self_attention: layers.append(SelfAttention(nf))
  22. return nn.Sequential(*layers)