layers.py 1.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. from fastai.layers import *
  2. from fastai.torch_core import *
  3. from torch.nn.parameter import Parameter
  4. from torch.autograd import Variable
  5. # The code below is meant to be merged into fastaiv1 ideally
  6. def custom_conv_layer(
  7. ni: int,
  8. nf: int,
  9. ks: int = 3,
  10. stride: int = 1,
  11. padding: int = None,
  12. bias: bool = None,
  13. is_1d: bool = False,
  14. norm_type: Optional[NormType] = NormType.Batch,
  15. use_activ: bool = True,
  16. leaky: float = None,
  17. transpose: bool = False,
  18. init: Callable = nn.init.kaiming_normal_,
  19. self_attention: bool = False,
  20. extra_bn: bool = False,
  21. ):
  22. "Create a sequence of convolutional (`ni` to `nf`), ReLU (if `use_activ`) and batchnorm (if `bn`) layers."
  23. if padding is None:
  24. padding = (ks - 1) // 2 if not transpose else 0
  25. bn = norm_type in (NormType.Batch, NormType.BatchZero) or extra_bn == True
  26. if bias is None:
  27. bias = not bn
  28. conv_func = nn.ConvTranspose2d if transpose else nn.Conv1d if is_1d else nn.Conv2d
  29. conv = init_default(
  30. conv_func(ni, nf, kernel_size=ks, bias=bias, stride=stride, padding=padding),
  31. init,
  32. )
  33. if norm_type == NormType.Weight:
  34. conv = weight_norm(conv)
  35. elif norm_type == NormType.Spectral:
  36. conv = spectral_norm(conv)
  37. layers = [conv]
  38. if use_activ:
  39. layers.append(relu(True, leaky=leaky))
  40. if bn:
  41. layers.append((nn.BatchNorm1d if is_1d else nn.BatchNorm2d)(nf))
  42. if self_attention:
  43. layers.append(SelfAttention(nf))
  44. return nn.Sequential(*layers)