diff options
author | aktersnurra <gustaf.rydholm@gmail.com> | 2020-08-20 22:18:35 +0200 |
---|---|---|
committer | aktersnurra <gustaf.rydholm@gmail.com> | 2020-08-20 22:18:35 +0200 |
commit | 1f459ba19422593de325983040e176f97cf4ffc0 (patch) | |
tree | 89fef442d5dbe0c83253e9566d1762f0704f64e2 /src/text_recognizer/networks/lenet.py | |
parent | 95cbdf5bc1cc9639febda23c28d8f464c998b214 (diff) |
A lot of stuff working :D. ResNet implemented!
Diffstat (limited to 'src/text_recognizer/networks/lenet.py')
-rw-r--r-- | src/text_recognizer/networks/lenet.py | 17 |
1 files changed, 6 insertions, 11 deletions
diff --git a/src/text_recognizer/networks/lenet.py b/src/text_recognizer/networks/lenet.py index cbc58fc..91d3f2c 100644 --- a/src/text_recognizer/networks/lenet.py +++ b/src/text_recognizer/networks/lenet.py @@ -5,6 +5,8 @@ from einops.layers.torch import Rearrange import torch from torch import nn +from text_recognizer.networks.misc import activation_function + class LeNet(nn.Module): """LeNet network.""" @@ -16,8 +18,7 @@ class LeNet(nn.Module): hidden_size: Tuple[int, ...] = (9216, 128), dropout_rate: float = 0.2, output_size: int = 10, - activation_fn: Optional[Callable] = None, - activation_fn_args: Optional[Dict] = None, + activation_fn: Optional[str] = "relu", ) -> None: """The LeNet network. @@ -28,18 +29,12 @@ class LeNet(nn.Module): Defaults to (9216, 128). dropout_rate (float): The dropout rate. Defaults to 0.2. output_size (int): Number of classes. Defaults to 10. - activation_fn (Optional[Callable]): The non-linear activation function. Defaults to - nn.ReLU(inplace). - activation_fn_args (Optional[Dict]): The arguments for the activation function. Defaults to None. + activation_fn (Optional[str]): The name of non-linear activation function. Defaults to relu. """ super().__init__() - if activation_fn is not None: - activation_fn_args = activation_fn_args or {} - activation_fn = getattr(nn, activation_fn)(**activation_fn_args) - else: - activation_fn = nn.ReLU(inplace=True) + activation_fn = activation_function(activation_fn) self.layers = [ nn.Conv2d( @@ -66,7 +61,7 @@ class LeNet(nn.Module): self.layers = nn.Sequential(*self.layers) def forward(self, x: torch.Tensor) -> torch.Tensor: - """The feedforward.""" + """The feedforward pass.""" # If batch dimenstion is missing, it needs to be added. if len(x.shape) == 3: x = x.unsqueeze(0) |