diff options
| author | aktersnurra <gustaf.rydholm@gmail.com> | 2020-10-22 22:45:58 +0200 | 
|---|---|---|
| committer | aktersnurra <gustaf.rydholm@gmail.com> | 2020-10-22 22:45:58 +0200 | 
| commit | 4d7713746eb936832e84852e90292936b933e87d (patch) | |
| tree | 2b2519d1d2ce53d4e1390590f52018d55dadbc7c /src/text_recognizer/networks/mlp.py | |
| parent | 1b3b8073a19f939d18a0bb85247eb0d99284f7cc (diff) | |
Transfomer added, many other changes.
Diffstat (limited to 'src/text_recognizer/networks/mlp.py')
| -rw-r--r-- | src/text_recognizer/networks/mlp.py | 6 | 
1 files changed, 3 insertions, 3 deletions
diff --git a/src/text_recognizer/networks/mlp.py b/src/text_recognizer/networks/mlp.py index d66af28..1101912 100644 --- a/src/text_recognizer/networks/mlp.py +++ b/src/text_recognizer/networks/mlp.py @@ -5,7 +5,7 @@ from einops.layers.torch import Rearrange  import torch  from torch import nn -from text_recognizer.networks.misc import activation_function +from text_recognizer.networks.util import activation_function  class MLP(nn.Module): @@ -63,8 +63,8 @@ class MLP(nn.Module):      def forward(self, x: torch.Tensor) -> torch.Tensor:          """The feedforward pass."""          # If batch dimenstion is missing, it needs to be added. -        if len(x.shape) == 3: -            x = x.unsqueeze(0) +        if len(x.shape) < 4: +            x = x[(None,) * (4 - len(x.shape))]          return self.layers(x)      @property  |