summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/util.py
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-06-06 23:19:35 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-06-06 23:19:35 +0200
commit01d6e5fc066969283df99c759609df441151e9c5 (patch)
treeecd1459e142356d0c7f50a61307b760aca813248 /text_recognizer/networks/util.py
parentf4688482b4898c0b342d6ae59839dc27fbf856c6 (diff)
Working on fixing decoder transformer
Diffstat (limited to 'text_recognizer/networks/util.py')
-rw-r--r--text_recognizer/networks/util.py39
1 files changed, 0 insertions, 39 deletions
diff --git a/text_recognizer/networks/util.py b/text_recognizer/networks/util.py
index 9c6b151..05b10a8 100644
--- a/text_recognizer/networks/util.py
+++ b/text_recognizer/networks/util.py
@@ -22,42 +22,3 @@ def activation_function(activation: str) -> Type[nn.Module]:
]
)
return activation_fns[activation.lower()]
-
-
-# def configure_backbone(backbone: Union[OmegaConf, NamedTuple]) -> Type[nn.Module]:
-# """Loads a backbone network."""
-# network_module = importlib.import_module("text_recognizer.networks")
-# backbone_class = getattr(network_module, backbone.type)
-#
-# if "pretrained" in backbone.args:
-# logger.info("Loading pretrained backbone.")
-# checkpoint_file = Path(__file__).resolve().parents[2] / backbone.args.pop(
-# "pretrained"
-# )
-#
-# # Loading state directory.
-# state_dict = torch.load(checkpoint_file)
-# network_args = state_dict["network_args"]
-# weights = state_dict["model_state"]
-#
-# freeze = False
-# if "freeze" in backbone.args and backbone.args["freeze"] is True:
-# backbone.args.pop("freeze")
-# freeze = True
-#
-# # Initializes the network with trained weights.
-# backbone_ = backbone_(**backbone.args)
-# backbone_.load_state_dict(weights)
-# if freeze:
-# for params in backbone_.parameters():
-# params.requires_grad = False
-# else:
-# backbone_ = getattr(network_module, backbone.type)
-# backbone_ = backbone_(**backbone.args)
-#
-# if "remove_layers" in backbone_args and backbone_args["remove_layers"] is not None:
-# backbone = nn.Sequential(
-# *list(backbone.children())[:][: -backbone_args["remove_layers"]]
-# )
-#
-# return backbone