diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2022-01-26 23:17:33 +0100 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2022-01-26 23:17:33 +0100 |
commit | 53a4af4ca22ced165fde14cd0de46e29aab7d80d (patch) | |
tree | 592dbbf8826e7110950c5165fd5e965880a0b9a5 | |
parent | 9b6c99172f80b45030279e7268aaa80e5a1ed574 (diff) |
fix: change Set to Sequence
-rw-r--r-- | text_recognizer/data/mappings/emnist.py | 4 | ||||
-rw-r--r-- | text_recognizer/data/transforms/word_piece.py | 9 |
2 files changed, 5 insertions, 8 deletions
diff --git a/text_recognizer/data/mappings/emnist.py b/text_recognizer/data/mappings/emnist.py index 655169e..51e4677 100644 --- a/text_recognizer/data/mappings/emnist.py +++ b/text_recognizer/data/mappings/emnist.py @@ -1,5 +1,5 @@ """Emnist mapping.""" -from typing import List, Optional, Set, Union +from typing import List, Optional, Sequence, Union import torch from torch import Tensor @@ -12,7 +12,7 @@ class EmnistMapping(AbstractMapping): """Mapping for EMNIST labels.""" def __init__( - self, extra_symbols: Optional[Set[str]] = None, lower: bool = True + self, extra_symbols: Optional[Sequence[str]] = None, lower: bool = True ) -> None: self.extra_symbols = set(extra_symbols) if extra_symbols is not None else None self.mapping, self.inverse_mapping, self.input_size = emnist_mapping( diff --git a/text_recognizer/data/transforms/word_piece.py b/text_recognizer/data/transforms/word_piece.py index a15615d..d805c7e 100644 --- a/text_recognizer/data/transforms/word_piece.py +++ b/text_recognizer/data/transforms/word_piece.py @@ -1,6 +1,5 @@ """Target transform for word pieces.""" -from pathlib import Path -from typing import Optional, Union, Set +from typing import Optional, Sequence import torch from torch import Tensor @@ -18,10 +17,8 @@ class WordPiece: lexicon: str = "iamdb_1kwp_lex_1000.txt", use_words: bool = False, prepend_wordsep: bool = False, - special_tokens: Set[str] = {"<s>", "<e>", "<p>"}, - extra_symbols: Optional[Set[str]] = { - "\n", - }, + special_tokens: Sequence[str] = ("<s>", "<e>", "<p>"), + extra_symbols: Optional[Sequence[str]] = ("\n",), max_len: int = 451, ) -> None: self.mapping = WordPieceMapping( |