summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authoraktersnurra <gustaf.rydholm@gmail.com>2020-09-09 23:31:31 +0200
committeraktersnurra <gustaf.rydholm@gmail.com>2020-09-09 23:31:31 +0200
commit2b63fd952bdc9c7c72edd501cbcdbf3231e98f00 (patch)
tree1c0e0898cb8b66faff9e5d410aa1f82d13542f68 /src
parente1b504bca41a9793ed7e88ef14f2e2cbd85724f2 (diff)
Created an abstract Dataset class for common methods.
Diffstat (limited to 'src')
-rw-r--r--src/notebooks/01-look-at-emnist.ipynb134
-rw-r--r--src/notebooks/01b-dataset_normalization.ipynb148
-rw-r--r--src/notebooks/02b-emnist-lines-dataset.ipynb124
-rw-r--r--src/notebooks/03a-line-prediction.ipynb31
-rw-r--r--src/notebooks/04a-look-at-iam-lines.ipynb101
-rw-r--r--src/notebooks/04b-look-at-iam-paragraphs.ipynb (renamed from src/notebooks/04-look-at-iam-paragraphs.ipynb)26
-rw-r--r--src/text_recognizer/datasets/__init__.py16
-rw-r--r--src/text_recognizer/datasets/dataset.py124
-rw-r--r--src/text_recognizer/datasets/emnist_dataset.py228
-rw-r--r--src/text_recognizer/datasets/emnist_lines_dataset.py56
-rw-r--r--src/text_recognizer/datasets/iam_dataset.py6
-rw-r--r--src/text_recognizer/datasets/iam_lines_dataset.py68
-rw-r--r--src/text_recognizer/datasets/iam_paragraphs_dataset.py70
-rw-r--r--src/text_recognizer/datasets/sentence_generator.py2
-rw-r--r--src/text_recognizer/datasets/util.py125
-rw-r--r--src/text_recognizer/models/base.py2
-rw-r--r--src/text_recognizer/networks/ctc.py2
17 files changed, 582 insertions, 681 deletions
diff --git a/src/notebooks/01-look-at-emnist.ipynb b/src/notebooks/01-look-at-emnist.ipynb
index 93083a5..564d14e 100644
--- a/src/notebooks/01-look-at-emnist.ipynb
+++ b/src/notebooks/01-look-at-emnist.ipynb
@@ -2,9 +2,18 @@
"cells": [
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": 18,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "The autoreload extension is already loaded. To reload it, use:\n",
+ " %reload_ext autoreload\n"
+ ]
+ }
+ ],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
@@ -22,7 +31,7 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": 22,
"metadata": {},
"outputs": [],
"source": [
@@ -31,7 +40,7 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 35,
"metadata": {},
"outputs": [],
"source": [
@@ -40,7 +49,16 @@
},
{
"cell_type": "code",
- "execution_count": 15,
+ "execution_count": 36,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "dataset.load_or_generate_data()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 37,
"metadata": {},
"outputs": [],
"source": [
@@ -49,7 +67,7 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 38,
"metadata": {},
"outputs": [],
"source": [
@@ -58,7 +76,7 @@
},
{
"cell_type": "code",
- "execution_count": 10,
+ "execution_count": 39,
"metadata": {},
"outputs": [
{
@@ -67,7 +85,7 @@
"55898"
]
},
- "execution_count": 10,
+ "execution_count": 39,
"metadata": {},
"output_type": "execute_result"
}
@@ -78,7 +96,7 @@
},
{
"cell_type": "code",
- "execution_count": 18,
+ "execution_count": 40,
"metadata": {},
"outputs": [],
"source": [
@@ -87,7 +105,7 @@
},
{
"cell_type": "code",
- "execution_count": 19,
+ "execution_count": 41,
"metadata": {},
"outputs": [
{
@@ -96,7 +114,7 @@
"3494"
]
},
- "execution_count": 19,
+ "execution_count": 41,
"metadata": {},
"output_type": "execute_result"
}
@@ -107,19 +125,74 @@
},
{
"cell_type": "code",
- "execution_count": 14,
+ "execution_count": 42,
"metadata": {},
"outputs": [
{
- "ename": "ValueError",
- "evalue": "only one element tensors can be converted to Python scalars",
- "output_type": "error",
- "traceback": [
- "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
- "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
- "\u001b[0;32m<ipython-input-14-69c3b5027f10>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0md1\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdataset\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitem\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
- "\u001b[0;31mValueError\u001b[0m: only one element tensors can be converted to Python scalars"
- ]
+ "data": {
+ "text/plain": [
+ "tensor([[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
+ " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
+ " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
+ " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
+ " 0, 0, 0, 0, 4, 4, 4, 4, 4, 2, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
+ " 0, 2, 4, 9, 32, 37, 37, 37, 32, 20, 1, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
+ " 3, 65, 109, 140, 204, 215, 217, 217, 201, 154, 22, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3,\n",
+ " 12, 122, 190, 222, 245, 249, 250, 250, 242, 206, 46, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 8, 79,\n",
+ " 127, 222, 247, 253, 235, 228, 249, 254, 254, 245, 114, 4, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 35, 91, 219,\n",
+ " 244, 252, 247, 207, 100, 84, 223, 251, 254, 250, 127, 4, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 91, 163, 246,\n",
+ " 252, 244, 220, 127, 39, 48, 218, 250, 255, 250, 127, 4, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 5, 20, 95, 219, 246, 246,\n",
+ " 221, 127, 79, 10, 5, 37, 217, 250, 254, 249, 125, 4, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 20, 67, 175, 246, 252, 219,\n",
+ " 164, 47, 22, 1, 5, 39, 218, 250, 254, 245, 114, 4, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 1, 9, 95, 175, 250, 246, 219, 91,\n",
+ " 35, 1, 0, 0, 22, 84, 234, 252, 250, 220, 50, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 9, 35, 164, 221, 252, 219, 163, 35,\n",
+ " 9, 0, 0, 0, 46, 127, 246, 254, 245, 204, 34, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 7, 91, 163, 246, 252, 219, 91, 35, 1,\n",
+ " 0, 0, 0, 10, 128, 209, 254, 254, 220, 139, 9, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 3, 22, 159, 219, 252, 247, 164, 35, 9, 0,\n",
+ " 0, 0, 1, 36, 175, 233, 254, 254, 204, 115, 4, 0, 0, 0],\n",
+ " [ 0, 0, 0, 1, 36, 95, 232, 251, 232, 195, 47, 1, 0, 0,\n",
+ " 0, 9, 35, 163, 246, 253, 249, 232, 122, 45, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 7, 91, 164, 247, 251, 187, 127, 20, 0, 0, 0,\n",
+ " 1, 35, 91, 219, 253, 254, 234, 187, 67, 20, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 46, 207, 244, 247, 220, 80, 24, 1, 3, 8, 34,\n",
+ " 52, 164, 219, 253, 249, 234, 155, 79, 4, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 2, 81, 232, 251, 235, 179, 39, 12, 5, 22, 46, 115,\n",
+ " 139, 221, 246, 254, 234, 188, 79, 32, 0, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 3, 112, 244, 254, 236, 193, 130, 127, 129, 173, 209, 245,\n",
+ " 250, 254, 253, 232, 154, 79, 4, 0, 0, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 46, 206, 242, 249, 248, 249, 250, 250, 250, 250, 250,\n",
+ " 250, 243, 219, 95, 22, 7, 0, 0, 0, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 22, 154, 201, 217, 222, 245, 249, 249, 233, 222, 217,\n",
+ " 217, 202, 158, 36, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 1, 20, 32, 39, 51, 114, 125, 125, 82, 51, 37,\n",
+ " 37, 32, 20, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 2, 4, 5, 9, 32, 37, 37, 21, 9, 4,\n",
+ " 4, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
+ " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
+ " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n",
+ " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
+ " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],\n",
+ " dtype=torch.uint8)"
+ ]
+ },
+ "execution_count": 42,
+ "metadata": {},
+ "output_type": "execute_result"
}
],
"source": [
@@ -128,7 +201,7 @@
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 43,
"metadata": {},
"outputs": [
{
@@ -137,7 +210,7 @@
"torch.Tensor"
]
},
- "execution_count": 4,
+ "execution_count": 43,
"metadata": {},
"output_type": "execute_result"
}
@@ -148,7 +221,7 @@
},
{
"cell_type": "code",
- "execution_count": 16,
+ "execution_count": 44,
"metadata": {},
"outputs": [
{
@@ -169,7 +242,7 @@
},
{
"cell_type": "code",
- "execution_count": 17,
+ "execution_count": 45,
"metadata": {},
"outputs": [],
"source": [
@@ -187,7 +260,7 @@
},
{
"cell_type": "code",
- "execution_count": 18,
+ "execution_count": 46,
"metadata": {},
"outputs": [
{
@@ -207,7 +280,7 @@
},
{
"cell_type": "code",
- "execution_count": 19,
+ "execution_count": 47,
"metadata": {},
"outputs": [
{
@@ -238,6 +311,13 @@
"metadata": {},
"outputs": [],
"source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
}
],
"metadata": {
diff --git a/src/notebooks/01b-dataset_normalization.ipynb b/src/notebooks/01b-dataset_normalization.ipynb
deleted file mode 100644
index 9421816..0000000
--- a/src/notebooks/01b-dataset_normalization.ipynb
+++ /dev/null
@@ -1,148 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [],
- "source": [
- "%load_ext autoreload\n",
- "%autoreload 2\n",
- "\n",
- "%matplotlib inline\n",
- "import matplotlib.pyplot as plt\n",
- "import numpy as np\n",
- "from PIL import Image\n",
- "import torch\n",
- "from importlib.util import find_spec\n",
- "if find_spec(\"text_recognizer\") is None:\n",
- " import sys\n",
- " sys.path.append('..')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 2,
- "metadata": {},
- "outputs": [],
- "source": [
- "from text_recognizer.datasets import EmnistDataLoader"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {},
- "outputs": [],
- "source": [
- "data_loaders = EmnistDataLoader(splits=[\"train\"], sample_to_balance=True,\n",
- " subsample_fraction = None,\n",
- " transform = None,\n",
- " target_transform = None,\n",
- " batch_size = 512,\n",
- " shuffle = True,\n",
- " num_workers = 0,\n",
- " cuda = False,\n",
- " seed = 4711)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 9,
- "metadata": {},
- "outputs": [],
- "source": [
- "loader = data_loaders(\"train\")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 11,
- "metadata": {},
- "outputs": [],
- "source": [
- "mean = 0.\n",
- "std = 0.\n",
- "nb_samples = 0.\n",
- "for data in loader:\n",
- " data, _ = data\n",
- " batch_samples = data.size(0)\n",
- " data = data.view(batch_samples, data.size(1), -1)\n",
- " mean += data.mean(2).sum(0)\n",
- " std += data.std(2).sum(0)\n",
- " nb_samples += batch_samples\n",
- "\n",
- "mean /= nb_samples\n",
- "std /= nb_samples"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 12,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "tensor([0.1731])"
- ]
- },
- "execution_count": 12,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "mean"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 13,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "tensor([0.3247])"
- ]
- },
- "execution_count": 13,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "std"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.8.2"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 4
-}
diff --git a/src/notebooks/02b-emnist-lines-dataset.ipynb b/src/notebooks/02b-emnist-lines-dataset.ipynb
index a7aabeb..2ef7da7 100644
--- a/src/notebooks/02b-emnist-lines-dataset.ipynb
+++ b/src/notebooks/02b-emnist-lines-dataset.ipynb
@@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
@@ -31,61 +31,43 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
- "emnist_train = EmnistDataset(train=True, sample_to_balance=True)\n",
- "emnist_val = EmnistDataset(train=False, sample_to_balance=True)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 14,
- "metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "2020-08-23 22:01:45.373 | DEBUG | text_recognizer.datasets.emnist_lines_dataset:_load_data:162 - EmnistLinesDataset loading data from HDF5...\n"
- ]
- }
- ],
- "source": [
"emnist_lines = EmnistLinesDataset(train=False)"
]
},
{
"cell_type": "code",
- "execution_count": 15,
+ "execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "2020-08-23 22:01:46.598 | DEBUG | text_recognizer.datasets.emnist_lines_dataset:_load_data:162 - EmnistLinesDataset loading data from HDF5...\n"
+ "2020-09-09 23:07:57.716 | DEBUG | text_recognizer.datasets.emnist_lines_dataset:_load_data:134 - EmnistLinesDataset loading data from HDF5...\n"
]
}
],
"source": [
- "emnist_lines._load_or_generate_data()"
+ "emnist_lines.load_or_generate_data()"
]
},
{
"cell_type": "code",
- "execution_count": 16,
+ "execution_count": 15,
"metadata": {},
"outputs": [],
"source": [
"def convert_y_label_to_string(y, emnist_lines=emnist_lines):\n",
- " return ''.join([emnist_lines.mapping[i] for i in y])"
+ " return ''.join([emnist_lines.mapper(i) for i in y])"
]
},
{
"cell_type": "code",
- "execution_count": 17,
+ "execution_count": 16,
"metadata": {
"scrolled": false
},
@@ -230,7 +212,7 @@
},
{
"cell_type": "code",
- "execution_count": 18,
+ "execution_count": 17,
"metadata": {},
"outputs": [
{
@@ -255,94 +237,6 @@
},
{
"cell_type": "code",
- "execution_count": 14,
- "metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "2020-08-05 00:40:26.070 | DEBUG | text_recognizer.datasets.emnist_lines_dataset:_load_data:153 - EmnistLinesDataset loading data from HDF5...\n"
- ]
- }
- ],
- "source": [
- "dl = EmnistLinesDataLoaders(\"train\")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 16,
- "metadata": {},
- "outputs": [],
- "source": [
- "ddl = dl(\"train\")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 17,
- "metadata": {},
- "outputs": [],
- "source": [
- "batch = next(iter(ddl))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 21,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "torch.Size([1, 28, 952])"
- ]
- },
- "execution_count": 21,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "batch[0][0].shape"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 24,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "<matplotlib.image.AxesImage at 0x7f139b1cf1c0>"
- ]
- },
- "execution_count": 24,
- "metadata": {},
- "output_type": "execute_result"
- },
- {
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAABH4AAABDCAYAAADqHsJ5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy86wFpkAAAACXBIWXMAAAsTAAALEwEAmpwYAABmZ0lEQVR4nO29d3Rcx53n+72d0Tkih0bOgSABEgQJZommIi1akiWPg+zRO/bMsSfI7+yb2T07s89h54w94/Xq7diWLY+zZEmUFSiRFAmSYgABECCJnHMDjdBodDfQufu+P8gqowmQRCIBSvU5R0cEutF9761bdau+9ft9fxzP82AwGAwGg8FgMBgMBoPBYHzyEKz3ATAYDAaDwWAwGAwGg8FgMO4NTPhhMBgMBoPBYDAYDAaDwfiEwoQfBoPBYDAYDAaDwWAwGIxPKEz4YTAYDAaDwWAwGAwGg8H4hMKEHwaDwWAwGAwGg8FgMBiMTyhM+GEwGAwGg8FgMBgMBoPB+ISyKuGH47iDHMd1chzXw3Hcf1mrg2IwGAwGg8FgMBgMBoPBYKwejuf5lf0hxwkBdAE4AGAEQD2Az/M837Z2h8dgMBgMBoPBYDAYDAaDwVgpq4n4KQfQw/N8H8/zfgCvAXhibQ6LwWAwGAwGg8FgMBgMBoOxWkSr+NsEAMPzfh4BsPVOf8Bx3MrCixgMBoPBYDAYDAaDwWAwGLdjiud502Iv3HNzZ47jXuQ47grHcVfu9XetJxzHQSAQgOO49T6UTwQcx0EoFEIoFN7zayoUChEVFQW1Wn1Pv+dBhbSDQMC84BlLQyAQQCgUrusxcBwHqVQKrVa7rsexUZnfr1czxpKxmsFgLB2JRAKNRgOxWLzeh7Js2JyAwWAwNjSDt3thNRE/FgBJ835OvPm7CHie/xmAnwGf3IgfkUiEoqIiGI1GDA4OYnBwEF6vd12ORSgUQqlUIhAIwOfzIRQKrenncxxHFwnhcHhNPxsAlEoltFotzGYz4uPj4fV6MTExgbGxMYyPj8Pn82GlvlS3o6ysDLt374ZWq8V3v/tduFyuNfnc+deKwPP8mh//vUAsFkOhUMBoNGLz5s3w+/2Ynp7G1NQUpqamYLPZEAwG1/sw1535bfygtO29hOM4xMbGorKyEuXl5fjOd74Dl8u1LtclJSUFjz32GAoKCvDtb397zY5jsX4NPBjtLxAIoFQqYTQasWnTJoTDYbhcLkxNTWFychITExMIBAJL/jydToeqqirs3LkT3//+92G32+/Jc+FWyILzQbjmt4PjOIjFYgiFQoTDYfh8vtu+R6vVQiAQYG5uDm63e82f6yuFjX8r5y/+4i9QUlKCt956C5cvX171nPFet4VYLIZSqYTBYKBzApvNRucE09PTbE7AYDAYG5zVCD/1ADI5jkvFDcHnWQDPrclRPUBwHAe1Wo1HH30UKSkpuHTpEkKhEPr7+9dlcqZSqVBVVYWZmRl0d3djbGxsTT5XLBZDpVJBr9cjOjoawWAQ/f39cLvd8Hq9a3KuCoUCO3fuRF5eHnJychAbGwu/34+JiQkMDg6ivr4e165dg81mW4MzugHHcTCbzdi1axdMJhP+9//+36sWfgQCAUQiEcRiMZ3YAzeEsmAwCL/fD7/fv+qJGcdxEIlEEAgE4HkewWBwTRZdEokEiYmJKC8vR25uLsrKyugkb3x8HP39/bh69SquXPlkBPGRCbNQKIRYLIZEIoHP54PH41nwXqFQCIlEAqlUCrFYDL1eTyNKRkdHMTk5eU9E33vV1msNGQ+Li4vx2c9+Fi+//DJmZ2fXZUFoMBhQWVmJyspKfOc731n1cZA2kEgkC/p1KBRCIBCA1+vdsItfoVAIvV6PHTt2oKioCKWlpQAAp9OJyclJDA0Nob6+HpcuXVrSOXAcB4VCgcLCQhw5cgQ/+clP4HQ61/y+nD+eSiQSSCQSJCUlQSKRwGazwWazwel0wu/3r+n3AjfOUSKRAABCoRBCodCata9UKoVer4dMJoPH41nwrBYIBJBIJNDpdCgoKIBEIsHAwAAGBgYwNze3JsewXG4d/3Q6HXQ6HYA/j3/3YnNmKZCoa3J8fr//nmx+rQUcx6G8vBz79+/H9evXce3atRU/N0hko8lkQnx8PADQfuHxeNbkeSSRSJCcnIyysjLk5uZiy5YtdE5gtVrR19eHa9euoaGhYdXfxWAwGIx7x4qFH57ngxzH/TWAEwCEAF7leb51zY5sjSELO57nVz0RIBMMMvFXKpXYuXMncnJyoFQq4XK5YLPZYLfb1+LQl4VGo8GhQ4dgtVoRDAYxPj6+6ok4x3GQy+WIj49HYWEhysvL4fV6cfLkSQwNDcFqta5qIioUCqHValFUVIQvfOEL2LRpE+Li4iCXyxEKheDxeDA9PY2MjAx6XddqccFxHLRaLWJiYqDRaBAVFbXqz4uKioJKpYJarYZKpaKh3MFgELOzs5iZmcH09DQCgcCKJ8hkJ1ij0UAikSAQCGBubg4ej2dV10atViM5ORmVlZU4fPgwCgsLYTKZEA6H4fV64XK5MDw8jOTkZFy/fn1Z0QH3G9LnRaLIYY4sDsixk2sol8upkDM2NobW1taIRYxQKITJZEJCQgLi4+Mhl8thNpuRmJiIYDCIc+fOoa6uDuPj42u62CBtrVKpIJPJEAqF4HK5Vt3W9wKBQICoqCjExMQgISEBJpMJY2Nj9/04ST+Mj4+HwWBAVFTUqtOZJBIJVCoVtFptRIpGMBiE1+uF0+nE+Pj4moi6a41cLkdsbCyKi4vx5S9/GaWlpTCZbqR/+/1+uN1uTE5OIjU1FVeuXFk0+uRWBAIBZDIZoqOjkZCQgOjoaFgsljUfE9RqNYxGI6KjoxEdHY2oqCiUl5dDKpWiubkZV69eRU9PD6amptb0ewUCAcRiMYxGI3ieh9vthsfjWZP2FYvFSEtLQ1paGmQyGUZGRhY8qxUKBWJiYpCRkYGHH34YUVFRuHjxIhwOx7r0faFQCKPRSMc/hUKB5ORkJCUlIRwO4+zZs6ivr8f4+PiaRn6IRKIFKd9E7CD3WlRUFORyOSQSCYxGI0wmE8bHx9HX14fp6ekNOU6SsWn+HGElCIVCqNVqFBQUYPfu3QCAjo4ONDc3Y2xsDBbLgkD8ZaFWq5GamoodO3bg8ccfR0FBwYI5wcDAAE6dOoWmpqYNPSdgMBiMTzurifgBz/MfAPhgjY7lrohEIohEInAcB7/fTxdXi+UZz3/QCwQCSKVSqFQqhEKhVUeMSCQSKJVKREVFIRgMIhgMwuPx0IgVr9eLvr6+dYmIEIvFSEhIgEqlQnNz85r443AcB5lMhpiYGBQWFuLhhx+Gz+dDIBBAQ0MDPB4P3G73iibDHMdBqVRi//79+Md//EdkZGRAKpXSHfRQKASJRILU1FQoFAocO3YMHR0daxZVIRKJEB0djdjY2BWfA3DjHiP3WUpKCpKTk5GQkEAFLADweDyYmppCT08P2tvbYbfbVzSBJ55EOp0OGRkZUCqVmJ2dhcViwejoKGZnZ1d0DiKRCGVlZfj85z+P/fv3Iz4+HiKRiLY1ET70ej3EYjF+8IMfwOFwrOi77hUkeoeIOyaTie5IA39erJpMJthsNgiFQpSWltJINrPZjKSkJNTU1OCf/umfMDY2RhcxcrkcFRUVeOSRR7B9+3Yq8MnlcrhcLoRCIVitVtjt9jWL/CBtrdFoYDabodfr4fV6MTw8DIvFsm7RNLeDCAEpKSkQCoXIz89Hd3c3gsHgfT1OIianpKSA5/kVL/xIvyb3UmpqKl3syuVycBwHn88Hh8OBwcFBNDQ0YHp6Gh6PZ8NEGgiFQmRlZeHxxx/Hc889h7S0NNqvg8EgjdLS6XRQq9X453/+5yWJGxKJBHq9HqmpqRAIBMjLy0N3d/eaRj1xHIecnBzs2LED27ZtQ3FxMcRiMaKjowEAp06dAs/zmJ2dhd1uX5NrTiKMyBhbVFSEUChE041J+670fGQyGYxGIz7/+c+jpKQEbrcbp0+fRn19PX2fSCRCeno6qqqqsHv3bhw4cABCoRAKhQJdXV2Ympq67+nkMpkM27dvxyOPPIKtW7dCqVRCqVRCoVDQ9DOr1YqZmZlVR0aRcZz0O41GQwV80icTExNht9vB8zxSUlKQlpYGjUaDjIwMZGVloaGhAa+88grq6+vXLULqdojFYqSnp0MsFq9KlCKbG0qlEhkZGdi3bx/EYjGSkpKgVqtx9erVVQnvIpEIW7duxRe+8AXs3r0b8fHxEAqFC+YEJBXx3//935nww2AwGBuYVQk/95v8/Hzk5eVBoVDg8uXL6O3tBQBER0dH7JgEAgFYLBYEg0GaH5+fn49du3ZhcHAQv/vd71b8cOI4Dvn5+aisrERmZiba29vx0UcfQafTQSwWIxQKgeO4dfP4ufVY1+pzhEIh5HI5oqOjkZmZSXf2FQoFBgcHYbVaVzTRk0gkSEhIwNNPP43c3FwIhUIEAgF0dHTg2rVrGBwchFKpxGOPPUajWtYypJ+kEIRCIdjt9mULGUTsiYuLg06nQ2ZmJp588kls2rQJOp0OSqUyIj3H5/Ohr68P58+fx5kzZ1BXV7fk3HhyzWNjY1FeXo6qqiqUl5dDo9FgcnISV69exYcffogPP/xwRTuuWq0WDz/8MHbu3InExERwHAePx4Pjx4+jo6MDYrEYBQUFyMjIgMvl2nCTaQA00io6Ohp5eXl0h5JE55F7WSaTwev1guM4GAwGyGQyuqAAbngk6HQ6TExM0MWxVCpFeno6CgsLYTabI3aiQ6EQMjIyUFhYiJGREdhsNvh8vhVPuEnESlxcHLZs2YLt27ejrKwMMTExmJmZQWNjI95991189NFHG2KsIRCBODMzE2KxGN/85jfR1NSEzs7OFS+WVwJJ2xMIBJienobT6VzW+EQWnXFxcdRzrKqqCgcOHIDJZIJKpaIRpOFwGH6/H5OTkzh9+jQ+/PBDXLlyBTabbUNE/6jValRUVODgwYPIyMiAQCCA1+vF6dOn0dXVBb/fj8TERGzbtg1er3fJAjhZlOfm5kIgEOAb3/gGuru70dDQsGLx+VYEAgFiY2ORnZ2NvLw8pKSkRETvxsXFIScnB5OTkxgbG4PT6VzVwlMikUCr1SIpKQlFRUXYtWsX9uzZA7fbjfb2dly+fBkff/wxamtrl92uZJNj79692LRpE5588kmIRCK0tLQseKbFxMSgqqoKTz31FPLy8uh4NTY2BofDcd+9VEh/MJvNKCgogNlshlQqpWMmz/PIyMhAUVERRkdHqWfRSsY/Ysiu1WpRWFiIJ554gs77iCBEopBJZJpSqYRKpaLpsOQe+fDDD9Hc3LyhnlUkglMikcDpdNIIrtV+nk6nQ3p6OtRqNWJjYxETEwOBQIBLly6t+Dmk0+lw6NAhVFZWIjExEQAwNzeHkydPoq2tDVKpFMXFxUhNTYXL5YLb7V7xeTAYDAbj3vPACD9CoRBHjhzBF7/4RZhMJng8HlitVgA3hB+Shw/cCF2vr6/H//gf/wPDw8M0xH3z5s1LCmG/EwKBADExMcjNzUVWVhbGx8cxNzeHgYEB5OTkQCqVIjU1FTt37kRra+u6T/rvFWQhrFQqIZPJqEHlctHr9aioqEBFRQWEQiEsFgs++ugjvPvuu6ipqcHs7CwkEgnOnz+P+Ph49PX1rek1jY+PpzuHv//975eULjA/1U8ulyMmJgaf+cxnEBMTg9TUVJSWliIhIYHuUBJRSSwWQyaTITExEZs2bcLU1BQGBgYwOzu7pB1SmUyGpKQkbNq0CQ899BBKS0uRnJwMqVQKhUIBv9+Pzs7OFVfaKCsrQ0VFBeLj4+HxeNDf348//elPeOWVVzA9PQ2RSISMjAzk5ORAIpFsqPB5iUSCtLQ0HD58GNu3b0daWhq0Wi0MBsOi9ybHcdQA0+v1wu/3QyqVAgDcbjeuXLkSEUFAdtvj4+NhNBohkUgirrNcLse+fftQWlqKxMREnDlzBm1tbZiYmFjx+cTHx2PLli3Yv38/SkpKkJKSAoVCAbVaDZ/Ph9bWVpw5c2aFV+zew/M89Ho9DAbDgnS7e43BYEBKSgokEgl+8pOfYHJy8q79a77Xk1QqhdFoxGOPPYbo6GgkJSUhLy8PZrM5wvPF6XRGeGyUlpbCYrFgZGQEbrcbwWBw3SN/8vPzsX37duTk5CAcDmNoaAivvfYa/vM//5OmF8XExKC0tBQxMTErFk6I34tUKl0z4UcqldKITK1WG3EfcRyHwsJCpKamYvv27UhOTqYL0pU+IwwGAzIzM1FSUoKKigps2rQJsbGxNDpqZmYGvb29EAgEy2pX4oeUmZmJxx57jKbQdnV1oaOjA8PDw/SYZTIZ8vLyUFJSgrS0NMjlcszMzGB0dBQtLS1wOBz3/Z4SCASQy+VITEyEyWSiz3yCTCajglZSUhLOnj2LlpYWjI+PL/k7xGIxYmNjsXXrVjz++OMoLi5eMH7Mb1cyhgOgvnnAjXvG5/Ph+vXrsFgsG0oYB25cy/T0dERHR+O3v/0trl69umb9hSAWi2lEqkgkWrFQWF5ejsrKSsTFxcHtdqO3txfvvPMOXnnlFdjtdohEImRnZyMrK2vFc0AGg8Fg3D8eGOHHaDRSHxaZTEYXu8CNFBry4JTL5dBoNNi6dSuee+45nDhxAmq1Gjk5OYiPj4dMJlv1schkMqhUKqhUKrqj/Oqrr0Kr1WLLli3IzMzEQw89hD/84Q+YmZlZ9fdtVEj0iVarhUQiWVE6h0ajQWFhIXQ6Hfx+P06cOIHf/va3aGpqgsPhQDgchsfjQXV1Nd0hW0vhh4gEVqsV1dXVd30/SYuQy+WQy+UwGAzIysrCoUOHYDAYoNFooNPpEAwGqefO7Owsurq6qBeGXC5HXFwcsrKykJSUhImJCfj9/jtOzoRCIXQ6HYqLi7Fz505s2rQJiYmJdBdUJpNBqVTS9JOVUFxcjPj4eIjFYvT29uLtt9/GL3/5S4yNjdGFRltbG/r6+iAQCDbMJE8mk2HPnj20SgqJACSGqb29vVSgvfUa8zyPiYkJqFQqpKenIzk5GT09PfjpT39Ko31IJaSkpCQkJydH7DyTzyA70CKRCHl5eRgeHobVal2S4HArAoEAKpUKxcXFqKqqoospMokPhUI0xWKtovrWGrIrvH37dlRUVKCnp2fNquUthdzcXGzevBlutxvvvffekkQfhUJB+7VGo0FmZmZEv9ZoNABAd7aJtwXpz+QZlZWVhZaWFthstjUzvl8N2dnZMJvNiIqKgt1uxzvvvIOf//znGBkZoSKPz+fD1NQUJBLJso/X7/fj+PHjqKiowNatW9Hd3b0mBvwikQjx8fFISkqiYsP8+53neUgkEqjVasTFxdG0wq6urhWJVyT9pry8HFu2bEFhYSHi4uJoNDHpc8v1gVMoFDAYDMjNzcVXv/pV7Ny5EwqFAq2trXj77bdx+vRpWCwWiMVipKSkIDMzE88//zwqKipgMBjgcrnw5ptv4uOPP8aFCxdgs9nu64bSSsY/i8WCsbExTExMLDl6rLy8HI888gh27NiBzMxMmkI2MTGB4eFhTExMYGZmZsHmHc/zcLlc8Pv9SEhIQHl5OXp6evCLX/wCLS0t9zXScClIJBI88sgjEAqFOHv2LEZGRtb8O4j/H0k1X6nhdlFREWJiYiAUCtHT04OjR4/i17/+NaxWKx0nmpub0dPTA47jNsycgMFgMBiL80AIPxKJBGVlZUhNTaWTLp7n4ff7YbfbcebMGfT29iIYDMJsNqO8vByFhYWoqqqiu/qJiYmQSCSrXiiRNBG5XE4nhKFQCC0tLbhw4QJiYmLoRDsxMXFdhJ+VRnwsF4VCgZSUFMTFxUGtVsPv9y9rws1xHIxGI0pLSyEWizE+Po7Tp0+jvb0dMzMzdBLB8zwcDkfEDt9aIBaLsWXLFuj1ely7dg1DQ0N3fD/xW8nMzERMTAz0ej2MRiOysrKQm5sLhUIBoVAIt9uN8fFxWiLZZrNhaGgIubm5VJzRarVISEhAQkICPd87XSeSVrFlyxYUFxcjISGBfh/P89TEeKUmkQKBAGVlZdDpdPB4POjp6cGZM2cwMjISIZZ4PB54PJ4NITiQxcamTZvwxS9+EZWVlfB6vWhra6NRF4ODg7BYLNRPabFFrdvtpilK0dHRGB8fp+kXRBAoKCjA008/jZKSEqjV6kXL5pJ2ysvLowbv4+PjcDqdS15Mk7B9vV5PF5+JiYkRBqDEg2T+jvtGIxQKwWKxUOPR+xnxIxKJkJWVhezsbNhsNgwMDNzx/SSFMiUlhRquGo1GZGZm0hQTkUiEQCAAq9WKiYkJjI+Pw2azYXR0FHFxcRCJRNBqtVCr1UhMTERcXBx6enrWtALhShAIBCgoKEBiYiLC4TBGR0dx4sQJDA8PR4zVJGJiJf2atDURCFZjVDv/uHU6HY4cOYJdu3bBbDZTrzTgzx5+pNKaVqtFcXEx7HY7Ghsbl51mR1Ir8/LyUFpaSqtKkvkGifAkqURL/UziC7Zlyxbk5+ejoqKCCnAXLlxAbW0tBgYGIBAIsGXLFhw5cgRmsxmbNm2CUqmE1WpFa2sr3n33XbS2tmJiYuK+eqiQc8jNzcWzzz6LTZs2Qa1WR6TE3jr+5ebmYnZ2FtPT0zT97k7jn1gsxqZNm/D0009jx44dMBgMGB0dxeDgIPr7+2kqud1ux+zs7KKp3n6/H+FwGFqtFpcuXYLVakVDQ8OaFoJYC8j13LNnD5xOJ434XWskEgkMBgNSU1NhNBpve93uhEAgwNatW6HRaOB2u9HZ2UmFqo06J2AwGAzGndnQwg9ZdOXk5GDfvn3UV4Pkure1taG3txcnTpxAb28vwuEwUlJS4HK5UFRUhLS0NLobJJVKEQwGVzVpIhMbjUZDvVvIw5SYezocDrpjr9fr73tkhFarXRCGfa8gJrmxsbHQ6XRwOBzLFn6USiUSEhIgEAjgdrsxPDyMubm5Ra/ZWoo+HMchISEBubm5mJubQ0tLyx0jEkioe1paGh5++GEkJydDr9dDo9EgOjoaJpOJ+hOR8qYTExNwOByYm5ujopFEIoFQKKReUEuZLBET1czMTGzZsgVpaWlUCCB/T9LPSJlp4rtAuNu1EwgESEpKgkwmo0apt07w5rMRUhhFIhEt2U0WDHV1dbh48SKuX7+OoaEhDA4OYm5ujkZe3O64iVeTTCaDz+ejk3GRSASlUkmNVpOTkyNS+IjYSSb0AoEACQkJKC4uRnd3N9ra2uDz+ZbsfUA8yVJTU1FWVob09HRaQYoIn/PbmghA8/vLerYNEaXC4TAmJibg8/nu64KA4zjExsYiKysLCoUC58+fv6NvF2m3lJQUVFVVIS8vD0ajERqNBjExMTCZTDTKampqCh0dHRgbG8P09DTcbjdN85LJZPS+IO2zEeA4DnFxcdBoNAiHw3C5XBgaGrrtOL2ce2d+Zcvx8fE1TakRiUTQ6XSoqqpCbm4udDodHdPC4TD1bCFlxWUyGcxmM8rKypCcnEz7+1JTXGQyGeLj41FSUoL8/HwkJCRAqVQuSC0TiUR0DL9bnxMKhdBoNNi8eTMOHjyItLQ0mEwmDA4Oore3F/X19bQtYmJisHPnTjz++OPQ6XRQKBSYmJhAV1cXamtrce3aNVoJ8n5CUlxTU1Oxa9euiPGP+PjcOv7Fx8ejqKgI/f39aGpquuP4x3EcVCoVKioqsHPnTqSmplK/unPnzqG3txeDg4OYnZ2l33Wn+ZRYLEZnZye8Xu9t5xHrCblP8/PzUVtbe8+EPJFIBIVCgdjYWERHR9Mqr8u5HgKBgKaSO51O2O12jI6Obug5AYPBYDDuzIYVfoRCIVQqFXJycvD000/joYceQmxsLLxeb4QPTEdHBy2hKxQK4XQ6aalJq9UKiUSC6OhouiBfTZg02VmMjY2FRqOB1+vF1NRUhHEveYiTUFuxWLxqX6GlIhAIkJaWBr1ef18ijYgYYjKZYDQa6eR/uZMtsjAkvhpyufyeV8WRSCTYtWsXEhMTUVNTg/Pnz9811UqlUtHIj+TkZERFRdEqc2RRQkTJgYEBuN1uek45OTlIS0uD0WgEx3FwOBy0Cpfb7b7tNSPXOCkpCYWFhSgsLKQVNG5FKpVSUZKEXZOF0lJKx5N2IJNGg8GAoaGhNTXTXisEAgHUajWys7Oxfft2mhLR1taGmpoaXL9+HS6Xa8n+SeFwmFbnm/9eIvRGR0dHeE0Qo3EyppDyzGq1GjKZDAkJCcjMzERqaiqcTid8Pt9d7+f5hs4FBQUoLi6mC95b30dMaJVKJa1+R85jPQ2FSdQZz/M03WO+KHKvkUgk2LJlC3JycjA9PY133nnnjgsrItLn5ubiiSeewObNm+mCf75xLfF36e/vx8zMDGQyGQwGA9LS0pCSkoLY2FhIpVJMT09jZGQEo6OjcLlc992EdzFIvyaRTSaTCRKJZEljwp0gXkgCgQCjo6MYGxuDRCKhXlmrOV7imRQdHU0FBRLl63K50NnZCQA0QoukuqampiIzMxPT09Pw+/2Ym5tbkuitVquRm5uL4uJi6qtzq2ApFoshl8uhUqmgVCpp9TKe5yP6IIH05fT0dKSmpsJgMCAQCOD69euor69HW1sbPB4PNeXdvn07TS0Lh8MYGxvD9evXaeTKelRLIvOY6OhoGI3GBeMfiWwlVcjI+BcfH0/P+07jn1gsRkZGBj33cDiM4eFh1NXV4eOPP4bD4aBj+FLmFT6fj87xNpoQQSpgVVVVQafT4Y033oDNZrsn4hSJHFUqlUhMTERPT88d5xl3+hwieCqVSuj1ekgkkg05J2AwGAzG3dmQwo9QKITBYEBFRQW++tWvYvfu3VAoFAgEAujq6sKxY8fw05/+FENDQ/RBplQqYTQakZubi8OHD0MoFOLQoUPgeR7f/OY3UVlZidOnT991gX8ndDodSkpKaBWTK1eu4PTp03RC09TUhNbWVuTm5kKr1eKJJ57A5cuXI8SmezkZIaaBer3+nuSN3wqJyEpMTERCQgIGBgZWJTiZTCbs27cPLpcLzc3NsNls90T84TgOiYmJeOGFF2AwGNDf34/u7u47/g1ZULtcLrrrS6Iw5kfdSKVSmM1m6j+l0WgQFRUFrVZLPRBmZmYwNjaGzs5Oupt5u/MkYoDRaERsbCwUCgUVAuYvTMgkz2w2IzMzk5bUDQaD8Hq9mJycXLIgoFQqkZWVhb1798LhcKC3t3fDlWiVyWTIz8/HX/7lX+Khhx4CAHz88cd4++23UV9fv+L78NaJsV6vR25uLgoLC2E0GunvR0ZG0NTUhOPHj6OhoQEqlQpf/vKXUV5eThd5eXl52Lp1KwKBAILBICYnJ+/43aTUvE6nQ3x8/G3bmuzCm81mZGRkYHJyku7m+nw+WK3Wda8mRc7lgw8+wLZt22A0GtHd3X3Pd+Dj4uLw9NNPIz8/Hw0NDWhqarrj+4mYMDs7C57nIRaLaTUwAllExcTEoKSkBKFQiPZn4vclFovh9/sxPj6Orq4uDA0NLTsC8l5B7gOpVIrExEQ8/PDDGB8fx8DAwIq9P+ZDPMZOnjyJzZs3Iz4+flWRrkKhEDExMaioqEBCQgIVkrxeLwYGBlBfX4//+I//QCgUolUIc3JyaIrd9u3b4fV6IRAIYLFY7uotRfpTYmIitFotpFJpxJjO8zy9n00mE9LS0pCRkUH95oLBIGw2G2ZnZ+ncguM4pKenY+/evSgpKUFsbCzC4TD6+vrwm9/8hprHk8IT+/fvx759+6gHod1ux5UrV3Ds2LFFq37dL3Q6HXJzc1FUVBQx/lksFjQ3N+PkyZOora2FQqHACy+8gLKyMrr5lJubi23bttFI61vHP5FIBJPJhG984xs4ePAgxGIxrly5gvfeew/vv/8+LBbLio55o0X5EIhv25e+9CVagOReGk+T6NGMjAw0NjbSOcFy+zvP83QTds+ePXA4HOjr69sQYxuDwWAwlseGE344jsPevXvx4osvYvfu3TAYDPS1s2fP4je/+Q2OHj0aYdinVqvxmc98Bp/73Oewd+9eaLVaeL1evPXWW9Dr9ZBKpWhqakJ7e/uKq+wAN6qH5eTkQC6Xw2q1or29PaJqxcjICC5cuACz2YyHHnoImzdvRkFBAUZHR+mOldvtxszMDH1okqiMtYKUMr1frDatIRAI0LQaqVSK559/Hrt378aJEydw9OhRXLlyZc0nvUKhEDt27EBKSgqam5vR0tICu91+x78hbWexWBaEkJP0G57nIRKJkJSUhLi4OLpgJClgXq8XHo8Hg4ODaGxsRHNzMzV2vt09IJVKoVarYTKZoNPpbhs5QXZmCwoKcOTIERptQISAM2fOUEHgdrhcLoRCIchkMmRnZ+Ob3/wmnnjiCXzve9/DmTNn6OJ4IxAVFYWkpCSUlpZCKpVicHAQv/jFL1Yl+twKx3HIzMxEWVkZsrKyaPSew+HA73//e1RXV9PKXSKRCMPDw9i6dSv+7u/+DqmpqcjJyUFycjKefPJJvPrqq/jpT396x+svkUigUqlgMplgMBhu65VCfHPy8/Px2c9+lo4ngUAANpsNJ06cgNVqvW+RhvPx+/00Wk+hUGB2dhYGgwFqtRoSieSeV9ipqKhARkYGrFYr6urq7iq2kX5NvEgW2xQgHlomkwlKpRIAaLpPIBCgwtHU1BQaGxtx/fp1jI2NwePxbIhFqNvtht/vh0AggMlkomLpD3/4Q5w9exYTExMrEtgDgQCtXKZUKjE7O0vLcEul0hWb6pIS0Vu3boVSqUQoFMLs7Cx6enrw+uuv4+TJk2hubgbP8xgeHsa5c+dQWVmJF198EbGxsThy5AgeeughnDx5EkePHkV1dfVtz4+keOp0OkRHRyMqKoqmr80Xf4AbYw4xD/b5fNQsfnZ2FnV1dejq6sLMzAyCwSAMBgO+/vWvY9u2bTCbzRAIBBgbG8Nrr72G6upqBAIBlJSU4JFHHkFVVRUyMjIQFRWFcDiMmZkZ/OlPf8Kf/vQnXLt27b6aos+H4zhkZGRg8+bNyMnJiRj/Xn/9dVRXV6O5uRnj4+O0ImdZWRn+9m//Funp6cjMzMSLL76Iw4cP45VXXsHPfvaziDGJbPBt3boVcrkc4+PjOH78ON55550Viz4bmcTERGzZsgXR0dE4ffr0HdOm1orVzs9cLhfC4TCkUiny8vLwrW99C08++SS+//3v4/Tp03C73RtmTsBgMBiMu7OhhB+hUEgnVjt37oRer494fWZmhpo2zketVqOkpASbN2+GRqNBKBTC8ePH4XA4sGfPHgQCAVgsFrS2tt51gX+nY9u9ezcOHDgAu92O2tpanD9/PmJyGwgEMDU1BavVCp7nkZWVhR/96Ed0suPz+TA2NobW1lbMzs5ieHgYk5OT1CyUTBofJILBINxuN7xe77KPPRwOo62tDf/6r/+K5557DlVVVVCr1VCr1YiNjUV+fj5+8YtfoLq6GtPT02tyvCKRCGazGd///vdhMBjwP//n/0RdXd1dj52kXBHvB7I4IJFAJOoCiDS8JAvL6upqdHd30/QXq9WKmZmZO4Zf6/V6FBcXY/fu3SgvL0deXh6EQuGiwh6JDEpOTsYLL7xAjyEYDGJkZAR6vR4ffvjhbXf5g8Eg/vVf/xVPP/00KisrkZycTNObfvjDH+IHP/gBTp8+jZGRkQ1VHpdMasl9GA6HaduQFI6oqChahWk5u5QCgQCxsbFISkqC0WhEKBSC1WpFS0sLGhoaMDg4CJfLRa/z2NgYrly5goaGBnAcR32gRCIRjVy4nc+CTqdDYWEhtm/fjq1bt6KoqGjBApRAIsuSk5PxpS99iYrHoVAINpsNarUaH3zwAQYGBlaUerlURCIREhMTcfjwYYyNjdFKVsPDw7h27RptG7FYDKlUek99x0gU33//7/8dSUlJePXVV/HOO+/cVTQm/Zr44JACACR9Z74Hx/x+zfM83G43ampq0NXVRdO7RkZGqP/PelfzAm74UL366quYnJzE3r17kZ+fD71eD61Wi+9+97v45S9/iQ8//BAdHR13NJkl3lVPPfUUxsfH0dnZCavVSu/5+W0tkUhWnNpHxPLk5GQkJSXRCMmhoSE0NjaisbERY2Nj9J6emZlBd3c3pFIpysrKUFpaSisnJiQkUAF1sbaQSCQwGo0oLi7Go48+isrKSphMptt6rxHTdVIllNwPPp8P77//Po4fP47GxkaMjo5CqVQiOzsbcXFxkEqlCAQCsNvt6OzsBM/z0Gg0KC4uRllZGfLy8qBWqwHcmCOMj4+jvr4eXV1d67qwJuNfcnIyTZ8fHx9Ha2srrly5gv7+fjr+hUIhjI2NobGxEQ0NDdQfRq/XQywW0/FvMY8eslkVCoXg9Xpp2j7w5yqaMpmMCo3E3+lBwmg04rHHHsM3vvENAMAPf/jDe962ZAwjFc+W+13BYBD/8i//gmeffRYVFRVITExETEwMjEYjfvjDH+Jf/uVfcObMGVgslg01J2AwGAzG7dlQwk9iYiKOHDmCgwcPLvC2mJycxLVr19DR0RExiROJRMjOzkZqaio0Gg14nofNZsMrr7yC7du3U98dh8Oxohxn4M+eGjExMdBqtRgdHaWlRW+FVP4Ih8OQyWTIzMykD9xQKITs7Gxs3rwZwWAQdrsdc3NzcDgcqK+vx5kzZ9DW1gan07n8i7cOEE8bm82G6enpFaUN2Gw2nD59GlNTUxgdHcWePXuoh9LmzZshEomgUqnwzjvvRFT6WilCoRBarRYGgwFzc3MYGRm5o/nr/L+Ty+WIj4+nHiDhcJie//nz5xfcX2RC7PF4aMleu90Ol8sFj8eDYDB4x+tFUrdIlTidTnfX4yQL2flVpwKBALKzs9HW1kYFqsWErtraWuqfUVVVhc2bN8NgMCApKQkvvPACTCYTTp06hZaWlntSiWQ1qFQqHDhwANnZ2dRQmIh8Wq0Wra2tuHDhAnp7e5cUiUCMx7OyspCWlgaNRoO5uTmcO3cOx44dQ21tLex2OxUWeJ6Hx+PB0NAQfvvb36K5uRn79u3D9u3bIZPJUFVVhePHj6OtrW1Rvw6lUonk5GRkZ2dTL6i7QTxb5re1QCBATk4OmpubMTU1dVcz1NUgkUhoOotUKkU4HIbH44FCoUBaWhqNtomJicEzzzyDjIyMiPLOPM9jZGQEk5OTmJubo8dKUjyXK5zodDro9Xrq5zY/GvN2EMGUjDnEn4ik75w/fx5zc3MLjoUYDNfU1GBwcBA2mw1Op5Oex0baBW9ubsbs7Cy6u7uxf/9+VFVVITo6GvHx8fjc5z4Hk8mEEydO4MKFC7cdC4VCIdLT07Fnzx7IZDIqeAgEAuTl5dFI2oSEBDz66KPQ6XQRAg3P8xgbG8Pk5CScTie9RlNTUxFtTXz0SCl1juPQ0tKCc+fO4dKlS7h69WpEBIzf78f09DSam5vx85//HGVlZfja174GvV5P/b96enrQ3d294NxEIhHUajXS0tKQm5tLRZpbmd+WRJiaH1lL0i7T09NhtVrh8XhQUlKCxMRE6rc2NzeHyclJTE1NoaqqCiUlJdi/fz/y8/Oh0+mo4X9DQwPOnj1LjX/XayOIjH+ZmZlIS0uDVquF2+2m419NTQ3sdjvd1Jo//v3ud79DS0sL9uzZg507d0Iul2PXrl04ceIE2tvbqf/SrcjlcpSWliIQCNC5lUAgoHM7q9WKa9eu4dKlSw/MHImgUCig1+uhUCioB+C9jgYMBoPUh87n860ouvzy5ctwOBzo7OzEzp07UVJSQucEX/va12AymfDRRx+hvb39gRTkGAwG49PGhhF+tFotqqqqsH//fuTm5kakOZDIEJJWQVAoFMjMzMQjjzyC/Px8iEQijI2Noba2Fo2NjSgtLaULQGLCu1xIiVqz2Qyz2QyHw4H29nZ0d3cvEH7IRHZkZAROpxNarRY8z9PdVJJfPT8ag5hFm0wmZGVloampCadPn0ZbW9uGiv4hu6Dzr2EwGKQmxWTCu9zJDPHFuHz5Mvx+P2ZmZlBeXo6srCzo9XqUlpbSXcATJ07AbrevaiddpVJh9+7d8Pv9+Oijj2gEDJnIi0SiBSky5LzFYjFNWeE4job59/f34/Tp04t69RBxjAh6JDLqbudATGdJxTCNRnPHRQm5p0iEEREghUIhxGIxTCYTTXu8Xei33W7H9evXqQeR1WrFtm3bkJ2djby8PASDQcjlckRFReHy5csrTuW4F6jVahw6dIiGpgM3FqtGoxFSqRTZ2dnQarWorq5GU1PTXXco50d4mUwmyGQyWnWus7OTiirzIcJHa2sr5ubmYDQakZqaiqSkJJjNZuTm5kaIHATilUVM0nU63bLaen41N5KSZDQaaXW/uwmMKyUYDMJqteLs2bPQ6/V0jExMTKTpagBoSev4+HhMT09DJBJBKpVSI1abzUavSTAYxLVr1+D1etHX10ej4u4WqSUSibBnzx7wPI9Lly5RcZLc67dLMyOivkqloteLXNehoSGcOXMGDodjwVhMhI+uri7Y7XZ4PB74/f4NEeVzK06nk6YhTU5OYnp6Gjt37kR2djYyMjKoR49YLMbJkycXrcBEiiWQtk5OTkZKSgqSk5MRFxdHo3OVSiVKSkqg1+tpCiRpa7vdTr1wSPntxsZGeL1eDA0N0dLbSqUSKSkpUKvV4DgOU1NT6OvrQ19fH+x2+4JqhSSa5tq1a/B4PNi3bx8kEgl0Oh0yMjKQmZmJqampBcLPfONik8kEuVy+ICrt1u8iJvA+nw9CoZA+C0g6rslkgtvtxsGDB2E0GiGRSBAMBuH3+yESiVBRUYG8vDwq8KrVaohEIvA8j9nZWZw5cwZnzpzB8PDwuo6vRFSOj4+nKXA+nw8jIyPo6OjA1NTUAvGGjH/t7e2YnZ2lptUpKSkwm83Iy8vD5OTkbUuLKxQKlJeXIzk5OaK/m0wmSKVS2Gw2mM1mSKVSfPTRR0uulLjecByHoqIipKamYmJiAidPnsT09DQdm8gm0krne4vNzeZvSvX399/RR/BOTE9P00IJJFq5vLwc2dnZyM/PRyAQQFRUFBQKBerq6jbUnIDBYDAYC9kQwg/HccjKysKhQ4dQUFAArVYbsYs9NzeHq1ev0kpJZFFWXFyMAwcO4DOf+QxiY2MxOTmJxsZGHD16lE4iRSIR5HI5NdZdLmKxGEajESUlJTCZTOjr68P169fR29u7IOqB7Gr29fVhamoKycnJ8Hg8uHTpEjo7OyMWpcCNyZXZbKY7jgcPHsT27duhUCgwNTWF8fHxDbGQIGV7yeKSQFKcLBYLpqamVhzuGwqFYLfb6Y7z6OgoDh06hPLycmg0GuzYsQOBQADNzc2Ym5tb8eRCLBYjNjYWjz32GGZmZvDuu+/C4XAgNTUV0dHRtCzwlStXFq22QSb65BqQncm+vj7U1dVhdnZ2wd+QqJ/p6ekll1MlvhPR0dGIiYmBRqOhi1Lymbd+Poni8Xq9GB0dhUwmg0KhQFRUFL1epFIR8GfT0luP1eVy0Yn96OgobDYbjXIqKiqCTCZDMBhER0fHui9MiJhLFq5kIkrOi/ye53kq3hCz6rvdq7dWziKRIMSr6U7t6HQ6MTQ0hObmZjoGJCQkoKCggKZ3kt3R+dGE0dHR0Gq1tFocYb7gQxYI5DhIJSXS1mTxIBQK77nfl9/vR1dXF6ampqBSqZCRkYFNmzbRVLX5ERFEgJqdnYVcLodMJoswSSZiaCgUotW0VCoV+vr6aKTc7RAIBNBqtTh8+DCNruvt7YXJZEJKSgp9Bly/fn3RBSu5l8j9RI5zYGAAdXV1cDgci47DRPgmIsZGivKZD0lLGxoawvT0NI1OMhqNMBgMSE9Pp942V65cWXRBHQqF0Nvbi9/97ndQqVQwm80oKipCRUUFtmzZEuGDQxay89s6HA7TtEtSAYtcL4FAgPb2dvT398Nms0Emk1GRmrSHz+e7Y7oKiaDt6enBlStXEAwGkZiYCJVKhby8PFgsFgwODlI/NlKliIyxKpWKijjkms2/fsRDi1SzstlskMvlNEqM3LdSqRR6vR4VFRVQKpW0D0okEsTFxeHw4cPIyMiAQqGgxQHmp2leunQJra2tNIVqvSDppGq1mh4r2YAh0SO3w+l0YmRkBC0tLUhNTYXP50NCQgLy8/MxMjKCqakpzM7O0nYgzySpVIqUlBTEx8fT+4NcUwA0dY/nedTW1j4wwo9SqURlZSUyMjLQ39+PY8eOQSQSoby8HBKJBDKZDNPT0+jt7V2RFcGt4xcQGe1zt8qhd4LMCVpbWzE5OYmxsTHYbDYkJCRAoVCgpKSEpvB1dnYy4YfBYDA2OBtC+JFIJNi9ezd27NiB2NjYiAVuOBxGf38/rl69iqmpKQA3KvpkZWXhr//6r7Fr1y7IZDKMjIygpqYGR48exdmzZ+lDjuyGkwX7chZCHMdBLpcjJSUF+fn5NCJiYGDgtkagfr8fNpsNIyMjKC0thUAgwPT0NKqrq9Hf3x+x6OA4DiqVCgaDAY899hieffZZFBUV4fnnn8eVK1fw8ccfb4jJDZkkk0kguYZut5t61Sw3zWsx4cHn86GxsREjIyMIhUJQq9XIyMiAWq1GaWkpcnJyqGnqStBoNMjPz0dZWRm6u7sxOTmJ7OxsPProo9i5cyckEgnUajX+4R/+Ae++++6iZsZkkjo/f350dBRDQ0O0vO9iLFXAI9VjiPcEqdI2v9LMfNxuN2ZnZ+F0OuF0OuFwONDU1ASNRgOj0Qi1Wg2fz4fOzs4Iw9lb0xfm/0wiOex2O/r7+1FaWorCwkJoNBqkpqaiuLgYqampGBsbW9I53QuioqKgUqmoQBIKhTA+Pk7TCMjCJTY2FnK5HBKJBLGxsbTs9lIgQt/8xUk4HL7rBJp4fdXX12N2dhY5OTlUyCTRC2Qsk0qlMBgMKCoqQkZGBqKjoyGTyRZta6/XS9va4XBgZmYGra2tkMvlMBgMERGGY2Nj1GfmXi4gA4EAxsfHMT4+jp6eHnz88ccoKyvDl770JVr16Mtf/jIuX76MX/3qV/j4448hkUigVCrh8XgQFRUV8XkikQiZmZk0vZEY+95pQUQiurZu3Yqenh5MTk7CZDLhwIEDeP7552n0x3e+8x188MEHsFgsi4q6AGgkptvtpv36VjP3+WwEYf523Nqvw+EwnE4nampqMDIyguzsbJSWlsJkMiEmJgabNm1CZmYmLBbLovcM8TyamJhAb28vLl68iDNnzuBb3/oWHA4HeJ7HM888g+vXr+PXv/413n//fRpN5fV6F4xhAoEA2dnZyMzMRHx8fIQoM1/oJuPVne5jEoE1NjaGo0ePoqWlBdu3b0d2djYqKiqoqBIMBiEWi6FSqZCUlEQjMYhIs9jnulwu2O12OJ1OzM7OYnJyEn19fdDr9YiPj0dUVBS6u7sxMDBAU9fmp9uKxWIYDAYaFXWraTS5thaLBZ2dnYtGmK0HQqEQMpkMEokk4rl3t7YgJvNERMzJycGjjz6KsrIyGoEyMTEBoVAY4atFjK2npqao8EUEM4VCQdMAU1JSaPWzB4H09HRs3rwZ0dHR6O3txczMDCoqKvBP//RP9DnW2NiIV199FadOnVq2D51EIoFGo4FaraZCI7E3GB0dxdzc3LKfAYvNCYjo09fXh02bNqGwsJCmShYVFcFsNsNqtS7rexgMBoNxf1l34UcoFCIpKQnbtm2DRqOJiCjheR7T09N4+eWXceLECUxNTUEsFiMxMRFf/vKXcfDgQYhEIpw+fRpvvfUWzp8/D4vFgkAgELFjTiobLHf3W6PRoKSkBH/1V3+FwsJC/Od//idqamowOjp624ez2+1GZ2cnTp8+jUcffRRKpRJ6vR7hcJjudM7HbrfDbrejpqYGGRkZyMvLg8lkQlxcXMSO4Hqi1+tRVFSE7du3o7CwkE4CnU4nent76SJzqccpFotpdILP54u4JiSl4JVXXsH58+fx7LPP4oUXXkBSUhL+8R//EZOTkzh37tyKdq/MZjMOHjxIK0L97Gc/g06nw8zMDNra2tDV1YXnn38ef/d3f4f29na0t7cvSWSaH3WzGgQCARQKBVJSUnDgwAF8+9vfhlarjZh4E0KhEPx+Py5fvozLly/j+vXr6Ovrw/T0NKanp+k1Jv3A5XJRc+P5O+1kx3F+KWKCz+dDT08Pnn32WXzlK1/Bc889h8zMTOzcuRMCgQBPPvnkuix8hUIhkpOTaYlhkmL5ox/9CDU1NZiamqKLhMrKSpSVlUGv16OtrQ11dXVLqpJDBACPx7PgHl0KPM9jYmKCRmCRSn+kMhQRohMSEnDw4EG89NJLMBgMERFlBGIg3tjYiMuXL6OxsRFdXV2w2Wyw2+00nUYikQC40dYkneJ+jx1yuZy2zXvvvQen00krIM3NzdGoOCJ8LUZPTw8+/PDDJX+nWq3GkSNHIJPJkJSUhO9+97u0QhL5rK9//ev4+te/jomJibsKSYT5/Xq9x+DlQNJS5XI5rTw1//hDoRAGBgbwta99DYcPH8aRI0doCtL3v/997Nq1a0kV4aRSKRISErB582a89dZbcDqd8Hg8cLvd9P8ulws2m+22n9HX10fbmkTyZmdn0/HodpXtbkc4HEZHRwdGR0ep4JeSkoLo6GgqPJlMJpSWluLQoUP47Gc/S6N2bp0fELPh9957DxcuXEBnZycmJyfhcDjgdDohlUppJDE5Z+DGc2axZwERN0gky3zvv9nZ2YiU8PWGbGy43e4Vj39TU1O4du0aLBYL0tLScODAgYjxT6PRYNu2bdTPjFQze++999DZ2UmjxCorK1FcXIyYmBjMzc3h/PnzS/Ll2yh85jOfgdlshl6vR1VVFbZs2QKTyYTu7m5UV1dj27Zt2LJlC3ieR3t7OwYGBpb82VKpFPHx8aioqEBlZSUVxLxeLyYmJtDT0wOHw7Hk+dKd5gTz01ufeeYZfPWrX8Xzzz9Pfd6EQiE+97nPbYj7l8FgMBiLs67Cj0AgQHR0NP7whz+guLg4IlSVeKN8/PHHOH78eIThZ39/P773ve/hJz/5CfWIWayKCplYkZ0rEs69lAW6RCJBcnIytm3bhsLCQgCgobh3qhTD8zwmJyepCaVSqYTBYKD+Abf77p6eHpw6dYpWJ9u5cyfOnTu3JH+Lew0REZRK5YId+uUKMAKBAHv27KHV1q5evYozZ84s8Etyu93o6+tDdXU1jhw5Ap1OR9tvJSiVShQWFuKRRx4Bz/MQi8Vob2/H+++/j/Pnz6Ovrw+xsbH4zGc+g7S0NOzatYv6Ft3uPEgqUHJyMtRqNfUoWSnEV0Gv1yMhIWFRrxciArhcLkxMTODixYs4f/48enp6MDU1RSfpHMdhZmaG9qdbI1VIFaTKykpUVVWhs7MTr7/+ekR1MoLNZkN1dTV27tyJlJQU6nW0nsjlcrpLPzk5iZ///Oe0DDAxdwaAxsZGeh1dLhfm5ubuWukJuLEYm5ycRF1dHYxGIwoKCqiATCKA7jbB9Xg88Hq9cLlcuHTpEp544gns2rWLVutxOBzQ6XS0rW+N9CFiscvlosdSXV2N9vZ2TE5ORrT1fI+HpUQl3SvI/enxeMDzPM6fP48vfOELS7rmK0EqlcJsNuOZZ56h/bq/vx/V1dU4c+YMGhsbwfM8Pve5zyEpKQkVFRXo7++/rfBDRBNi2KvRaOD3+9dFRFsJxG9m165dOHToELq7u3Hs2DFa4W0+drsdly5dQn5+PkpKShAVFbWsfs3zPG1rAKipqcHjjz++4ihV4q1ksVhw/vx52jdIqjGZH9ytHaanpzEzM4PGxkZkZmZiy5YtePjhh3H8+HH09PRAp9MhJiYGsbGx0Gq1EeISEXx9Ph9mZ2cxPj6Ojz76CA0NDbBarREm5PMFRBIFIxKJYLPZUFtbi7i4OCpAEhPqlpYWaLVaZGdnQ6fTwePxYHh4GDU1NfjhD38Im822Ie4zMv5duXIFMTExKCoqouIYaY+7jX9erxc+nw9OpxOXLl3CY489hqqqKvT29tKIabVaDbFYjOnpabzxxhv44x//iIaGBmreTMy9iQdXKBSCw+F4YFKKjEYjnnrqKSQmJkIgEMDr9aKurg5vvPEGTp48CZ/Ph+9973s4cuQIcnNzUVVVtSzhh6TCqVSqiGhswnIiPkklysrKSlRWVqKrqwuvvfYaJicnF7S1zWbDqVOnsGfPHiQnJ9MoOgaDwWBsbNZV+BGLxYiJiUFmZuYC0cfpdKKhoQG/+93vFixGA4EAJicn6SRpsYcb2e212WyIiYlBfn4+nn/+eZw4cQLV1dWLLoxIWlhSUhKtYLJ9+3Y0NTXh4sWLOHfu3KIVeW6FhKsTkYKY+t5pokT8X+RyOQQCAVwu111z6e8HYrEYBQUFKCoqQkpKCpRK5ao+T6vV4tFHH8XDDz8MlUqFpqYmCAQC/OlPf4q4PlFRUcjMzMThw4fpjuDdQszv9r2xsbGIiorC8PAwfv/73+PYsWPUpDsUClFzXFKa+m6Vq4jwYzabkZKSQtMCllrVh0yiSY482aFOSEigpYznEwgE4HQ6abpQV1cXampq0NHRQauqza+icyfkcjlyc3Nx+PBh7Nmzh4oUb7/9dkTlJeBGtaTDhw8jOzsbcrl83RfBBoMBqampSExMhM/nQ11dHX73u99R0Qf48/kTHyqyYFxOf/L5fGhubqb+QHl5eSgrK4NQKMTly5cxNjYGp9MJt9tN07Bu9QYh/h4kVWF+SXe5XI7ExEQkJydH+IsAoAucyclJ9Pf3o7e3FxcuXKCV2eb7DG2EhSKB7DQPDAxALpcvKTVkNZBS7FqtFhaLBX/84x9x6tQptLe304grUoVvYmKC/v52cBwHoVAItVpN+zVJkVrqeEz6NdlkWCzS815BSmcfPnwYjz76KDweD+RyOd5++220tbVFHL9Wq8WBAwewZcsWaDQaAMu7l4g5d09PDxQKxZq0MxFUiHBCDJoPHDiA+Ph4NDU1YWxsDA6HA16vd4FROjkHEvEkk8nos5iY3ptMJiQlJcFoNC6Irpubm4Pdbsfk5CSGh4fR1dWFhoYGjIyMwO12R0RPLTaekHS4l19+GYODg1CpVNR/aGJiAjabDS+++CLC4TCNvLp48SJOnTpFPYg2Cn6/Hy0tLTCZTIiKikJhYSHKy8shEAhQX1+P0dFROBwOzM3NLTr+ESQSCY2MIpFEpFR8dnY2gBsC/VtvvUWNhAlEXLTb7RHC9oNCfHw8DAYDXC4Xrl69ipMnT+LcuXPo6uqCy+WCVCqFUCikAmBra+uSP5sULyguLkZeXl6ENyaw/OeCXC5HQUEBPvvZz6KqqopG7R09enSBIKnT6fDUU08hIyMDUVFRd/W9YzAYDMbGYN2EH4FAAI1Gg9zcXMjl8ojX/H4/hoeH8f7776O2tnbRsPO7lfsNh8PUSNXn80Gr1aKgoAC9vb2LpgoJhUKoVCpotVoa9l5YWAiO41BbW4vLly9jenp6SdE3ZNcwGAzSsFlSAvZO14PjOPh8PgSDQboAuZfmrHeDeBwVFhYiJyeHCifzF9HL9fVRKpXIyclBTEwMlEolEhIS6MKXRGoQo96HH34YFRUVNC2MVKdY7oQmNjYWTz75JA4ePAin04lXX30Vr7/+Os1/D4fDSE9Px9NPPw29Xo8f//jHaG9vX7BzTVJ/iLBDygFnZmbixRdfRG9vL6xWK10gLOV6kOiClpYWuFwumEwmpKWlwWw2R9wv4XAY4+PjOHv2LBobG2mK3fDwMGZmZpZdtpv435DqaeFwGGlpaVCpVJienqZmpUqlEocOHcKePXsQExMDgUAAp9OJ/v7+dVmkcByH2NhYpKWlIS4uDqFQCMPDw7BarbeNKlnphJSMIc3NzYiLi0N2djZycnKg0Wig0WioUanNZqMLxfnfRQRpg8GAgoICKpqRMYR4JqWmpkYsQMPhMOx2O06dOoX6+nr09vZiYmICw8PDsNls1Ex4I0IWaTabDfn5+Whvb4der192ys5S0Ov12LFjB774xS8iGAzil7/8Jd566y0MDw/D5XLRiIIvfvGLMBqNeOWVV3D16tUF0T7z+3UoFIJIJEJUVBRSUlLwta99Df39/RgfH4fT6VxS5NL8VKvBwUF0d3djfHx8xeb3y0EsFkOn0yE3N5f2a5JmIhaLEQgEaBW5/fv3Y+/evcjIyIBIJILH40FPT8+S761gMAin0wmr1Urb2mQy3TGNbyn4/X50d3ejsbER27ZtQ2xsLDVAj42NxfDwMMbHxzE5OYnR0dEFzwSfz4eoqChaUYqkv/j9fprmazab6Xg2/3zq6+vR2NiI7u5u+j2Dg4OLeqMtBhEqenp6cPLkSRqlQu6vpKQk+hzleR7j4+Po6upa4P+3ESDjX0tLC+Li4pCXl4esrCyoVCrodDoMDw/T8Y+MT4uNfxqNBoWFhZDL5bRfSiQSpKSkIDU1FQAwPDyM0dHR25pab9Tx7nYIhUIkJibib/7mb6BWq3Hy5Em8++67qKmpoeb+HMfh8OHDKC4uhtVqxXvvvYe+vr4lfT7ZUIiOjsamTZuQnp4eYSYOLF/4iYqKQlxcHDIyMujYQfyviMG9UCiMmBOYTCYaXbzRhEsGg8FgLGTdhJ+oqCgkJyejqqoqQuAgk6ErV67g4sWLsFqtK36YjI6OoqOjAwkJCYiJiUFKSgpycnJgMpnoREwoFCIqKgparRZFRUVISkpCaWkpoqOjEQgEcOXKFVy4cAG9vb3LTuOZL9rcmo5x6znFxcUhJycHCoUCc3NzdCK4nHMnEUtrJRYRz5mcnBykpqbSxQOJsiKT7zuZGs+HTIr7+/uRkZEBqVQKo9GIHTt2YGZmhk58SSWWsrIymM1mOpE+fvw4RkdHl3VNVCoVDh8+jKeeegppaWlob2/H22+/jY6ODvoehUKB3NxcPProowgEAjh58mRElBkRuXw+HzWeJKaUpOrb/v37kZ+fj4mJiSULP/PPw+VyYXh4GEqlElqtFhqNZoH558TEBGpqanD+/Hm6A73U6KJbCQaDmJ6exuDgIMxmM1QqFTZv3ozDhw/TanJCoRBarRYHDx5EdnY2RCIRrFYrrl69ilOnTq2b8JOQkICUlBQYDAZaEW6p9+ByGR8fR2trK2JjY7Fp0yakpKRAq9VCpVJRw1diSDoyMhKxQCH+QDqdDhUVFZBIJAgEAlS4IT4Xt7Y1qQJXU1ODM2fOwGKx0Lbe6Asg0h59fX04ePAg8vLyoFKp7ih6rwSFQoGqqio8/fTTNH2LmPqSDQGJRIKkpCQ8/fTTCIfDOHv2LF3EE8iYND09TaNISFquVqvF3r17MT4+TisRLcX7Zv59WFNTg4mJCUxPT98X4SccDmNubg59fX3IycmBUqlEfn4+Dh06hLS0NCr8aLVa7Nu3D6WlpVCr1XA4HOjq6sKxY8eWHJ1EIqE6Ojrw2GOPIT8/f4FP30oIBoMYGRlBfX09uru7kZ+fj+TkZBiNRsTGxmJ6eppG5Vit1gXCj91uh1Qqxc6dO5GVlQWhUEirgpFqcVqtFgqFIuJ7vV4vWlpa8PHHH9PIMDJPWA48f6M0e3t7e8S1IAJwQkICrTDY09OD9vZ2jI2NbchF89TUFNra2hAdHY0tW7bAbDZDq9VCrVYvGP9uFX68Xi+8Xi80Gg0qKipo9Sq/3w+lUonU1FTEx8cDuNFmi6XrP4gIhUJER0fjK1/5Cg4dOkRTpc+dO4fR0VH6PoPBgMcffxzJycm4cOECLl68uCDl/XaQ4gUxMTHIy8tDfHw8TQsPhUJwOp0YHx+/o7/WrZCiBENDQzCbzVAqlSgrK6ORamROoNPp8PDDDyM7O5tGSF+5cgXV1dUb8h5mMBgMxp9ZF+GH4zgYDAZs2rQJ+/btWxDZQHbLiMHfShkfH8fly5eh1WqxefNmxMXFYfPmzdizZw9cLhecTickEgkMBgPMZjP27t2LhIQE+Hw+TE9P49q1a/jggw/Q2Ni4rN24+aHgwJ/LlAKgJbctFgu8Xi+EQiHkcjlKS0uxf/9+xMbGYnR0FDU1NXA6ncuO4iAGsWux0CLRNwkJCTAajbQsM/FiuH79Oq1CstQHvs1mw5tvvgm5XI6ysjLExsbi4MGDKC8vp+caFRUFhUIBiUQCr9eL3t5e/OlPf8Ibb7wRMXFaCnq9Hi+88AIKCwvR3d2NU6dOoaWlJeI9ZrMZ5eXlyMzMxOXLl9HW1ragvcPhMDweD4aGhmCxWGgpX6lUSr2qVCoVkpOT71q2nQhnXq+XpgCQMt0kkm3+389fnPb19cFqtcLlcq1qkjw3N4empia89dZb0Gq1yM/Px/bt25Gfn08FRyL8aTQaBAIBjI6Oor6+Hh988AHef//9dZnkicViJCcn03KyZBF4rxYMDocDfX190Ol0tJqZTqeDXq+n7UhKHM9PDSRpfyScv6CggPr1eDweeo+Q/0haGGlr8r3j4+N0l/xBgJi6NjY2QiaTITc3d03FaIJGo8Hjjz+OJ598Ei6XCydOnEBTU1NEvzEYDNi2bRtKS0tx8eJFWiZ7PqRfWywWjIyMIDk5GQaDgaYImUwmWgHqbilbRCD2eDx0MQzgruPBWuLz+TA4OIjXXnuNRmkUFhYiMzMzoqQzqdIYDodhs9nQ1taG48eP44033ljysfI8D4fDgfr6elpZjUSErgZi/t3U1ERTvtLT06HRaBAfH0/HSpJetFhxAOJXotPpaCU8v98PsVhM06zmnydpt+HhYRopTIzIVwIR1oE/m1ZrNBrs2LEDBoMBHMfBYrHg+vXraG5uXnWU1L3C4XAgEAhArVZj06ZNSEtLWzD+kYi5W8c/ct0lEgkKCwsRCASo6TyJdtTpdAgGg3Tj55MgHBDB+e///u8hlUpx7Ngx1NXVLaiCuXXrVmzZsgUzMzO4du0a+vr6luXHIxaLodFokJiYCK1WC5FIFHEft7S0oK+vb8n38OzsLK5fv46jR49Co9EgLy8PlZWVKCgouO2cwGKx4PLly/jggw/w4YcffiLaj8FgMD7JrIvwI5VKkZmZia1btyImJiZiF9Xv96OjowPnzp1bdeUGv9+Puro66j3w+OOPo6ysDJmZmfD7/ZiYmIBAIIBWq6UVUGZmZvCrX/2KVkmyWq3L3vHz+/00BFqpVOLgwYOoqKigE6Senh5897vfRUdHBzQaDYqKivDQQw+htLQUgUAAly5dQl9f37KiGIiYVlZWBqVSuWYLrVAoRCt7kBQ0n8+HiYkJ1NXVob29nRoxLoVAIEDL1Hd3d2Pv3r3YvHkz9Ho9FazI4pfsPr399tv45S9/SUvlLgexWAypVAqv14v6+nr85je/iZgISaVSPPLII/jc5z4Hi8WCb3/724sav4ZCIczNzaGnp4caxsbHx0Ov10Mul9PUjvm+JrebcPE8Tz11RkdH0dzcTA1nJRIJnTAHg0EqtBExwGKxrEnlF5IidezYMTgcDnz+85+nvksikYhG4RG/rdHRUXz00Ud0ErteVVVUKhXMZjOMRiMt/9vd3X3PhB/SVsPDw6itrUV5eTkVnQQCAY2yi4qKivBJIfdwMBik77HZbBgeHqYRSkQscjqd1KSZLGpdLhcsFssDJfoQSD+pr6/HQw89tCZiwK2IxWKIxWJq9v/yyy9H9DexWIzi4mJ885vfxNzcHF566SWMjIwsuJY8z9MImevXr0OtViM1NZVWHyJtS1Jz7uYTRURAq9WKjo4OWl5+KZFCawHpE6dOnUIoFMIzzzyD3bt3Q6fTUZ8VMsbOzs5iYmICtbW1OHHiBD788MMlVTubj8/nQ29vL2pra3HgwIGIMuarPY/Z2VnU1dWhoKAAWq2WPqeJF5tEIqGec+Q5GQ6HodPp6KZFIBCA3W7H0NAQ3G43ZDIZrS5HIkwEAgHdzCDRXSS1ay0gKcGJiYnIycmBQCBAMBhEc3MzjSxa7wIOt4MU2bBYLKitrUVFRcWi459UKr3r+Dc9PY3h4WF4PB7odDokJydTP8Oenh5qCP+gQ/y9pFIp7HY7XnnlFXR3d0ecm1wux3/9r/8VKpUK/+f//B+8/vrrd/UUvBUyL3C73VTUDAQCmJmZQUdHBxoaGjAwMLDkaxoKhTA4OIj3338fMzMzeOaZZ3Dw4MHbzglGRkZw8uRJvP/++2hsbHygKq0xGAzGp5V1EX4SEhKgVqvR09ODf/u3f4t4bXZ2FvX19Qt2R1aKzWZDe3s7tFotnXglJCQAuFFxgexYTU5O4oMPPoDVasXZs2cxMjJCjXqXSzgcppEqpJw7mRSFw2EkJiaC4zg0NTUhOzsbRUVFiIuLg8vlQnV1NX7wgx9gcnJy2ZOg+SayazGBIpPvrq4umEwm6vnicrnQ1taG+vp6jIyMLLvChs/nQ01NDTo7O3Hu3Dns3r2bVqJRKpXwer24evUqPv74YzQ2NmJ4eJjuni4HoVBIvVVmZmYwMjICq9VKXxcIBPj85z+PgwcPYnJyEj/60Y/uaK7o9/thtVpx8uRJDA8PIz09HWazGfHx8XRyJJFI6D11OzHC7/djZmYGnZ2dOH/+PBV+wuEwRCIRenp60NLSArPZTCfYpHrOUtPIlgLZ7T927Biam5tx9epV6PV65OTkwGw2QyKRwOVy4fXXX8eFCxcwMjKC6enpda2oolarkZ6eDpPJhGAwiKmpKbS2tt7TiAoyIX7zzTfhcrmwb98+7Nu3D1qtloqV89M4iRGwVCqFUqmEUCiE2+3GqVOncPz4cbS1tdEy7D09PWhubqaV4ea39UYp7bwSvF4v2tra8NnPfjbi2qwFAoEAmZmZMJlM8Hq9GB4exsjISMTru3fvxnPPPQeNRoOXXnoJzc3Ntx0TiQH4hQsXMDMzg8zMTGRkZNA2EYlENMWVeLctBkl9am1txcWLF9He3o6+vj7MzMzcV/GO53m43W4cP34cra2tOHjwIJKSkpCYmIjCwkIoFAq4XC68/fbbuHz5Mvr7+zE5ObkgGmqp+P1+tLa24oknnrirl91yP/fkyZOYnZ3F9u3bsXXrVmzbtg0KhYJ6RpHvItfe6/XCaDRCLBYjGAxiYGAAp0+fxqlTp2Cz2aBWqzE0NIT29nYYDAbodDqa8kWE2LsVYlguJKU5KSmJmmAHg0HYbLZVR27eD8gmwVtvvYW5uTk6/ul0ukXHP5fLBa/XC4lEApVKFTH+nTx5Ev39/di2bRvMZjM4joPdbl/UU+9BhFTVy83NhUAgwMjICCwWS8QzU6lU4lvf+hby8vLw/e9/H++8886yI5lJ6vnk5CQ6OjqgUqkQHx9Pq6Fev34dLS0tmJycXNbnkmpu7733Hq5fv47r169Dq9UiLy+PejE6nU788Y9/pCnn9yuNlcFgMBir567CD8dxSQB+DSAGAA/gZzzP/y+O4/4JwF8CIE+Wf+B5/oOlfOnAwAAsFgtOnjy5YEFAShiv1c6P0+lEW1sb9QzIzs7Gpk2b6K6n1+uFw+FAf38/zp49S/PSl2tcPB+e5zE6Ooq//du/xc6dO/GNb3wD6enpiIqKohVjDh48iH379tFdlGvXruHkyZM4e/YshoaGVvTd5JjJwnG1D2NiMvuTn/wEFy5cQElJCXJzc6FSqahfxkq9VYjocOnSJTQ0NEAikeDVV1+lE3a73U79AFayqBeJRNi/fz/+23/7b4iLi8Mf/vAHfPDBBxH+H4cOHcJf//VfIxQK4a233sLx48fv+Jk8z8Pn8+HcuXOoqalBVFQU5HI5FAoFpFIp5HI5tFotPB4PLRu+2LGTnbrZ2VnMzMxELDTcbjfGxsZw+fJlnD59GikpKYiKiqI+R3a7fc1FDrJAevnllyEWi6HVamklJL/fT9t5NX1irQgEAnTH3m63o7Gxccm+CKuBVNmqq6uD1+uFTCZDSkoKTfcjE36RSISrV69iYmICsbGxyMjIgEKhwMjICM6ePYvm5mbqZ0HMoRsaGlBdXU0XhmShRUy2H0Tm5ubw+uuv4y/+4i+QlJS0pp+9d+9evPTSS9iyZQva2trw61//OqJf79y5E3/5l3+J4uJivPvuuzh69Ohd71ufz4eGhgY0NzdDJpNBqVRCqVRCKpVCJpNBrVbT9L3bVbAhFYtcLldEv14vX6ZQKIShoSH86le/opsCer0eIpEIgUCARl+s9hgDgQBee+01PPvss0hLS1vDM7gh/nR2dlKxnOd5JCUlQaVSgeM4REVFQSaTwe1249q1a7DZbKisrIROp4PD4UBTUxOthkeif2pra9HW1oaTJ0+ioKAAiYmJ1HOnq6srIiVutYhEIuTn52Pnzp3YvXs3lEol3G43reZ1qy/ORoUIa6TYhlQqpX4/t45/165dw/j4OIxGI7KysqBUKmGxWHDmzBk0NTXB6XTC4/HQVNi6ujrqIfOgk56ejkceeQRf//rXwfM8fvzjH2N6eho8z1Pvn4MHD+Kll17CRx99hDfffBNDQ0PL/h4SqdjY2Igf/OAHKC0txa5duxAdHY2GhgY0NTXBarWuWHAmc4If//jHEXMCoVCIQCCwoeYEDAaDwVg6S4n4CQL4e57nGzmOUwFo4Djuo5uv/TvP8z9Y7pferSLXWkJ215xOJ3p7e2n1ASI4Ea+AmZkZWtZ7LQgEAujp6cHMzAwkEgmeeuopWhFIIBBAKBRCIBBgamoK165dw7Fjx3D69GmMjo6uOOzb5XLh2rVrcLlcaGhoWJMUGOKZQFIDRkdHodfr0dHRsWqBjiyUgsEg3G43rXRB2ux2x67T6ZCamor09HR0dXWhqalp0eMQi8WQSCTw+/0YGBjA8PAwgD9Xhvryl7+MpKQk/OEPf8CxY8eWvONIjELdbjccDgdtS5IWQio3kaomi533/OiB+e8hf+fz+VBbW4v29nZIJBKawrGWi5L5EFNYjuMwOztLBUkiUi0Gx3FIT0+nVUlIVbN7yfT0NN5991309vZSEe5+hZiTCa/NZkN/fz/UanVEuWjiKUIqt5AKKSqVCk1NTairq6OCAGl/Uo768uXLaG9vp5EMs7OztOLcgwhZOFRXV+PIkSMR1Q1Xm/ZEUkvI2NTe3k5fi46OxpEjR1BaWor+/n785je/wdzc3JI+l4xFZFFK0qJEIhFkMhkVfm/ts/Mh7bqWmxergXh+ED8cm80W4SV1u2M0m83Izc1FMBhEX18fent77/gdg4ODOHv2LC0AEBUVRVPxVgPP8zQaaXBwEFeuXIkQW2NjY2EwGBAMBnHlyhVMT0+jtrYW0dHRsFgsaG9vR3d3Nx0jSDQuMVl3OBxQq9X0vhwbG4PP51uztiPPIOKJNzIyQlMLH7T0mGAwSFNV+/r6oNFo6PgnEolgNpuh0+lQW1uLyclJmEwmZGVlQa1WR4x/PM+jvr4er776KuLi4vDee++tKI17I0JSEMViMTweD65du0afn3K5HPn5+fjSl74EiUSCn/zkJxgbG1uxOEPEn4GBATrmxMfHU2+4tZj7kTnB3Nwcjey625wgMzMTRUVFGB0dRU9PDyYmJlZ1HAwGg8FYO+4q/PA8PwZg7Oa/XRzHtQNIuNcHttYEg0FqLDg9PR1RXSsUCt3VuHO5kGgiq9WKEydOYG5uDsXFxUhMTIyovtDX14fLly+jsbERAwMDK14UkbzrN954A0ajkVYKWQs8Hg+sVivm5uYwNDSEqKioNY1GIELIcs6dVNO63TmGw2F0dnbi/fffR2JiIpqbmyMiQ0QiEVQqFaamptDd3b3sKCty35CFOQl1J8IM2Qm702fe7jUysbJarbDZbDSdwe/33/OSv3dLZ7n1vcANA20SBXOv8Xg8qKurQ1dXF0KhECwWy33zxyD3qN/vx9zcHE1r4TiOVjuRy+U0/XFgYACdnZ0Qi8WwWq2LCstkAT4xMQG73U7bmkzkH1SIN9Kbb76JcDgMo9GIvXv3QiqVoqamBuPj4yv+7O7ubpw4cQL9/f1obm6OSGcQCoVQKBRwu93o7+9f4K2xlOMmbUIqYJF+Tfx9Vtqv1xMyXi1nzNZoNOB5fknPEa/Xi6NHjyIYDCIuLg7bt29HOBzGuXPnVi0GBwIBBAIBuN1u2Gy2iH6nUqmor5rVaqURvEqlEk6nE3a7fYGASsa42dlZWsmI+PystdhKql9yHIe+vj4IhUJ4vV5MTExsaG+fxbh1/Jvv+yIUCtHc3Ay5XE5Tm6KiotDZ2QmZTAar1UpN+AUCAQYGBvDBBx9ALpejr69vTcW29YQY27/77ruIj4+HxWKhfY5sDpFnRGtr66qjskOhEBVF5+bmoFKp4HQ6MTU1tWbXk2zQLZVwOAyDwYC5uTk612UwGAzGxmBZHj8cx5kBbAJQC6ASwF9zHPdFAFdwIypogTMkx3EvAnhx9Ye6ekKhEDwez331KAkGgzTyp6WlBXFxcZBIJABuPFCHhoZoCs9qhRSPx4MzZ85AJpOt6XkSY+O5ubmIBdt6TdS8Xi/Gx8fR29t7W++fcDiM/v5+vPHGG4iOjkZHRwe9HiRlo7q6GgqFAi0tLSv2FyDXgPx/LcWwW9P17rbgXA/I7i+JRrrXhMNhjI6OUk+E9bgeRNS9ddJOBGWycPR4PLSc7t2EggehrZcLz/O4ePEiXC4XqqqqEBUVRaMrVsPg4CDee+89xMbGwm63R5iizs3NoaamBsPDw+jo6Fixbw05/rXu1w8KDocDAwMD1Hx3KVy+fBlOpxMVFRUwmUzQaDT0WbcWLPb8ttvtEZs4wI37Y/7Pt4NElc5PN78XabSjo6NU1OU4DsFgED6f74E1Mybj363cOv653W76fJ5/niR6lUQ7PYjX4HbY7XZcvXoVs7OzMJlMEZVZ/X4/xsbGcOLECQiFwjWJMCeijMPhiCiysV7XlOd52O12OjdbTz9ABoPBYCyEW+oDguM4JYBzAL7L8/xRjuNiAEzhhu/P/wsgjuf5F+7yGZ+cJ/wK4DhugfEly5FeOcSnaaXXTyKR0In4p21ht5bMN1hlMBaDLHqBez/mkYgQYrLOWBkk2mm5Y+z9bGsG40GC4zgqht6vSn/rwWrnZgwGg8FYFQ08z29Z7IUlCT8cx4kBvA/gBM/z/7bI62YA7/M8X3CXz2FPAQaDwWAwGAwGg8FgMBiMteW2ws9SqnpxAH4BoH2+6MNxXNxN/x8AOAygZQkHMgVg7ub/GQzGxsUI1k8ZjI0O66cMxoMB66sMxsaH9VPGJ4GU271w14gfjuN2ADgPoBkAyeX4BwCfB1CCG6leAwD+r3lC0J0+78rtVCgGg7ExYP2Uwdj4sH7KYDwYsL7KYGx8WD9lfNJZSlWvCwC4RV76YO0Ph8FgMBgMBoPBYDAYDAaDsVYI7v4WBoPBYDAYDAaDwWAwGAzGg8h6CD8/W4fvZDAYy4P1UwZj48P6KYPxYMD6KoOx8WH9lPGJZsnl3BkMBoPBYDAYDAaDwWAwGA8WLNWLwWAwGAwGg8FgMBgMBuMTyn0TfjiOO8hxXCfHcT0cx/2X+/W9DAYjEo7jkjiOO8NxXBvHca0cx33r5u/1HMd9xHFc983/627+nuM47sc3+24Tx3Gl63sGDManC47jhBzHXeU47v2bP6dyHFd7s0++znGc5ObvpTd/7rn5unldD5zB+JTAcZyW47g3OY7r4DiuneO4CvZMZTA2HhzH/e3NuW8Lx3F/4DhOxp6pjE8L90X44ThOCOD/A/AZAHkAPs9xXN79+G4Gg7GAIIC/53k+D8A2AH91sz/+FwCneZ7PBHD65s/AjX6befO/FwH8x/0/ZAbjU823ALTP+/lfAPw7z/MZAOwAvnrz918FYL/5+3+/+T4Gg3Hv+V8AjvM8nwOgGDf6K3umMhgbCI7jEgB8E8AWnucLAAgBPAv2TGV8SrhfET/lAHp4nu/jed4P4DUAT9yn72YwGPPgeX6M5/nGm/924cYENQE3+uSvbr7tVwCevPnvJwD8mr/BZQBajuPi7u9RMxifTjiOSwTwCICf3/yZA7AXwJs333JrXyV9+E0A+26+n8Fg3CM4jtMAqALwCwDged7P8/wM2DOVwdiIiABEcRwnAiAHMAb2TGV8Srhfwk8CgOF5P4/c/B2DwVhHboatbgJQCyCG5/mxmy9ZAcTc/DfrvwzG+vEjAP83gPDNnw0AZnieD978eX5/pH315uuOm+9nMBj3jlQAkwB+eTMl8+ccxynAnqkMxoaC53kLgB8AGMINwccBoAHsmcr4lMDMnRmMTykcxykBvAXgb3ied85/jb9R7o+V/GMw1hGO4x4FMMHzfMN6HwuDwbgtIgClAP6D5/lNAObw57QuAOyZymBsBG76bD2BG2JtPAAFgIPrelAMxn3kfgk/FgBJ835OvPk7BoOxDnAcJ8YN0ed3PM8fvfnrcRJufvP/Ezd/z/ovg7E+VAJ4nOO4AdxIkd6LG14i2pth6kBkf6R99ebrGgC2+3nADMankBEAIzzP1978+U3cEILYM5XB2FjsB9DP8/wkz/MBAEdx4znLnqmMTwX3S/ipB5B50zVdghtGWu/ep+9mMBjzuJmf/AsA7TzP/9u8l94F8KWb//4SgHfm/f6LNyuRbAPgmBe+zmAw7hE8z/8/PM8n8jxvxo3nZjXP888DOAPgyM233dpXSR8+cvP9LMqAwbiH8DxvBTDMcVz2zV/tA9AG9kxlMDYaQwC2cRwnvzkXJn2VPVMZnwq4+3X/chx3CDe8CoQAXuV5/rv35YsZDEYEHMftAHAeQDP+7BvyD7jh8/NHAMkABgE8zfP89M2H48u4EQ7rBvAVnuev3PcDZzA+xXActxvASzzPP8pxXBpuRADpAVwF8AWe530cx8kA/AY3fLumATzL83zfOh0yg/GpgeO4EtwwYJcA6APwFdzYXGXPVAZjA8Fx3D8DeAY3KtxeBfA13PDyYc9Uxiee+yb8MBgMBoPBYDAYDAaDwWAw7i/M3JnBYDAYDAaDwWAwGAwG4xMKE34YDAaDwWAwGAwGg8FgMD6hMOGHwWAwGAwGg8FgMBgMBuMTChN+GAwGg8FgMBgMBoPBYDA+oTDhh8FgMBgMBoPBYDAYDAbjEwoTfhgMBoPBYDAYDAaDwWAwPqEw4YfBYDAYDAaDwWAwGAwG4xMKE34YDAaDwWAwGAwGg8FgMD6h/P9i0Oh7ln4PdQAAAABJRU5ErkJggg==\n",
- "text/plain": [
- "<Figure size 1440x1440 with 1 Axes>"
- ]
- },
- "metadata": {
- "needs_background": "light"
- },
- "output_type": "display_data"
- }
- ],
- "source": [
- "plt.figure(figsize=(20, 20))\n",
- "plt.imshow(batch[0][-1].squeeze(0), cmap='gray')"
- ]
- },
- {
- "cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
diff --git a/src/notebooks/03a-line-prediction.ipynb b/src/notebooks/03a-line-prediction.ipynb
index 65c6dd6..336614f 100644
--- a/src/notebooks/03a-line-prediction.ipynb
+++ b/src/notebooks/03a-line-prediction.ipynb
@@ -49,7 +49,7 @@
"name": "stderr",
"output_type": "stream",
"text": [
- "2020-09-01 23:37:29.664 | DEBUG | text_recognizer.datasets.emnist_lines_dataset:_load_data:164 - EmnistLinesDataset loading data from HDF5...\n"
+ "2020-09-09 20:38:27.854 | DEBUG | text_recognizer.datasets.emnist_lines_dataset:_load_data:164 - EmnistLinesDataset loading data from HDF5...\n"
]
}
],
@@ -71,6 +71,35 @@
"cell_type": "code",
"execution_count": 6,
"metadata": {},
+ "outputs": [],
+ "source": [
+ "data, target = emnist_lines[0]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "torch.Size([34])"
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "target.shape"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
"outputs": [
{
"name": "stderr",
diff --git a/src/notebooks/04a-look-at-iam-lines.ipynb b/src/notebooks/04a-look-at-iam-lines.ipynb
index aa62d19..0f9fefb 100644
--- a/src/notebooks/04a-look-at-iam-lines.ipynb
+++ b/src/notebooks/04a-look-at-iam-lines.ipynb
@@ -2,18 +2,9 @@
"cells": [
{
"cell_type": "code",
- "execution_count": 12,
+ "execution_count": 1,
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "The autoreload extension is already loaded. To reload it, use:\n",
- " %reload_ext autoreload\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
@@ -32,7 +23,7 @@
},
{
"cell_type": "code",
- "execution_count": 13,
+ "execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
@@ -41,7 +32,7 @@
},
{
"cell_type": "code",
- "execution_count": 15,
+ "execution_count": 4,
"metadata": {},
"outputs": [
{
@@ -65,7 +56,7 @@
},
{
"cell_type": "code",
- "execution_count": 16,
+ "execution_count": 5,
"metadata": {},
"outputs": [
{
@@ -74,7 +65,7 @@
"(97, 80)"
]
},
- "execution_count": 16,
+ "execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
@@ -85,7 +76,7 @@
},
{
"cell_type": "code",
- "execution_count": 17,
+ "execution_count": 6,
"metadata": {},
"outputs": [
{
@@ -94,7 +85,7 @@
"'A MOVE to stop Mr. Gaitskell from________________________________________________________________'"
]
},
- "execution_count": 17,
+ "execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
@@ -108,7 +99,7 @@
},
{
"cell_type": "code",
- "execution_count": 24,
+ "execution_count": 7,
"metadata": {},
"outputs": [
{
@@ -260,6 +251,80 @@
},
{
"cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "data, target = dataset[0]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "torch.Size([1, 28, 952])"
+ ]
+ },
+ "execution_count": 9,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "data.shape"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "torch.Size([97])"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "target.shape\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "tensor([10, 62, 22, 24, 31, 14, 62, 55, 50, 62, 54, 55, 50, 51, 62, 22, 53, 74,\n",
+ " 62, 16, 36, 44, 55, 54, 46, 40, 47, 47, 62, 41, 53, 50, 48, 79, 79, 79,\n",
+ " 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79,\n",
+ " 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79,\n",
+ " 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79,\n",
+ " 79, 79, 79, 79, 79, 79, 79], dtype=torch.uint8)"
+ ]
+ },
+ "execution_count": 11,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "target"
+ ]
+ },
+ {
+ "cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
diff --git a/src/notebooks/04-look-at-iam-paragraphs.ipynb b/src/notebooks/04b-look-at-iam-paragraphs.ipynb
index da420b0..a442420 100644
--- a/src/notebooks/04-look-at-iam-paragraphs.ipynb
+++ b/src/notebooks/04b-look-at-iam-paragraphs.ipynb
@@ -2,9 +2,18 @@
"cells": [
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": 4,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "The autoreload extension is already loaded. To reload it, use:\n",
+ " %reload_ext autoreload\n"
+ ]
+ }
+ ],
"source": [
"\n",
"\n",
@@ -28,7 +37,7 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": 5,
"metadata": {},
"outputs": [
{
@@ -48,19 +57,14 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "2020-09-08 23:04:48.663 | INFO | text_recognizer.datasets.iam_paragraphs_dataset:_decide_on_crop_dims:190 - Max crop width and height were found to be 1240x1156.\n",
- "2020-09-08 23:04:48.664 | INFO | text_recognizer.datasets.iam_paragraphs_dataset:_decide_on_crop_dims:193 - Setting them to 1240x1240\n",
- "2020-09-08 23:04:48.665 | INFO | text_recognizer.datasets.iam_paragraphs_dataset:_process_iam_paragraphs:161 - Cropping paragraphs, generating ground truth, and saving debugging images to /home/akternurra/Documents/projects/quest-for-general-artifical-intelligence/projects/text-recognizer/data/interim/iam_paragraphs/debug_crops\n",
- "2020-09-08 23:05:10.585 | ERROR | text_recognizer.datasets.iam_paragraphs_dataset:_crop_paragraph_image:240 - Rescued /home/akternurra/Documents/projects/quest-for-general-artifical-intelligence/projects/text-recognizer/data/raw/iam/iamdb/forms/e01-086.jpg: could not broadcast input array from shape (687,1236) into shape (687,1240)\n",
- "2020-09-08 23:05:14.430 | ERROR | text_recognizer.datasets.iam_paragraphs_dataset:_crop_paragraph_image:240 - Rescued /home/akternurra/Documents/projects/quest-for-general-artifical-intelligence/projects/text-recognizer/data/raw/iam/iamdb/forms/e01-081.jpg: could not broadcast input array from shape (587,1236) into shape (587,1240)\n",
- "2020-09-08 23:05:29.910 | INFO | text_recognizer.datasets.iam_paragraphs_dataset:_load_iam_paragraphs:278 - Loading IAM paragraph crops and ground truth from image files...\n"
+ "2020-09-09 23:24:01.352 | INFO | text_recognizer.datasets.iam_paragraphs_dataset:_load_iam_paragraphs:244 - Loading IAM paragraph crops and ground truth from image files...\n"
]
},
{
@@ -83,7 +87,7 @@
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 7,
"metadata": {},
"outputs": [
{
diff --git a/src/text_recognizer/datasets/__init__.py b/src/text_recognizer/datasets/__init__.py
index ede4541..a3af9b1 100644
--- a/src/text_recognizer/datasets/__init__.py
+++ b/src/text_recognizer/datasets/__init__.py
@@ -1,10 +1,5 @@
"""Dataset modules."""
-from .emnist_dataset import (
- DATA_DIRNAME,
- EmnistDataset,
- EmnistMapper,
- ESSENTIALS_FILENAME,
-)
+from .emnist_dataset import EmnistDataset, Transpose
from .emnist_lines_dataset import (
construct_image_from_string,
EmnistLinesDataset,
@@ -13,7 +8,14 @@ from .emnist_lines_dataset import (
from .iam_dataset import IamDataset
from .iam_lines_dataset import IamLinesDataset
from .iam_paragraphs_dataset import IamParagraphsDataset
-from .util import _download_raw_dataset, compute_sha256, download_url, Transpose
+from .util import (
+ _download_raw_dataset,
+ compute_sha256,
+ DATA_DIRNAME,
+ download_url,
+ EmnistMapper,
+ ESSENTIALS_FILENAME,
+)
__all__ = [
"_download_raw_dataset",
diff --git a/src/text_recognizer/datasets/dataset.py b/src/text_recognizer/datasets/dataset.py
new file mode 100644
index 0000000..f328a0f
--- /dev/null
+++ b/src/text_recognizer/datasets/dataset.py
@@ -0,0 +1,124 @@
+"""Abstract dataset class."""
+from typing import Callable, Dict, Optional, Tuple, Union
+
+import torch
+from torch import Tensor
+from torch.utils import data
+from torchvision.transforms import ToTensor
+
+from text_recognizer.datasets.util import EmnistMapper
+
+
+class Dataset(data.Dataset):
+ """Abstract class for with common methods for all datasets."""
+
+ def __init__(
+ self,
+ train: bool,
+ subsample_fraction: float = None,
+ transform: Optional[Callable] = None,
+ target_transform: Optional[Callable] = None,
+ ) -> None:
+ """Initialization of Dataset class.
+
+ Args:
+ train (bool): If True, loads the training set, otherwise the validation set is loaded. Defaults to False.
+ subsample_fraction (float): Description of parameter `subsample_fraction`. Defaults to None.
+ transform (Optional[Callable]): Transform(s) for input data. Defaults to None.
+ target_transform (Optional[Callable]): Transform(s) for output data. Defaults to None.
+
+ Raises:
+ ValueError: If subsample_fraction is not None and outside the range (0, 1).
+
+ """
+ self.train = train
+ self.split = "train" if self.train else "test"
+
+ if subsample_fraction is not None:
+ if not 0.0 < subsample_fraction < 1.0:
+ raise ValueError("The subsample fraction must be in (0, 1).")
+ self.subsample_fraction = subsample_fraction
+
+ self._mapper = EmnistMapper()
+ self._input_shape = self._mapper.input_shape
+ self._output_shape = self._mapper._num_classes
+ self.num_classes = self.mapper.num_classes
+
+ # Set transforms.
+ self.transform = transform
+ if self.transform is None:
+ self.transform = ToTensor()
+
+ self.target_transform = target_transform
+ if self.target_transform is None:
+ self.target_transform = torch.tensor
+
+ self._data = None
+ self._targets = None
+
+ @property
+ def data(self) -> Tensor:
+ """The input data."""
+ return self._data
+
+ @property
+ def targets(self) -> Tensor:
+ """The target data."""
+ return self._targets
+
+ @property
+ def input_shape(self) -> Tuple:
+ """Input shape of the data."""
+ return self._input_shape
+
+ @property
+ def output_shape(self) -> Tuple:
+ """Output shape of the data."""
+ return self._output_shape
+
+ @property
+ def mapper(self) -> EmnistMapper:
+ """Returns the EmnistMapper."""
+ return self._mapper
+
+ @property
+ def mapping(self) -> Dict:
+ """Return EMNIST mapping from index to character."""
+ return self._mapper.mapping
+
+ @property
+ def inverse_mapping(self) -> Dict:
+ """Returns the inverse mapping from character to index."""
+ return self.mapper.inverse_mapping
+
+ def _subsample(self) -> None:
+ """Only this fraction of the data will be loaded."""
+ if self.subsample_fraction is None:
+ return
+ num_subsample = int(self.data.shape[0] * self.subsample_fraction)
+ self.data = self.data[:num_subsample]
+ self.targets = self.targets[:num_subsample]
+
+ def __len__(self) -> int:
+ """Returns the length of the dataset."""
+ return len(self.data)
+
+ def load_or_generate_data(self) -> None:
+ """Load or generate dataset data."""
+ raise NotImplementedError
+
+ def __getitem__(self, index: Union[int, Tensor]) -> Tuple[Tensor, Tensor]:
+ """Fetches samples from the dataset.
+
+ Args:
+ index (Union[int, torch.Tensor]): The indices of the samples to fetch.
+
+ Raises:
+ NotImplementedError: If the method is not implemented in child class.
+
+ """
+ raise NotImplementedError
+
+ def __repr__(self) -> str:
+ """Returns information about the dataset."""
+ raise NotImplementedError
diff --git a/src/text_recognizer/datasets/emnist_dataset.py b/src/text_recognizer/datasets/emnist_dataset.py
index 0715aae..81268fb 100644
--- a/src/text_recognizer/datasets/emnist_dataset.py
+++ b/src/text_recognizer/datasets/emnist_dataset.py
@@ -2,139 +2,26 @@
import json
from pathlib import Path
-from typing import Callable, Dict, List, Optional, Tuple, Type, Union
+from typing import Callable, Optional, Tuple, Union
from loguru import logger
import numpy as np
from PIL import Image
import torch
from torch import Tensor
-from torch.utils.data import DataLoader, Dataset
from torchvision.datasets import EMNIST
-from torchvision.transforms import Compose, Normalize, ToTensor
+from torchvision.transforms import Compose, ToTensor
-from text_recognizer.datasets.util import Transpose
+from text_recognizer.datasets.dataset import Dataset
+from text_recognizer.datasets.util import DATA_DIRNAME
-DATA_DIRNAME = Path(__file__).resolve().parents[3] / "data"
-ESSENTIALS_FILENAME = Path(__file__).resolve().parents[0] / "emnist_essentials.json"
+class Transpose:
+ """Transposes the EMNIST image to the correct orientation."""
-def save_emnist_essentials(emnsit_dataset: type = EMNIST) -> None:
- """Extract and saves EMNIST essentials."""
- labels = emnsit_dataset.classes
- labels.sort()
- mapping = [(i, str(label)) for i, label in enumerate(labels)]
- essentials = {
- "mapping": mapping,
- "input_shape": tuple(emnsit_dataset[0][0].shape[:]),
- }
- logger.info("Saving emnist essentials...")
- with open(ESSENTIALS_FILENAME, "w") as f:
- json.dump(essentials, f)
-
-
-def download_emnist() -> None:
- """Download the EMNIST dataset via the PyTorch class."""
- logger.info(f"Data directory is: {DATA_DIRNAME}")
- dataset = EMNIST(root=DATA_DIRNAME, split="byclass", download=True)
- save_emnist_essentials(dataset)
-
-
-class EmnistMapper:
- """Mapper between network output to Emnist character."""
-
- def __init__(self) -> None:
- """Loads the emnist essentials file with the mapping and input shape."""
- self.essentials = self._load_emnist_essentials()
- # Load dataset infromation.
- self._mapping = self._augment_emnist_mapping(dict(self.essentials["mapping"]))
- self._inverse_mapping = {v: k for k, v in self.mapping.items()}
- self._num_classes = len(self.mapping)
- self._input_shape = self.essentials["input_shape"]
-
- def __call__(self, token: Union[str, int, np.uint8]) -> Union[str, int]:
- """Maps the token to emnist character or character index.
-
- If the token is an integer (index), the method will return the Emnist character corresponding to that index.
- If the token is a str (Emnist character), the method will return the corresponding index for that character.
-
- Args:
- token (Union[str, int, np.uint8]): Eihter a string or index (integer).
-
- Returns:
- Union[str, int]: The mapping result.
-
- Raises:
- KeyError: If the index or string does not exist in the mapping.
-
- """
- if (isinstance(token, np.uint8) or isinstance(token, int)) and int(
- token
- ) in self.mapping:
- return self.mapping[int(token)]
- elif isinstance(token, str) and token in self._inverse_mapping:
- return self._inverse_mapping[token]
- else:
- raise KeyError(f"Token {token} does not exist in the mappings.")
-
- @property
- def mapping(self) -> Dict:
- """Returns the mapping between index and character."""
- return self._mapping
-
- @property
- def inverse_mapping(self) -> Dict:
- """Returns the mapping between character and index."""
- return self._inverse_mapping
-
- @property
- def num_classes(self) -> int:
- """Returns the number of classes in the dataset."""
- return self._num_classes
-
- @property
- def input_shape(self) -> List[int]:
- """Returns the input shape of the Emnist characters."""
- return self._input_shape
-
- def _load_emnist_essentials(self) -> Dict:
- """Load the EMNIST mapping."""
- with open(str(ESSENTIALS_FILENAME)) as f:
- essentials = json.load(f)
- return essentials
-
- def _augment_emnist_mapping(self, mapping: Dict) -> Dict:
- """Augment the mapping with extra symbols."""
- # Extra symbols in IAM dataset
- extra_symbols = [
- " ",
- "!",
- '"',
- "#",
- "&",
- "'",
- "(",
- ")",
- "*",
- "+",
- ",",
- "-",
- ".",
- "/",
- ":",
- ";",
- "?",
- ]
-
- # padding symbol
- extra_symbols.append("_")
-
- max_key = max(mapping.keys())
- extra_mapping = {}
- for i, symbol in enumerate(extra_symbols):
- extra_mapping[max_key + 1 + i] = symbol
-
- return {**mapping, **extra_mapping}
+ def __call__(self, image: Image) -> np.ndarray:
+ """Swaps axis."""
+ return np.array(image).swapaxes(0, 1)
class EmnistDataset(Dataset):
@@ -159,70 +46,33 @@ class EmnistDataset(Dataset):
target_transform (Optional[Callable]): Transform(s) for output data. Defaults to None.
seed (int): Seed number. Defaults to 4711.
- Raises:
- ValueError: If subsample_fraction is not None and outside the range (0, 1).
-
"""
+ super().__init__(
+ train=train,
+ subsample_fraction=subsample_fraction,
+ transform=transform,
+ target_transform=target_transform,
+ )
- self.train = train
self.sample_to_balance = sample_to_balance
- if subsample_fraction is not None:
- if not 0.0 < subsample_fraction < 1.0:
- raise ValueError("The subsample fraction must be in (0, 1).")
- self.subsample_fraction = subsample_fraction
-
- self.transform = transform
- if self.transform is None:
+ # Have to transpose the emnist characters, ToTensor norms input between [0,1].
+ if transform is None:
self.transform = Compose([Transpose(), ToTensor()])
+ # The EMNIST dataset is already casted to tensors.
self.target_transform = target_transform
- self.seed = seed
-
- self._mapper = EmnistMapper()
- self._input_shape = self._mapper.input_shape
- self.num_classes = self._mapper.num_classes
-
- # Load dataset.
- self._data, self._targets = self.load_emnist_dataset()
-
- @property
- def data(self) -> Tensor:
- """The input data."""
- return self._data
- @property
- def targets(self) -> Tensor:
- """The target data."""
- return self._targets
-
- @property
- def input_shape(self) -> Tuple:
- """Input shape of the data."""
- return self._input_shape
-
- @property
- def mapper(self) -> EmnistMapper:
- """Returns the EmnistMapper."""
- return self._mapper
-
- @property
- def inverse_mapping(self) -> Dict:
- """Returns the inverse mapping from character to index."""
- return self.mapper.inverse_mapping
-
- def __len__(self) -> int:
- """Returns the length of the dataset."""
- return len(self.data)
+ self.seed = seed
def __getitem__(self, index: Union[int, Tensor]) -> Tuple[Tensor, Tensor]:
"""Fetches samples from the dataset.
Args:
- index (Union[int, torch.Tensor]): The indices of the samples to fetch.
+ index (Union[int, Tensor]): The indices of the samples to fetch.
Returns:
- Tuple[torch.Tensor, torch.Tensor]: Data target tuple.
+ Tuple[Tensor, Tensor]: Data target tuple.
"""
if torch.is_tensor(index):
@@ -248,13 +98,11 @@ class EmnistDataset(Dataset):
f"Mapping: {self.mapper.mapping}\n"
)
- def _sample_to_balance(
- self, data: Tensor, targets: Tensor
- ) -> Tuple[np.ndarray, np.ndarray]:
+ def _sample_to_balance(self) -> None:
"""Because the dataset is not balanced, we take at most the mean number of instances per class."""
np.random.seed(self.seed)
- x = data
- y = targets
+ x = self._data
+ y = self._targets
num_to_sample = int(np.bincount(y.flatten()).mean())
all_sampled_indices = []
for label in np.unique(y.flatten()):
@@ -264,22 +112,10 @@ class EmnistDataset(Dataset):
indices = np.concatenate(all_sampled_indices)
x_sampled = x[indices]
y_sampled = y[indices]
- data = x_sampled
- targets = y_sampled
- return data, targets
-
- def _subsample(self, data: Tensor, targets: Tensor) -> Tuple[Tensor, Tensor]:
- """Subsamples the dataset to the specified fraction."""
- x = data
- y = targets
- num_samples = int(x.shape[0] * self.subsample_fraction)
- x_sampled = x[:num_samples]
- y_sampled = y[:num_samples]
- self.data = x_sampled
- self.targets = y_sampled
- return data, targets
+ self._data = x_sampled
+ self._targets = y_sampled
- def load_emnist_dataset(self) -> Tuple[Tensor, Tensor]:
+ def load_or_generate_data(self) -> None:
"""Fetch the EMNIST dataset."""
dataset = EMNIST(
root=DATA_DIRNAME,
@@ -290,13 +126,11 @@ class EmnistDataset(Dataset):
target_transform=None,
)
- data = dataset.data
- targets = dataset.targets
+ self._data = dataset.data
+ self._targets = dataset.targets
if self.sample_to_balance:
- data, targets = self._sample_to_balance(data, targets)
+ self._sample_to_balance()
if self.subsample_fraction is not None:
- data, targets = self._subsample(data, targets)
-
- return data, targets
+ self._subsample()
diff --git a/src/text_recognizer/datasets/emnist_lines_dataset.py b/src/text_recognizer/datasets/emnist_lines_dataset.py
index 656131a..8fa77cd 100644
--- a/src/text_recognizer/datasets/emnist_lines_dataset.py
+++ b/src/text_recognizer/datasets/emnist_lines_dataset.py
@@ -9,17 +9,16 @@ from loguru import logger
import numpy as np
import torch
from torch import Tensor
-from torch.utils.data import Dataset
from torchvision.transforms import ToTensor
-from text_recognizer.datasets import (
+from text_recognizer.datasets.dataset import Dataset
+from text_recognizer.datasets.emnist_dataset import EmnistDataset, Transpose
+from text_recognizer.datasets.sentence_generator import SentenceGenerator
+from text_recognizer.datasets.util import (
DATA_DIRNAME,
- EmnistDataset,
EmnistMapper,
ESSENTIALS_FILENAME,
)
-from text_recognizer.datasets.sentence_generator import SentenceGenerator
-from text_recognizer.datasets.util import Transpose
from text_recognizer.networks import sliding_window
DATA_DIRNAME = DATA_DIRNAME / "processed" / "emnist_lines"
@@ -52,18 +51,11 @@ class EmnistLinesDataset(Dataset):
seed (int): Seed number. Defaults to 4711.
"""
- self.train = train
-
- self.transform = transform
- if self.transform is None:
- self.transform = ToTensor()
-
- self.target_transform = target_transform
- if self.target_transform is None:
- self.target_transform = torch.tensor
+ super().__init__(
+ train=train, transform=transform, target_transform=target_transform,
+ )
# Extract dataset information.
- self._mapper = EmnistMapper()
self._input_shape = self._mapper.input_shape
self.num_classes = self._mapper.num_classes
@@ -75,24 +67,12 @@ class EmnistLinesDataset(Dataset):
self.input_shape[0],
self.input_shape[1] * self.max_length,
)
- self.output_shape = (self.max_length, self.num_classes)
+ self._output_shape = (self.max_length, self.num_classes)
self.seed = seed
# Placeholders for the dataset.
- self.data = None
- self.target = None
-
- # Load dataset.
- self._load_or_generate_data()
-
- @property
- def input_shape(self) -> Tuple:
- """Input shape of the data."""
- return self._input_shape
-
- def __len__(self) -> int:
- """Returns the length of the dataset."""
- return len(self.data)
+ self._data = None
+ self._target = None
def __getitem__(self, index: Union[int, Tensor]) -> Tuple[Tensor, Tensor]:
"""Fetches data, target pair of the dataset for a given and index or indices.
@@ -132,16 +112,6 @@ class EmnistLinesDataset(Dataset):
)
@property
- def mapper(self) -> EmnistMapper:
- """Returns the EmnistMapper."""
- return self._mapper
-
- @property
- def mapping(self) -> Dict:
- """Return EMNIST mapping from index to character."""
- return self._mapper.mapping
-
- @property
def data_filename(self) -> Path:
"""Path to the h5 file."""
filename = f"ml_{self.max_length}_o{self.min_overlap}_{self.max_overlap}_n{self.num_samples}.pt"
@@ -151,7 +121,7 @@ class EmnistLinesDataset(Dataset):
filename = "test_" + filename
return DATA_DIRNAME / filename
- def _load_or_generate_data(self) -> None:
+ def load_or_generate_data(self) -> None:
"""Loads the dataset, if it does not exist a new dataset is generated before loading it."""
np.random.seed(self.seed)
@@ -163,8 +133,8 @@ class EmnistLinesDataset(Dataset):
"""Loads the dataset from the h5 file."""
logger.debug("EmnistLinesDataset loading data from HDF5...")
with h5py.File(self.data_filename, "r") as f:
- self.data = f["data"][:]
- self.targets = f["targets"][:]
+ self._data = f["data"][:]
+ self._targets = f["targets"][:]
def _generate_data(self) -> str:
"""Generates a dataset with the Brown corpus and Emnist characters."""
diff --git a/src/text_recognizer/datasets/iam_dataset.py b/src/text_recognizer/datasets/iam_dataset.py
index 5e47350..f4a869d 100644
--- a/src/text_recognizer/datasets/iam_dataset.py
+++ b/src/text_recognizer/datasets/iam_dataset.py
@@ -7,10 +7,8 @@ from boltons.cacheutils import cachedproperty
import defusedxml.ElementTree as ET
from loguru import logger
import toml
-from torch.utils.data import Dataset
-from text_recognizer.datasets import DATA_DIRNAME
-from text_recognizer.datasets.util import _download_raw_dataset
+from text_recognizer.datasets.util import _download_raw_dataset, DATA_DIRNAME
RAW_DATA_DIRNAME = DATA_DIRNAME / "raw" / "iam"
METADATA_FILENAME = RAW_DATA_DIRNAME / "metadata.toml"
@@ -20,7 +18,7 @@ DOWNSAMPLE_FACTOR = 2 # If images were downsampled, the regions must also be.
LINE_REGION_PADDING = 0 # Add this many pixels around the exact coordinates.
-class IamDataset(Dataset):
+class IamDataset:
"""IAM dataset.
"The IAM Lines dataset, first published at the ICDAR 1999, contains forms of unconstrained handwritten text,
diff --git a/src/text_recognizer/datasets/iam_lines_dataset.py b/src/text_recognizer/datasets/iam_lines_dataset.py
index 477f500..4a74b2b 100644
--- a/src/text_recognizer/datasets/iam_lines_dataset.py
+++ b/src/text_recognizer/datasets/iam_lines_dataset.py
@@ -5,11 +5,15 @@ import h5py
from loguru import logger
import torch
from torch import Tensor
-from torch.utils.data import Dataset
from torchvision.transforms import ToTensor
-from text_recognizer.datasets.emnist_dataset import DATA_DIRNAME, EmnistMapper
-from text_recognizer.datasets.util import compute_sha256, download_url
+from text_recognizer.datasets.dataset import Dataset
+from text_recognizer.datasets.util import (
+ compute_sha256,
+ DATA_DIRNAME,
+ download_url,
+ EmnistMapper,
+)
PROCESSED_DATA_DIRNAME = DATA_DIRNAME / "processed" / "iam_lines"
@@ -29,47 +33,26 @@ class IamLinesDataset(Dataset):
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
) -> None:
- self.train = train
- self.split = "train" if self.train else "test"
- self._mapper = EmnistMapper()
- self.num_classes = self.mapper.num_classes
-
- # Set transforms.
- self.transform = transform
- if self.transform is None:
- self.transform = ToTensor()
-
- self.target_transform = target_transform
- if self.target_transform is None:
- self.target_transform = torch.tensor
-
- self.subsample_fraction = subsample_fraction
- self.data = None
- self.targets = None
-
- @property
- def mapper(self) -> EmnistMapper:
- """Returns the EmnistMapper."""
- return self._mapper
-
- @property
- def mapping(self) -> Dict:
- """Return EMNIST mapping from index to character."""
- return self._mapper.mapping
+ super().__init__(
+ train=train,
+ subsample_fraction=subsample_fraction,
+ transform=transform,
+ target_transform=target_transform,
+ )
@property
def input_shape(self) -> Tuple:
"""Input shape of the data."""
- return self.data.shape[1:]
+ return self.data.shape[1:] if self.data is not None else None
@property
def output_shape(self) -> Tuple:
"""Output shape of the data."""
- return self.targets.shape[1:] + (self.num_classes,)
-
- def __len__(self) -> int:
- """Returns the length of the dataset."""
- return len(self.data)
+ return (
+ self.targets.shape[1:] + (self.num_classes,)
+ if self.targets is not None
+ else None
+ )
def load_or_generate_data(self) -> None:
"""Load or generate dataset data."""
@@ -78,19 +61,10 @@ class IamLinesDataset(Dataset):
logger.info("Downloading IAM lines...")
download_url(PROCESSED_DATA_URL, PROCESSED_DATA_FILENAME)
with h5py.File(PROCESSED_DATA_FILENAME, "r") as f:
- self.data = f[f"x_{self.split}"][:]
- self.targets = f[f"y_{self.split}"][:]
+ self._data = f[f"x_{self.split}"][:]
+ self._targets = f[f"y_{self.split}"][:]
self._subsample()
- def _subsample(self) -> None:
- """Only a fraction of the data will be loaded."""
- if self.subsample_fraction is None:
- return
-
- num_samples = int(self.data.shape[0] * self.subsample_fraction)
- self.data = self.data[:num_samples]
- self.targets = self.targets[:num_samples]
-
def __repr__(self) -> str:
"""Print info about the dataset."""
return (
diff --git a/src/text_recognizer/datasets/iam_paragraphs_dataset.py b/src/text_recognizer/datasets/iam_paragraphs_dataset.py
index d65b346..4b34bd1 100644
--- a/src/text_recognizer/datasets/iam_paragraphs_dataset.py
+++ b/src/text_recognizer/datasets/iam_paragraphs_dataset.py
@@ -8,13 +8,17 @@ from loguru import logger
import numpy as np
import torch
from torch import Tensor
-from torch.utils.data import Dataset
from torchvision.transforms import ToTensor
from text_recognizer import util
-from text_recognizer.datasets.emnist_dataset import DATA_DIRNAME, EmnistMapper
+from text_recognizer.datasets.dataset import Dataset
from text_recognizer.datasets.iam_dataset import IamDataset
-from text_recognizer.datasets.util import compute_sha256, download_url
+from text_recognizer.datasets.util import (
+ compute_sha256,
+ DATA_DIRNAME,
+ download_url,
+ EmnistMapper,
+)
INTERIM_DATA_DIRNAME = DATA_DIRNAME / "interim" / "iam_paragraphs"
DEBUG_CROPS_DIRNAME = INTERIM_DATA_DIRNAME / "debug_crops"
@@ -28,11 +32,7 @@ SEED = 4711
class IamParagraphsDataset(Dataset):
- """IAM Paragraphs dataset for paragraphs of handwritten text.
-
- TODO: __getitem__, __len__, get_data_target_from_id
-
- """
+ """IAM Paragraphs dataset for paragraphs of handwritten text."""
def __init__(
self,
@@ -41,34 +41,20 @@ class IamParagraphsDataset(Dataset):
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
) -> None:
-
+ super().__init__(
+ train=train,
+ subsample_fraction=subsample_fraction,
+ transform=transform,
+ target_transform=target_transform,
+ )
# Load Iam dataset.
self.iam_dataset = IamDataset()
- self.train = train
- self.split = "train" if self.train else "test"
self.num_classes = 3
self._input_shape = (256, 256)
self._output_shape = self._input_shape + (self.num_classes,)
- self.subsample_fraction = subsample_fraction
-
- # Set transforms.
- self.transform = transform
- if self.transform is None:
- self.transform = ToTensor()
-
- self.target_transform = target_transform
- if self.target_transform is None:
- self.target_transform = torch.tensor
-
- self._data = None
- self._targets = None
self._ids = None
- def __len__(self) -> int:
- """Returns the length of the dataset."""
- return len(self.data)
-
def __getitem__(self, index: Union[Tensor, int]) -> Tuple[Tensor, Tensor]:
"""Fetches data, target pair of the dataset for a given and index or indices.
@@ -94,26 +80,6 @@ class IamParagraphsDataset(Dataset):
return data, targets
@property
- def input_shape(self) -> Tuple:
- """Input shape of the data."""
- return self._input_shape
-
- @property
- def output_shape(self) -> Tuple:
- """Output shape of the data."""
- return self._output_shape
-
- @property
- def data(self) -> Tensor:
- """The input data."""
- return self._data
-
- @property
- def targets(self) -> Tensor:
- """The target data."""
- return self._targets
-
- @property
def ids(self) -> Tensor:
"""Ids of the dataset."""
return self._ids
@@ -201,14 +167,6 @@ class IamParagraphsDataset(Dataset):
logger.info(f"Setting them to {max_crop_width}x{max_crop_width}")
return crop_dims
- def _subsample(self) -> None:
- """Only this fraction of the data will be loaded."""
- if self.subsample_fraction is None:
- return
- num_subsample = int(self.data.shape[0] * self.subsample_fraction)
- self.data = self.data[:num_subsample]
- self.targets = self.targets[:num_subsample]
-
def __repr__(self) -> str:
"""Return info about the dataset."""
return (
diff --git a/src/text_recognizer/datasets/sentence_generator.py b/src/text_recognizer/datasets/sentence_generator.py
index ee86bd4..dd76652 100644
--- a/src/text_recognizer/datasets/sentence_generator.py
+++ b/src/text_recognizer/datasets/sentence_generator.py
@@ -9,7 +9,7 @@ import nltk
from nltk.corpus.reader.util import ConcatenatedCorpusView
import numpy as np
-from text_recognizer.datasets import DATA_DIRNAME
+from text_recognizer.datasets.util import DATA_DIRNAME
NLTK_DATA_DIRNAME = DATA_DIRNAME / "raw" / "nltk"
diff --git a/src/text_recognizer/datasets/util.py b/src/text_recognizer/datasets/util.py
index dd16bed..3acf5db 100644
--- a/src/text_recognizer/datasets/util.py
+++ b/src/text_recognizer/datasets/util.py
@@ -1,6 +1,7 @@
"""Util functions for datasets."""
import hashlib
import importlib
+import json
import os
from pathlib import Path
from typing import Callable, Dict, List, Optional, Type, Union
@@ -11,15 +12,129 @@ from loguru import logger
import numpy as np
from PIL import Image
from torch.utils.data import DataLoader, Dataset
+from torchvision.datasets import EMNIST
from tqdm import tqdm
+DATA_DIRNAME = Path(__file__).resolve().parents[3] / "data"
+ESSENTIALS_FILENAME = Path(__file__).resolve().parents[0] / "emnist_essentials.json"
-class Transpose:
- """Transposes the EMNIST image to the correct orientation."""
- def __call__(self, image: Image) -> np.ndarray:
- """Swaps axis."""
- return np.array(image).swapaxes(0, 1)
+def save_emnist_essentials(emnsit_dataset: type = EMNIST) -> None:
+ """Extract and saves EMNIST essentials."""
+ labels = emnsit_dataset.classes
+ labels.sort()
+ mapping = [(i, str(label)) for i, label in enumerate(labels)]
+ essentials = {
+ "mapping": mapping,
+ "input_shape": tuple(emnsit_dataset[0][0].shape[:]),
+ }
+ logger.info("Saving emnist essentials...")
+ with open(ESSENTIALS_FILENAME, "w") as f:
+ json.dump(essentials, f)
+
+
+def download_emnist() -> None:
+ """Download the EMNIST dataset via the PyTorch class."""
+ logger.info(f"Data directory is: {DATA_DIRNAME}")
+ dataset = EMNIST(root=DATA_DIRNAME, split="byclass", download=True)
+ save_emnist_essentials(dataset)
+
+
+class EmnistMapper:
+ """Mapper between network output to Emnist character."""
+
+ def __init__(self) -> None:
+ """Loads the emnist essentials file with the mapping and input shape."""
+ self.essentials = self._load_emnist_essentials()
+ # Load dataset infromation.
+ self._mapping = self._augment_emnist_mapping(dict(self.essentials["mapping"]))
+ self._inverse_mapping = {v: k for k, v in self.mapping.items()}
+ self._num_classes = len(self.mapping)
+ self._input_shape = self.essentials["input_shape"]
+
+ def __call__(self, token: Union[str, int, np.uint8]) -> Union[str, int]:
+ """Maps the token to emnist character or character index.
+
+ If the token is an integer (index), the method will return the Emnist character corresponding to that index.
+ If the token is a str (Emnist character), the method will return the corresponding index for that character.
+
+ Args:
+ token (Union[str, int, np.uint8]): Eihter a string or index (integer).
+
+ Returns:
+ Union[str, int]: The mapping result.
+
+ Raises:
+ KeyError: If the index or string does not exist in the mapping.
+
+ """
+ if (isinstance(token, np.uint8) or isinstance(token, int)) and int(
+ token
+ ) in self.mapping:
+ return self.mapping[int(token)]
+ elif isinstance(token, str) and token in self._inverse_mapping:
+ return self._inverse_mapping[token]
+ else:
+ raise KeyError(f"Token {token} does not exist in the mappings.")
+
+ @property
+ def mapping(self) -> Dict:
+ """Returns the mapping between index and character."""
+ return self._mapping
+
+ @property
+ def inverse_mapping(self) -> Dict:
+ """Returns the mapping between character and index."""
+ return self._inverse_mapping
+
+ @property
+ def num_classes(self) -> int:
+ """Returns the number of classes in the dataset."""
+ return self._num_classes
+
+ @property
+ def input_shape(self) -> List[int]:
+ """Returns the input shape of the Emnist characters."""
+ return self._input_shape
+
+ def _load_emnist_essentials(self) -> Dict:
+ """Load the EMNIST mapping."""
+ with open(str(ESSENTIALS_FILENAME)) as f:
+ essentials = json.load(f)
+ return essentials
+
+ def _augment_emnist_mapping(self, mapping: Dict) -> Dict:
+ """Augment the mapping with extra symbols."""
+ # Extra symbols in IAM dataset
+ extra_symbols = [
+ " ",
+ "!",
+ '"',
+ "#",
+ "&",
+ "'",
+ "(",
+ ")",
+ "*",
+ "+",
+ ",",
+ "-",
+ ".",
+ "/",
+ ":",
+ ";",
+ "?",
+ ]
+
+ # padding symbol
+ extra_symbols.append("_")
+
+ max_key = max(mapping.keys())
+ extra_mapping = {}
+ for i, symbol in enumerate(extra_symbols):
+ extra_mapping[max_key + 1 + i] = symbol
+
+ return {**mapping, **extra_mapping}
def compute_sha256(filename: Union[Path, str]) -> str:
diff --git a/src/text_recognizer/models/base.py b/src/text_recognizer/models/base.py
index 153e19a..d23fe56 100644
--- a/src/text_recognizer/models/base.py
+++ b/src/text_recognizer/models/base.py
@@ -140,6 +140,7 @@ class Model(ABC):
if not self.data_prepared:
# Load train dataset.
train_dataset = self.dataset(train=True, **self.dataset_args["args"])
+ train_dataset.load_or_generate_data()
# Set input shape.
self._input_shape = train_dataset.input_shape
@@ -156,6 +157,7 @@ class Model(ABC):
# Load test dataset.
self.test_dataset = self.dataset(train=False, **self.dataset_args["args"])
+ self.test_dataset.load_or_generate_data()
# Set the flag to true to disable ability to load data agian.
self.data_prepared = True
diff --git a/src/text_recognizer/networks/ctc.py b/src/text_recognizer/networks/ctc.py
index fc0d21d..72f18b8 100644
--- a/src/text_recognizer/networks/ctc.py
+++ b/src/text_recognizer/networks/ctc.py
@@ -5,7 +5,7 @@ from einops import rearrange
import torch
from torch import Tensor
-from text_recognizer.datasets import EmnistMapper
+from text_recognizer.datasets.util import EmnistMapper
def greedy_decoder(