{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "%load_ext autoreload\n", "%autoreload 2\n", "\n", "%matplotlib inline\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", "from PIL import Image\n", "import torch\n", "from importlib.util import find_spec\n", "if find_spec(\"text_recognizer\") is None:\n", " import sys\n", " sys.path.append('..')" ] }, { "cell_type": "code", "execution_count": 68, "metadata": {}, "outputs": [], "source": [ "from text_recognizer.networks.residual_network import IdentityBlock, ResidualBlock, BasicBlock, BottleNeckBlock, ResidualLayer, Encoder, ResidualNetwork" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "IdentityBlock(32, 64)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "ResidualBlock(32, 64)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "dummy = torch.ones((1, 32, 224, 224))\n", "\n", "block = BasicBlock(32, 64)\n", "block(dummy).shape\n", "print(block)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "dummy = torch.ones((1, 32, 10, 10))\n", "\n", "block = BottleNeckBlock(32, 64)\n", "block(dummy).shape\n", "print(block)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "dummy = torch.ones((1, 64, 48, 48))\n", "\n", "layer = ResidualLayer(64, 128, block=BasicBlock, num_blocks=3)\n", "layer(dummy).shape" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "blocks_sizes=[64, 128, 256, 512]\n", "list(zip(blocks_sizes, blocks_sizes[1:]))" ] }, { "cell_type": "code", "execution_count": 69, "metadata": {}, "outputs": [], "source": [ "e = Encoder(depths=[2, 1], block_sizes= [96, 128])" ] }, { "cell_type": "code", "execution_count": 75, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "Encoder(\n", " (gate): Sequential(\n", " (0): Conv2d(1, 96, kernel_size=(3, 3), stride=(2, 2), padding=(3, 3), bias=False)\n", " (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " (2): ReLU(inplace=True)\n", " (3): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n", " )\n", " (blocks): Sequential(\n", " (0): ResidualLayer(\n", " (blocks): Sequential(\n", " (0): BasicBlock(\n", " (blocks): Sequential(\n", " (0): Sequential(\n", " (0): Conv2dAuto(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " )\n", " (1): ReLU(inplace=True)\n", " (2): Sequential(\n", " (0): Conv2dAuto(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " )\n", " )\n", " (activation_fn): ReLU(inplace=True)\n", " (shortcut): None\n", " )\n", " (1): BasicBlock(\n", " (blocks): Sequential(\n", " (0): Sequential(\n", " (0): Conv2dAuto(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " )\n", " (1): ReLU(inplace=True)\n", " (2): Sequential(\n", " (0): Conv2dAuto(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " )\n", " )\n", " (activation_fn): ReLU(inplace=True)\n", " (shortcut): None\n", " )\n", " )\n", " )\n", " (1): ResidualLayer(\n", " (blocks): Sequential(\n", " (0): BasicBlock(\n", " (blocks): Sequential(\n", " (0): Sequential(\n", " (0): Conv2dAuto(96, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", " (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " )\n", " (1): ReLU(inplace=True)\n", " (2): Sequential(\n", " (0): Conv2dAuto(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", " (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " )\n", " )\n", " (activation_fn): ReLU(inplace=True)\n", " (shortcut): Sequential(\n", " (0): Conv2d(96, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", " (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", " )\n", " )\n", " )\n", " )\n", " )\n", ")" ] }, "execution_count": 75, "metadata": {}, "output_type": "execute_result" } ], "source": [ "Encoder(**{\"depths\": [2, 1], \"block_sizes\": [96, 128]})" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "from text_recognizer.networks import WideResidualNetwork" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [], "source": [ "wr = WideResidualNetwork(\n", " in_channels= 1,\n", " num_classes= 80,\n", " depth= 16,\n", " num_layers= 4,\n", " width_factor= 2,\n", " dropout_rate= 0.2,\n", " activation= \"SELU\",\n", " use_decoder= False,\n", ")" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "from torchsummary import summary" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "===============================================================================================\n", "Layer (type:depth-idx) Output Shape Param #\n", "===============================================================================================\n", "├─Sequential: 1-1 [-1, 256, 4, 2] --\n", "| └─Conv2d: 2-1 [-1, 16, 28, 14] 144\n", "| └─Sequential: 2-2 [-1, 32, 28, 14] --\n", "| | └─WideBlock: 3-1 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4-1 [-1, 32, 28, 14] --\n", "| | | | └─Conv2d: 5-1 [-1, 32, 28, 14] 512\n", "| | | └─Sequential: 4-2 [-1, 32, 28, 14] --\n", "| | | | └─BatchNorm2d: 5-2 [-1, 16, 28, 14] 32\n", "| | | └─SELU: 4-3 [-1, 16, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-3 [-1, 16, 28, 14] --\n", "| | | | └─Conv2d: 5-4 [-1, 32, 28, 14] 4,608\n", "| | | | └─Dropout: 5-5 [-1, 32, 28, 14] --\n", "| | | | └─BatchNorm2d: 5-6 [-1, 32, 28, 14] 64\n", "| | | └─SELU: 4-4 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-7 [-1, 32, 28, 14] --\n", "| | | | └─Conv2d: 5-8 [-1, 32, 28, 14] 9,216\n", "| | └─WideBlock: 3-2 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4-5 [-1, 32, 28, 14] --\n", "| | | | └─BatchNorm2d: 5-9 [-1, 32, 28, 14] 64\n", "| | | └─SELU: 4-6 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-10 [-1, 32, 28, 14] --\n", "| | | | └─Conv2d: 5-11 [-1, 32, 28, 14] 9,216\n", "| | | | └─Dropout: 5-12 [-1, 32, 28, 14] --\n", "| | | | └─BatchNorm2d: 5-13 [-1, 32, 28, 14] 64\n", "| | | └─SELU: 4-7 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-14 [-1, 32, 28, 14] --\n", "| | | | └─Conv2d: 5-15 [-1, 32, 28, 14] 9,216\n", "| └─Sequential: 2-3 [-1, 64, 14, 7] --\n", "| | └─WideBlock: 3-3 [-1, 64, 14, 7] --\n", "| | | └─Sequential: 4-8 [-1, 64, 14, 7] --\n", "| | | | └─Conv2d: 5-16 [-1, 64, 14, 7] 2,048\n", "| | | └─Sequential: 4-9 [-1, 64, 14, 7] --\n", "| | | | └─BatchNorm2d: 5-17 [-1, 32, 28, 14] 64\n", "| | | └─SELU: 4-10 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-18 [-1, 32, 28, 14] --\n", "| | | | └─Conv2d: 5-19 [-1, 64, 28, 14] 18,432\n", "| | | | └─Dropout: 5-20 [-1, 64, 28, 14] --\n", "| | | | └─BatchNorm2d: 5-21 [-1, 64, 28, 14] 128\n", "| | | └─SELU: 4-11 [-1, 64, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-22 [-1, 64, 28, 14] --\n", "| | | | └─Conv2d: 5-23 [-1, 64, 14, 7] 36,864\n", "| | └─WideBlock: 3-4 [-1, 64, 14, 7] --\n", "| | | └─Sequential: 4-12 [-1, 64, 14, 7] --\n", "| | | | └─BatchNorm2d: 5-24 [-1, 64, 14, 7] 128\n", "| | | └─SELU: 4-13 [-1, 64, 14, 7] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-25 [-1, 64, 14, 7] --\n", "| | | | └─Conv2d: 5-26 [-1, 64, 14, 7] 36,864\n", "| | | | └─Dropout: 5-27 [-1, 64, 14, 7] --\n", "| | | | └─BatchNorm2d: 5-28 [-1, 64, 14, 7] 128\n", "| | | └─SELU: 4-14 [-1, 64, 14, 7] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-29 [-1, 64, 14, 7] --\n", "| | | | └─Conv2d: 5-30 [-1, 64, 14, 7] 36,864\n", "| └─Sequential: 2-4 [-1, 128, 7, 4] --\n", "| | └─WideBlock: 3-5 [-1, 128, 7, 4] --\n", "| | | └─Sequential: 4-15 [-1, 128, 7, 4] --\n", "| | | | └─Conv2d: 5-31 [-1, 128, 7, 4] 8,192\n", "| | | └─Sequential: 4-16 [-1, 128, 7, 4] --\n", "| | | | └─BatchNorm2d: 5-32 [-1, 64, 14, 7] 128\n", "| | | └─SELU: 4-17 [-1, 64, 14, 7] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-33 [-1, 64, 14, 7] --\n", "| | | | └─Conv2d: 5-34 [-1, 128, 14, 7] 73,728\n", "| | | | └─Dropout: 5-35 [-1, 128, 14, 7] --\n", "| | | | └─BatchNorm2d: 5-36 [-1, 128, 14, 7] 256\n", "| | | └─SELU: 4-18 [-1, 128, 14, 7] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-37 [-1, 128, 14, 7] --\n", "| | | | └─Conv2d: 5-38 [-1, 128, 7, 4] 147,456\n", "| | └─WideBlock: 3-6 [-1, 128, 7, 4] --\n", "| | | └─Sequential: 4-19 [-1, 128, 7, 4] --\n", "| | | | └─BatchNorm2d: 5-39 [-1, 128, 7, 4] 256\n", "| | | └─SELU: 4-20 [-1, 128, 7, 4] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-40 [-1, 128, 7, 4] --\n", "| | | | └─Conv2d: 5-41 [-1, 128, 7, 4] 147,456\n", "| | | | └─Dropout: 5-42 [-1, 128, 7, 4] --\n", "| | | | └─BatchNorm2d: 5-43 [-1, 128, 7, 4] 256\n", "| | | └─SELU: 4-21 [-1, 128, 7, 4] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-44 [-1, 128, 7, 4] --\n", "| | | | └─Conv2d: 5-45 [-1, 128, 7, 4] 147,456\n", "| └─Sequential: 2-5 [-1, 256, 4, 2] --\n", "| | └─WideBlock: 3-7 [-1, 256, 4, 2] --\n", "| | | └─Sequential: 4-22 [-1, 256, 4, 2] --\n", "| | | | └─Conv2d: 5-46 [-1, 256, 4, 2] 32,768\n", "| | | └─Sequential: 4-23 [-1, 256, 4, 2] --\n", "| | | | └─BatchNorm2d: 5-47 [-1, 128, 7, 4] 256\n", "| | | └─SELU: 4-24 [-1, 128, 7, 4] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-48 [-1, 128, 7, 4] --\n", "| | | | └─Conv2d: 5-49 [-1, 256, 7, 4] 294,912\n", "| | | | └─Dropout: 5-50 [-1, 256, 7, 4] --\n", "| | | | └─BatchNorm2d: 5-51 [-1, 256, 7, 4] 512\n", "| | | └─SELU: 4-25 [-1, 256, 7, 4] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-52 [-1, 256, 7, 4] --\n", "| | | | └─Conv2d: 5-53 [-1, 256, 4, 2] 589,824\n", "| | └─WideBlock: 3-8 [-1, 256, 4, 2] --\n", "| | | └─Sequential: 4-26 [-1, 256, 4, 2] --\n", "| | | | └─BatchNorm2d: 5-54 [-1, 256, 4, 2] 512\n", "| | | └─SELU: 4-27 [-1, 256, 4, 2] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-55 [-1, 256, 4, 2] --\n", "| | | | └─Conv2d: 5-56 [-1, 256, 4, 2] 589,824\n", "| | | | └─Dropout: 5-57 [-1, 256, 4, 2] --\n", "| | | | └─BatchNorm2d: 5-58 [-1, 256, 4, 2] 512\n", "| | | └─SELU: 4-28 [-1, 256, 4, 2] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-59 [-1, 256, 4, 2] --\n", "| | | | └─Conv2d: 5-60 [-1, 256, 4, 2] 589,824\n", "===============================================================================================\n", "Total params: 2,788,784\n", "Trainable params: 2,788,784\n", "Non-trainable params: 0\n", "Total mult-adds (M): 84.83\n", "===============================================================================================\n", "Input size (MB): 0.00\n", "Forward/backward pass size (MB): 2.26\n", "Params size (MB): 10.64\n", "Estimated Total Size (MB): 12.90\n", "===============================================================================================\n" ] }, { "data": { "text/plain": [ "===============================================================================================\n", "Layer (type:depth-idx) Output Shape Param #\n", "===============================================================================================\n", "├─Sequential: 1-1 [-1, 256, 4, 2] --\n", "| └─Conv2d: 2-1 [-1, 16, 28, 14] 144\n", "| └─Sequential: 2-2 [-1, 32, 28, 14] --\n", "| | └─WideBlock: 3-1 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4-1 [-1, 32, 28, 14] --\n", "| | | | └─Conv2d: 5-1 [-1, 32, 28, 14] 512\n", "| | | └─Sequential: 4-2 [-1, 32, 28, 14] --\n", "| | | | └─BatchNorm2d: 5-2 [-1, 16, 28, 14] 32\n", "| | | └─SELU: 4-3 [-1, 16, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-3 [-1, 16, 28, 14] --\n", "| | | | └─Conv2d: 5-4 [-1, 32, 28, 14] 4,608\n", "| | | | └─Dropout: 5-5 [-1, 32, 28, 14] --\n", "| | | | └─BatchNorm2d: 5-6 [-1, 32, 28, 14] 64\n", "| | | └─SELU: 4-4 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-7 [-1, 32, 28, 14] --\n", "| | | | └─Conv2d: 5-8 [-1, 32, 28, 14] 9,216\n", "| | └─WideBlock: 3-2 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4-5 [-1, 32, 28, 14] --\n", "| | | | └─BatchNorm2d: 5-9 [-1, 32, 28, 14] 64\n", "| | | └─SELU: 4-6 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-10 [-1, 32, 28, 14] --\n", "| | | | └─Conv2d: 5-11 [-1, 32, 28, 14] 9,216\n", "| | | | └─Dropout: 5-12 [-1, 32, 28, 14] --\n", "| | | | └─BatchNorm2d: 5-13 [-1, 32, 28, 14] 64\n", "| | | └─SELU: 4-7 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-14 [-1, 32, 28, 14] --\n", "| | | | └─Conv2d: 5-15 [-1, 32, 28, 14] 9,216\n", "| └─Sequential: 2-3 [-1, 64, 14, 7] --\n", "| | └─WideBlock: 3-3 [-1, 64, 14, 7] --\n", "| | | └─Sequential: 4-8 [-1, 64, 14, 7] --\n", "| | | | └─Conv2d: 5-16 [-1, 64, 14, 7] 2,048\n", "| | | └─Sequential: 4-9 [-1, 64, 14, 7] --\n", "| | | | └─BatchNorm2d: 5-17 [-1, 32, 28, 14] 64\n", "| | | └─SELU: 4-10 [-1, 32, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-18 [-1, 32, 28, 14] --\n", "| | | | └─Conv2d: 5-19 [-1, 64, 28, 14] 18,432\n", "| | | | └─Dropout: 5-20 [-1, 64, 28, 14] --\n", "| | | | └─BatchNorm2d: 5-21 [-1, 64, 28, 14] 128\n", "| | | └─SELU: 4-11 [-1, 64, 28, 14] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-22 [-1, 64, 28, 14] --\n", "| | | | └─Conv2d: 5-23 [-1, 64, 14, 7] 36,864\n", "| | └─WideBlock: 3-4 [-1, 64, 14, 7] --\n", "| | | └─Sequential: 4-12 [-1, 64, 14, 7] --\n", "| | | | └─BatchNorm2d: 5-24 [-1, 64, 14, 7] 128\n", "| | | └─SELU: 4-13 [-1, 64, 14, 7] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-25 [-1, 64, 14, 7] --\n", "| | | | └─Conv2d: 5-26 [-1, 64, 14, 7] 36,864\n", "| | | | └─Dropout: 5-27 [-1, 64, 14, 7] --\n", "| | | | └─BatchNorm2d: 5-28 [-1, 64, 14, 7] 128\n", "| | | └─SELU: 4-14 [-1, 64, 14, 7] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-29 [-1, 64, 14, 7] --\n", "| | | | └─Conv2d: 5-30 [-1, 64, 14, 7] 36,864\n", "| └─Sequential: 2-4 [-1, 128, 7, 4] --\n", "| | └─WideBlock: 3-5 [-1, 128, 7, 4] --\n", "| | | └─Sequential: 4-15 [-1, 128, 7, 4] --\n", "| | | | └─Conv2d: 5-31 [-1, 128, 7, 4] 8,192\n", "| | | └─Sequential: 4-16 [-1, 128, 7, 4] --\n", "| | | | └─BatchNorm2d: 5-32 [-1, 64, 14, 7] 128\n", "| | | └─SELU: 4-17 [-1, 64, 14, 7] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-33 [-1, 64, 14, 7] --\n", "| | | | └─Conv2d: 5-34 [-1, 128, 14, 7] 73,728\n", "| | | | └─Dropout: 5-35 [-1, 128, 14, 7] --\n", "| | | | └─BatchNorm2d: 5-36 [-1, 128, 14, 7] 256\n", "| | | └─SELU: 4-18 [-1, 128, 14, 7] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-37 [-1, 128, 14, 7] --\n", "| | | | └─Conv2d: 5-38 [-1, 128, 7, 4] 147,456\n", "| | └─WideBlock: 3-6 [-1, 128, 7, 4] --\n", "| | | └─Sequential: 4-19 [-1, 128, 7, 4] --\n", "| | | | └─BatchNorm2d: 5-39 [-1, 128, 7, 4] 256\n", "| | | └─SELU: 4-20 [-1, 128, 7, 4] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-40 [-1, 128, 7, 4] --\n", "| | | | └─Conv2d: 5-41 [-1, 128, 7, 4] 147,456\n", "| | | | └─Dropout: 5-42 [-1, 128, 7, 4] --\n", "| | | | └─BatchNorm2d: 5-43 [-1, 128, 7, 4] 256\n", "| | | └─SELU: 4-21 [-1, 128, 7, 4] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-44 [-1, 128, 7, 4] --\n", "| | | | └─Conv2d: 5-45 [-1, 128, 7, 4] 147,456\n", "| └─Sequential: 2-5 [-1, 256, 4, 2] --\n", "| | └─WideBlock: 3-7 [-1, 256, 4, 2] --\n", "| | | └─Sequential: 4-22 [-1, 256, 4, 2] --\n", "| | | | └─Conv2d: 5-46 [-1, 256, 4, 2] 32,768\n", "| | | └─Sequential: 4-23 [-1, 256, 4, 2] --\n", "| | | | └─BatchNorm2d: 5-47 [-1, 128, 7, 4] 256\n", "| | | └─SELU: 4-24 [-1, 128, 7, 4] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-48 [-1, 128, 7, 4] --\n", "| | | | └─Conv2d: 5-49 [-1, 256, 7, 4] 294,912\n", "| | | | └─Dropout: 5-50 [-1, 256, 7, 4] --\n", "| | | | └─BatchNorm2d: 5-51 [-1, 256, 7, 4] 512\n", "| | | └─SELU: 4-25 [-1, 256, 7, 4] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-52 [-1, 256, 7, 4] --\n", "| | | | └─Conv2d: 5-53 [-1, 256, 4, 2] 589,824\n", "| | └─WideBlock: 3-8 [-1, 256, 4, 2] --\n", "| | | └─Sequential: 4-26 [-1, 256, 4, 2] --\n", "| | | | └─BatchNorm2d: 5-54 [-1, 256, 4, 2] 512\n", "| | | └─SELU: 4-27 [-1, 256, 4, 2] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-55 [-1, 256, 4, 2] --\n", "| | | | └─Conv2d: 5-56 [-1, 256, 4, 2] 589,824\n", "| | | | └─Dropout: 5-57 [-1, 256, 4, 2] --\n", "| | | | └─BatchNorm2d: 5-58 [-1, 256, 4, 2] 512\n", "| | | └─SELU: 4-28 [-1, 256, 4, 2] --\n", "| | | └─Sequential: 4 [] --\n", "| | | | └─SELU: 5-59 [-1, 256, 4, 2] --\n", "| | | | └─Conv2d: 5-60 [-1, 256, 4, 2] 589,824\n", "===============================================================================================\n", "Total params: 2,788,784\n", "Trainable params: 2,788,784\n", "Non-trainable params: 0\n", "Total mult-adds (M): 84.83\n", "===============================================================================================\n", "Input size (MB): 0.00\n", "Forward/backward pass size (MB): 2.26\n", "Params size (MB): 10.64\n", "Estimated Total Size (MB): 12.90\n", "===============================================================================================" ] }, "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], "source": [ "summary(wr, (1, 28, 14), device=\"cpu\", depth=10)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.2" } }, "nbformat": 4, "nbformat_minor": 4 }