resnet50


Find this model in the ResNetV2 model summary
resnet50 Model Summary Plots





resnet50 Model Selected Details
  layer_type N M Q alpha D alpha-hat log_SN % Rand num_traps num_fingers rank_loss
layer_id                        
1 CONV2D 64 3 21.33 2.13 0.10 3.05 1.43 88.37 90 0 0
7 CONV2D 64 64 1.00 2.52 0.21 1.51 0.60 90.51 4 7 0
9 CONV2D 64 64 1.00 1.88 0.06 1.54 0.82 82.27 22 0 4
11 CONV2D 256 64 4.00 3.09 0.11 3.34 1.08 80.94 1 0 0
15 CONV2D 256 64 4.00 3.68 0.09 5.35 1.45 80.62 7 0 0
18 CONV2D 256 64 4.00 3.60 0.14 2.04 0.57 86.08 0 0 1
20 CONV2D 64 64 1.00 1.89 0.08 0.96 0.51 85.48 21 0 9
22 CONV2D 256 64 4.00 4.42 0.12 2.21 0.50 90.36 1 0 0
26 CONV2D 256 64 4.00 3.84 0.14 2.00 0.52 85.82 0 0 0
28 CONV2D 64 64 1.00 1.97 0.11 0.58 0.29 85.32 17 5 4
30 CONV2D 256 64 4.00 6.00 0.07 2.73 0.45 90.86 0 0 0
35 CONV2D 256 128 2.00 1.90 0.15 1.43 0.75 86.41 0 1 0
37 CONV2D 128 128 1.00 3.10 0.08 0.23 0.07 90.81 16 0 5
39 CONV2D 512 128 4.00 10.76 0.15 5.53 0.51 89.65 0 1 0
43 CONV2D 512 256 2.00 2.20 0.06 2.12 0.97 72.98 2 0 0
46 CONV2D 512 128 4.00 2.48 0.11 1.21 0.49 81.13 0 0 0
48 CONV2D 128 128 1.00 1.94 0.05 0.92 0.48 76.15 26 0 6
50 CONV2D 512 128 4.00 5.95 0.08 4.19 0.70 85.21 0 0 0
54 CONV2D 512 128 4.00 3.39 0.09 1.63 0.48 85.28 0 0 0
56 CONV2D 128 128 1.00 2.03 0.07 0.53 0.26 82.88 18 0 3
58 CONV2D 512 128 4.00 4.43 0.09 2.44 0.55 86.67 0 0 0
62 CONV2D 512 128 4.00 2.44 0.14 1.46 0.60 85.43 0 3 0
64 CONV2D 128 128 1.00 2.52 0.09 0.54 0.22 85.33 16 0 2
66 CONV2D 512 128 4.00 3.50 0.08 2.21 0.63 86.21 0 0 0
71 CONV2D 512 256 2.00 2.90 0.11 2.76 0.95 82.46 0 1 0
73 CONV2D 256 256 1.00 4.60 0.08 -0.12 -0.03 95.56 34 0 9
75 CONV2D 1024 256 4.00 4.35 0.11 3.10 0.71 88.04 0 1 0
79 CONV2D 1024 512 2.00 2.72 0.06 2.20 0.81 78.49 4 0 0
82 CONV2D 1024 256 4.00 3.26 0.13 1.60 0.49 83.10 0 5 0
84 CONV2D 256 256 1.00 2.54 0.07 1.12 0.44 82.38 37 0 8
86 CONV2D 1024 256 4.00 3.60 0.04 2.94 0.82 84.86 2 0 0
90 CONV2D 1024 256 4.00 3.14 0.13 1.49 0.47 81.90 0 2 0
92 CONV2D 256 256 1.00 2.30 0.08 0.49 0.21 82.44 16 1 13
94 CONV2D 1024 256 4.00 3.45 0.10 2.17 0.63 83.93 0 1 0
98 CONV2D 1024 256 4.00 3.67 0.11 2.10 0.57 87.36 0 1 0
100 CONV2D 256 256 1.00 2.14 0.07 0.43 0.20 84.64 25 0 8
102 CONV2D 1024 256 4.00 4.11 0.07 2.83 0.69 84.56 1 0 0
106 CONV2D 1024 256 4.00 3.50 0.09 1.91 0.55 86.61 0 0 0
108 CONV2D 256 256 1.00 2.85 0.06 0.43 0.15 85.47 16 0 9
110 CONV2D 1024 256 4.00 4.79 0.06 3.50 0.73 83.97 1 0 0
114 CONV2D 1024 256 4.00 4.59 0.09 2.97 0.65 89.60 0 1 0
116 CONV2D 256 256 1.00 3.15 0.04 0.70 0.22 88.43 29 0 8
118 CONV2D 1024 256 4.00 4.12 0.03 3.94 0.95 87.04 1 0 0
123 CONV2D 1024 512 2.00 2.89 0.06 3.19 1.10 84.46 0 0 0
125 CONV2D 512 512 1.00 5.80 0.04 2.13 0.37 96.71 49 0 10
127 CONV2D 2048 512 4.00 6.17 0.05 7.97 1.29 93.84 1 0 0
131 CONV2D 2048 1024 2.00 3.85 0.02 3.41 0.89 87.35 1 0 0
134 CONV2D 2048 512 4.00 6.29 0.05 5.96 0.95 92.36 1 0 0
136 CONV2D 512 512 1.00 3.57 0.02 3.00 0.84 88.31 28 0 13
138 CONV2D 2048 512 4.00 4.16 0.05 5.37 1.29 89.02 1 0 0
142 CONV2D 2048 512 4.00 4.65 0.07 6.32 1.36 89.96 1 0 0
144 CONV2D 512 512 1.00 3.27 0.03 2.66 0.82 86.26 55 0 12
146 CONV2D 2048 512 4.00 2.97 0.02 5.56 1.87 78.28 1 0 0
150 DENSE 2048 1000 2.05 4.41 0.03 6.78 1.54 94.41 20 0 0

resnet50 Layer Plots
Layer 1
   Layer=1  |  N=64  |  M=3  |  Q=21.33  |  alpha=2.13  |  D_ks=0.10  |  alpha-hat=3.05  |  num traps=90









Layer 7
   Layer=7  |  N=64  |  M=64  |  Q=1.00  |  alpha=2.52  |  D_ks=0.21  |  alpha-hat=1.51  |  num traps=4









Layer 9
   Layer=9  |  N=64  |  M=64  |  Q=1.00  |  alpha=1.88  |  D_ks=0.06  |  alpha-hat=1.54  |  num traps=22









Layer 11
   Layer=11  |  N=256  |  M=64  |  Q=4.00  |  alpha=3.09  |  D_ks=0.11  |  alpha-hat=3.34  |  num traps=1









Layer 15
   Layer=15  |  N=256  |  M=64  |  Q=4.00  |  alpha=3.68  |  D_ks=0.09  |  alpha-hat=5.35  |  num traps=7









Layer 18
   Layer=18  |  N=256  |  M=64  |  Q=4.00  |  alpha=3.60  |  D_ks=0.14  |  alpha-hat=2.04  |  num traps=0









Layer 20
   Layer=20  |  N=64  |  M=64  |  Q=1.00  |  alpha=1.89  |  D_ks=0.08  |  alpha-hat=0.96  |  num traps=21









Layer 22
   Layer=22  |  N=256  |  M=64  |  Q=4.00  |  alpha=4.42  |  D_ks=0.12  |  alpha-hat=2.21  |  num traps=1









Layer 26
   Layer=26  |  N=256  |  M=64  |  Q=4.00  |  alpha=3.84  |  D_ks=0.14  |  alpha-hat=2.00  |  num traps=0









Layer 28
   Layer=28  |  N=64  |  M=64  |  Q=1.00  |  alpha=1.97  |  D_ks=0.11  |  alpha-hat=0.58  |  num traps=17









Layer 30
   Layer=30  |  N=256  |  M=64  |  Q=4.00  |  alpha=6.00  |  D_ks=0.07  |  alpha-hat=2.73  |  num traps=0









Layer 35
   Layer=35  |  N=256  |  M=128  |  Q=2.00  |  alpha=1.90  |  D_ks=0.15  |  alpha-hat=1.43  |  num traps=0









Layer 37
   Layer=37  |  N=128  |  M=128  |  Q=1.00  |  alpha=3.10  |  D_ks=0.08  |  alpha-hat=0.23  |  num traps=16









Layer 39
   Layer=39  |  N=512  |  M=128  |  Q=4.00  |  alpha=10.76  |  D_ks=0.15  |  alpha-hat=5.53  |  num traps=0









Layer 43
   Layer=43  |  N=512  |  M=256  |  Q=2.00  |  alpha=2.20  |  D_ks=0.06  |  alpha-hat=2.12  |  num traps=2









Layer 46
   Layer=46  |  N=512  |  M=128  |  Q=4.00  |  alpha=2.48  |  D_ks=0.11  |  alpha-hat=1.21  |  num traps=0









Layer 48
   Layer=48  |  N=128  |  M=128  |  Q=1.00  |  alpha=1.94  |  D_ks=0.05  |  alpha-hat=0.92  |  num traps=26









Layer 50
   Layer=50  |  N=512  |  M=128  |  Q=4.00  |  alpha=5.95  |  D_ks=0.08  |  alpha-hat=4.19  |  num traps=0









Layer 54
   Layer=54  |  N=512  |  M=128  |  Q=4.00  |  alpha=3.39  |  D_ks=0.09  |  alpha-hat=1.63  |  num traps=0









Layer 56
   Layer=56  |  N=128  |  M=128  |  Q=1.00  |  alpha=2.03  |  D_ks=0.07  |  alpha-hat=0.53  |  num traps=18









Layer 58
   Layer=58  |  N=512  |  M=128  |  Q=4.00  |  alpha=4.43  |  D_ks=0.09  |  alpha-hat=2.44  |  num traps=0









Layer 62
   Layer=62  |  N=512  |  M=128  |  Q=4.00  |  alpha=2.44  |  D_ks=0.14  |  alpha-hat=1.46  |  num traps=0









Layer 64
   Layer=64  |  N=128  |  M=128  |  Q=1.00  |  alpha=2.52  |  D_ks=0.09  |  alpha-hat=0.54  |  num traps=16









Layer 66
   Layer=66  |  N=512  |  M=128  |  Q=4.00  |  alpha=3.50  |  D_ks=0.08  |  alpha-hat=2.21  |  num traps=0









Layer 71
   Layer=71  |  N=512  |  M=256  |  Q=2.00  |  alpha=2.90  |  D_ks=0.11  |  alpha-hat=2.76  |  num traps=0









Layer 73
   Layer=73  |  N=256  |  M=256  |  Q=1.00  |  alpha=4.60  |  D_ks=0.08  |  alpha-hat=-0.12  |  num traps=34









Layer 75
   Layer=75  |  N=1024  |  M=256  |  Q=4.00  |  alpha=4.35  |  D_ks=0.11  |  alpha-hat=3.10  |  num traps=0









Layer 79
   Layer=79  |  N=1024  |  M=512  |  Q=2.00  |  alpha=2.72  |  D_ks=0.06  |  alpha-hat=2.20  |  num traps=4









Layer 82
   Layer=82  |  N=1024  |  M=256  |  Q=4.00  |  alpha=3.26  |  D_ks=0.13  |  alpha-hat=1.60  |  num traps=0









Layer 84
   Layer=84  |  N=256  |  M=256  |  Q=1.00  |  alpha=2.54  |  D_ks=0.07  |  alpha-hat=1.12  |  num traps=37









Layer 86
   Layer=86  |  N=1024  |  M=256  |  Q=4.00  |  alpha=3.60  |  D_ks=0.04  |  alpha-hat=2.94  |  num traps=2









Layer 90
   Layer=90  |  N=1024  |  M=256  |  Q=4.00  |  alpha=3.14  |  D_ks=0.13  |  alpha-hat=1.49  |  num traps=0









Layer 92
   Layer=92  |  N=256  |  M=256  |  Q=1.00  |  alpha=2.30  |  D_ks=0.08  |  alpha-hat=0.49  |  num traps=16









Layer 94
   Layer=94  |  N=1024  |  M=256  |  Q=4.00  |  alpha=3.45  |  D_ks=0.10  |  alpha-hat=2.17  |  num traps=0









Layer 98
   Layer=98  |  N=1024  |  M=256  |  Q=4.00  |  alpha=3.67  |  D_ks=0.11  |  alpha-hat=2.10  |  num traps=0









Layer 100
   Layer=100  |  N=256  |  M=256  |  Q=1.00  |  alpha=2.14  |  D_ks=0.07  |  alpha-hat=0.43  |  num traps=25









Layer 102
   Layer=102  |  N=1024  |  M=256  |  Q=4.00  |  alpha=4.11  |  D_ks=0.07  |  alpha-hat=2.83  |  num traps=1









Layer 106
   Layer=106  |  N=1024  |  M=256  |  Q=4.00  |  alpha=3.50  |  D_ks=0.09  |  alpha-hat=1.91  |  num traps=0









Layer 108
   Layer=108  |  N=256  |  M=256  |  Q=1.00  |  alpha=2.85  |  D_ks=0.06  |  alpha-hat=0.43  |  num traps=16









Layer 110
   Layer=110  |  N=1024  |  M=256  |  Q=4.00  |  alpha=4.79  |  D_ks=0.06  |  alpha-hat=3.50  |  num traps=1









Layer 114
   Layer=114  |  N=1024  |  M=256  |  Q=4.00  |  alpha=4.59  |  D_ks=0.09  |  alpha-hat=2.97  |  num traps=0









Layer 116
   Layer=116  |  N=256  |  M=256  |  Q=1.00  |  alpha=3.15  |  D_ks=0.04  |  alpha-hat=0.70  |  num traps=29









Layer 118
   Layer=118  |  N=1024  |  M=256  |  Q=4.00  |  alpha=4.12  |  D_ks=0.03  |  alpha-hat=3.94  |  num traps=1









Layer 123
   Layer=123  |  N=1024  |  M=512  |  Q=2.00  |  alpha=2.89  |  D_ks=0.06  |  alpha-hat=3.19  |  num traps=0









Layer 125
   Layer=125  |  N=512  |  M=512  |  Q=1.00  |  alpha=5.80  |  D_ks=0.04  |  alpha-hat=2.13  |  num traps=49









Layer 127
   Layer=127  |  N=2048  |  M=512  |  Q=4.00  |  alpha=6.17  |  D_ks=0.05  |  alpha-hat=7.97  |  num traps=1









Layer 131
   Layer=131  |  N=2048  |  M=1024  |  Q=2.00  |  alpha=3.85  |  D_ks=0.02  |  alpha-hat=3.41  |  num traps=1









Layer 134
   Layer=134  |  N=2048  |  M=512  |  Q=4.00  |  alpha=6.29  |  D_ks=0.05  |  alpha-hat=5.96  |  num traps=1









Layer 136
   Layer=136  |  N=512  |  M=512  |  Q=1.00  |  alpha=3.57  |  D_ks=0.02  |  alpha-hat=3.00  |  num traps=28









Layer 138
   Layer=138  |  N=2048  |  M=512  |  Q=4.00  |  alpha=4.16  |  D_ks=0.05  |  alpha-hat=5.37  |  num traps=1









Layer 142
   Layer=142  |  N=2048  |  M=512  |  Q=4.00  |  alpha=4.65  |  D_ks=0.07  |  alpha-hat=6.32  |  num traps=1









Layer 144
   Layer=144  |  N=512  |  M=512  |  Q=1.00  |  alpha=3.27  |  D_ks=0.03  |  alpha-hat=2.66  |  num traps=55









Layer 146
   Layer=146  |  N=2048  |  M=512  |  Q=4.00  |  alpha=2.97  |  D_ks=0.02  |  alpha-hat=5.56  |  num traps=1









Layer 150
   Layer=150  |  N=2048  |  M=1000  |  Q=2.05  |  alpha=4.41  |  D_ks=0.03  |  alpha-hat=6.78  |  num traps=20