-
Notifications
You must be signed in to change notification settings - Fork 118
/
Copy pathres2net101_26w_4s.py
1278 lines (1267 loc) · 91.7 KB
/
res2net101_26w_4s.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import torch
import torch.nn
import torch.functional
import torch.nn.functional
class res2net101_26w_4s(torch.nn.Module):
def __init__(self):
super().__init__()
self.conv1 = torch.nn.modules.conv.Conv2d(3, 64, (7, 7), stride=(2, 2), padding=(3, 3), dilation=(1, 1), bias=False)
self.bn1 = torch.nn.modules.batchnorm.BatchNorm2d(64)
self.relu = torch.nn.modules.activation.ReLU(inplace=True)
self.maxpool = torch.nn.modules.pooling.MaxPool2d(3, stride=2, padding=1)
self.layer1_0_conv1 = torch.nn.modules.conv.Conv2d(64, 104, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer1_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_convs_0 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_convs_1 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_convs_2 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=1, padding=1)
self.layer1_0_conv3 = torch.nn.modules.conv.Conv2d(104, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_0_downsample_0 = torch.nn.modules.conv.Conv2d(64, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_conv1 = torch.nn.modules.conv.Conv2d(256, 104, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer1_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_convs_0 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_convs_1 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_convs_2 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_1_conv3 = torch.nn.modules.conv.Conv2d(104, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_conv1 = torch.nn.modules.conv.Conv2d(256, 104, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer1_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_convs_0 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_convs_1 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_convs_2 = torch.nn.modules.conv.Conv2d(26, 26, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer1_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(26)
self.layer1_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer1_2_conv3 = torch.nn.modules.conv.Conv2d(104, 256, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer1_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(256)
self.layer1_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_conv1 = torch.nn.modules.conv.Conv2d(256, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=2, padding=1)
self.layer2_0_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_0_downsample_0 = torch.nn.modules.conv.Conv2d(256, 512, (1, 1), stride=(2, 2), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_conv1 = torch.nn.modules.conv.Conv2d(512, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_1_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_conv1 = torch.nn.modules.conv.Conv2d(512, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_2_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_conv1 = torch.nn.modules.conv.Conv2d(512, 208, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_3_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer2_3_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_convs_0 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_3_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_3_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_convs_1 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_3_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_3_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_convs_2 = torch.nn.modules.conv.Conv2d(52, 52, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer2_3_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(52)
self.layer2_3_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer2_3_conv3 = torch.nn.modules.conv.Conv2d(208, 512, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer2_3_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(512)
self.layer2_3_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_conv1 = torch.nn.modules.conv.Conv2d(512, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=2, padding=1)
self.layer3_0_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_0_downsample_0 = torch.nn.modules.conv.Conv2d(512, 1024, (1, 1), stride=(2, 2), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_1_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_2_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_3_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_3_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_3_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_3_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_3_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_3_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_3_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_3_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_3_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_3_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_3_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_4_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_4_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_4_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_4_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_4_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_4_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_4_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_4_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_4_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_4_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_4_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_5_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_5_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_5_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_5_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_5_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_5_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_5_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_5_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_5_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_5_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_5_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_6_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_6_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_6_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_6_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_6_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_6_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_6_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_6_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_6_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_6_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_6_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_7_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_7_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_7_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_7_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_7_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_7_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_7_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_7_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_7_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_7_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_7_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_8_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_8_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_8_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_8_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_8_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_8_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_8_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_8_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_8_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_8_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_8_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_9_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_9_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_9_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_9_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_9_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_9_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_9_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_9_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_9_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_9_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_9_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_10_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_10_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_10_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_10_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_10_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_10_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_10_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_10_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_10_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_10_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_10_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_11_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_11_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_11_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_11_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_11_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_11_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_11_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_11_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_11_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_11_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_11_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_12_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_12_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_12_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_12_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_12_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_12_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_12_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_12_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_12_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_12_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_12_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_13_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_13_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_13_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_13_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_13_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_13_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_13_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_13_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_13_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_13_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_13_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_14_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_14_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_14_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_14_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_14_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_14_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_14_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_14_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_14_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_14_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_14_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_15_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_15_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_15_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_15_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_15_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_15_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_15_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_15_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_15_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_15_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_15_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_16_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_16_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_16_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_16_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_16_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_16_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_16_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_16_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_16_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_16_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_16_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_17_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_17_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_17_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_17_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_17_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_17_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_17_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_17_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_17_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_17_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_17_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_18_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_18_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_18_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_18_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_18_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_18_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_18_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_18_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_18_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_18_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_18_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_19_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_19_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_19_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_19_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_19_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_19_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_19_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_19_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_19_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_19_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_19_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_20_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_20_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_20_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_20_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_20_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_20_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_20_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_20_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_20_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_20_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_20_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_21_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_21_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_21_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_21_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_21_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_21_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_21_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_21_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_21_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_21_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_21_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_conv1 = torch.nn.modules.conv.Conv2d(1024, 416, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_22_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(416)
self.layer3_22_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_convs_0 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_22_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_22_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_convs_1 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_22_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_22_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_convs_2 = torch.nn.modules.conv.Conv2d(104, 104, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer3_22_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(104)
self.layer3_22_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer3_22_conv3 = torch.nn.modules.conv.Conv2d(416, 1024, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer3_22_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(1024)
self.layer3_22_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_conv1 = torch.nn.modules.conv.Conv2d(1024, 832, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_0_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(832)
self.layer4_0_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_convs_0 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_0_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_0_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_convs_1 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_0_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_0_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_convs_2 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(2, 2), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_0_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_0_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_0_pool = torch.nn.modules.pooling.AvgPool2d(3, stride=2, padding=1)
self.layer4_0_conv3 = torch.nn.modules.conv.Conv2d(832, 2048, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_0_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_0_downsample_0 = torch.nn.modules.conv.Conv2d(1024, 2048, (1, 1), stride=(2, 2), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_0_downsample_1 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_0_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_conv1 = torch.nn.modules.conv.Conv2d(2048, 832, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_1_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(832)
self.layer4_1_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_convs_0 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_1_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_1_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_convs_1 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_1_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_1_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_convs_2 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_1_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_1_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_1_conv3 = torch.nn.modules.conv.Conv2d(832, 2048, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_1_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_1_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_conv1 = torch.nn.modules.conv.Conv2d(2048, 832, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_2_bn1 = torch.nn.modules.batchnorm.BatchNorm2d(832)
self.layer4_2_relu = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_convs_0 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_2_bns_0 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_2_relu_1 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_convs_1 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_2_bns_1 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_2_relu_2 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_convs_2 = torch.nn.modules.conv.Conv2d(208, 208, (3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), bias=False)
self.layer4_2_bns_2 = torch.nn.modules.batchnorm.BatchNorm2d(208)
self.layer4_2_relu_3 = torch.nn.modules.activation.ReLU(inplace=True)
self.layer4_2_conv3 = torch.nn.modules.conv.Conv2d(832, 2048, (1, 1), stride=(1, 1), padding=(0, 0), dilation=(1, 1), bias=False)
self.layer4_2_bn3 = torch.nn.modules.batchnorm.BatchNorm2d(2048)
self.layer4_2_relu_4 = torch.nn.modules.activation.ReLU(inplace=True)
self.avgpool = torch.nn.modules.pooling.AdaptiveAvgPool2d(1)
self.fc = torch.nn.modules.linear.Linear(2048, 1000)
def forward(self, input_1):
conv1 = self.conv1(input_1)
bn1 = self.bn1(conv1)
relu = self.relu(bn1)
maxpool = self.maxpool(relu)
layer1_0_conv1 = self.layer1_0_conv1(maxpool)
layer1_0_bn1 = self.layer1_0_bn1(layer1_0_conv1)
layer1_0_relu = self.layer1_0_relu(layer1_0_bn1)
split_1 = torch.split(layer1_0_relu, 26, 1)
layer1_0_convs_0 = self.layer1_0_convs_0(split_1[0])
layer1_0_bns_0 = self.layer1_0_bns_0(layer1_0_convs_0)
layer1_0_relu_1 = self.layer1_0_relu_1(layer1_0_bns_0)
layer1_0_convs_1 = self.layer1_0_convs_1(split_1[1])
layer1_0_bns_1 = self.layer1_0_bns_1(layer1_0_convs_1)
layer1_0_relu_2 = self.layer1_0_relu_2(layer1_0_bns_1)
cat_1 = torch.cat([layer1_0_relu_1, layer1_0_relu_2], 1)
layer1_0_convs_2 = self.layer1_0_convs_2(split_1[2])
layer1_0_bns_2 = self.layer1_0_bns_2(layer1_0_convs_2)
layer1_0_relu_3 = self.layer1_0_relu_3(layer1_0_bns_2)
cat_2 = torch.cat([cat_1, layer1_0_relu_3], 1)
layer1_0_pool = self.layer1_0_pool(split_1[3])
cat_3 = torch.cat([cat_2, layer1_0_pool], 1)
layer1_0_conv3 = self.layer1_0_conv3(cat_3)
layer1_0_bn3 = self.layer1_0_bn3(layer1_0_conv3)
layer1_0_downsample_0 = self.layer1_0_downsample_0(maxpool)
layer1_0_downsample_1 = self.layer1_0_downsample_1(layer1_0_downsample_0)
add_1 = layer1_0_bn3.__iadd__(layer1_0_downsample_1)
layer1_0_relu_4 = self.layer1_0_relu_4(add_1)
layer1_1_conv1 = self.layer1_1_conv1(layer1_0_relu_4)
layer1_1_bn1 = self.layer1_1_bn1(layer1_1_conv1)
layer1_1_relu = self.layer1_1_relu(layer1_1_bn1)
split_2 = torch.split(layer1_1_relu, 26, 1)
layer1_1_convs_0 = self.layer1_1_convs_0(split_2[0])
layer1_1_bns_0 = self.layer1_1_bns_0(layer1_1_convs_0)
layer1_1_relu_1 = self.layer1_1_relu_1(layer1_1_bns_0)
add_2 = layer1_1_relu_1.__add__(split_2[1])
layer1_1_convs_1 = self.layer1_1_convs_1(add_2)
layer1_1_bns_1 = self.layer1_1_bns_1(layer1_1_convs_1)
layer1_1_relu_2 = self.layer1_1_relu_2(layer1_1_bns_1)
cat_4 = torch.cat([layer1_1_relu_1, layer1_1_relu_2], 1)
add_3 = layer1_1_relu_2.__add__(split_2[2])
layer1_1_convs_2 = self.layer1_1_convs_2(add_3)
layer1_1_bns_2 = self.layer1_1_bns_2(layer1_1_convs_2)
layer1_1_relu_3 = self.layer1_1_relu_3(layer1_1_bns_2)
cat_5 = torch.cat([cat_4, layer1_1_relu_3], 1)
cat_6 = torch.cat([cat_5, split_2[3]], 1)
layer1_1_conv3 = self.layer1_1_conv3(cat_6)
layer1_1_bn3 = self.layer1_1_bn3(layer1_1_conv3)
add_4 = layer1_1_bn3.__iadd__(layer1_0_relu_4)
layer1_1_relu_4 = self.layer1_1_relu_4(add_4)
layer1_2_conv1 = self.layer1_2_conv1(layer1_1_relu_4)
layer1_2_bn1 = self.layer1_2_bn1(layer1_2_conv1)
layer1_2_relu = self.layer1_2_relu(layer1_2_bn1)
split_3 = torch.split(layer1_2_relu, 26, 1)
layer1_2_convs_0 = self.layer1_2_convs_0(split_3[0])
layer1_2_bns_0 = self.layer1_2_bns_0(layer1_2_convs_0)
layer1_2_relu_1 = self.layer1_2_relu_1(layer1_2_bns_0)
add_5 = layer1_2_relu_1.__add__(split_3[1])
layer1_2_convs_1 = self.layer1_2_convs_1(add_5)
layer1_2_bns_1 = self.layer1_2_bns_1(layer1_2_convs_1)
layer1_2_relu_2 = self.layer1_2_relu_2(layer1_2_bns_1)
cat_7 = torch.cat([layer1_2_relu_1, layer1_2_relu_2], 1)
add_6 = layer1_2_relu_2.__add__(split_3[2])
layer1_2_convs_2 = self.layer1_2_convs_2(add_6)
layer1_2_bns_2 = self.layer1_2_bns_2(layer1_2_convs_2)
layer1_2_relu_3 = self.layer1_2_relu_3(layer1_2_bns_2)
cat_8 = torch.cat([cat_7, layer1_2_relu_3], 1)
cat_9 = torch.cat([cat_8, split_3[3]], 1)
layer1_2_conv3 = self.layer1_2_conv3(cat_9)
layer1_2_bn3 = self.layer1_2_bn3(layer1_2_conv3)
add_7 = layer1_2_bn3.__iadd__(layer1_1_relu_4)
layer1_2_relu_4 = self.layer1_2_relu_4(add_7)
layer2_0_conv1 = self.layer2_0_conv1(layer1_2_relu_4)
layer2_0_bn1 = self.layer2_0_bn1(layer2_0_conv1)
layer2_0_relu = self.layer2_0_relu(layer2_0_bn1)
split_4 = torch.split(layer2_0_relu, 52, 1)
layer2_0_convs_0 = self.layer2_0_convs_0(split_4[0])
layer2_0_bns_0 = self.layer2_0_bns_0(layer2_0_convs_0)
layer2_0_relu_1 = self.layer2_0_relu_1(layer2_0_bns_0)
layer2_0_convs_1 = self.layer2_0_convs_1(split_4[1])
layer2_0_bns_1 = self.layer2_0_bns_1(layer2_0_convs_1)
layer2_0_relu_2 = self.layer2_0_relu_2(layer2_0_bns_1)
cat_10 = torch.cat([layer2_0_relu_1, layer2_0_relu_2], 1)
layer2_0_convs_2 = self.layer2_0_convs_2(split_4[2])
layer2_0_bns_2 = self.layer2_0_bns_2(layer2_0_convs_2)
layer2_0_relu_3 = self.layer2_0_relu_3(layer2_0_bns_2)
cat_11 = torch.cat([cat_10, layer2_0_relu_3], 1)
layer2_0_pool = self.layer2_0_pool(split_4[3])
cat_12 = torch.cat([cat_11, layer2_0_pool], 1)
layer2_0_conv3 = self.layer2_0_conv3(cat_12)
layer2_0_bn3 = self.layer2_0_bn3(layer2_0_conv3)
layer2_0_downsample_0 = self.layer2_0_downsample_0(layer1_2_relu_4)
layer2_0_downsample_1 = self.layer2_0_downsample_1(layer2_0_downsample_0)
add_8 = layer2_0_bn3.__iadd__(layer2_0_downsample_1)
layer2_0_relu_4 = self.layer2_0_relu_4(add_8)
layer2_1_conv1 = self.layer2_1_conv1(layer2_0_relu_4)
layer2_1_bn1 = self.layer2_1_bn1(layer2_1_conv1)
layer2_1_relu = self.layer2_1_relu(layer2_1_bn1)
split_5 = torch.split(layer2_1_relu, 52, 1)
layer2_1_convs_0 = self.layer2_1_convs_0(split_5[0])
layer2_1_bns_0 = self.layer2_1_bns_0(layer2_1_convs_0)
layer2_1_relu_1 = self.layer2_1_relu_1(layer2_1_bns_0)
add_9 = layer2_1_relu_1.__add__(split_5[1])
layer2_1_convs_1 = self.layer2_1_convs_1(add_9)
layer2_1_bns_1 = self.layer2_1_bns_1(layer2_1_convs_1)
layer2_1_relu_2 = self.layer2_1_relu_2(layer2_1_bns_1)
cat_13 = torch.cat([layer2_1_relu_1, layer2_1_relu_2], 1)
add_10 = layer2_1_relu_2.__add__(split_5[2])
layer2_1_convs_2 = self.layer2_1_convs_2(add_10)
layer2_1_bns_2 = self.layer2_1_bns_2(layer2_1_convs_2)
layer2_1_relu_3 = self.layer2_1_relu_3(layer2_1_bns_2)
cat_14 = torch.cat([cat_13, layer2_1_relu_3], 1)
cat_15 = torch.cat([cat_14, split_5[3]], 1)
layer2_1_conv3 = self.layer2_1_conv3(cat_15)
layer2_1_bn3 = self.layer2_1_bn3(layer2_1_conv3)
add_11 = layer2_1_bn3.__iadd__(layer2_0_relu_4)
layer2_1_relu_4 = self.layer2_1_relu_4(add_11)
layer2_2_conv1 = self.layer2_2_conv1(layer2_1_relu_4)
layer2_2_bn1 = self.layer2_2_bn1(layer2_2_conv1)
layer2_2_relu = self.layer2_2_relu(layer2_2_bn1)
split_6 = torch.split(layer2_2_relu, 52, 1)
layer2_2_convs_0 = self.layer2_2_convs_0(split_6[0])
layer2_2_bns_0 = self.layer2_2_bns_0(layer2_2_convs_0)
layer2_2_relu_1 = self.layer2_2_relu_1(layer2_2_bns_0)
add_12 = layer2_2_relu_1.__add__(split_6[1])
layer2_2_convs_1 = self.layer2_2_convs_1(add_12)
layer2_2_bns_1 = self.layer2_2_bns_1(layer2_2_convs_1)
layer2_2_relu_2 = self.layer2_2_relu_2(layer2_2_bns_1)
cat_16 = torch.cat([layer2_2_relu_1, layer2_2_relu_2], 1)
add_13 = layer2_2_relu_2.__add__(split_6[2])
layer2_2_convs_2 = self.layer2_2_convs_2(add_13)
layer2_2_bns_2 = self.layer2_2_bns_2(layer2_2_convs_2)
layer2_2_relu_3 = self.layer2_2_relu_3(layer2_2_bns_2)
cat_17 = torch.cat([cat_16, layer2_2_relu_3], 1)
cat_18 = torch.cat([cat_17, split_6[3]], 1)
layer2_2_conv3 = self.layer2_2_conv3(cat_18)
layer2_2_bn3 = self.layer2_2_bn3(layer2_2_conv3)
add_14 = layer2_2_bn3.__iadd__(layer2_1_relu_4)
layer2_2_relu_4 = self.layer2_2_relu_4(add_14)
layer2_3_conv1 = self.layer2_3_conv1(layer2_2_relu_4)
layer2_3_bn1 = self.layer2_3_bn1(layer2_3_conv1)
layer2_3_relu = self.layer2_3_relu(layer2_3_bn1)
split_7 = torch.split(layer2_3_relu, 52, 1)
layer2_3_convs_0 = self.layer2_3_convs_0(split_7[0])
layer2_3_bns_0 = self.layer2_3_bns_0(layer2_3_convs_0)
layer2_3_relu_1 = self.layer2_3_relu_1(layer2_3_bns_0)
add_15 = layer2_3_relu_1.__add__(split_7[1])
layer2_3_convs_1 = self.layer2_3_convs_1(add_15)
layer2_3_bns_1 = self.layer2_3_bns_1(layer2_3_convs_1)
layer2_3_relu_2 = self.layer2_3_relu_2(layer2_3_bns_1)
cat_19 = torch.cat([layer2_3_relu_1, layer2_3_relu_2], 1)
add_16 = layer2_3_relu_2.__add__(split_7[2])
layer2_3_convs_2 = self.layer2_3_convs_2(add_16)
layer2_3_bns_2 = self.layer2_3_bns_2(layer2_3_convs_2)
layer2_3_relu_3 = self.layer2_3_relu_3(layer2_3_bns_2)
cat_20 = torch.cat([cat_19, layer2_3_relu_3], 1)
cat_21 = torch.cat([cat_20, split_7[3]], 1)
layer2_3_conv3 = self.layer2_3_conv3(cat_21)
layer2_3_bn3 = self.layer2_3_bn3(layer2_3_conv3)
add_17 = layer2_3_bn3.__iadd__(layer2_2_relu_4)
layer2_3_relu_4 = self.layer2_3_relu_4(add_17)
layer3_0_conv1 = self.layer3_0_conv1(layer2_3_relu_4)
layer3_0_bn1 = self.layer3_0_bn1(layer3_0_conv1)
layer3_0_relu = self.layer3_0_relu(layer3_0_bn1)
split_8 = torch.split(layer3_0_relu, 104, 1)
layer3_0_convs_0 = self.layer3_0_convs_0(split_8[0])
layer3_0_bns_0 = self.layer3_0_bns_0(layer3_0_convs_0)
layer3_0_relu_1 = self.layer3_0_relu_1(layer3_0_bns_0)
layer3_0_convs_1 = self.layer3_0_convs_1(split_8[1])
layer3_0_bns_1 = self.layer3_0_bns_1(layer3_0_convs_1)
layer3_0_relu_2 = self.layer3_0_relu_2(layer3_0_bns_1)
cat_22 = torch.cat([layer3_0_relu_1, layer3_0_relu_2], 1)
layer3_0_convs_2 = self.layer3_0_convs_2(split_8[2])
layer3_0_bns_2 = self.layer3_0_bns_2(layer3_0_convs_2)
layer3_0_relu_3 = self.layer3_0_relu_3(layer3_0_bns_2)
cat_23 = torch.cat([cat_22, layer3_0_relu_3], 1)
layer3_0_pool = self.layer3_0_pool(split_8[3])
cat_24 = torch.cat([cat_23, layer3_0_pool], 1)
layer3_0_conv3 = self.layer3_0_conv3(cat_24)
layer3_0_bn3 = self.layer3_0_bn3(layer3_0_conv3)
layer3_0_downsample_0 = self.layer3_0_downsample_0(layer2_3_relu_4)
layer3_0_downsample_1 = self.layer3_0_downsample_1(layer3_0_downsample_0)
add_18 = layer3_0_bn3.__iadd__(layer3_0_downsample_1)
layer3_0_relu_4 = self.layer3_0_relu_4(add_18)
layer3_1_conv1 = self.layer3_1_conv1(layer3_0_relu_4)
layer3_1_bn1 = self.layer3_1_bn1(layer3_1_conv1)
layer3_1_relu = self.layer3_1_relu(layer3_1_bn1)
split_9 = torch.split(layer3_1_relu, 104, 1)
layer3_1_convs_0 = self.layer3_1_convs_0(split_9[0])
layer3_1_bns_0 = self.layer3_1_bns_0(layer3_1_convs_0)
layer3_1_relu_1 = self.layer3_1_relu_1(layer3_1_bns_0)
add_19 = layer3_1_relu_1.__add__(split_9[1])
layer3_1_convs_1 = self.layer3_1_convs_1(add_19)
layer3_1_bns_1 = self.layer3_1_bns_1(layer3_1_convs_1)
layer3_1_relu_2 = self.layer3_1_relu_2(layer3_1_bns_1)
cat_25 = torch.cat([layer3_1_relu_1, layer3_1_relu_2], 1)
add_20 = layer3_1_relu_2.__add__(split_9[2])
layer3_1_convs_2 = self.layer3_1_convs_2(add_20)
layer3_1_bns_2 = self.layer3_1_bns_2(layer3_1_convs_2)
layer3_1_relu_3 = self.layer3_1_relu_3(layer3_1_bns_2)
cat_26 = torch.cat([cat_25, layer3_1_relu_3], 1)
cat_27 = torch.cat([cat_26, split_9[3]], 1)
layer3_1_conv3 = self.layer3_1_conv3(cat_27)
layer3_1_bn3 = self.layer3_1_bn3(layer3_1_conv3)
add_21 = layer3_1_bn3.__iadd__(layer3_0_relu_4)
layer3_1_relu_4 = self.layer3_1_relu_4(add_21)
layer3_2_conv1 = self.layer3_2_conv1(layer3_1_relu_4)
layer3_2_bn1 = self.layer3_2_bn1(layer3_2_conv1)
layer3_2_relu = self.layer3_2_relu(layer3_2_bn1)
split_10 = torch.split(layer3_2_relu, 104, 1)
layer3_2_convs_0 = self.layer3_2_convs_0(split_10[0])
layer3_2_bns_0 = self.layer3_2_bns_0(layer3_2_convs_0)
layer3_2_relu_1 = self.layer3_2_relu_1(layer3_2_bns_0)
add_22 = layer3_2_relu_1.__add__(split_10[1])
layer3_2_convs_1 = self.layer3_2_convs_1(add_22)
layer3_2_bns_1 = self.layer3_2_bns_1(layer3_2_convs_1)
layer3_2_relu_2 = self.layer3_2_relu_2(layer3_2_bns_1)
cat_28 = torch.cat([layer3_2_relu_1, layer3_2_relu_2], 1)
add_23 = layer3_2_relu_2.__add__(split_10[2])
layer3_2_convs_2 = self.layer3_2_convs_2(add_23)
layer3_2_bns_2 = self.layer3_2_bns_2(layer3_2_convs_2)
layer3_2_relu_3 = self.layer3_2_relu_3(layer3_2_bns_2)
cat_29 = torch.cat([cat_28, layer3_2_relu_3], 1)
cat_30 = torch.cat([cat_29, split_10[3]], 1)
layer3_2_conv3 = self.layer3_2_conv3(cat_30)
layer3_2_bn3 = self.layer3_2_bn3(layer3_2_conv3)
add_24 = layer3_2_bn3.__iadd__(layer3_1_relu_4)
layer3_2_relu_4 = self.layer3_2_relu_4(add_24)
layer3_3_conv1 = self.layer3_3_conv1(layer3_2_relu_4)
layer3_3_bn1 = self.layer3_3_bn1(layer3_3_conv1)
layer3_3_relu = self.layer3_3_relu(layer3_3_bn1)
split_11 = torch.split(layer3_3_relu, 104, 1)
layer3_3_convs_0 = self.layer3_3_convs_0(split_11[0])
layer3_3_bns_0 = self.layer3_3_bns_0(layer3_3_convs_0)
layer3_3_relu_1 = self.layer3_3_relu_1(layer3_3_bns_0)
add_25 = layer3_3_relu_1.__add__(split_11[1])
layer3_3_convs_1 = self.layer3_3_convs_1(add_25)
layer3_3_bns_1 = self.layer3_3_bns_1(layer3_3_convs_1)
layer3_3_relu_2 = self.layer3_3_relu_2(layer3_3_bns_1)
cat_31 = torch.cat([layer3_3_relu_1, layer3_3_relu_2], 1)
add_26 = layer3_3_relu_2.__add__(split_11[2])
layer3_3_convs_2 = self.layer3_3_convs_2(add_26)
layer3_3_bns_2 = self.layer3_3_bns_2(layer3_3_convs_2)
layer3_3_relu_3 = self.layer3_3_relu_3(layer3_3_bns_2)
cat_32 = torch.cat([cat_31, layer3_3_relu_3], 1)
cat_33 = torch.cat([cat_32, split_11[3]], 1)
layer3_3_conv3 = self.layer3_3_conv3(cat_33)
layer3_3_bn3 = self.layer3_3_bn3(layer3_3_conv3)
add_27 = layer3_3_bn3.__iadd__(layer3_2_relu_4)
layer3_3_relu_4 = self.layer3_3_relu_4(add_27)
layer3_4_conv1 = self.layer3_4_conv1(layer3_3_relu_4)
layer3_4_bn1 = self.layer3_4_bn1(layer3_4_conv1)
layer3_4_relu = self.layer3_4_relu(layer3_4_bn1)
split_12 = torch.split(layer3_4_relu, 104, 1)
layer3_4_convs_0 = self.layer3_4_convs_0(split_12[0])
layer3_4_bns_0 = self.layer3_4_bns_0(layer3_4_convs_0)
layer3_4_relu_1 = self.layer3_4_relu_1(layer3_4_bns_0)
add_28 = layer3_4_relu_1.__add__(split_12[1])
layer3_4_convs_1 = self.layer3_4_convs_1(add_28)
layer3_4_bns_1 = self.layer3_4_bns_1(layer3_4_convs_1)
layer3_4_relu_2 = self.layer3_4_relu_2(layer3_4_bns_1)
cat_34 = torch.cat([layer3_4_relu_1, layer3_4_relu_2], 1)
add_29 = layer3_4_relu_2.__add__(split_12[2])
layer3_4_convs_2 = self.layer3_4_convs_2(add_29)
layer3_4_bns_2 = self.layer3_4_bns_2(layer3_4_convs_2)
layer3_4_relu_3 = self.layer3_4_relu_3(layer3_4_bns_2)
cat_35 = torch.cat([cat_34, layer3_4_relu_3], 1)
cat_36 = torch.cat([cat_35, split_12[3]], 1)
layer3_4_conv3 = self.layer3_4_conv3(cat_36)
layer3_4_bn3 = self.layer3_4_bn3(layer3_4_conv3)
add_30 = layer3_4_bn3.__iadd__(layer3_3_relu_4)
layer3_4_relu_4 = self.layer3_4_relu_4(add_30)
layer3_5_conv1 = self.layer3_5_conv1(layer3_4_relu_4)
layer3_5_bn1 = self.layer3_5_bn1(layer3_5_conv1)
layer3_5_relu = self.layer3_5_relu(layer3_5_bn1)
split_13 = torch.split(layer3_5_relu, 104, 1)
layer3_5_convs_0 = self.layer3_5_convs_0(split_13[0])
layer3_5_bns_0 = self.layer3_5_bns_0(layer3_5_convs_0)
layer3_5_relu_1 = self.layer3_5_relu_1(layer3_5_bns_0)
add_31 = layer3_5_relu_1.__add__(split_13[1])
layer3_5_convs_1 = self.layer3_5_convs_1(add_31)
layer3_5_bns_1 = self.layer3_5_bns_1(layer3_5_convs_1)
layer3_5_relu_2 = self.layer3_5_relu_2(layer3_5_bns_1)
cat_37 = torch.cat([layer3_5_relu_1, layer3_5_relu_2], 1)
add_32 = layer3_5_relu_2.__add__(split_13[2])
layer3_5_convs_2 = self.layer3_5_convs_2(add_32)
layer3_5_bns_2 = self.layer3_5_bns_2(layer3_5_convs_2)
layer3_5_relu_3 = self.layer3_5_relu_3(layer3_5_bns_2)
cat_38 = torch.cat([cat_37, layer3_5_relu_3], 1)
cat_39 = torch.cat([cat_38, split_13[3]], 1)
layer3_5_conv3 = self.layer3_5_conv3(cat_39)
layer3_5_bn3 = self.layer3_5_bn3(layer3_5_conv3)
add_33 = layer3_5_bn3.__iadd__(layer3_4_relu_4)
layer3_5_relu_4 = self.layer3_5_relu_4(add_33)
layer3_6_conv1 = self.layer3_6_conv1(layer3_5_relu_4)
layer3_6_bn1 = self.layer3_6_bn1(layer3_6_conv1)
layer3_6_relu = self.layer3_6_relu(layer3_6_bn1)
split_14 = torch.split(layer3_6_relu, 104, 1)
layer3_6_convs_0 = self.layer3_6_convs_0(split_14[0])
layer3_6_bns_0 = self.layer3_6_bns_0(layer3_6_convs_0)
layer3_6_relu_1 = self.layer3_6_relu_1(layer3_6_bns_0)
add_34 = layer3_6_relu_1.__add__(split_14[1])
layer3_6_convs_1 = self.layer3_6_convs_1(add_34)
layer3_6_bns_1 = self.layer3_6_bns_1(layer3_6_convs_1)
layer3_6_relu_2 = self.layer3_6_relu_2(layer3_6_bns_1)
cat_40 = torch.cat([layer3_6_relu_1, layer3_6_relu_2], 1)
add_35 = layer3_6_relu_2.__add__(split_14[2])
layer3_6_convs_2 = self.layer3_6_convs_2(add_35)
layer3_6_bns_2 = self.layer3_6_bns_2(layer3_6_convs_2)
layer3_6_relu_3 = self.layer3_6_relu_3(layer3_6_bns_2)
cat_41 = torch.cat([cat_40, layer3_6_relu_3], 1)
cat_42 = torch.cat([cat_41, split_14[3]], 1)
layer3_6_conv3 = self.layer3_6_conv3(cat_42)
layer3_6_bn3 = self.layer3_6_bn3(layer3_6_conv3)
add_36 = layer3_6_bn3.__iadd__(layer3_5_relu_4)
layer3_6_relu_4 = self.layer3_6_relu_4(add_36)
layer3_7_conv1 = self.layer3_7_conv1(layer3_6_relu_4)
layer3_7_bn1 = self.layer3_7_bn1(layer3_7_conv1)
layer3_7_relu = self.layer3_7_relu(layer3_7_bn1)
split_15 = torch.split(layer3_7_relu, 104, 1)
layer3_7_convs_0 = self.layer3_7_convs_0(split_15[0])
layer3_7_bns_0 = self.layer3_7_bns_0(layer3_7_convs_0)
layer3_7_relu_1 = self.layer3_7_relu_1(layer3_7_bns_0)
add_37 = layer3_7_relu_1.__add__(split_15[1])
layer3_7_convs_1 = self.layer3_7_convs_1(add_37)
layer3_7_bns_1 = self.layer3_7_bns_1(layer3_7_convs_1)
layer3_7_relu_2 = self.layer3_7_relu_2(layer3_7_bns_1)
cat_43 = torch.cat([layer3_7_relu_1, layer3_7_relu_2], 1)
add_38 = layer3_7_relu_2.__add__(split_15[2])
layer3_7_convs_2 = self.layer3_7_convs_2(add_38)
layer3_7_bns_2 = self.layer3_7_bns_2(layer3_7_convs_2)
layer3_7_relu_3 = self.layer3_7_relu_3(layer3_7_bns_2)
cat_44 = torch.cat([cat_43, layer3_7_relu_3], 1)
cat_45 = torch.cat([cat_44, split_15[3]], 1)
layer3_7_conv3 = self.layer3_7_conv3(cat_45)
layer3_7_bn3 = self.layer3_7_bn3(layer3_7_conv3)
add_39 = layer3_7_bn3.__iadd__(layer3_6_relu_4)
layer3_7_relu_4 = self.layer3_7_relu_4(add_39)
layer3_8_conv1 = self.layer3_8_conv1(layer3_7_relu_4)
layer3_8_bn1 = self.layer3_8_bn1(layer3_8_conv1)
layer3_8_relu = self.layer3_8_relu(layer3_8_bn1)
split_16 = torch.split(layer3_8_relu, 104, 1)
layer3_8_convs_0 = self.layer3_8_convs_0(split_16[0])
layer3_8_bns_0 = self.layer3_8_bns_0(layer3_8_convs_0)
layer3_8_relu_1 = self.layer3_8_relu_1(layer3_8_bns_0)
add_40 = layer3_8_relu_1.__add__(split_16[1])
layer3_8_convs_1 = self.layer3_8_convs_1(add_40)
layer3_8_bns_1 = self.layer3_8_bns_1(layer3_8_convs_1)
layer3_8_relu_2 = self.layer3_8_relu_2(layer3_8_bns_1)
cat_46 = torch.cat([layer3_8_relu_1, layer3_8_relu_2], 1)
add_41 = layer3_8_relu_2.__add__(split_16[2])
layer3_8_convs_2 = self.layer3_8_convs_2(add_41)
layer3_8_bns_2 = self.layer3_8_bns_2(layer3_8_convs_2)
layer3_8_relu_3 = self.layer3_8_relu_3(layer3_8_bns_2)
cat_47 = torch.cat([cat_46, layer3_8_relu_3], 1)
cat_48 = torch.cat([cat_47, split_16[3]], 1)
layer3_8_conv3 = self.layer3_8_conv3(cat_48)
layer3_8_bn3 = self.layer3_8_bn3(layer3_8_conv3)
add_42 = layer3_8_bn3.__iadd__(layer3_7_relu_4)
layer3_8_relu_4 = self.layer3_8_relu_4(add_42)
layer3_9_conv1 = self.layer3_9_conv1(layer3_8_relu_4)
layer3_9_bn1 = self.layer3_9_bn1(layer3_9_conv1)
layer3_9_relu = self.layer3_9_relu(layer3_9_bn1)
split_17 = torch.split(layer3_9_relu, 104, 1)
layer3_9_convs_0 = self.layer3_9_convs_0(split_17[0])
layer3_9_bns_0 = self.layer3_9_bns_0(layer3_9_convs_0)
layer3_9_relu_1 = self.layer3_9_relu_1(layer3_9_bns_0)
add_43 = layer3_9_relu_1.__add__(split_17[1])
layer3_9_convs_1 = self.layer3_9_convs_1(add_43)
layer3_9_bns_1 = self.layer3_9_bns_1(layer3_9_convs_1)
layer3_9_relu_2 = self.layer3_9_relu_2(layer3_9_bns_1)
cat_49 = torch.cat([layer3_9_relu_1, layer3_9_relu_2], 1)
add_44 = layer3_9_relu_2.__add__(split_17[2])
layer3_9_convs_2 = self.layer3_9_convs_2(add_44)
layer3_9_bns_2 = self.layer3_9_bns_2(layer3_9_convs_2)
layer3_9_relu_3 = self.layer3_9_relu_3(layer3_9_bns_2)
cat_50 = torch.cat([cat_49, layer3_9_relu_3], 1)
cat_51 = torch.cat([cat_50, split_17[3]], 1)
layer3_9_conv3 = self.layer3_9_conv3(cat_51)
layer3_9_bn3 = self.layer3_9_bn3(layer3_9_conv3)
add_45 = layer3_9_bn3.__iadd__(layer3_8_relu_4)
layer3_9_relu_4 = self.layer3_9_relu_4(add_45)
layer3_10_conv1 = self.layer3_10_conv1(layer3_9_relu_4)
layer3_10_bn1 = self.layer3_10_bn1(layer3_10_conv1)
layer3_10_relu = self.layer3_10_relu(layer3_10_bn1)
split_18 = torch.split(layer3_10_relu, 104, 1)
layer3_10_convs_0 = self.layer3_10_convs_0(split_18[0])
layer3_10_bns_0 = self.layer3_10_bns_0(layer3_10_convs_0)
layer3_10_relu_1 = self.layer3_10_relu_1(layer3_10_bns_0)
add_46 = layer3_10_relu_1.__add__(split_18[1])
layer3_10_convs_1 = self.layer3_10_convs_1(add_46)
layer3_10_bns_1 = self.layer3_10_bns_1(layer3_10_convs_1)
layer3_10_relu_2 = self.layer3_10_relu_2(layer3_10_bns_1)
cat_52 = torch.cat([layer3_10_relu_1, layer3_10_relu_2], 1)
add_47 = layer3_10_relu_2.__add__(split_18[2])
layer3_10_convs_2 = self.layer3_10_convs_2(add_47)
layer3_10_bns_2 = self.layer3_10_bns_2(layer3_10_convs_2)
layer3_10_relu_3 = self.layer3_10_relu_3(layer3_10_bns_2)
cat_53 = torch.cat([cat_52, layer3_10_relu_3], 1)
cat_54 = torch.cat([cat_53, split_18[3]], 1)
layer3_10_conv3 = self.layer3_10_conv3(cat_54)
layer3_10_bn3 = self.layer3_10_bn3(layer3_10_conv3)
add_48 = layer3_10_bn3.__iadd__(layer3_9_relu_4)
layer3_10_relu_4 = self.layer3_10_relu_4(add_48)
layer3_11_conv1 = self.layer3_11_conv1(layer3_10_relu_4)
layer3_11_bn1 = self.layer3_11_bn1(layer3_11_conv1)
layer3_11_relu = self.layer3_11_relu(layer3_11_bn1)
split_19 = torch.split(layer3_11_relu, 104, 1)
layer3_11_convs_0 = self.layer3_11_convs_0(split_19[0])
layer3_11_bns_0 = self.layer3_11_bns_0(layer3_11_convs_0)
layer3_11_relu_1 = self.layer3_11_relu_1(layer3_11_bns_0)
add_49 = layer3_11_relu_1.__add__(split_19[1])
layer3_11_convs_1 = self.layer3_11_convs_1(add_49)
layer3_11_bns_1 = self.layer3_11_bns_1(layer3_11_convs_1)
layer3_11_relu_2 = self.layer3_11_relu_2(layer3_11_bns_1)
cat_55 = torch.cat([layer3_11_relu_1, layer3_11_relu_2], 1)
add_50 = layer3_11_relu_2.__add__(split_19[2])
layer3_11_convs_2 = self.layer3_11_convs_2(add_50)
layer3_11_bns_2 = self.layer3_11_bns_2(layer3_11_convs_2)
layer3_11_relu_3 = self.layer3_11_relu_3(layer3_11_bns_2)
cat_56 = torch.cat([cat_55, layer3_11_relu_3], 1)
cat_57 = torch.cat([cat_56, split_19[3]], 1)
layer3_11_conv3 = self.layer3_11_conv3(cat_57)
layer3_11_bn3 = self.layer3_11_bn3(layer3_11_conv3)
add_51 = layer3_11_bn3.__iadd__(layer3_10_relu_4)
layer3_11_relu_4 = self.layer3_11_relu_4(add_51)
layer3_12_conv1 = self.layer3_12_conv1(layer3_11_relu_4)
layer3_12_bn1 = self.layer3_12_bn1(layer3_12_conv1)
layer3_12_relu = self.layer3_12_relu(layer3_12_bn1)
split_20 = torch.split(layer3_12_relu, 104, 1)
layer3_12_convs_0 = self.layer3_12_convs_0(split_20[0])
layer3_12_bns_0 = self.layer3_12_bns_0(layer3_12_convs_0)
layer3_12_relu_1 = self.layer3_12_relu_1(layer3_12_bns_0)
add_52 = layer3_12_relu_1.__add__(split_20[1])
layer3_12_convs_1 = self.layer3_12_convs_1(add_52)
layer3_12_bns_1 = self.layer3_12_bns_1(layer3_12_convs_1)
layer3_12_relu_2 = self.layer3_12_relu_2(layer3_12_bns_1)
cat_58 = torch.cat([layer3_12_relu_1, layer3_12_relu_2], 1)
add_53 = layer3_12_relu_2.__add__(split_20[2])
layer3_12_convs_2 = self.layer3_12_convs_2(add_53)
layer3_12_bns_2 = self.layer3_12_bns_2(layer3_12_convs_2)
layer3_12_relu_3 = self.layer3_12_relu_3(layer3_12_bns_2)
cat_59 = torch.cat([cat_58, layer3_12_relu_3], 1)
cat_60 = torch.cat([cat_59, split_20[3]], 1)
layer3_12_conv3 = self.layer3_12_conv3(cat_60)
layer3_12_bn3 = self.layer3_12_bn3(layer3_12_conv3)
add_54 = layer3_12_bn3.__iadd__(layer3_11_relu_4)
layer3_12_relu_4 = self.layer3_12_relu_4(add_54)
layer3_13_conv1 = self.layer3_13_conv1(layer3_12_relu_4)
layer3_13_bn1 = self.layer3_13_bn1(layer3_13_conv1)
layer3_13_relu = self.layer3_13_relu(layer3_13_bn1)
split_21 = torch.split(layer3_13_relu, 104, 1)
layer3_13_convs_0 = self.layer3_13_convs_0(split_21[0])
layer3_13_bns_0 = self.layer3_13_bns_0(layer3_13_convs_0)
layer3_13_relu_1 = self.layer3_13_relu_1(layer3_13_bns_0)
add_55 = layer3_13_relu_1.__add__(split_21[1])
layer3_13_convs_1 = self.layer3_13_convs_1(add_55)
layer3_13_bns_1 = self.layer3_13_bns_1(layer3_13_convs_1)
layer3_13_relu_2 = self.layer3_13_relu_2(layer3_13_bns_1)
cat_61 = torch.cat([layer3_13_relu_1, layer3_13_relu_2], 1)
add_56 = layer3_13_relu_2.__add__(split_21[2])
layer3_13_convs_2 = self.layer3_13_convs_2(add_56)
layer3_13_bns_2 = self.layer3_13_bns_2(layer3_13_convs_2)
layer3_13_relu_3 = self.layer3_13_relu_3(layer3_13_bns_2)
cat_62 = torch.cat([cat_61, layer3_13_relu_3], 1)
cat_63 = torch.cat([cat_62, split_21[3]], 1)
layer3_13_conv3 = self.layer3_13_conv3(cat_63)
layer3_13_bn3 = self.layer3_13_bn3(layer3_13_conv3)
add_57 = layer3_13_bn3.__iadd__(layer3_12_relu_4)
layer3_13_relu_4 = self.layer3_13_relu_4(add_57)
layer3_14_conv1 = self.layer3_14_conv1(layer3_13_relu_4)
layer3_14_bn1 = self.layer3_14_bn1(layer3_14_conv1)
layer3_14_relu = self.layer3_14_relu(layer3_14_bn1)
split_22 = torch.split(layer3_14_relu, 104, 1)
layer3_14_convs_0 = self.layer3_14_convs_0(split_22[0])