Skip to content
This repository has been archived by the owner on Oct 31, 2023. It is now read-only.

Error in Convnext training with Swin transformer framework - The testing results of the whole dataset is empty. #133

Open
anshudaur opened this issue Nov 8, 2022 · 0 comments

Comments

@anshudaur
Copy link

Hi,

I am trying to fine-tune convnext large model on the dataset in coco format, and after solving lot of error i am able to run training by using utils/train.py .

But i get mmdet - ERROR - The testing results of the whole dataset is empty.
when i print out the output, it gives an empty array.

Since the dataset is strictly detection based , i commented out mask details from the config which now looks like this 👍



_base_ = [
    '../_base_/models/cascade_mask_rcnn_convnext_fpn.py',
    ../_base_/datasets/custom_dataset_detection.py',
    '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
]

model = dict(
    backbone=dict(
        in_chans=3,
        depths=[3, 3, 27, 3], 
        dims=[192, 384, 768, 1536], 
        drop_path_rate=0.7,
        layer_scale_init_value=1.0,
        out_indices=[0, 1, 2, 3],
    ),
    neck=dict(in_channels=[192, 384, 768, 1536]),
    roi_head=dict(
        bbox_head=[
            dict(
                type='ConvFCBBoxHead',
                num_shared_convs=4,
                num_shared_fcs=1,
                in_channels=256,
                conv_out_channels=256,
                fc_out_channels=1024,
                roi_feat_size=7,
                num_classes=97,
                bbox_coder=dict(
                    type='DeltaXYWHBBoxCoder',
                    target_means=[0., 0., 0., 0.],
                    target_stds=[0.1, 0.1, 0.2, 0.2]),
                reg_class_agnostic=False,
                reg_decoded_bbox=True,
                norm_cfg=dict(type='SyncBN', requires_grad=True),
                loss_cls=dict(
                    type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
                loss_bbox=dict(type='GIoULoss', loss_weight=10.0)),
            dict(
                type='ConvFCBBoxHead',
                num_shared_convs=4,
                num_shared_fcs=1,
                in_channels=256,
                conv_out_channels=256,
                fc_out_channels=1024,
                roi_feat_size=7,
                num_classes=97,
                bbox_coder=dict(
                    type='DeltaXYWHBBoxCoder',
                    target_means=[0., 0., 0., 0.],
                    target_stds=[0.05, 0.05, 0.1, 0.1]),
                reg_class_agnostic=False,
                reg_decoded_bbox=True,
                norm_cfg=dict(type='SyncBN', requires_grad=True),
                loss_cls=dict(
                    type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
                loss_bbox=dict(type='GIoULoss', loss_weight=10.0)),
            dict(
                type='ConvFCBBoxHead',
                num_shared_convs=4,
                num_shared_fcs=1,
                in_channels=256,
                conv_out_channels=256,
                fc_out_channels=1024,
                roi_feat_size=7,
                num_classes=97,
                bbox_coder=dict(
                    type='DeltaXYWHBBoxCoder',
                    target_means=[0., 0., 0., 0.],
                    target_stds=[0.033, 0.033, 0.067, 0.067]),
                reg_class_agnostic=False,
                reg_decoded_bbox=True,
                norm_cfg=dict(type='SyncBN', requires_grad=True),
                loss_cls=dict(
                    type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
                loss_bbox=dict(type='GIoULoss', loss_weight=10.0))
        ]))

img_norm_cfg = dict(
    mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)

# augmentation strategy originates from DETR / Sparse RCNN
train_pipeline = [
    dict(type='LoadImageFromFile'),
    dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
    dict(type='RandomFlip', flip_ratio=0.5),
    dict(type='AutoAugment',
         policies=[
             [
                 dict(type='Resize',
                      img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333),
                                 (608, 1333), (640, 1333), (672, 1333), (704, 1333),
                                 (736, 1333), (768, 1333), (800, 1333)],
                      multiscale_mode='value',
                      keep_ratio=True)
             ],
             [
                 dict(type='Resize',
                      img_scale=[(400, 1333), (500, 1333), (600, 1333)],
                      multiscale_mode='value',
                      keep_ratio=True),
                 dict(type='RandomCrop',
                      crop_type='absolute_range',
                      crop_size=(384, 600),
                      allow_negative_crop=True),
                 dict(type='Resize',
                      img_scale=[(480, 1333), (512, 1333), (544, 1333),
                                 (576, 1333), (608, 1333), (640, 1333),
                                 (672, 1333), (704, 1333), (736, 1333),
                                 (768, 1333), (800, 1333)],
                      multiscale_mode='value',
                      override=True,
                      keep_ratio=True)
             ]
         ]),
    dict(type='Normalize', **img_norm_cfg),
    dict(type='Pad', size_divisor=32),
    dict(type='DefaultFormatBundle'),
    dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
data = dict(train=dict(pipeline=train_pipeline))

optimizer = dict(constructor='LearningRateDecayOptimizerConstructor', _delete_=True, type='AdamW', 
                 lr=0.0000002, betas=(0.9, 0.999), weight_decay=0.05,
                 paramwise_cfg={'decay_rate': 0.7,
                                'decay_type': 'layer_wise',
                                'num_layers': 12})
lr_config = dict(step=[3, 9, 27, 33])
runner = dict(type='EpochBasedRunnerAmp', max_epochs=36)

# do not use mmdet version fp16
fp16 = None
optimizer_config = dict(
    type="DistOptimizerHook",
    update_interval=1,
    grad_clip=None,
    coalesce=True,
    bucket_size_mb=-1,
    use_fp16=False,
)

And the corresponsing base/models/cascade_mask_rcnn_convnext_fpn.py config looks like :

# Copyright (c) Meta Platforms, Inc. and affiliates.

# All rights reserved.

# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.


# model settings
model = dict(
    type='CascadeRCNN',
    pretrained='checkpoint/cascade_mask_rcnn_convnext_large_22k_3x.pth',
    backbone=dict(
        type='ConvNeXt',
        in_chans=3,
        depths=[3, 3, 9, 3], 
        dims=[96, 192, 384, 768], 
        drop_path_rate=0.2,
        layer_scale_init_value=1e-6,
        out_indices=[0, 1, 2, 3],
    ),
    neck=dict(
        type='FPN',
        in_channels=[128, 256, 512, 1024],
        out_channels=256,
        num_outs=5),
    rpn_head=dict(
        type='RPNHead',
        in_channels=256,
        feat_channels=256,
        anchor_generator=dict(
            type='AnchorGenerator',
            scales=[8],
            ratios=[0.5, 1.0, 2.0],
            strides=[4, 8, 16, 32, 64]),
        bbox_coder=dict(
            type='DeltaXYWHBBoxCoder',
            target_means=[.0, .0, .0, .0],
            target_stds=[1.0, 1.0, 1.0, 1.0]),
        loss_cls=dict(
            type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
        loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
    roi_head=dict(
        type='CascadeRoIHead',
        num_stages=3,
        stage_loss_weights=[1, 0.5, 0.25],
        bbox_roi_extractor=dict(
            type='SingleRoIExtractor',
            roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0),
            out_channels=256,
            featmap_strides=[4, 8, 16, 32]),
        bbox_head=[
            dict(
                type='Shared2FCBBoxHead',
                in_channels=256,
                fc_out_channels=1024,
                roi_feat_size=7,
                num_classes=97,
                bbox_coder=dict(
                    type='DeltaXYWHBBoxCoder',
                    target_means=[0., 0., 0., 0.],
                    target_stds=[0.1, 0.1, 0.2, 0.2]),
                reg_class_agnostic=True,
                loss_cls=dict(
                    type='CrossEntropyLoss',
                    use_sigmoid=False,
                    loss_weight=1.0),
                loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
                               loss_weight=1.0)),
            dict(
                type='Shared2FCBBoxHead',
                in_channels=256,
                fc_out_channels=1024,
                roi_feat_size=7,
                num_classes=97,
                bbox_coder=dict(
                    type='DeltaXYWHBBoxCoder',
                    target_means=[0., 0., 0., 0.],
                    target_stds=[0.05, 0.05, 0.1, 0.1]),
                reg_class_agnostic=True,
                loss_cls=dict(
                    type='CrossEntropyLoss',
                    use_sigmoid=False,
                    loss_weight=1.0),
                loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
                               loss_weight=1.0)),
            dict(
                type='Shared2FCBBoxHead',
                in_channels=256,
                fc_out_channels=1024,
                roi_feat_size=7,
                num_classes=97,
                bbox_coder=dict(
                    type='DeltaXYWHBBoxCoder',
                    target_means=[0., 0., 0., 0.],
                    target_stds=[0.033, 0.033, 0.067, 0.067]),
                reg_class_agnostic=True,
                loss_cls=dict(
                    type='CrossEntropyLoss',
                    use_sigmoid=False,
                    loss_weight=1.0),
                loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
        ]),
    # model training and testing settings
    train_cfg = dict(
        rpn=dict(
            assigner=dict(
                type='MaxIoUAssigner',
                pos_iou_thr=0.7,
                neg_iou_thr=0.3,
                min_pos_iou=0.3,
                match_low_quality=True,
                ignore_iof_thr=-1),
            sampler=dict(
                type='RandomSampler',
                num=256,
                pos_fraction=0.5,
                neg_pos_ub=-1,
                add_gt_as_proposals=False),
            allowed_border=0,
            pos_weight=-1,
            debug=False),
        rpn_proposal=dict(
            nms_across_levels=False,
            nms_pre=2000,
            nms_post=2000,
            max_per_img=2000,
            nms=dict(type='nms', iou_threshold=0.7),
            min_bbox_size=0),
        rcnn=[
            dict(
                assigner=dict(
                    type='MaxIoUAssigner',
                    pos_iou_thr=0.5,
                    neg_iou_thr=0.5,
                    min_pos_iou=0.5,
                    match_low_quality=False,
                    ignore_iof_thr=-1),
                sampler=dict(
                    type='RandomSampler',
                    num=512,
                    pos_fraction=0.25,
                    neg_pos_ub=-1,
                    add_gt_as_proposals=True),
                #mask_size=28,
                pos_weight=-1,
                debug=False),
            dict(
                assigner=dict(
                    type='MaxIoUAssigner',
                    pos_iou_thr=0.6,
                    neg_iou_thr=0.6,
                    min_pos_iou=0.6,
                    match_low_quality=False,
                    ignore_iof_thr=-1),
                sampler=dict(
                    type='RandomSampler',
                    num=512,
                    pos_fraction=0.25,
                    neg_pos_ub=-1,
                    add_gt_as_proposals=True),
                #mask_size=28,
                pos_weight=-1,
                debug=False),
            dict(
                assigner=dict(
                    type='MaxIoUAssigner',
                    pos_iou_thr=0.7,
                    neg_iou_thr=0.7,
                    min_pos_iou=0.7,
                    match_low_quality=False,
                    ignore_iof_thr=-1),
                sampler=dict(
                    type='RandomSampler',
                    num=512,
                    pos_fraction=0.25,
                    neg_pos_ub=-1,
                    add_gt_as_proposals=True),
                #mask_size=28,
                pos_weight=-1,
                debug=False)
        ]),
    test_cfg = dict(
        rpn=dict(
            nms_across_levels=False,
            nms_pre=1000,
            nms_post=1000,
            max_per_img=1000,
            nms=dict(type='nms', iou_threshold=0.7),
            min_bbox_size=0),
        rcnn=dict(
            score_thr=0.05,
            nms=dict(type='nms', iou_threshold=0.5),
            max_per_img=100)))

Do i need to prune or update some layer's information for which i am getting below error :

unexpected key in source state_dict: backbone.downsample_layers.0.0.weight, backbone.downsample_layers.0.0.bias, backbone.downsample_layers.0.1.weight, backbone.downsample_layers.0.1.bias, backbone.downsample_layers.1.0.weight, backbone.downsample_layers.1.0.bias, backbone.downsample_layers.1.1.weight, backbone.downsample_layers.1.1.bias, backbone.downsample_layers.2.0.weight, backbone.downsample_layers.2.0.bias, backbone.downsample_layers.2.1.weight, backbone.downsample_layers.2.1.bias, backbone.downsample_layers.3.0.weight, backbone.downsample_layers.3.0.bias, backbone.downsample_layers.3.1.weight, backbone.downsample_layers.3.1.bias, backbone.stages.0.0.gamma, backbone.stages.0.0.dwconv.weight, backbone.stages.0.0.dwconv.bias, backbone.stages.0.0.norm.weight, backbone.stages.0.0.norm.bias, backbone.stages.0.0.pwconv1.weight, backbone.stages.0.0.pwconv1.bias, backbone.stages.0.0.pwconv2.weight, backbone.stages.0.0.pwconv2.bias, backbone.stages.0.1.gamma, backbone.stages.0.1.dwconv.weight, backbone.stages.0.1.dwconv.bias, backbone.stages.0.1.norm.weight, backbone.stages.0.1.norm.bias, backbone.stages.0.1.pwconv1.weight, backbone.stages.0.1.pwconv1.bias, backbone.stages.0.1.pwconv2.weight, backbone.stages.0.1.pwconv2.bias, backbone.stages.0.2.gamma, backbone.stages.0.2.dwconv.weight, backbone.stages.0.2.dwconv.bias, backbone.stages.0.2.norm.weight, backbone.stages.0.2.norm.bias, backbone.stages.0.2.pwconv1.weight, backbone.stages.0.2.pwconv1.bias, backbone.stages.0.2.pwconv2.weight, backbone.stages.0.2.pwconv2.bias, backbone.stages.1.0.gamma, backbone.stages.1.0.dwconv.weight, backbone.stages.1.0.dwconv.bias, backbone.stages.1.0.norm.weight, backbone.stages.1.0.norm.bias, backbone.stages.1.0.pwconv1.weight, backbone.stages.1.0.pwconv1.bias, backbone.stages.1.0.pwconv2.weight, backbone.stages.1.0.pwconv2.bias, backbone.stages.1.1.gamma, backbone.stages.1.1.dwconv.weight, backbone.stages.1.1.dwconv.bias, backbone.stages.1.1.norm.weight, backbone.stages.1.1.norm.bias, backbone.stages.1.1.pwconv1.weight, backbone.stages.1.1.pwconv1.bias, backbone.stages.1.1.pwconv2.weight, backbone.stages.1.1.pwconv2.bias, backbone.stages.1.2.gamma, backbone.stages.1.2.dwconv.weight, backbone.stages.1.2.dwconv.bias, backbone.stages.1.2.norm.weight, backbone.stages.1.2.norm.bias, backbone.stages.1.2.pwconv1.weight, backbone.stages.1.2.pwconv1.bias, backbone.stages.1.2.pwconv2.weight, backbone.stages.1.2.pwconv2.bias, backbone.stages.2.0.gamma, backbone.stages.2.0.dwconv.weight, backbone.stages.2.0.dwconv.bias, backbone.stages.2.0.norm.weight, backbone.stages.2.0.norm.bias, backbone.stages.2.0.pwconv1.weight, backbone.stages.2.0.pwconv1.bias, backbone.stages.2.0.pwconv2.weight, backbone.stages.2.0.pwconv2.bias, backbone.stages.2.1.gamma, backbone.stages.2.1.dwconv.weight, backbone.stages.2.1.dwconv.bias, backbone.stages.2.1.norm.weight, backbone.stages.2.1.norm.bias, backbone.stages.2.1.pwconv1.weight, backbone.stages.2.1.pwconv1.bias, backbone.stages.2.1.pwconv2.weight, backbone.stages.2.1.pwconv2.bias, backbone.stages.2.2.gamma, backbone.stages.2.2.dwconv.weight, backbone.stages.2.2.dwconv.bias, backbone.stages.2.2.norm.weight, backbone.stages.2.2.norm.bias, backbone.stages.2.2.pwconv1.weight, backbone.stages.2.2.pwconv1.bias, backbone.stages.2.2.pwconv2.weight, backbone.stages.2.2.pwconv2.bias, backbone.stages.2.3.gamma, backbone.stages.2.3.dwconv.weight, backbone.stages.2.3.dwconv.bias, backbone.stages.2.3.norm.weight, backbone.stages.2.3.norm.bias, backbone.stages.2.3.pwconv1.weight, backbone.stages.2.3.pwconv1.bias, backbone.stages.2.3.pwconv2.weight, backbone.stages.2.3.pwconv2.bias, backbone.stages.2.4.gamma, backbone.stages.2.4.dwconv.weight, backbone.stages.2.4.dwconv.bias, backbone.stages.2.4.norm.weight, backbone.stages.2.4.norm.bias, backbone.stages.2.4.pwconv1.weight, backbone.stages.2.4.pwconv1.bias, backbone.stages.2.4.pwconv2.weight, backbone.stages.2.4.pwconv2.bias, backbone.stages.2.5.gamma, backbone.stages.2.5.dwconv.weight, backbone.stages.2.5.dwconv.bias, backbone.stages.2.5.norm.weight, backbone.stages.2.5.norm.bias, backbone.stages.2.5.pwconv1.weight, backbone.stages.2.5.pwconv1.bias, backbone.stages.2.5.pwconv2.weight, backbone.stages.2.5.pwconv2.bias, backbone.stages.2.6.gamma, backbone.stages.2.6.dwconv.weight, backbone.stages.2.6.dwconv.bias, backbone.stages.2.6.norm.weight, backbone.stages.2.6.norm.bias, backbone.stages.2.6.pwconv1.weight, backbone.stages.2.6.pwconv1.bias, backbone.stages.2.6.pwconv2.weight, backbone.stages.2.6.pwconv2.bias, backbone.stages.2.7.gamma, backbone.stages.2.7.dwconv.weight, backbone.stages.2.7.dwconv.bias, backbone.stages.2.7.norm.weight, backbone.stages.2.7.norm.bias, backbone.stages.2.7.pwconv1.weight, backbone.stages.2.7.pwconv1.bias, backbone.stages.2.7.pwconv2.weight, backbone.stages.2.7.pwconv2.bias, backbone.stages.2.8.gamma, backbone.stages.2.8.dwconv.weight, backbone.stages.2.8.dwconv.bias, backbone.stages.2.8.norm.weight, backbone.stages.2.8.norm.bias, backbone.stages.2.8.pwconv1.weight, backbone.stages.2.8.pwconv1.bias, backbone.stages.2.8.pwconv2.weight, backbone.stages.2.8.pwconv2.bias, backbone.stages.2.9.gamma, backbone.stages.2.9.dwconv.weight, backbone.stages.2.9.dwconv.bias, backbone.stages.2.9.norm.weight, backbone.stages.2.9.norm.bias, backbone.stages.2.9.pwconv1.weight, backbone.stages.2.9.pwconv1.bias, backbone.stages.2.9.pwconv2.weight, backbone.stages.2.9.pwconv2.bias, backbone.stages.2.10.gamma, backbone.stages.2.10.dwconv.weight, backbone.stages.2.10.dwconv.bias, backbone.stages.2.10.norm.weight, backbone.stages.2.10.norm.bias, backbone.stages.2.10.pwconv1.weight, backbone.stages.2.10.pwconv1.bias, backbone.stages.2.10.pwconv2.weight, backbone.stages.2.10.pwconv2.bias, backbone.stages.2.11.gamma, backbone.stages.2.11.dwconv.weight, backbone.stages.2.11.dwconv.bias, backbone.stages.2.11.norm.weight, backbone.stages.2.11.norm.bias, backbone.stages.2.11.pwconv1.weight, backbone.stages.2.11.pwconv1.bias, backbone.stages.2.11.pwconv2.weight, backbone.stages.2.11.pwconv2.bias, backbone.stages.2.12.gamma, backbone.stages.2.12.dwconv.weight, backbone.stages.2.12.dwconv.bias, backbone.stages.2.12.norm.weight, backbone.stages.2.12.norm.bias, backbone.stages.2.12.pwconv1.weight, backbone.stages.2.12.pwconv1.bias, backbone.stages.2.12.pwconv2.weight, backbone.stages.2.12.pwconv2.bias, backbone.stages.2.13.gamma, backbone.stages.2.13.dwconv.weight, backbone.stages.2.13.dwconv.bias, backbone.stages.2.13.norm.weight, backbone.stages.2.13.norm.bias, backbone.stages.2.13.pwconv1.weight, backbone.stages.2.13.pwconv1.bias, backbone.stages.2.13.pwconv2.weight, backbone.stages.2.13.pwconv2.bias, backbone.stages.2.14.gamma, backbone.stages.2.14.dwconv.weight, backbone.stages.2.14.dwconv.bias, backbone.stages.2.14.norm.weight, backbone.stages.2.14.norm.bias, backbone.stages.2.14.pwconv1.weight, backbone.stages.2.14.pwconv1.bias, backbone.stages.2.14.pwconv2.weight, backbone.stages.2.14.pwconv2.bias, backbone.stages.2.15.gamma, backbone.stages.2.15.dwconv.weight, backbone.stages.2.15.dwconv.bias, backbone.stages.2.15.norm.weight, backbone.stages.2.15.norm.bias, backbone.stages.2.15.pwconv1.weight, backbone.stages.2.15.pwconv1.bias, backbone.stages.2.15.pwconv2.weight, backbone.stages.2.15.pwconv2.bias, backbone.stages.2.16.gamma, backbone.stages.2.16.dwconv.weight, backbone.stages.2.16.dwconv.bias, backbone.stages.2.16.norm.weight, backbone.stages.2.16.norm.bias, backbone.stages.2.16.pwconv1.weight, backbone.stages.2.16.pwconv1.bias, backbone.stages.2.16.pwconv2.weight, backbone.stages.2.16.pwconv2.bias, backbone.stages.2.17.gamma, backbone.stages.2.17.dwconv.weight, backbone.stages.2.17.dwconv.bias, backbone.stages.2.17.norm.weight, backbone.stages.2.17.norm.bias, backbone.stages.2.17.pwconv1.weight, backbone.stages.2.17.pwconv1.bias, backbone.stages.2.17.pwconv2.weight, backbone.stages.2.17.pwconv2.bias, backbone.stages.2.18.gamma, backbone.stages.2.18.dwconv.weight, backbone.stages.2.18.dwconv.bias, backbone.stages.2.18.norm.weight, backbone.stages.2.18.norm.bias, backbone.stages.2.18.pwconv1.weight, backbone.stages.2.18.pwconv1.bias, backbone.stages.2.18.pwconv2.weight, backbone.stages.2.18.pwconv2.bias, backbone.stages.2.19.gamma, backbone.stages.2.19.dwconv.weight, backbone.stages.2.19.dwconv.bias, backbone.stages.2.19.norm.weight, backbone.stages.2.19.norm.bias, backbone.stages.2.19.pwconv1.weight, backbone.stages.2.19.pwconv1.bias, backbone.stages.2.19.pwconv2.weight, backbone.stages.2.19.pwconv2.bias, backbone.stages.2.20.gamma, backbone.stages.2.20.dwconv.weight, backbone.stages.2.20.dwconv.bias, backbone.stages.2.20.norm.weight, backbone.stages.2.20.norm.bias, backbone.stages.2.20.pwconv1.weight, backbone.stages.2.20.pwconv1.bias, backbone.stages.2.20.pwconv2.weight, backbone.stages.2.20.pwconv2.bias, backbone.stages.2.21.gamma, backbone.stages.2.21.dwconv.weight, backbone.stages.2.21.dwconv.bias, backbone.stages.2.21.norm.weight, backbone.stages.2.21.norm.bias, backbone.stages.2.21.pwconv1.weight, backbone.stages.2.21.pwconv1.bias, backbone.stages.2.21.pwconv2.weight, backbone.stages.2.21.pwconv2.bias, backbone.stages.2.22.gamma, backbone.stages.2.22.dwconv.weight, backbone.stages.2.22.dwconv.bias, backbone.stages.2.22.norm.weight, backbone.stages.2.22.norm.bias, backbone.stages.2.22.pwconv1.weight, backbone.stages.2.22.pwconv1.bias, backbone.stages.2.22.pwconv2.weight, backbone.stages.2.22.pwconv2.bias, backbone.stages.2.23.gamma, backbone.stages.2.23.dwconv.weight, backbone.stages.2.23.dwconv.bias, backbone.stages.2.23.norm.weight, backbone.stages.2.23.norm.bias, backbone.stages.2.23.pwconv1.weight, backbone.stages.2.23.pwconv1.bias, backbone.stages.2.23.pwconv2.weight, backbone.stages.2.23.pwconv2.bias, backbone.stages.2.24.gamma, backbone.stages.2.24.dwconv.weight, backbone.stages.2.24.dwconv.bias, backbone.stages.2.24.norm.weight, backbone.stages.2.24.norm.bias, backbone.stages.2.24.pwconv1.weight, backbone.stages.2.24.pwconv1.bias, backbone.stages.2.24.pwconv2.weight, backbone.stages.2.24.pwconv2.bias, backbone.stages.2.25.gamma, backbone.stages.2.25.dwconv.weight, backbone.stages.2.25.dwconv.bias, backbone.stages.2.25.norm.weight, backbone.stages.2.25.norm.bias, backbone.stages.2.25.pwconv1.weight, backbone.stages.2.25.pwconv1.bias, backbone.stages.2.25.pwconv2.weight, backbone.stages.2.25.pwconv2.bias, backbone.stages.2.26.gamma, backbone.stages.2.26.dwconv.weight, backbone.stages.2.26.dwconv.bias, backbone.stages.2.26.norm.weight, backbone.stages.2.26.norm.bias, backbone.stages.2.26.pwconv1.weight, backbone.stages.2.26.pwconv1.bias, backbone.stages.2.26.pwconv2.weight, backbone.stages.2.26.pwconv2.bias, backbone.stages.3.0.gamma, backbone.stages.3.0.dwconv.weight, backbone.stages.3.0.dwconv.bias, backbone.stages.3.0.norm.weight, backbone.stages.3.0.norm.bias, backbone.stages.3.0.pwconv1.weight, backbone.stages.3.0.pwconv1.bias, backbone.stages.3.0.pwconv2.weight, backbone.stages.3.0.pwconv2.bias, backbone.stages.3.1.gamma, backbone.stages.3.1.dwconv.weight, backbone.stages.3.1.dwconv.bias, backbone.stages.3.1.norm.weight, backbone.stages.3.1.norm.bias, backbone.stages.3.1.pwconv1.weight, backbone.stages.3.1.pwconv1.bias, backbone.stages.3.1.pwconv2.weight, backbone.stages.3.1.pwconv2.bias, backbone.stages.3.2.gamma, backbone.stages.3.2.dwconv.weight, backbone.stages.3.2.dwconv.bias, backbone.stages.3.2.norm.weight, backbone.stages.3.2.norm.bias, backbone.stages.3.2.pwconv1.weight, backbone.stages.3.2.pwconv1.bias, backbone.stages.3.2.pwconv2.weight, backbone.stages.3.2.pwconv2.bias, backbone.norm0.weight, backbone.norm0.bias, backbone.norm1.weight, backbone.norm1.bias, backbone.norm2.weight, backbone.norm2.bias, backbone.norm3.weight, backbone.norm3.bias, neck.lateral_convs.0.conv.weight, neck.lateral_convs.0.conv.bias, neck.lateral_convs.1.conv.weight, neck.lateral_convs.1.conv.bias, neck.lateral_convs.2.conv.weight, neck.lateral_convs.2.conv.bias, neck.lateral_convs.3.conv.weight, neck.lateral_convs.3.conv.bias, neck.fpn_convs.0.conv.weight, neck.fpn_convs.0.conv.bias, neck.fpn_convs.1.conv.weight, neck.fpn_convs.1.conv.bias, neck.fpn_convs.2.conv.weight, neck.fpn_convs.2.conv.bias, neck.fpn_convs.3.conv.weight, neck.fpn_convs.3.conv.bias, rpn_head.rpn_conv.weight, rpn_head.rpn_conv.bias, rpn_head.rpn_cls.weight, rpn_head.rpn_cls.bias, rpn_head.rpn_reg.weight, rpn_head.rpn_reg.bias, roi_head.bbox_head.0.fc_cls.weight, roi_head.bbox_head.0.fc_cls.bias, roi_head.bbox_head.0.fc_reg.weight, roi_head.bbox_head.0.fc_reg.bias, roi_head.bbox_head.0.shared_convs.0.conv.weight, roi_head.bbox_head.0.shared_convs.0.bn.weight, roi_head.bbox_head.0.shared_convs.0.bn.bias, roi_head.bbox_head.0.shared_convs.0.bn.running_mean, roi_head.bbox_head.0.shared_convs.0.bn.running_var, roi_head.bbox_head.0.shared_convs.0.bn.num_batches_tracked, roi_head.bbox_head.0.shared_convs.1.conv.weight, roi_head.bbox_head.0.shared_convs.1.bn.weight, roi_head.bbox_head.0.shared_convs.1.bn.bias, roi_head.bbox_head.0.shared_convs.1.bn.running_mean, roi_head.bbox_head.0.shared_convs.1.bn.running_var, roi_head.bbox_head.0.shared_convs.1.bn.num_batches_tracked, roi_head.bbox_head.0.shared_convs.2.conv.weight, roi_head.bbox_head.0.shared_convs.2.bn.weight, roi_head.bbox_head.0.shared_convs.2.bn.bias, roi_head.bbox_head.0.shared_convs.2.bn.running_mean, roi_head.bbox_head.0.shared_convs.2.bn.running_var, roi_head.bbox_head.0.shared_convs.2.bn.num_batches_tracked, roi_head.bbox_head.0.shared_convs.3.conv.weight, roi_head.bbox_head.0.shared_convs.3.bn.weight, roi_head.bbox_head.0.shared_convs.3.bn.bias, roi_head.bbox_head.0.shared_convs.3.bn.running_mean, roi_head.bbox_head.0.shared_convs.3.bn.running_var, roi_head.bbox_head.0.shared_convs.3.bn.num_batches_tracked, roi_head.bbox_head.0.shared_fcs.0.weight, roi_head.bbox_head.0.shared_fcs.0.bias, roi_head.bbox_head.1.fc_cls.weight, roi_head.bbox_head.1.fc_cls.bias, roi_head.bbox_head.1.fc_reg.weight, roi_head.bbox_head.1.fc_reg.bias, roi_head.bbox_head.1.shared_convs.0.conv.weight, roi_head.bbox_head.1.shared_convs.0.bn.weight, roi_head.bbox_head.1.shared_convs.0.bn.bias, roi_head.bbox_head.1.shared_convs.0.bn.running_mean, roi_head.bbox_head.1.shared_convs.0.bn.running_var, roi_head.bbox_head.1.shared_convs.0.bn.num_batches_tracked, roi_head.bbox_head.1.shared_convs.1.conv.weight, roi_head.bbox_head.1.shared_convs.1.bn.weight, roi_head.bbox_head.1.shared_convs.1.bn.bias, roi_head.bbox_head.1.shared_convs.1.bn.running_mean, roi_head.bbox_head.1.shared_convs.1.bn.running_var, roi_head.bbox_head.1.shared_convs.1.bn.num_batches_tracked, roi_head.bbox_head.1.shared_convs.2.conv.weight, roi_head.bbox_head.1.shared_convs.2.bn.weight, roi_head.bbox_head.1.shared_convs.2.bn.bias, roi_head.bbox_head.1.shared_convs.2.bn.running_mean, roi_head.bbox_head.1.shared_convs.2.bn.running_var, roi_head.bbox_head.1.shared_convs.2.bn.num_batches_tracked, roi_head.bbox_head.1.shared_convs.3.conv.weight, roi_head.bbox_head.1.shared_convs.3.bn.weight, roi_head.bbox_head.1.shared_convs.3.bn.bias, roi_head.bbox_head.1.shared_convs.3.bn.running_mean, roi_head.bbox_head.1.shared_convs.3.bn.running_var, roi_head.bbox_head.1.shared_convs.3.bn.num_batches_tracked, roi_head.bbox_head.1.shared_fcs.0.weight, roi_head.bbox_head.1.shared_fcs.0.bias, roi_head.bbox_head.2.fc_cls.weight, roi_head.bbox_head.2.fc_cls.bias, roi_head.bbox_head.2.fc_reg.weight, roi_head.bbox_head.2.fc_reg.bias, roi_head.bbox_head.2.shared_convs.0.conv.weight, roi_head.bbox_head.2.shared_convs.0.bn.weight, roi_head.bbox_head.2.shared_convs.0.bn.bias, roi_head.bbox_head.2.shared_convs.0.bn.running_mean, roi_head.bbox_head.2.shared_convs.0.bn.running_var, roi_head.bbox_head.2.shared_convs.0.bn.num_batches_tracked, roi_head.bbox_head.2.shared_convs.1.conv.weight, roi_head.bbox_head.2.shared_convs.1.bn.weight, roi_head.bbox_head.2.shared_convs.1.bn.bias, roi_head.bbox_head.2.shared_convs.1.bn.running_mean, roi_head.bbox_head.2.shared_convs.1.bn.running_var, roi_head.bbox_head.2.shared_convs.1.bn.num_batches_tracked, roi_head.bbox_head.2.shared_convs.2.conv.weight, roi_head.bbox_head.2.shared_convs.2.bn.weight, roi_head.bbox_head.2.shared_convs.2.bn.bias, roi_head.bbox_head.2.shared_convs.2.bn.running_mean, roi_head.bbox_head.2.shared_convs.2.bn.running_var, roi_head.bbox_head.2.shared_convs.2.bn.num_batches_tracked, roi_head.bbox_head.2.shared_convs.3.conv.weight, roi_head.bbox_head.2.shared_convs.3.bn.weight, roi_head.bbox_head.2.shared_convs.3.bn.bias, roi_head.bbox_head.2.shared_convs.3.bn.running_mean, roi_head.bbox_head.2.shared_convs.3.bn.running_var, roi_head.bbox_head.2.shared_convs.3.bn.num_batches_tracked, roi_head.bbox_head.2.shared_fcs.0.weight, roi_head.bbox_head.2.shared_fcs.0.bias, roi_head.mask_head.0.convs.0.conv.weight, roi_head.mask_head.0.convs.0.conv.bias, roi_head.mask_head.0.convs.1.conv.weight, roi_head.mask_head.0.convs.1.conv.bias, roi_head.mask_head.0.convs.2.conv.weight, roi_head.mask_head.0.convs.2.conv.bias, roi_head.mask_head.0.convs.3.conv.weight, roi_head.mask_head.0.convs.3.conv.bias, roi_head.mask_head.0.upsample.weight, roi_head.mask_head.0.upsample.bias, roi_head.mask_head.0.conv_logits.weight, roi_head.mask_head.0.conv_logits.bias, roi_head.mask_head.1.convs.0.conv.weight, roi_head.mask_head.1.convs.0.conv.bias, roi_head.mask_head.1.convs.1.conv.weight, roi_head.mask_head.1.convs.1.conv.bias, roi_head.mask_head.1.convs.2.conv.weight, roi_head.mask_head.1.convs.2.conv.bias, roi_head.mask_head.1.convs.3.conv.weight, roi_head.mask_head.1.convs.3.conv.bias, roi_head.mask_head.1.upsample.weight, roi_head.mask_head.1.upsample.bias, roi_head.mask_head.1.conv_logits.weight, roi_head.mask_head.1.conv_logits.bias, roi_head.mask_head.2.convs.0.conv.weight, roi_head.mask_head.2.convs.0.conv.bias, roi_head.mask_head.2.convs.1.conv.weight, roi_head.mask_head.2.convs.1.conv.bias, roi_head.mask_head.2.convs.2.conv.weight, roi_head.mask_head.2.convs.2.conv.bias, roi_head.mask_head.2.convs.3.conv.weight, roi_head.mask_head.2.convs.3.conv.bias, roi_head.mask_head.2.upsample.weight, roi_head.mask_head.2.upsample.bias, roi_head.mask_head.2.conv_logits.weight, roi_head.mask_head.2.conv_logits.bias

missing keys in source state_dict: downsample_layers.0.0.weight, downsample_layers.0.0.bias, downsample_layers.0.1.weight, downsample_layers.0.1.bias, downsample_layers.1.0.weight, downsample_layers.1.0.bias, downsample_layers.1.1.weight, downsample_layers.1.1.bias, downsample_layers.2.0.weight, downsample_layers.2.0.bias, downsample_layers.2.1.weight, downsample_layers.2.1.bias, downsample_layers.3.0.weight, downsample_layers.3.0.bias, downsample_layers.3.1.weight, downsample_layers.3.1.bias, stages.0.0.gamma, stages.0.0.dwconv.weight, stages.0.0.dwconv.bias, stages.0.0.norm.weight, stages.0.0.norm.bias, stages.0.0.pwconv1.weight, stages.0.0.pwconv1.bias, stages.0.0.pwconv2.weight, stages.0.0.pwconv2.bias, stages.0.1.gamma, stages.0.1.dwconv.weight, stages.0.1.dwconv.bias, stages.0.1.norm.weight, stages.0.1.norm.bias, stages.0.1.pwconv1.weight, stages.0.1.pwconv1.bias, stages.0.1.pwconv2.weight, stages.0.1.pwconv2.bias, stages.0.2.gamma, stages.0.2.dwconv.weight, stages.0.2.dwconv.bias, stages.0.2.norm.weight, stages.0.2.norm.bias, stages.0.2.pwconv1.weight, stages.0.2.pwconv1.bias, stages.0.2.pwconv2.weight, stages.0.2.pwconv2.bias, stages.1.0.gamma, stages.1.0.dwconv.weight, stages.1.0.dwconv.bias, stages.1.0.norm.weight, stages.1.0.norm.bias, stages.1.0.pwconv1.weight, stages.1.0.pwconv1.bias, stages.1.0.pwconv2.weight, stages.1.0.pwconv2.bias, stages.1.1.gamma, stages.1.1.dwconv.weight, stages.1.1.dwconv.bias, stages.1.1.norm.weight, stages.1.1.norm.bias, stages.1.1.pwconv1.weight, stages.1.1.pwconv1.bias, stages.1.1.pwconv2.weight, stages.1.1.pwconv2.bias, stages.1.2.gamma, stages.1.2.dwconv.weight, stages.1.2.dwconv.bias, stages.1.2.norm.weight, stages.1.2.norm.bias, stages.1.2.pwconv1.weight, stages.1.2.pwconv1.bias, stages.1.2.pwconv2.weight, stages.1.2.pwconv2.bias, stages.2.0.gamma, stages.2.0.dwconv.weight, stages.2.0.dwconv.bias, stages.2.0.norm.weight, stages.2.0.norm.bias, stages.2.0.pwconv1.weight, stages.2.0.pwconv1.bias, stages.2.0.pwconv2.weight, stages.2.0.pwconv2.bias, stages.2.1.gamma, stages.2.1.dwconv.weight, stages.2.1.dwconv.bias, stages.2.1.norm.weight, stages.2.1.norm.bias, stages.2.1.pwconv1.weight, stages.2.1.pwconv1.bias, stages.2.1.pwconv2.weight, stages.2.1.pwconv2.bias, stages.2.2.gamma, stages.2.2.dwconv.weight, stages.2.2.dwconv.bias, stages.2.2.norm.weight, stages.2.2.norm.bias, stages.2.2.pwconv1.weight, stages.2.2.pwconv1.bias, stages.2.2.pwconv2.weight, stages.2.2.pwconv2.bias, stages.2.3.gamma, stages.2.3.dwconv.weight, stages.2.3.dwconv.bias, stages.2.3.norm.weight, stages.2.3.norm.bias, stages.2.3.pwconv1.weight, stages.2.3.pwconv1.bias, stages.2.3.pwconv2.weight, stages.2.3.pwconv2.bias, stages.2.4.gamma, stages.2.4.dwconv.weight, stages.2.4.dwconv.bias, stages.2.4.norm.weight, stages.2.4.norm.bias, stages.2.4.pwconv1.weight, stages.2.4.pwconv1.bias, stages.2.4.pwconv2.weight, stages.2.4.pwconv2.bias, stages.2.5.gamma, stages.2.5.dwconv.weight, stages.2.5.dwconv.bias, stages.2.5.norm.weight, stages.2.5.norm.bias, stages.2.5.pwconv1.weight, stages.2.5.pwconv1.bias, stages.2.5.pwconv2.weight, stages.2.5.pwconv2.bias, stages.2.6.gamma, stages.2.6.dwconv.weight, stages.2.6.dwconv.bias, stages.2.6.norm.weight, stages.2.6.norm.bias, stages.2.6.pwconv1.weight, stages.2.6.pwconv1.bias, stages.2.6.pwconv2.weight, stages.2.6.pwconv2.bias, stages.2.7.gamma, stages.2.7.dwconv.weight, stages.2.7.dwconv.bias, stages.2.7.norm.weight, stages.2.7.norm.bias, stages.2.7.pwconv1.weight, stages.2.7.pwconv1.bias, stages.2.7.pwconv2.weight, stages.2.7.pwconv2.bias, stages.2.8.gamma, stages.2.8.dwconv.weight, stages.2.8.dwconv.bias, stages.2.8.norm.weight, stages.2.8.norm.bias, stages.2.8.pwconv1.weight, stages.2.8.pwconv1.bias, stages.2.8.pwconv2.weight, stages.2.8.pwconv2.bias, stages.2.9.gamma, stages.2.9.dwconv.weight, stages.2.9.dwconv.bias, stages.2.9.norm.weight, stages.2.9.norm.bias, stages.2.9.pwconv1.weight, stages.2.9.pwconv1.bias, stages.2.9.pwconv2.weight, stages.2.9.pwconv2.bias, stages.2.10.gamma, stages.2.10.dwconv.weight, stages.2.10.dwconv.bias, stages.2.10.norm.weight, stages.2.10.norm.bias, stages.2.10.pwconv1.weight, stages.2.10.pwconv1.bias, stages.2.10.pwconv2.weight, stages.2.10.pwconv2.bias, stages.2.11.gamma, stages.2.11.dwconv.weight, stages.2.11.dwconv.bias, stages.2.11.norm.weight, stages.2.11.norm.bias, stages.2.11.pwconv1.weight, stages.2.11.pwconv1.bias, stages.2.11.pwconv2.weight, stages.2.11.pwconv2.bias, stages.2.12.gamma, stages.2.12.dwconv.weight, stages.2.12.dwconv.bias, stages.2.12.norm.weight, stages.2.12.norm.bias, stages.2.12.pwconv1.weight, stages.2.12.pwconv1.bias, stages.2.12.pwconv2.weight, stages.2.12.pwconv2.bias, stages.2.13.gamma, stages.2.13.dwconv.weight, stages.2.13.dwconv.bias, stages.2.13.norm.weight, stages.2.13.norm.bias, stages.2.13.pwconv1.weight, stages.2.13.pwconv1.bias, stages.2.13.pwconv2.weight, stages.2.13.pwconv2.bias, stages.2.14.gamma, stages.2.14.dwconv.weight, stages.2.14.dwconv.bias, stages.2.14.norm.weight, stages.2.14.norm.bias, stages.2.14.pwconv1.weight, stages.2.14.pwconv1.bias, stages.2.14.pwconv2.weight, stages.2.14.pwconv2.bias, stages.2.15.gamma, stages.2.15.dwconv.weight, stages.2.15.dwconv.bias, stages.2.15.norm.weight, stages.2.15.norm.bias, stages.2.15.pwconv1.weight, stages.2.15.pwconv1.bias, stages.2.15.pwconv2.weight, stages.2.15.pwconv2.bias, stages.2.16.gamma, stages.2.16.dwconv.weight, stages.2.16.dwconv.bias, stages.2.16.norm.weight, stages.2.16.norm.bias, stages.2.16.pwconv1.weight, stages.2.16.pwconv1.bias, stages.2.16.pwconv2.weight, stages.2.16.pwconv2.bias, stages.2.17.gamma, stages.2.17.dwconv.weight, stages.2.17.dwconv.bias, stages.2.17.norm.weight, stages.2.17.norm.bias, stages.2.17.pwconv1.weight, stages.2.17.pwconv1.bias, stages.2.17.pwconv2.weight, stages.2.17.pwconv2.bias, stages.2.18.gamma, stages.2.18.dwconv.weight, stages.2.18.dwconv.bias, stages.2.18.norm.weight, stages.2.18.norm.bias, stages.2.18.pwconv1.weight, stages.2.18.pwconv1.bias, stages.2.18.pwconv2.weight, stages.2.18.pwconv2.bias, stages.2.19.gamma, stages.2.19.dwconv.weight, stages.2.19.dwconv.bias, stages.2.19.norm.weight, stages.2.19.norm.bias, stages.2.19.pwconv1.weight, stages.2.19.pwconv1.bias, stages.2.19.pwconv2.weight, stages.2.19.pwconv2.bias, stages.2.20.gamma, stages.2.20.dwconv.weight, stages.2.20.dwconv.bias, stages.2.20.norm.weight, stages.2.20.norm.bias, stages.2.20.pwconv1.weight, stages.2.20.pwconv1.bias, stages.2.20.pwconv2.weight, stages.2.20.pwconv2.bias, stages.2.21.gamma, stages.2.21.dwconv.weight, stages.2.21.dwconv.bias, stages.2.21.norm.weight, stages.2.21.norm.bias, stages.2.21.pwconv1.weight, stages.2.21.pwconv1.bias, stages.2.21.pwconv2.weight, stages.2.21.pwconv2.bias, stages.2.22.gamma, stages.2.22.dwconv.weight, stages.2.22.dwconv.bias, stages.2.22.norm.weight, stages.2.22.norm.bias, stages.2.22.pwconv1.weight, stages.2.22.pwconv1.bias, stages.2.22.pwconv2.weight, stages.2.22.pwconv2.bias, stages.2.23.gamma, stages.2.23.dwconv.weight, stages.2.23.dwconv.bias, stages.2.23.norm.weight, stages.2.23.norm.bias, stages.2.23.pwconv1.weight, stages.2.23.pwconv1.bias, stages.2.23.pwconv2.weight, stages.2.23.pwconv2.bias, stages.2.24.gamma, stages.2.24.dwconv.weight, stages.2.24.dwconv.bias, stages.2.24.norm.weight, stages.2.24.norm.bias, stages.2.24.pwconv1.weight, stages.2.24.pwconv1.bias, stages.2.24.pwconv2.weight, stages.2.24.pwconv2.bias, stages.2.25.gamma, stages.2.25.dwconv.weight, stages.2.25.dwconv.bias, stages.2.25.norm.weight, stages.2.25.norm.bias, stages.2.25.pwconv1.weight, stages.2.25.pwconv1.bias, stages.2.25.pwconv2.weight, stages.2.25.pwconv2.bias, stages.2.26.gamma, stages.2.26.dwconv.weight, stages.2.26.dwconv.bias, stages.2.26.norm.weight, stages.2.26.norm.bias, stages.2.26.pwconv1.weight, stages.2.26.pwconv1.bias, stages.2.26.pwconv2.weight, stages.2.26.pwconv2.bias, stages.3.0.gamma, stages.3.0.dwconv.weight, stages.3.0.dwconv.bias, stages.3.0.norm.weight, stages.3.0.norm.bias, stages.3.0.pwconv1.weight, stages.3.0.pwconv1.bias, stages.3.0.pwconv2.weight, stages.3.0.pwconv2.bias, stages.3.1.gamma, stages.3.1.dwconv.weight, stages.3.1.dwconv.bias, stages.3.1.norm.weight, stages.3.1.norm.bias, stages.3.1.pwconv1.weight, stages.3.1.pwconv1.bias, stages.3.1.pwconv2.weight, stages.3.1.pwconv2.bias, stages.3.2.gamma, stages.3.2.dwconv.weight, stages.3.2.dwconv.bias, stages.3.2.norm.weight, stages.3.2.norm.bias, stages.3.2.pwconv1.weight, stages.3.2.pwconv1.bias, stages.3.2.pwconv2.weight, stages.3.2.pwconv2.bias, norm0.weight, norm0.bias, norm1.weight, norm1.bias, norm2.weight, norm2.bias, norm3.weight, norm3.bias

Thank you !!
Anshu

Sign up for free to subscribe to this conversation on GitHub. Already have an account? Sign in.
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant