Skip to content
This repository has been archived by the owner on Jan 26, 2022. It is now read-only.

support python2 #152

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions lib/modeling/FPN.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import division

import collections
import numpy as np

Expand Down Expand Up @@ -78,7 +80,7 @@ class fpn(nn.Module):
similarly for spatial_scale: e.g [1/32, 1/16, 1/8, 1/4]
"""
def __init__(self, conv_body_func, fpn_level_info, P2only=False):
super().__init__()
super(fpn, self).__init__()
self.fpn_level_info = fpn_level_info
self.P2only = P2only

Expand Down Expand Up @@ -261,7 +263,7 @@ def forward(self, x):
class topdown_lateral_module(nn.Module):
"""Add a top-down lateral module."""
def __init__(self, dim_in_top, dim_in_lateral):
super().__init__()
super(topdown_lateral_module, self).__init__()
self.dim_in_top = dim_in_top
self.dim_in_lateral = dim_in_lateral
self.dim_out = dim_in_top
Expand Down Expand Up @@ -324,7 +326,7 @@ def get_min_max_levels():
class fpn_rpn_outputs(nn.Module):
"""Add RPN on FPN specific outputs."""
def __init__(self, dim_in, spatial_scales):
super().__init__()
super(fpn_rpn_outputs, self).__init__()
self.dim_in = dim_in
self.spatial_scales = spatial_scales
self.dim_out = self.dim_in
Expand Down
10 changes: 6 additions & 4 deletions lib/modeling/ResNet.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import division

import os
from collections import OrderedDict

Expand Down Expand Up @@ -41,7 +43,7 @@ def ResNet152_conv5_body():

class ResNet_convX_body(nn.Module):
def __init__(self, block_counts):
super().__init__()
super(ResNet_convX_body, self).__init__()
self.block_counts = block_counts
self.convX = len(block_counts) + 1
self.num_layers = (sum(block_counts) + 3 * (self.convX == 4)) * 3 + 2
Expand Down Expand Up @@ -117,7 +119,7 @@ def forward(self, x):

class ResNet_roi_conv5_head(nn.Module):
def __init__(self, dim_in, roi_xform_func, spatial_scale):
super().__init__()
super(ResNet_roi_conv5_head, self).__init__()
self.roi_xform = roi_xform_func
self.spatial_scale = spatial_scale

Expand Down Expand Up @@ -248,7 +250,7 @@ class bottleneck_transformation(nn.Module):

def __init__(self, inplanes, outplanes, innerplanes, stride=1, dilation=1, group=1,
downsample=None):
super().__init__()
super(bottleneck_transformation, self).__init__()
# In original resnet, stride=2 is on 1x1.
# In fb.torch resnet, stride=2 is on 3x3.
(str1x1, str3x3) = (stride, 1) if cfg.RESNETS.STRIDE_1X1 else (1, stride)
Expand Down Expand Up @@ -298,7 +300,7 @@ class bottleneck_gn_transformation(nn.Module):

def __init__(self, inplanes, outplanes, innerplanes, stride=1, dilation=1, group=1,
downsample=None):
super().__init__()
super(bottleneck_gn_transformation, self).__init__()
# In original resnet, stride=2 is on 1x1.
# In fb.torch resnet, stride=2 is on 3x3.
(str1x1, str3x3) = (stride, 1) if cfg.RESNETS.STRIDE_1X1 else (1, stride)
Expand Down
2 changes: 1 addition & 1 deletion lib/modeling/collect_and_distribute_fpn_rpn_proposals.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class CollectAndDistributeFpnRpnProposalsOp(nn.Module):
[labels, bbox_targets, bbox_inside_weights, bbox_outside_weights].
"""
def __init__(self):
super().__init__()
super(CollectAndDistributeFpnRpnProposalsOp, self).__init__()

def forward(self, inputs, roidb, im_info):
"""
Expand Down
8 changes: 4 additions & 4 deletions lib/modeling/fast_rcnn_heads.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

class fast_rcnn_outputs(nn.Module):
def __init__(self, dim_in):
super().__init__()
super(fast_rcnn_outputs, self).__init__()
self.cls_score = nn.Linear(dim_in, cfg.MODEL.NUM_CLASSES)
if cfg.MODEL.CLS_AGNOSTIC_BBOX_REG: # bg and fg
self.bbox_pred = nn.Linear(dim_in, 4 * 2)
Expand Down Expand Up @@ -73,7 +73,7 @@ def fast_rcnn_losses(cls_score, bbox_pred, label_int32, bbox_targets,
class roi_2mlp_head(nn.Module):
"""Add a ReLU MLP with two hidden layers."""
def __init__(self, dim_in, roi_xform_func, spatial_scale):
super().__init__()
super(roi_2mlp_head, self).__init__()
self.dim_in = dim_in
self.roi_xform = roi_xform_func
self.spatial_scale = spatial_scale
Expand Down Expand Up @@ -119,7 +119,7 @@ def forward(self, x, rpn_ret):
class roi_Xconv1fc_head(nn.Module):
"""Add a X conv + 1fc head, as a reference if not using GroupNorm"""
def __init__(self, dim_in, roi_xform_func, spatial_scale):
super().__init__()
super(roi_Xconv1fc_head, self).__init__()
self.dim_in = dim_in
self.roi_xform = roi_xform_func
self.spatial_scale = spatial_scale
Expand Down Expand Up @@ -181,7 +181,7 @@ def forward(self, x, rpn_ret):
class roi_Xconv1fc_gn_head(nn.Module):
"""Add a X conv + 1fc head, with GroupNorm"""
def __init__(self, dim_in, roi_xform_func, spatial_scale):
super().__init__()
super(roi_Xconv1fc_gn_head, self).__init__()
self.dim_in = dim_in
self.roi_xform = roi_xform_func
self.spatial_scale = spatial_scale
Expand Down
2 changes: 1 addition & 1 deletion lib/modeling/generate_proposal_labels.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

class GenerateProposalLabelsOp(nn.Module):
def __init__(self):
super().__init__()
super(GenerateProposalLabelsOp, self).__init__()

def forward(self, rpn_rois, roidb, im_info):
"""Op for generating training labels for RPN proposals. This is used
Expand Down
2 changes: 1 addition & 1 deletion lib/modeling/generate_proposals.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

class GenerateProposalsOp(nn.Module):
def __init__(self, anchors, spatial_scale):
super().__init__()
super(GenerateProposalsOp, self).__init__()
self._anchors = anchors
self._num_anchors = self._anchors.shape[0]
self._feat_stride = 1. / spatial_scale
Expand Down
6 changes: 4 additions & 2 deletions lib/modeling/keypoint_rcnn_heads.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import division

import numpy as np

import torch
Expand All @@ -17,7 +19,7 @@
class keypoint_outputs(nn.Module):
"""Mask R-CNN keypoint specific outputs: keypoint heatmaps."""
def __init__(self, dim_in):
super().__init__()
super(keypoint_outputs, self).__init__()
self.upsample_heatmap = (cfg.KRCNN.UP_SCALE > 1)

if cfg.KRCNN.USE_DECONV:
Expand Down Expand Up @@ -129,7 +131,7 @@ def keypoint_losses(kps_pred, keypoint_locations_int32, keypoint_weights,
class roi_pose_head_v1convX(nn.Module):
"""Mask R-CNN keypoint head. v1convX design: X * (conv)."""
def __init__(self, dim_in, roi_xform_func, spatial_scale):
super().__init__()
super(roi_pose_head_v1convX, self).__init__()
self.dim_in = dim_in
self.roi_xform = roi_xform_func
self.spatial_scale = spatial_scale
Expand Down
10 changes: 5 additions & 5 deletions lib/modeling/mask_rcnn_heads.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
class mask_rcnn_outputs(nn.Module):
"""Mask R-CNN specific outputs: either mask logits or probs."""
def __init__(self, dim_in):
super().__init__()
super(mask_rcnn_outputs, self).__init__()
self.dim_in = dim_in

n_classes = cfg.MODEL.NUM_CLASSES if cfg.MRCNN.CLS_SPECIFIC_MASK else 1
Expand Down Expand Up @@ -127,7 +127,7 @@ def mask_rcnn_fcn_head_v1up(dim_in, roi_xform_func, spatial_scale):
class mask_rcnn_fcn_head_v1upXconvs(nn.Module):
"""v1upXconvs design: X * (conv 3x3), convT 2x2."""
def __init__(self, dim_in, roi_xform_func, spatial_scale, num_convs):
super().__init__()
super(mask_rcnn_fcn_head_v1upXconvs, self).__init__()
self.dim_in = dim_in
self.roi_xform = roi_xform_func
self.spatial_scale = spatial_scale
Expand Down Expand Up @@ -191,7 +191,7 @@ def forward(self, x, rpn_ret):
class mask_rcnn_fcn_head_v1upXconvs_gn(nn.Module):
"""v1upXconvs design: X * (conv 3x3), convT 2x2, with GroupNorm"""
def __init__(self, dim_in, roi_xform_func, spatial_scale, num_convs):
super().__init__()
super(mask_rcnn_fcn_head_v1upXconvs_gn, self).__init__()
self.dim_in = dim_in
self.roi_xform = roi_xform_func
self.spatial_scale = spatial_scale
Expand Down Expand Up @@ -263,7 +263,7 @@ class mask_rcnn_fcn_head_v0upshare(nn.Module):
v0upshare design: conv5, convT 2x2.
"""
def __init__(self, dim_in, roi_xform_func, spatial_scale):
super().__init__()
super(mask_rcnn_fcn_head_v0upshare, self).__init__()
self.dim_in = dim_in
self.roi_xform = roi_xform_func
self.spatial_scale = spatial_scale
Expand Down Expand Up @@ -330,7 +330,7 @@ def forward(self, x, rpn_ret, roi_has_mask_int32=None):
class mask_rcnn_fcn_head_v0up(nn.Module):
"""v0up design: conv5, deconv 2x2 (no weight sharing with the box head)."""
def __init__(self, dim_in, roi_xform_func, spatial_scale):
super().__init__()
super(mask_rcnn_fcn_head_v0up, self).__init__()
self.dim_in = dim_in
self.roi_xform = roi_xform_func
self.spatial_scale = spatial_scale
Expand Down
2 changes: 1 addition & 1 deletion lib/modeling/model_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def wrapper(self, *args, **kwargs):

class Generalized_RCNN(nn.Module):
def __init__(self):
super().__init__()
super(Generalized_RCNN, self).__init__()

# For cache
self.mapping_to_detectron = None
Expand Down
4 changes: 3 additions & 1 deletion lib/modeling/rpn_heads.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import division

from torch import nn
from torch.nn import init
import torch.nn.functional as F
Expand Down Expand Up @@ -37,7 +39,7 @@ def generic_rpn_losses(*inputs, **kwargs):
class single_scale_rpn_outputs(nn.Module):
"""Add RPN outputs to a single scale model (i.e., no FPN)."""
def __init__(self, dim_in, spatial_scale):
super().__init__()
super(single_scale_rpn_outputs, self).__init__()
self.dim_in = dim_in
self.dim_out = dim_in if cfg.RPN.OUT_DIM_AS_IN_DIM else cfg.RPN.OUT_DIM
anchors = generate_anchors(
Expand Down
2 changes: 1 addition & 1 deletion lib/nn/modules/affine.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
class AffineChannel2d(nn.Module):
""" A simple channel-wise affine transformation operation """
def __init__(self, num_features):
super().__init__()
super(AffineChannel2d, self).__init__()
self.num_features = num_features
self.weight = nn.Parameter(torch.Tensor(num_features))
self.bias = nn.Parameter(torch.Tensor(num_features))
Expand Down
2 changes: 1 addition & 1 deletion lib/nn/modules/normalization.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

class GroupNorm(nn.Module):
def __init__(self, num_groups, num_channels, eps=1e-5, affine=True):
super().__init__()
super(GroupNorm, self).__init__()
self.num_groups = num_groups
self.num_channels = num_channels
self.eps = eps
Expand Down
2 changes: 1 addition & 1 deletion lib/nn/modules/upsample.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ class BilinearInterpolation2d(nn.Module):
See: https://github.com/shelhamer/fcn.berkeleyvision.org/blob/master/surgery.py
"""
def __init__(self, in_channels, out_channels, up_scale):
super().__init__()
super(BilinearInterpolation2d, self).__init__()
assert in_channels == out_channels
assert up_scale % 2 == 0, 'Scale should be even'
self.in_channes = in_channels
Expand Down
2 changes: 2 additions & 0 deletions lib/utils/misc.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import absolute_import

import os
import socket
from collections import defaultdict, Iterable
Expand Down
3 changes: 3 additions & 0 deletions lib/utils/net.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
from __future__ import absolute_import
from __future__ import division

import logging
import os
import numpy as np
Expand Down
2 changes: 2 additions & 0 deletions tools/train_net.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
""" Training Script """
from __future__ import absolute_import
from __future__ import division

import argparse
import distutils.util
Expand Down
2 changes: 2 additions & 0 deletions tools/train_net_step.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
""" Training script for steps_with_decay policy"""
from __future__ import absolute_import
from __future__ import division

import argparse
import os
Expand Down