Skip to content

Commit

Permalink
[Refactor] Move res-layer to models.utils (#537)
Browse files Browse the repository at this point in the history
* move reslayer to utils

* update configs
  • Loading branch information
fangyixiao18 committed Oct 28, 2022
1 parent ed9d4a7 commit 560446a
Show file tree
Hide file tree
Showing 6 changed files with 47 additions and 40 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
'../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
]

custom_imports = dict(
imports=['mmselfsup.models.utils.res_layer_extra_norm'],
allow_failed_imports=False)
norm_cfg = dict(type='SyncBN', requires_grad=True)
model = dict(
backbone=dict(frozen_stages=-1, norm_cfg=norm_cfg, norm_eval=False),
Expand All @@ -30,7 +33,3 @@
]

data = dict(train=dict(pipeline=train_pipeline))

custom_imports = dict(
imports=['tools.benchmarks.mmdetection.res_layer_extra_norm'],
allow_failed_imports=False)
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
'../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py'
]

custom_imports = dict(
imports=['mmselfsup.models.utils.res_layer_extra_norm'],
allow_failed_imports=False)
norm_cfg = dict(type='SyncBN', requires_grad=True)
model = dict(
backbone=dict(frozen_stages=-1, norm_cfg=norm_cfg, norm_eval=False),
Expand All @@ -30,7 +33,3 @@
]

data = dict(train=dict(pipeline=train_pipeline))

custom_imports = dict(
imports=['tools.benchmarks.mmdetection.res_layer_extra_norm'],
allow_failed_imports=False)
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
'../_base_/schedules/schedule_24k.py', '../_base_/default_runtime.py'
]

custom_imports = dict(
imports=['mmselfsup.models.utils.res_layer_extra_norm'],
allow_failed_imports=False)
norm_cfg = dict(type='SyncBN', requires_grad=True)
model = dict(
backbone=dict(frozen_stages=-1, norm_cfg=norm_cfg, norm_eval=False),
Expand Down Expand Up @@ -78,7 +81,3 @@
dict(type='TextLoggerHook', by_epoch=False),
# dict(type='TensorboardLoggerHook')
])

custom_imports = dict(
imports=['tools.benchmarks.mmdetection.res_layer_extra_norm'],
allow_failed_imports=False)
8 changes: 7 additions & 1 deletion mmselfsup/models/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,15 @@
from .transformer_blocks import (CAETransformerRegressorLayer,
MultiheadAttention, TransformerEncoderLayer)

try:
from .res_layer_extra_norm import ResLayerExtraNorm
except ImportError:
ResLayerExtraNorm = None

__all__ = [
'Accuracy', 'accuracy', 'ExtractProcess', 'MultiExtractProcess',
'GatherLayer', 'knn_classifier', 'MultiPooling', 'MultiPrototypes',
'build_2d_sincos_position_embedding', 'Sobel', 'MultiheadAttention',
'TransformerEncoderLayer', 'CAETransformerRegressorLayer', 'Encoder'
'TransformerEncoderLayer', 'CAETransformerRegressorLayer', 'Encoder',
'ResLayerExtraNorm'
]
31 changes: 31 additions & 0 deletions mmselfsup/models/utils/res_layer_extra_norm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# Copyright (c) OpenMMLab. All rights reserved.
from mmcv.cnn import build_norm_layer
from mmcv.runner import auto_fp16

try:
from mmdet.models.backbones import ResNet
from mmdet.models.builder import SHARED_HEADS
from mmdet.models.roi_heads.shared_heads.res_layer import ResLayer

@SHARED_HEADS.register_module()
class ResLayerExtraNorm(ResLayer):

def __init__(self, *args, **kwargs):
super(ResLayerExtraNorm, self).__init__(*args, **kwargs)

block = ResNet.arch_settings[kwargs['depth']][0]
self.add_module(
'norm',
build_norm_layer(self.norm_cfg,
64 * 2**self.stage * block.expansion)[1])

@auto_fp16()
def forward(self, x):
res_layer = getattr(self, f'layer{self.stage + 1}')
norm = getattr(self, 'norm')
x = res_layer(x)
out = norm(x)
return out

except ImportError:
pass
27 changes: 0 additions & 27 deletions tools/benchmarks/mmdetection/res_layer_extra_norm.py

This file was deleted.

0 comments on commit 560446a

Please # to comment.