Shortcuts

Note

You are reading the documentation for MMSelfSup 0.x, which will soon be deprecated by the end of 2022. We recommend you upgrade to MMSelfSup 1.0.0rc versions to enjoy fruitful new features and better performance brought by OpenMMLab 2.0. Check out the changelog, code and documentation of MMSelfSup 1.0.0rc for more details.

Source code for mmselfsup.models.necks.relative_loc_neck

# Copyright (c) OpenMMLab. All rights reserved.
import torch.nn as nn
from mmcv.cnn import build_norm_layer
from mmcv.runner import BaseModule

from ..builder import NECKS


[docs]@NECKS.register_module() class RelativeLocNeck(BaseModule): """The neck of relative patch location: fc-bn-relu-dropout. Args: in_channels (int): Number of input channels. out_channels (int): Number of output channels. with_avg_pool (bool): Whether to apply the global average pooling after backbone. Defaults to True. norm_cfg (dict): Dictionary to construct and config norm layer. Defaults to dict(type='BN1d'). init_cfg (dict or list[dict], optional): Initialization config dict. """ def __init__(self, in_channels, out_channels, with_avg_pool=True, norm_cfg=dict(type='BN1d'), init_cfg=[ dict(type='Normal', std=0.01, layer='Linear'), dict( type='Constant', val=1, layer=['_BatchNorm', 'GroupNorm']) ]): super(RelativeLocNeck, self).__init__(init_cfg) self.with_avg_pool = with_avg_pool if with_avg_pool: self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = nn.Linear(in_channels * 2, out_channels) self.bn = build_norm_layer( dict(**norm_cfg, momentum=0.003), out_channels)[1] self.relu = nn.ReLU(inplace=True) self.dropout = nn.Dropout()
[docs] def forward(self, x): assert len(x) == 1 x = x[0] if self.with_avg_pool: x = self.avgpool(x) x = x.view(x.size(0), -1) x = self.fc(x) x = self.bn(x) x = self.relu(x) x = self.dropout(x) return [x]
Read the Docs v: 0.x
Versions
latest
stable
1.x
dev-1.x
0.x
Downloads
On Read the Docs
Project Home
Builds

Free document hosting provided by Read the Docs.