Shortcuts

Source code for mmselfsup.models.algorithms.simsiam

# Copyright (c) OpenMMLab. All rights reserved.
from typing import Dict, List, Tuple

import torch

from mmselfsup.registry import MODELS
from mmselfsup.structures import SelfSupDataSample
from .base import BaseModel


[docs]@MODELS.register_module() class SimSiam(BaseModel): """SimSiam. Implementation of `Exploring Simple Siamese Representation Learning <https://arxiv.org/abs/2011.10566>`_. The operation of fixing learning rate of predictor is in `engine/hooks/simsiam_hook.py`. """
[docs] def extract_feat(self, inputs: List[torch.Tensor], **kwarg) -> Tuple[torch.Tensor]: """Function to extract features from backbone. Args: inputs (List[torch.Tensor]): The input images. Returns: Tuple[torch.Tensor]: Backbone outputs. """ return self.backbone(inputs[0])
[docs] def loss(self, inputs: List[torch.Tensor], data_samples: List[SelfSupDataSample], **kwargs) -> Dict[str, torch.Tensor]: """The forward function in training. Args: inputs (List[torch.Tensor]): The input images. data_samples (List[SelfSupDataSample]): All elements required during the forward function. Returns: Dict[str, Tensor]: A dictionary of loss components. """ img_v1 = inputs[0] img_v2 = inputs[1] z1 = self.neck(self.backbone(img_v1))[0] # NxC z2 = self.neck(self.backbone(img_v2))[0] # NxC loss_1 = self.head(z1, z2) loss_2 = self.head(z2, z1) losses = dict(loss=0.5 * (loss_1 + loss_2)) return losses
Read the Docs v: latest
Versions
latest
stable
1.x
0.x
dev-1.x
Downloads
pdf
html
epub
On Read the Docs
Project Home
Builds

Free document hosting provided by Read the Docs.