Skip to content

Commit

Permalink
clean and add more comments
Browse files Browse the repository at this point in the history
  • Loading branch information
xinntao committed Nov 27, 2021
1 parent 0ff1cf7 commit be73d6d
Show file tree
Hide file tree
Showing 13 changed files with 336 additions and 225 deletions.
9 changes: 4 additions & 5 deletions .github/workflows/no-response.yml
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
name: No Response

# TODO: it seems not to work
# Modified from: https://raw.githubusercontent.com/github/docs/main/.github/workflows/no-response.yaml

# **What it does**: Closes issues that don't have enough information to be
# actionable.
# **Why we have it**: To remove the need for maintainers to remember to check
# back on issues periodically to see if contributors have
# responded.
# **What it does**: Closes issues that don't have enough information to be actionable.
# **Why we have it**: To remove the need for maintainers to remember to check back on issues periodically
# to see if contributors have responded.
# **Who does it impact**: Everyone that works on docs or docs-internal.

on:
Expand Down
2 changes: 1 addition & 1 deletion gfpgan/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
from .data import *
from .models import *
from .utils import *
from .version import __gitsha__, __version__
from .version import *
66 changes: 57 additions & 9 deletions gfpgan/archs/arcface_arch.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,27 @@
from basicsr.utils.registry import ARCH_REGISTRY


def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)
def conv3x3(inplanes, outplanes, stride=1):
"""A simple wrapper for 3x3 convolution with padding.
Args:
inplanes (int): Channel number of inputs.
outplanes (int): Channel number of outputs.
stride (int): Stride in convolution. Default: 1.
"""
return nn.Conv2d(inplanes, outplanes, kernel_size=3, stride=stride, padding=1, bias=False)


class BasicBlock(nn.Module):
expansion = 1
"""Basic residual block used in the ResNetArcFace architecture.
Args:
inplanes (int): Channel number of inputs.
planes (int): Channel number of outputs.
stride (int): Stride in convolution. Default: 1.
downsample (nn.Module): The downsample module. Default: None.
"""
expansion = 1 # output channel expansion ratio

def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
Expand Down Expand Up @@ -40,7 +54,16 @@ def forward(self, x):


class IRBlock(nn.Module):
expansion = 1
"""Improved residual block (IR Block) used in the ResNetArcFace architecture.
Args:
inplanes (int): Channel number of inputs.
planes (int): Channel number of outputs.
stride (int): Stride in convolution. Default: 1.
downsample (nn.Module): The downsample module. Default: None.
use_se (bool): Whether use the SEBlock (squeeze and excitation block). Default: True.
"""
expansion = 1 # output channel expansion ratio

def __init__(self, inplanes, planes, stride=1, downsample=None, use_se=True):
super(IRBlock, self).__init__()
Expand Down Expand Up @@ -78,7 +101,15 @@ def forward(self, x):


class Bottleneck(nn.Module):
expansion = 4
"""Bottleneck block used in the ResNetArcFace architecture.
Args:
inplanes (int): Channel number of inputs.
planes (int): Channel number of outputs.
stride (int): Stride in convolution. Default: 1.
downsample (nn.Module): The downsample module. Default: None.
"""
expansion = 4 # output channel expansion ratio

def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
Expand Down Expand Up @@ -116,10 +147,16 @@ def forward(self, x):


class SEBlock(nn.Module):
"""The squeeze-and-excitation block (SEBlock) used in the IRBlock.
Args:
channel (int): Channel number of inputs.
reduction (int): Channel reduction ration. Default: 16.
"""

def __init__(self, channel, reduction=16):
super(SEBlock, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.avg_pool = nn.AdaptiveAvgPool2d(1) # pool to 1x1 without spatial information
self.fc = nn.Sequential(
nn.Linear(channel, channel // reduction), nn.PReLU(), nn.Linear(channel // reduction, channel),
nn.Sigmoid())
Expand All @@ -133,13 +170,23 @@ def forward(self, x):

@ARCH_REGISTRY.register()
class ResNetArcFace(nn.Module):
"""ArcFace with ResNet architectures.
Ref: ArcFace: Additive Angular Margin Loss for Deep Face Recognition.
Args:
block (str): Block used in the ArcFace architecture.
layers (tuple(int)): Block numbers in each layer.
use_se (bool): Whether use the SEBlock (squeeze and excitation block). Default: True.
"""

def __init__(self, block, layers, use_se=True):
if block == 'IRBlock':
block = IRBlock
self.inplanes = 64
self.use_se = use_se
super(ResNetArcFace, self).__init__()

self.conv1 = nn.Conv2d(1, 64, kernel_size=3, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.prelu = nn.PReLU()
Expand All @@ -153,6 +200,7 @@ def __init__(self, block, layers, use_se=True):
self.fc5 = nn.Linear(512 * 8 * 8, 512)
self.bn5 = nn.BatchNorm1d(512)

# initialization
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.xavier_normal_(m.weight)
Expand All @@ -163,7 +211,7 @@ def __init__(self, block, layers, use_se=True):
nn.init.xavier_normal_(m.weight)
nn.init.constant_(m.bias, 0)

def _make_layer(self, block, planes, blocks, stride=1):
def _make_layer(self, block, planes, num_blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
Expand All @@ -173,7 +221,7 @@ def _make_layer(self, block, planes, blocks, stride=1):
layers = []
layers.append(block(self.inplanes, planes, stride, downsample, use_se=self.use_se))
self.inplanes = planes
for _ in range(1, blocks):
for _ in range(1, num_blocks):
layers.append(block(self.inplanes, planes, use_se=self.use_se))

return nn.Sequential(*layers)
Expand Down
Loading

0 comments on commit be73d6d

Please sign in to comment.