Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[PaddlePaddle Hackathon] add wide resnet #36109

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions python/paddle/tests/test_vision_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,15 @@ def test_resnet101(self):
def test_resnet152(self):
self.models_infer('resnet152')

def test_wide_resnet50(self):
self.models_infer('wide_resnet50')

def test_wide_resnet101(self):
self.models_infer('wide_resnet101')

def test_wide_resnet101_pretrained(self):
self.models_infer('wide_resnet101', pretrained=False)

def test_vgg16_num_classes(self):
vgg16 = models.__dict__['vgg16'](pretrained=False, num_classes=10)

Expand Down
3 changes: 3 additions & 0 deletions python/paddle/vision/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@
from .models import resnet50 # noqa: F401
from .models import resnet101 # noqa: F401
from .models import resnet152 # noqa: F401
from .models import WideResNet # noqa: F401
from .models import wide_resnet50 # noqa: F401
from .models import wide_resnet101 # noqa: F401
from .models import MobileNetV1 # noqa: F401
from .models import mobilenet_v1 # noqa: F401
from .models import MobileNetV2 # noqa: F401
Expand Down
3 changes: 3 additions & 0 deletions python/paddle/vision/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
from .resnet import resnet50 # noqa: F401
from .resnet import resnet101 # noqa: F401
from .resnet import resnet152 # noqa: F401
from .wideresnet import WideResNet # noqa: F401
from .wideresnet import wide_resnet50 # noqa: F401
from .wideresnet import wide_resnet101 # noqa: F401
from .mobilenetv1 import MobileNetV1 # noqa: F401
from .mobilenetv1 import mobilenet_v1 # noqa: F401
from .mobilenetv2 import MobileNetV2 # noqa: F401
Expand Down
18 changes: 15 additions & 3 deletions python/paddle/vision/models/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ class ResNet(nn.Layer):
depth (int): layers of resnet, default: 50.
num_classes (int): output dim of last fc layer. If num_classes <=0, last fc layer
will not be defined. Default: 1000.
width_per_group (int):
with_pool (bool): use pool before the last fc layer or not. Default: True.

Examples:
Expand All @@ -169,7 +170,12 @@ class ResNet(nn.Layer):

"""

def __init__(self, block, depth, num_classes=1000, with_pool=True):
def __init__(self,
block,
depth,
num_classes=1000,
width_per_group=64,
with_pool=True):
super(ResNet, self).__init__()
layer_cfg = {
18: [2, 2, 2, 2],
Expand All @@ -180,6 +186,7 @@ def __init__(self, block, depth, num_classes=1000, with_pool=True):
}
layers = layer_cfg[depth]
self.num_classes = num_classes
self.base_width = width_per_group
self.with_pool = with_pool
self._norm_layer = nn.BatchNorm2D

Expand Down Expand Up @@ -225,11 +232,16 @@ def _make_layer(self, block, planes, blocks, stride=1, dilate=False):

layers = []
layers.append(
block(self.inplanes, planes, stride, downsample, 1, 64,
block(self.inplanes, planes, stride, downsample, 1, self.base_width,
previous_dilation, norm_layer))
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes, norm_layer=norm_layer))
layers.append(
block(
self.inplanes,
planes,
base_width=self.base_width,
norm_layer=norm_layer))

return nn.Sequential(*layers)

Expand Down
121 changes: 121 additions & 0 deletions python/paddle/vision/models/wideresnet.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import division
from __future__ import print_function

import paddle
import paddle.nn as nn

from paddle.utils.download import get_weights_path_from_url
from paddle.vision.models.resnet import BottleneckBlock, ResNet

__all__ = []

model_urls = {'wide_resnet50': ('', ''), 'wide_resnet101': ('', '')}


class WideResNet(nn.Layer):
"""Wide ResNet model from
`"Wide Residual Networks" <https://arxiv.org/pdf/1605.07146.pdf>`_.

The model is the same as ResNet except for the bottleneck number of channels
which is twice larger in every block. The number of channels in outer 1x1
convolutions is the same.

Args:
Block (BasicBlock|BottleneckBlock): block module of model.
depth (int): layers of resnet, default: 50.
num_classes (int): output dim of last fc layer. If num_classes <=0, last fc layer
will not be defined. Default: 1000.
width_per_group (int): channel nums of each group
with_pool (bool): use pool before the last fc layer or not. Default: True.

Examples:
.. code-block:: python

from paddle.vision.models import WideResNet

wide_resnet50 = WideResNet(50)

wide_resnet101 = WideResNet(101)

"""

def __init__(self,
depth,
num_classes=1000,
width_per_group=64,
with_pool=True):
super(WideResNet, self).__init__()
self.layers = ResNet(BottleneckBlock, depth, num_classes,
width_per_group * 2, with_pool)

def forward(self, x):
return self.layers.forward(x)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这里self.layers(x)就行



def _wide_resnet(arch, depth, pretrained, **kwargs):
model = WideResNet(depth, **kwargs)
if pretrained:
assert arch in model_urls, "{} model do not have a pretrained model now, you should set pretrained=False".format(
arch)
weight_path = get_weights_path_from_url(model_urls[arch][0],
model_urls[arch][1])

param = paddle.load(weight_path)
model.set_dict(param)

return model


def wide_resnet50(pretrained=False, **kwargs):
"""Wide ResNet 50-layer model

Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet

Examples:
.. code-block:: python

from paddle.vision.models import wide_resnet50

# build model
model = wide_resnet50()

# build model and load imagenet pretrained weight
# model = wide_resnet50(pretrained=True)
"""
kwargs['width_per_group'] = 64 * 2
return _wide_resnet('wide_resnet50', 50, pretrained, **kwargs)


def wide_resnet101(pretrained=False, **kwargs):
"""Wide ResNet 101-layer model

Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet

Examples:
.. code-block:: python

from paddle.vision.models import wide_resnet101

# build model
model = wide_resnet101()

# build model and load imagenet pretrained weight
# model = wide_resnet101(pretrained=True)
"""
return _wide_resnet('wide_resnet101', 101, pretrained, **kwargs)