pyramidal_embedding.py 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146
  1. # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from __future__ import absolute_import
  15. from __future__ import division
  16. from __future__ import print_function
  17. import paddle
  18. import paddle.nn as nn
  19. import paddle.nn.functional as F
  20. from paddle.nn.initializer import Normal, Constant
  21. from paddle import ParamAttr
  22. from .resnet import ResNet50, ResNet101
  23. from ppdet.core.workspace import register
  24. __all__ = ['PCBPyramid']
  25. @register
  26. class PCBPyramid(nn.Layer):
  27. """
  28. PCB (Part-based Convolutional Baseline), see https://arxiv.org/abs/1711.09349,
  29. Pyramidal Person Re-IDentification, see https://arxiv.org/abs/1810.12193
  30. Args:
  31. input_ch (int): Number of channels of the input feature.
  32. num_stripes (int): Number of sub-parts.
  33. used_levels (tuple): Whether the level is used, 1 means used.
  34. num_classes (int): Number of classes for identities, default 751 in
  35. Market-1501 dataset.
  36. last_conv_stride (int): Stride of the last conv.
  37. last_conv_dilation (int): Dilation of the last conv.
  38. num_conv_out_channels (int): Number of channels of conv feature.
  39. """
  40. def __init__(self,
  41. input_ch=2048,
  42. model_name='ResNet101',
  43. num_stripes=6,
  44. used_levels=(1, 1, 1, 1, 1, 1),
  45. num_classes=751,
  46. last_conv_stride=1,
  47. last_conv_dilation=1,
  48. num_conv_out_channels=128):
  49. super(PCBPyramid, self).__init__()
  50. self.num_stripes = num_stripes
  51. self.used_levels = used_levels
  52. self.num_classes = num_classes
  53. self.num_in_each_level = [i for i in range(self.num_stripes, 0, -1)]
  54. self.num_branches = sum(self.num_in_each_level)
  55. assert model_name in ['ResNet50', 'ResNet101'], "Unsupported ReID arch: {}".format(model_name)
  56. self.base = eval(model_name)(
  57. lr_mult=0.1,
  58. last_conv_stride=last_conv_stride,
  59. last_conv_dilation=last_conv_dilation)
  60. self.dropout_layer = nn.Dropout(p=0.2)
  61. self.pyramid_conv_list0, self.pyramid_fc_list0 = self.basic_branch(
  62. num_conv_out_channels, input_ch)
  63. def basic_branch(self, num_conv_out_channels, input_ch):
  64. # the level indexes are defined from fine to coarse,
  65. # the branch will contain one more part than that of its previous level
  66. # the sliding step is set to 1
  67. pyramid_conv_list = nn.LayerList()
  68. pyramid_fc_list = nn.LayerList()
  69. idx_levels = 0
  70. for idx_branches in range(self.num_branches):
  71. if idx_branches >= sum(self.num_in_each_level[0:idx_levels + 1]):
  72. idx_levels += 1
  73. pyramid_conv_list.append(
  74. nn.Sequential(
  75. nn.Conv2D(input_ch, num_conv_out_channels, 1),
  76. nn.BatchNorm2D(num_conv_out_channels), nn.ReLU()))
  77. idx_levels = 0
  78. for idx_branches in range(self.num_branches):
  79. if idx_branches >= sum(self.num_in_each_level[0:idx_levels + 1]):
  80. idx_levels += 1
  81. fc = nn.Linear(
  82. in_features=num_conv_out_channels,
  83. out_features=self.num_classes,
  84. weight_attr=ParamAttr(initializer=Normal(
  85. mean=0., std=0.001)),
  86. bias_attr=ParamAttr(initializer=Constant(value=0.)))
  87. pyramid_fc_list.append(fc)
  88. return pyramid_conv_list, pyramid_fc_list
  89. def pyramid_forward(self, feat):
  90. each_stripe_size = int(feat.shape[2] / self.num_stripes)
  91. feat_list, logits_list = [], []
  92. idx_levels = 0
  93. used_branches = 0
  94. for idx_branches in range(self.num_branches):
  95. if idx_branches >= sum(self.num_in_each_level[0:idx_levels + 1]):
  96. idx_levels += 1
  97. idx_in_each_level = idx_branches - sum(self.num_in_each_level[
  98. 0:idx_levels])
  99. stripe_size_in_each_level = each_stripe_size * (idx_levels + 1)
  100. start = idx_in_each_level * each_stripe_size
  101. end = start + stripe_size_in_each_level
  102. k = feat.shape[-1]
  103. local_feat_avgpool = F.avg_pool2d(
  104. feat[:, :, start:end, :],
  105. kernel_size=(stripe_size_in_each_level, k))
  106. local_feat_maxpool = F.max_pool2d(
  107. feat[:, :, start:end, :],
  108. kernel_size=(stripe_size_in_each_level, k))
  109. local_feat = local_feat_avgpool + local_feat_maxpool
  110. local_feat = self.pyramid_conv_list0[used_branches](local_feat)
  111. local_feat = paddle.reshape(
  112. local_feat, shape=[local_feat.shape[0], -1])
  113. feat_list.append(local_feat)
  114. local_logits = self.pyramid_fc_list0[used_branches](
  115. self.dropout_layer(local_feat))
  116. logits_list.append(local_logits)
  117. used_branches += 1
  118. return feat_list, logits_list
  119. def forward(self, x):
  120. feat = self.base(x)
  121. assert feat.shape[2] % self.num_stripes == 0
  122. feat_list, logits_list = self.pyramid_forward(feat)
  123. feat_out = paddle.concat(feat_list, axis=-1)
  124. return feat_out