From d41b444a15cf86f08d633a6d1a49f60fb33495e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=99=A8=E5=A4=AA=E7=8B=BC?= Date: Fri, 9 Aug 2019 18:44:47 +0800 Subject: [PATCH] Fix fuse (#440) Fix fuse in models.py --- models.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/models.py b/models.py index bcee5c67..1e952420 100755 --- a/models.py +++ b/models.py @@ -213,13 +213,14 @@ class Darknet(nn.Module): # Fuse Conv2d + BatchNorm2d layers throughout model fused_list = nn.ModuleList() for a in list(self.children())[0]: - for i, b in enumerate(a): - if isinstance(b, nn.modules.batchnorm.BatchNorm2d): - # fuse this bn layer with the previous conv2d layer - conv = a[i - 1] - fused = torch_utils.fuse_conv_and_bn(conv, b) - a = nn.Sequential(fused, *list(a.children())[i + 1:]) - break + if isinstance(a, nn.Sequential): + for i, b in enumerate(a): + if isinstance(b, nn.modules.batchnorm.BatchNorm2d): + # fuse this bn layer with the previous conv2d layer + conv = a[i - 1] + fused = torch_utils.fuse_conv_and_bn(conv, b) + a = nn.Sequential(fused, *list(a.children())[i + 1:]) + break fused_list.append(a) self.module_list = fused_list # model_info(self) # yolov3-spp reduced from 225 to 152 layers