@@ -102,18 +102,14 @@ def __init__(self, in_channels, num_classes):
102
102
self .low_level_project = nn .Sequential (nn .Conv2d (256 , 48 , 1 , bias = False ),
103
103
nn .BatchNorm2d (48 ),
104
104
nn .ReLU ())
105
- self .project = nn .Sequential (
106
- nn .Conv2d (304 , 304 , kernel_size = 3 , stride = 1 , padding = 1 , groups = 304 , bias = False ),
107
- nn .Conv2d (304 , 256 , kernel_size = 1 , stride = 1 , padding = 0 , bias = False ),
108
- nn .BatchNorm2d (256 ),
109
- nn .ReLU (),
110
- nn .Conv2d (256 , 256 , kernel_size = 3 , stride = 1 , padding = 1 , groups = 256 , bias = False ),
111
- nn .Conv2d (256 , 256 , kernel_size = 1 , stride = 1 , padding = 0 , bias = False ),
112
- nn .BatchNorm2d (256 ),
113
- nn .ReLU (),
114
- nn .Dropout (0.1 ),
115
- nn .Conv2d (256 , num_classes , kernel_size = 1 , stride = 1 )
116
- )
105
+ self .project = nn .Sequential (nn .Conv2d (304 , 256 , kernel_size = 3 , stride = 1 , padding = 1 , bias = False ),
106
+ nn .BatchNorm2d (256 ),
107
+ nn .ReLU (),
108
+ nn .Conv2d (256 , 256 , kernel_size = 3 , stride = 1 , padding = 1 , bias = False ),
109
+ nn .BatchNorm2d (256 ),
110
+ nn .ReLU (),
111
+ nn .Dropout (0.1 ),
112
+ nn .Conv2d (256 , num_classes , kernel_size = 1 , stride = 1 ))
117
113
118
114
def forward (self , input_features ):
119
115
x , x_low = input_features
0 commit comments