self.apn = APN_Module(in_ch=128,out_ch=20)
# self.upsample = Interpolate(size=(512, 1024), mode="bilinear")
# self.output_conv = nn.ConvTranspose2d(16, num_classes, kernel_size=4, stride=2, padding=1, output_padding=0, bias=True)
# self.output_conv = nn.ConvTranspose2d(16, num_classes, kernel_size=3, stride=2, padding=1, output_padding=1, bias=True)
# self.output_conv = nn.ConvTranspose2d(16, num_classes, kernel_size=2, stride=2, padding=0, output_padding=0, bias=True)
def forward(self, input):
output = self.apn(input)
out = interpolate(output, size=(512, 1024), mode="bilinear", align_corners=True)
# out = self.upsample(output)
# print(out.shape)
return out
我并没有看到在输入端把图像resize到1024512,仅仅是把短边等比例缩放到512,长边并没有固定到1024,为何输出的时候,就直接缩放到1024512呢?
class Decoder (nn.Module):
def init(self, num_classes):
super().init()