mirror of
https://huggingface.co/spaces/H-Liu1997/TANGO
synced 2026-04-22 16:17:09 +08:00
54 lines
1.8 KiB
Python
54 lines
1.8 KiB
Python
import torch.nn as nn
|
|
|
|
def make_linear_layers(feat_dims, relu_final=True, use_bn=False):
|
|
layers = []
|
|
for i in range(len(feat_dims)-1):
|
|
layers.append(nn.Linear(feat_dims[i], feat_dims[i+1]))
|
|
|
|
# Do not use ReLU for final estimation
|
|
if i < len(feat_dims)-2 or (i == len(feat_dims)-2 and relu_final):
|
|
if use_bn:
|
|
layers.append(nn.BatchNorm1d(feat_dims[i+1]))
|
|
layers.append(nn.ReLU(inplace=True))
|
|
|
|
return nn.Sequential(*layers)
|
|
|
|
def make_conv_layers(feat_dims, kernel=3, stride=1, padding=1, bnrelu_final=True):
|
|
layers = []
|
|
for i in range(len(feat_dims)-1):
|
|
layers.append(
|
|
nn.Conv2d(
|
|
in_channels=feat_dims[i],
|
|
out_channels=feat_dims[i+1],
|
|
kernel_size=kernel,
|
|
stride=stride,
|
|
padding=padding
|
|
))
|
|
# Do not use BN and ReLU for final estimation
|
|
if i < len(feat_dims)-2 or (i == len(feat_dims)-2 and bnrelu_final):
|
|
layers.append(nn.BatchNorm2d(feat_dims[i+1]))
|
|
layers.append(nn.ReLU(inplace=True))
|
|
|
|
return nn.Sequential(*layers)
|
|
|
|
def make_deconv_layers(feat_dims, bnrelu_final=True):
|
|
layers = []
|
|
for i in range(len(feat_dims)-1):
|
|
layers.append(
|
|
nn.ConvTranspose2d(
|
|
in_channels=feat_dims[i],
|
|
out_channels=feat_dims[i+1],
|
|
kernel_size=4,
|
|
stride=2,
|
|
padding=1,
|
|
output_padding=0,
|
|
bias=False))
|
|
|
|
# Do not use BN and ReLU for final estimation
|
|
if i < len(feat_dims)-2 or (i == len(feat_dims)-2 and bnrelu_final):
|
|
layers.append(nn.BatchNorm2d(feat_dims[i+1]))
|
|
layers.append(nn.ReLU(inplace=True))
|
|
|
|
return nn.Sequential(*layers)
|
|
|