首先定义一个初始化函数 假如我们想使用 nn.Conv1d (像class TrainNet(nn.Module):这样定义就好) 为我们所用,
elif isinstance(m, nn.Conv1d): m.weight.data.fill_(1) #m.weight.data=m.weight.data #可以这样赋值给m.weight.data m.bias.data.zero_()
我是将所有的weight全设置为1 所有的bias自然设置为全0
#网络初始化net=TrainNet()#设置我们想要的参数initNetParams(net)#使用1维卷积net(data)
def initNetParams(net): '''Init net parameters.''' for m in net.modules(): if isinstance(m, nn.Conv2d): init.xavier_uniform(m.weight) if m.bias: init.constant(m.bias, 0) elif isinstance(m, nn.BatchNorm2d): init.constant(m.weight, 1) init.constant(m.bias, 0) elif isinstance(m, nn.Linear): init.normal(m.weight, std=1e-3) if m.bias: init.constant(m.bias, 0) elif isinstance(m, nn.Conv1d): m.weight.data.fill_(1) m.bias.data.zero_()class TrainNet(nn.Module): def __init__(self): super().__init__() self.hidden_layer = nn.Sequential( nn.Conv1d(4096,4096,10,1,9), ) def forward(self, xs): return self.hidden_layer(xs)net=TrainNet()initNetParams(net)net()
查看更多关于如果优美的将pytorch的卷积为自己所用的详细内容...
声明:本文来自网络,不代表【好得很程序员自学网】立场,转载请注明出处:http://haodehen.cn/did126947