Batch Norm is broken for CUDAHalfType
Repro:
import torch.nn
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 5)
self.pool = nn.MaxPool2d(2, 2)
self.bn = nn.BatchNorm2d(6)
def forward(self, x):
x = self.pool(F.relu(self.bn(self.conv1(x))))
return x
net = Net()
net = net.cuda().half().train()
net.forward(Variable(torch.randn(1,3,32,32).cuda().half()))
Possibly broken from #2170
@killeent