'PyTorch NotImplementedError
i have been working on a code to train and test the images dataset. But i am getting this error at every instance of output = model(images)
class ConvNeuralNet(nn.Module):
# Determine what layers and their order in CNN object
def __init__(self, num_classes):
super(ConvNeuralNet, self).__init__()
self.conv_layer1 = nn.Conv2d(in_channels=3, out_channels=32, kernel_size=3)
self.conv_layer2 = nn.Conv2d(in_channels=32, out_channels=32, kernel_size=3)
self.max_pool1 = nn.MaxPool2d(kernel_size = 2, stride = 2)
self.conv_layer3 = nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3)
self.conv_layer4 = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3)
self.max_pool2 = nn.MaxPool2d(kernel_size = 2, stride = 2)
self.fc1 = nn.Linear(1600, 128)
self.relu1 = nn.ReLU()
self.fc2 = nn.Linear(128, num_classes)
for i, (images, labels) in enumerate(train_loader):
# Move tensors to the configured device
device = torch.device('cpu')
images = images.to(device)
labels = labels.to(device)
# Forward pass
outputs = model(images)
loss = criterion(outputs, labels)
# Backward and optimize
optimizer.zero_grad()
loss.backward()
optimizer.step()
print('Epoch [{}/{}], Loss: {:.4f}'.format(epoch+1, num_epochs, loss.item()))
/usr/local/lib/python3.7/dist-packages/torch/nn/modules/module.py in _forward_unimplemented(self, *input) 199 registered hooks while the latter silently ignores them. 200 """ --> 201 raise NotImplementedError 202 203
NotImplementedError: I have checked that there is no indentation error so i don't understand what's wrong here.
Solution 1:[1]
When you subclass nn.Module, you need to implement a forward() method.
Here's an update to your ConvNeuralNet class:
class ConvNeuralNet(nn.Module):
# Determine what layers and their order in CNN object
def __init__(self, num_classes):
super(ConvNeuralNet, self).__init__()
self.conv_layer1 = nn.Conv2d(in_channels=3, out_channels=32, kernel_size=3)
self.conv_layer2 = nn.Conv2d(in_channels=32, out_channels=32, kernel_size=3)
self.max_pool1 = nn.MaxPool2d(kernel_size = 2, stride = 2)
self.conv_layer3 = nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3)
self.conv_layer4 = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3)
self.max_pool2 = nn.MaxPool2d(kernel_size = 2, stride = 2)
self.fc1 = nn.Linear(1600, 128)
self.relu1 = nn.ReLU()
self.fc2 = nn.Linear(128, num_classes)
## UPDATE: Implement forward() method
def forward(self, x):
# First three layers (from above)
x = self.conv_layer1(x)
x = self.conv_layer2(x)
x = self.max_pool1(x)
# Next three layers
x = self.conv_layer3(x)
x = self.conv_layer4(x)
x = self.max_pool2(x)
# Final three layers
x = self.fc1(x)
x = self.relu1(x)
x = self.fc2(x)
return x
Notice how x (the input data) moves through each layer you defined and is eventually returned.
From the documentation for nn.Module:
forward(*input)Defines the computation performed at every call.
Should be overridden by all subclasses.
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
| Solution | Source |
|---|---|
| Solution 1 | Daniel Bourke |
