'Expected 4-dimensional input for 4-dimensional weight [6, 1, 5, 5], but got 2-dimensional input of size [32, 784] instead

I'm actually trying to modify my already built NN (I used pytorch even there) to solve fashion-MNIST problem with a CNN.

I set up everything, I thought it could work, but actually I had this issue while training.

RuntimeError: Expected 4-dimensional input for 4-dimensional weight [6, 1, 5, 5], but got 2-dimensional input of size [32, 784] instead

Here I add my CNN:

  def __init__(self, dropout, learn_rate, weight_decay, optimizer):
    super().__init__()
    
    self.conv1 = nn.Conv2d(in_channels = 1, out_channels = 6, kernel_size = 5)   
    self.conv2 = nn.Conv2d(in_channels = 6, out_channels = 12 , kernel_size = 5)

    self.fc1 = nn.Linear(in_features = 12*4*4, out_features = 120)
    self.fc1 = nn.Linear(in_features = 120, out_features = 60)
    self.fc1 = nn.Linear(in_features = 60, out_features = 10)
    

    self.do = nn.Dropout(dropout) #for overfitting issues
    self.loss = nn.CrossEntropyLoss()
    self.accuracy = torchmetrics.Accuracy()
    self.learn_rate = learn_rate
    #self.momentum = momentum
    self.weight_decay = weight_decay
    self.optimizer = optimizer 

    self.train_loss = []
    self.val_loss = []
    self.train_acc = []
    self.test_acc = []

  #forward step
  def forward(self, x, additional_out=False):
      #conv1
      x = self.conv1(x)
      x = F.relu(x)
      x = F.max_pool2d(x, kernel_size = 2, stride = 2)
      #conv2
      x = self.conv2(x)
      x = F.relu(x)
      x = F.max_pool2d(x, kernel_size = 2, stride = 2)
      #fuly connected 1
      x = x.reshape(-1, 12*4*4)
      x = self.fc1(x)
      x = F.relu(x)
      x = self.do(x)
      #fully connected 2
      x=self.fc2(x)
      x = F.relu(x)
      x = self.do(x)
      #output
      x = self.out(x)
      return x

  #optimizer
  def configure_optimizers(self):
    #optimizer = self.optimizer(self.parameters(), lr = self.learn_rate, momentum = self.momentum, weight_decay = self.weight_decay)
    optimizer = self.optimizer(self.parameters(), lr = self.learn_rate, weight_decay = self.weight_decay)
    return optimizer


  #training step
  def training_step(self, batch, batch_idx):
    x, y = batch
    b = x.size(0)
    x = x.view(b, -1)
    logit = self(x) 
    J = self.loss(logit, y)                                 #loss
    #self.train_loss.append(J)                    #append loss
    acc = self.accuracy(logit, y)                   #accuracy
    #self.train_acc.append(acc)                  #append accuracy
    self.log("train_loss_cnn", J.item())  
    self.log("train_acc_cnn", acc.item())
    return {'loss': J}

#Since I used Tensorboard, it don't have to append to loss

  def test_step(self, batch, batch_idx):
    p, q = batch
    b = p.size(0)
    p = p.view(b, -1)
    logit = self(p) 
    J = self.loss(logit, q)                                 #loss
    acc_test = self.accuracy(logit, q)          #accuracy
    #self.train_acc.append(acc_test)         #append acc
    #self.train_loss.append(J)                     #append loss
    self.log("test_acc_cnn", acc_test.item())
    self.log("test_loss_cnn", J.item())


  def validation_step(self, batch, batch_idx=None):
    u,  v = batch
    b = u.size(0)
    u = u.view(b, -1)
    logit = self(u)
    J = self.loss(logit, v)                               #loss             
    #self.val_loss.append(J)                     #append loss
    acc_val = self.accuracy(logit, v)        #accuracy
    #self.train_acc.append(acc_val)         #append acc
    self.log("val_loss_cnn", J.item())        
    self.log("val_acc_cnn", acc_val.item())

While this is the objective function of Optuna. The error is launched when it fits inside the objective:

def objective(trial):
    dropout = trial.suggest_float("dropout", 0.1, 0.3)
    n_layers = trial.suggest_int("n_layers", 1, 3)
    optimizer = trial.suggest_categorical('optimizer',[optim.SGD, optim.RMSprop, optim.Adam])   #first test between optimizers, then eventually test for momentum.
    learn_rate = trial.suggest_loguniform("learn_rate", 0.0001, 0.1)
    #momentum = trial.suggest_float("momentum", 0.9, 0.99)
    weight_decay = trial.suggest_loguniform("weight_decay", 0.0001, 0.1)

    trainer = pl.Trainer(gpus=0, max_epochs=1, callbacks=[early_stopping], progress_bar_refresh_rate=20)

    #hyperparameters = dict(dropout=dropout, n_layers = n_layers, learn_rate = learn_rate, momentum = momentum, weight_decay = weight_decay, optimizer = optimizer)
    hyperparameters = dict(dropout=dropout, n_layers = n_layers, learn_rate = learn_rate, weight_decay = weight_decay, optimizer = optimizer)
    #trainer.logger.log_hyperparams(hyperparameters)
    
    #model = CNN(dropout, learn_rate, momentum, weight_decay, optimizer)
    model = CNN(dropout, learn_rate, weight_decay, optimizer)
    trainer.fit(model, train_loader, val_loader) #error here!!!!!

    return trainer.callback_metrics["val_loss_cnn"].item()

pruner: optuna.pruners.BasePruner = optuna.pruners.NopPruner()
# print(pruner) <optuna.pruners._nop.NopPruner object at 0x7f4c2466ed50>
# print(type(pruner)) <class 'optuna.pruners._nop.NopPruner'>

study = optuna.create_study(direction="minimize", pruner=pruner)
study.optimize(objective, n_trials=3, timeout=300)

print("Number of finished trials: {}".format(len(study.trials)))

print("Best trial:")
trial = study.best_trial

print("  Value: {}".format(trial.value))

print("  Params: ")
for key, value in trial.params.items():
    print("    {}: {}".format(key, value))```




Does someone can give me a hint about what does this mean? I know it's someone related with the dimensions passed that are not well done, but I can't see where is the issue. 


Solution 1:[1]

Ok, I solved.

I had to define:

  • x = x.view(b, -1) as x = x.view(b, -1, 28, 28)

Also my CNN was bad written, since I defined 3 times self.fc1. I defined as:

  • self.fc1
  • self.fc2
  • self.out

And it worked.

Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source
Solution 1 Alessio Lovato