"Attributeerror: classifierFinal' Object has no attribute 'log_softmax" when trying to train a neural network using pytorch

Asked

Viewed 47 times

2

I’m learning to use the pytorch and I’ve stumbled upon the mistake that won’t let me continue.

My code:

import pandas as pd
import numpy as np
import torch.nn as nn
from skorch import NeuralNetClassifier #integracao com sklearn
from sklearn.model_selection import cross_val_score,GridSearchCV
from sklearn.preprocessing import LabelEncoder, MinMaxScaler
import torch
import torch.nn.functional as F
from torch import nn,optim

class classificadorFinal(nn.Module):
    def __init__(self, activation=F.tanh, neurons=16, initializer=torch.nn.init.uniform_, dropout=0.3):
        ##from melhores_parametros
        super().__init__()
        self.dense0 = nn.Linear(4, neurons)
        initializer(self.dense0.weight)
        self.activation0 = activation
        self.dense1 = nn.Linear(neurons, neurons)
        initializer(self.dense1.weight)
        self.activation1 = activation
        self.dense2 = nn.Linear(neurons, 3)

        self.dropout = nn.Dropout(dropout)

    def forward(self, X):
        X = self.dense0(X)
        X = self.activation0(X)
        X = self.dropout(X)
        X = self.dense1(X)
        X = self.activation1(X)
        X = self.dropout(X)
        X = self.dense2(X)
        return X


criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(classificador.parameters(), lr = 0.001, weight_decay = 0.0001)


#treino
for epoch in range(200):##from melhores_parametros

    running_loss = 0.
    running_accuracy = 0.

    for data in train_loader:
        inputs, labels = data

        optimizer.zero_grad()        

        outputs = classificadorFinal(inputs)

        loss = criterion(outputs, labels)###erro
        loss.backward()

        optimizer.step()

        running_loss += loss.item()

        ps = F.softmax(outputs)

        top_p, top_class = ps.topk(k = 1, dim = 1)

        equals = top_class == labels.view(*top_class.shape)

        running_accuracy += torch.mean(equals.type(torch.float))

    print('Época {:3d}: perda {:3.5f} - precisão {:3.5f}'.format(epoch + 1, running_loss/len(train_loader), running_accuracy/len(train_loader)))

The error occurs exactly in loss = criterion(outputs, labels):

AttributeError: 'classificadorFinal' object has no attribute 'log_softmax'

I found that this error is well known but did not understand the proposed solution:

disable aux_logits when the model is created with aux_logits=False.

How to solve?

No answers

Browser other questions tagged

You are not signed in. Login or sign up in order to post.