I have this Variational autoencoder and I want to use Adam for its optimizer but it has this error I don't know what is wrong here
class VAE(nn.Module):
def __init__(self):
super().__init__()
#encoder
self.enc = nn.Sequential(
nn.Linear(1200, 786),
nn.ReLU(),
nn.Flatten()
)
self.mean = nn.Linear(1200, 2)
self.log = nn.Linear(1200, 2)
#decoder
self.dec = nn.Sequential(
nn.Linear(2, 1200),
nn.ReLU(),
)
def param(self, mu, Log):
eps = torch.randn(2, 1200)
z = mu + (eps * torch.exp(Log * 0.5))
return z
def forward(self, x):
x = self.enc(x)
mu , log = self.mean(x), self.log(x)
z = self.param(mu, log)
x = self.dec(z)
return x, mu, log
model = VAE()
optim = torch.optim.Adam(model.param, lr=0.01)
criterion = nn.CrossEntropyLoss()
and here is the error
Traceback (most recent call last):
File "C:\Users\khashayar\PycharmProjects\pythonProject2\VAE.py", line 40, in <module>
optim = torch.optim.Adam(model.param, lr=0.01)
File "C:\Users\khashayar\anaconda3\envs\deeplearning\lib\site-packages\torch\optim\adam.py", line 48, in __init__
super(Adam, self).__init__(params, defaults)
File "C:\Users\khashayar\anaconda3\envs\deeplearning\lib\site-packages\torch\optim\optimizer.py", line 47, in __init__
param_groups = list(params)
TypeError: 'method' object is not iterable
how I can solve this?
The problem is probably in model.param
.
param is a method, and as write in the error : "'method' object is not iterable". The optimizer should receive the model parameters, and not the method "param" of the model class.
Try convert optim = torch.optim.Adam(model.param, lr=0.01)
To optim = torch.optim.Adam(model.parameters(), lr=0.01)