Hello I am trying to do a simple test, I want to show it a number at t=0 and then I want it to output that number k step in the future. Meanwhile the network is going to be shown zeros. But I am getting an error when I am doing backward. I am not sure how to read the error message.
Here is the code I wrote:
import argparse
import gym
import numpy as np
from itertools import count
from collections import namedtuple
import os
import torch
import random
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.autograd as autograd
from torch.autograd import Variable
import torchvision.transforms as T
import cv2
import pickle
import glob
import time
import subprocess
from collections import namedtuple
import resource
import math
class Policy(nn.Module):
def __init__(self):
super(Policy, self).__init__()
self.fc1 = nn.Linear(5,5)
self.lstm = nn.LSTMCell(5, 2)
self.fc2 = nn.Linear(2,1)
def forward(self, x, hidden):
y = self.fc1(x)
hx,cx = self.lstm(y,hidden)
y = self.fc2(hx)
return y, hx,cx
model = Policy()
optimizer = optim.Adam(model.parameters(),lr=1)
step = 10
for i in range(100):
yhat = Variable(torch.zeros(step,1))
target = Variable(torch.zeros(step,1))
target[-1,0] = 1
cx = Variable(torch.zeros(1,2))
hx = Variable(torch.zeros(1,2))
hidden= [hx,cx]
for j in range(step):
x = Variable(torch.zeros(1,5))
if j is 0:
x += 1
y, hx,cx = model(x,hidden)
print (hx.data.numpy())
hidden = (hx,cx)
yhat[j] = y.clone()
print ('done - Hoping the last value should be zero')
#learning
optimizer.zero_grad()
error = ((yhat-target)*(yhat-target)).mean()
error.backward()
optimizer.step()
Here is the error I get,
RuntimeError: matrices expected, got 1D, 2D tensors at /data/users/soumith/miniconda2/conda-bld/pytorch-cuda80-0.1.10_1488756735684/work/torch/lib/TH/generic/THTensorMath.c:1224
I am sure I am just using something like a silly person.