# Using logistic regression without vectorizing to find the weights of AND gate. My code does not seem to learn. Help me

``````number_of_features = 2
number_of_examples = 4
X = torch.tensor([[1.0,0.0,1.0,0.0],[1.0,0.0,0.0,1.0]])
Y = torch.tensor([[1.0,0.0,0.0,0.0]])
w = torch.Tensor([[0.0],[0.0]])
myW = torch.transpose(w,0,1)
dw1 = torch.Tensor([0.0])
dw2 = torch.Tensor([0.0])
db = torch.Tensor([0.0])
loss = torch.Tensor([0.0])
for epoch in range(10):
dw1 = 0.0
dw2 = 0.0
db = 0.0
loss = 0.0
for i in range(number_of_examples):
example = X[:,i].clone()
myexample = example.resize(2,1)
z = torch.mm(myW,myexample)
a = torch.sigmoid(z)
loss = loss + (-(Y[0,i]*torch.log(a)+(1-Y[0,i])*torch.log(1-a)))
dz = a-Y[0,i]
for j in range(1):
dw1 = dw1 + (myexample[0,0] * dz)
dw2 = dw2 + (myexample[1,0] *dz)
db = db + dz
loss = loss / number_of_examples
dw1 = dw1 / number_of_examples
dw2 = dw2 / number_of_examples
myW[0,0] = myW[0,0] - (0.001*dw1)
myW[0,1] = myW[0,1] - (0.001*dw2)
print("loss")
print(loss)

``````