number_of_features = 2
number_of_examples = 4
X = torch.tensor([[1.0,0.0,1.0,0.0],[1.0,0.0,0.0,1.0]])
Y = torch.tensor([[1.0,0.0,0.0,0.0]])
w = torch.Tensor([[0.0],[0.0]])
myW = torch.transpose(w,0,1)
dw1 = torch.Tensor([0.0])
dw2 = torch.Tensor([0.0])
db = torch.Tensor([0.0])
loss = torch.Tensor([0.0])
for epoch in range(10):
dw1 = 0.0
dw2 = 0.0
db = 0.0
loss = 0.0
for i in range(number_of_examples):
example = X[:,i].clone()
myexample = example.resize(2,1)
z = torch.mm(myW,myexample)
a = torch.sigmoid(z)
loss = loss + (-(Y[0,i]*torch.log(a)+(1-Y[0,i])*torch.log(1-a)))
dz = a-Y[0,i]
for j in range(1):
dw1 = dw1 + (myexample[0,0] * dz)
dw2 = dw2 + (myexample[1,0] *dz)
db = db + dz
loss = loss / number_of_examples
dw1 = dw1 / number_of_examples
dw2 = dw2 / number_of_examples
myW[0,0] = myW[0,0] - (0.001*dw1)
myW[0,1] = myW[0,1] - (0.001*dw2)
print("loss")
print(loss)
Coming back to this after almost four years . Below is the solution
import torch
number_of_features = 2
number_of_examples = 4
X = torch.tensor([[1.0, 0.0, 1.0, 0.0], [1.0, 0.0, 0.0, 1.0]])
Y = torch.tensor([1.0, 0.0, 0.0, 0.0])
w = torch.zeros((number_of_features, 1))
b = torch.zeros(1)
learning_rate = 0.1
for epoch in range(100000):
dw = torch.zeros_like(w)
db = 0.0
loss = 0.0
for i in range(number_of_examples):
x_i = X[:, i].view(-1, 1)
y_i = Y[i]
z = torch.mm(w.t(), x_i) + b
a = torch.sigmoid(z)
a = torch.sigmoid(z).squeeze()
loss += -(y_i * torch.log(a) + (1 - y_i) * torch.log(1 - a))
dz = a - y_i
dw += x_i * dz
db += dz
loss /= number_of_examples
dw /= number_of_examples
db /= number_of_examples
w -= learning_rate * dw
b -= learning_rate * db
if epoch % 1000 == 0:
print(f"Epoch {epoch}, Loss: {loss.item()}")
print("Weights:")
print(w)
print("Bias:")
print(b)
1 Like