# Multilayer Neural Network doesn't work

HI guys!
I’m college student who is studying ML.

I made Binary Classifier for classifying which one is horse picture or human picture.
It worked in one layer NN but it did’t work in multi layers like two or three.

This is my code for train part.

if someone know the problem on my code , Let me know what is wrong

Thanks!!

#This is code

for epoch in range(NUM_EPOCH+1):
#forward propagation(train)
trZ1=np.dot(u,trainX)+a#Layer 1
trZ2=np.dot(v,trA1)+b#Layer 2
trA2=sigmoid(trZ2)

``````trZ3=np.dot(w,trA2)+c#Layer 3
trA3=sigmoid(trZ3)

#get train loss
trloss=-(np.multiply(trainY,np.log(trA3))+np.multiply((1-trainY),np.log(1-trA3)))
trloss=1/trDataNum*np.sum(trloss)
trLossArray[epoch]=trloss

#forward propagation(test)
tZ1=np.dot(u,testX)+a#Layer 1
tA1=sigmoid(tZ1)

tZ2=np.dot(v,tA1)+b#Layer 2
tA2=sigmoid(tZ2)

tZ3=np.dot(w,tA2)+c#Layer 3
tA3=sigmoid(tZ3)

#get test loss
tloss=-(np.multiply(testY,np.log(tA3))+np.multiply((1-testY),np.log(1-tA3)))
tloss=1/tDataNum*np.sum(tloss)
tLossArray[epoch]=tloss

#get Accuracy
trainPY=np.where(trA3>=0.5,1.,0.)
trAccuracy=((trainPY == trainY).sum())/trDataNum
trAcArray[epoch]=trAccuracy

testPY=np.where(tA3>=0.5,1.,0.)
tAccuracy=((testPY == testY).sum())/tDataNum
tAcArray[epoch]=tAccuracy

#backward propagation
dz3=trA3-trainY
dw=1/trDataNum*np.dot(dz3,trA2.T)
dc=1/tDataNum*np.sum(dz3,axis=1,keepdims=True)

dz2=np.multiply(np.dot(w.T,dz3),sigmoid(trA2,True))
dv=1/trDataNum*np.dot(dz2,trA1.T)
db=1/tDataNum*np.sum(dz2,axis=1,keepdims=True)

dz1=np.multiply(np.dot(v.T,dz2),sigmoid(trA1,True))
du=1/trDataNum*np.dot(dz1,trainX.T)
da=1/tDataNum*np.sum(dz1,axis=1,keepdims=True)

#update weight and bias
u=u-lr*du
a=a-lr*da

v=v-lr*dv
b=b-lr*db

w=w-lr*dw
c=c-lr*dc

#check the data per epoch 50
if epoch%50==0:

print("epoch :" + str(epoch+1))
print("train loss :  " +np.array2string(trloss))
print("test loss :  " +np.array2string(tloss))
print("train accuracy :  " +np.array2string(trAccuracy))
print("test accuracy :  " +np.array2string(tAccuracy))``````