Do I need to make multiple instances of a neural network in PyTorch to test multiple loss functions?

I have written out a neural network in PyTorch and I would like to compare the results of two different loss functions on this one network

Should I go about making two different instances of the network and test one loss function per network like this

network_w_loss_1 = ANN().cuda()
network_w_loss_2 = ANN().cuda()

crit_loss_1 = loss_1()
crit_loss_2 = loss_2()

opt_loss_1 = optim.SGD('params')
opt_loss_2 = optim.SGD('params')

for epoch in range(num_epochs):
    for i, dat in enumerate(data_loader):
        #unpack data
        opt_loss_1.zero_grad()
        opt_loss_2.zero_grad()
        output1 = network_w_loss_1('params')
        output2 = network_w_loss_2('params')
        los_1 = crit_loss_1(output1)
        los_2 = crit_loss_2(output2)
        los_1.backward()
        los_2.backward()
        opt_loss_1.step()
        opt_loss_2.step()

or can I get away with doing this?

network = ANN().cuda()

crit_loss_1 = loss_1()
crit_loss_2 = loss_2()

opt = optim.SGD('params')

for epoch in range(num_epochs):
    for i, dat in enumerate(data_loader):
        #unpack data
        opt.zero_grad()
        output1 = network('params')
        output2 = network('params')
        los_1 = crit_loss_1(output1)
        los_2 = crit_loss_2(output2)
        los_1.backward()
        los_2.backward()
        opt.step()

I am using Python 3.6.5 and PyTorch 0.4.0