My understanding (and experience …) is that PyTorch will throw an exception if you try to compute gradients in such a situation. Here is some example code to test this:
# All steps are "differentiable" by PyTorch
model = nn.Sequential(nn.Linear(10,2), nn.Linear(2,1))
def forward(model,x):
y = model(x)
z = torch.sin(y)
return z
x = torch.randn(10, requires_grad=True)
z = forward(model,x)
loss = torch.norm(z)
loss.backward()
print(x.grad)
# Going via NumPy to throw off the PyTorch gradient tracker
import numpy as np
model = nn.Sequential(nn.Linear(10,2), nn.Linear(2,1))
def forward(model,x):
y = model(x)
y_np = y.detach().numpy()
z_np = np.sin(y_np)
z = torch.tensor(z_np)
return z
x = torch.randn(10, requires_grad=True)
z = forward(model,x)
loss = torch.norm(z)
loss.backward() # Exception here
print(x.grad)