How does one reproduce the deepcopy error of tensors made by code?

managed to create my own example!

import torch
import torch.nn as nn

import copy
from collections import OrderedDict

model = nn.Sequential( OrderedDict( [ ('fc0', nn.Linear(3,1)) ] ) )
#model.fc0.weight = nn.Parameter( torch.randn(3,1) + 3 )
print(model.fc0.weight)
w = model.fc0.weight
for i in range(5):
    w_new = w - 2*(w)
print()
print(w_new.is_leaf)
#model.fc0.weight = nn.Parameter( w_new )
setattr(model,'fc0.weight', w_new )
print(model.fc0.weight.is_leaf)
print(model.fc0.weight)
model_copy = copy.deepcopy(model)

seems that the issue is with this line of code:

 def __deepcopy__(self, memo): 
     if not self.is_leaf: 
         raise RuntimeError("Only Variables created explicitly by the user " 
                            "(graph leaves) support the deepcopy protocol at the moment") 
     result = type(self)(self.data.clone()) 
     result.requires_grad = self.requires_grad 
     result.volatile = self.volatile 
     memo[id(self)] = result 
     return result 

pytorch determines this by checking if things are leafs. Somehow doing nn.Parameter(w_new) sets things as new leafs.

but the setattr somehow avoids it and introduces the error.