def pdist(A, B = None, squared = False, eps = 1e-4):
B = B if B is not None else A
prod = torch.mm(A, B.t())
normA = (A * A).sum(1).unsqueeze(1)
normB = (B * B).sum(1).unsqueeze(1)
D = clamp((normA.expand_as(prod) + normB.t().expand_as(prod) - 2 * prod), mn=0)
return D if squared else clamp(clamp(D, mn=eps).sqrt(), mn=eps)
Getting error,
RuntimeError: “clamp_min_cpu” not implemented for ‘ComplexFloat’
pytorch 1.11.0
How do I bypass this error?