for size in [10, 10000, 10000000]:
src = torch.arange(10)[None, :]
index = torch.ones_like(src)
t = time.time()
torch.zeros(size, 10, dtype=src.dtype).scatter_(0, index, src)
print('time', time.time() - t)
Hi Sam!
The timed section of your code includes the creation of increasingly-large
tensors.
Best.
K. Frank
Oops, maybe this?
for size in [100, 100000, 100000000]:
src = torch.arange(100)[None, :]
index = torch.ones_like(src)
x = torch.zeros(size, dtype=src.dtype)
t = time.time()
x.view(-1, 100).scatter_(1, index, src)
print('time', time.time() - t)
1 Like
Hi Sam!
Iām not seeing the slow-down that you allude to:
>>> import torch
>>> torch.__version__
'1.10.0'
>>> import time
>>> for size in [100, 100000, 100000000]:
... src = torch.arange(100)[None, :]
... index = torch.ones_like(src)
... x = torch.zeros(size, dtype=src.dtype)
... t = time.time()
... x.view(-1, 100).scatter_(1, index, src)
... print('time', time.time() - t)
...
tensor([[ 0, 99, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
time 0.0041332244873046875
tensor([[ 0, 99, 0, ..., 0, 0, 0],
[ 0, 0, 0, ..., 0, 0, 0],
[ 0, 0, 0, ..., 0, 0, 0],
...,
[ 0, 0, 0, ..., 0, 0, 0],
[ 0, 0, 0, ..., 0, 0, 0],
[ 0, 0, 0, ..., 0, 0, 0]])
time 0.0012705326080322266
tensor([[ 0, 99, 0, ..., 0, 0, 0],
[ 0, 0, 0, ..., 0, 0, 0],
[ 0, 0, 0, ..., 0, 0, 0],
...,
[ 0, 0, 0, ..., 0, 0, 0],
[ 0, 0, 0, ..., 0, 0, 0],
[ 0, 0, 0, ..., 0, 0, 0]])
time 0.0011951923370361328
(I attribute the longer time for the first, smaller size
to a warm-up effect.)
Best.
K. Frank