How to build a stack (data structure)?

I mean I want a stack, the first-in-last-out data structure one, not the torch.stack function or Neural Stack or something like this. And I want my stack can push and pop in batch-level, so I add a mask parameter, whose type is bool, in both push and pop. My code looks like below.

import torch


class Stack(object):
    def __init__(self, batch_size, num_frames, input_size):
        super(Stack, self).__init__()

        self.data = torch.zeros((batch_size, num_frames, input_size), dtype=torch.float32)
        self.prev = torch.zeros((batch_size, num_frames), dtype=torch.long)
        self.top = torch.zeros((batch_size,), dtype=torch.long)
        self.idx = torch.zeros((batch_size,), dtype=torch.long)

    def push(self, x, mask):
        self.data[mask, self.idx[mask]] = x
        self.prev[mask, self.idx[mask]] = self.top[mask]
        self.top[mask] = self.idx[mask]
        self.idx[mask] = self.idx[mask] + 1

    def pop(self, mask):
        self.top[mask] = self.prev[mask, self.top[mask]]

    def get_top(self, mask):
        return self.data[mask, self.top[mask]]


if __name__ == '__main__':
    with torch.autograd.set_detect_anomaly(True):
        stack = Stack(3, 10, 2)
        x = torch.rand(2, 2, requires_grad=True)
        y = torch.rand(2, 2, requires_grad=True)
        m = torch.tensor([True, False, True])

        stack.push(x, m)

        t = stack.get_top(m)
        stack.push(y + t, m)

        loss = stack.get_top(m).mean()
        loss.backward()

but I got this,

../torch/csrc/autograd/python_anomaly_mode.cpp:57: UserWarning: Traceback of forward call that caused the error:
  File "~/Library/Preferences/PyCharm2019.1/scratches/scratch.py", line 35, in <module>
    t = stack.get_top(m)
  File "~/Library/Preferences/PyCharm2019.1/scratches/scratch.py", line 23, in get_top
    return self.data[mask, self.top[mask]]

Traceback (most recent call last):
  File "~/Library/Preferences/PyCharm2019.1/scratches/scratch.py", line 39, in <module>
    loss.backward()
  File "/usr/local/lib/python3.7/site-packages/torch/tensor.py", line 118, in backward
    torch.autograd.backward(self, gradient, retain_graph, create_graph)
  File "/usr/local/lib/python3.7/site-packages/torch/autograd/__init__.py", line 93, in backward
    allow_unreachable=True)  # allow_unreachable flag
RuntimeError: one of the variables needed for gradient computation has been modified by an inplace operation: [torch.FloatTensor [3, 10, 2]], which is output 0 of IndexPutBackward, is at version 2; expected version 1 instead. Hint: the backtrace further above shows the operation that failed to compute its gradient. The variable in question was changed in there or anywhere later. Good luck!

Process finished with exit code 1

any idea about how to fix this?