Pytorch memory leak while using multithreading?

Hi,
I try to do the predicting part with multithreading. But I find that the memory keeps increasing…
Is there anything wrong with my code?

My development environment is:
Ubuntu 14.04
Pytorch 1.0.0 (install by anaconda)
Python 3.6.5

import time, threading
import torch
import torch.nn as nn
import torch.nn.functional as F

class TestTorch(nn.Module):
    def __init__(self):
        super(TestTorch, self).__init__()
        self.linear = nn.Sequential(
                nn.Linear(10, 5000),
                nn.ReLU(),
                nn.Linear(5000,6)
                )

    def forward(self, x):
        with torch.no_grad():
            x_tensor = torch.FloatTensor(x)
            y_tensor = self.linear(x_tensor)
        return y_tensor

def worker(handler):
    x = [1,2,3,4,5,6,7,8,9,10]
    result = handler(x)


if __name__ == '__main__':
    handler = TestTorch()
    thread_pool = []
    while True:
        t = threading.Thread(target=worker, args=(handler,))
        thread_pool.append(t)
        t.start()
        if len(thread_pool) > 10:
            for tmp in thread_pool:
                tmp.join()
            thread_pool = []


But when I use the single threading, memory will be stable.

import time, threading
import torch
import torch.nn as nn
import torch.nn.functional as F

class TestTorch(nn.Module):
    def __init__(self):
        super(TestTorch, self).__init__()
        self.linear = nn.Sequential(
                nn.Linear(10, 5000),
                nn.ReLU(),
                nn.Linear(5000,6)
                )

    def forward(self, x):
        with torch.no_grad():
            x_tensor = torch.FloatTensor(x)
            y_tensor = self.linear(x_tensor)
        return y_tensor


if __name__ == '__main__':
    handler = TestTorch()
    while True:
        x = [1,2,3,4,5,6,7,8,9,10]
        result = handler(x)


Maybe the problem is same as this issue