Regarding weights of layer

When I saved the weights of a layer from pretrained model into a dictionary it’s showing only some but I need all weights how can I see those numbers?
OrderedDict([(‘weight’, tensor([[[[-3.4618e-02, -3.3801e-02, -2.2497e-02],
[-3.2275e-02, -2.8543e-02, -1.5026e-02],
[-3.1307e-02, -1.8263e-02, -1.1662e-02]],

     [[-6.7787e-03, -5.1631e-03, -1.1498e-02],
      [-3.1353e-03, -9.5856e-03, -1.1940e-02],
      [-1.6923e-02, -1.5893e-02, -9.2058e-03]],

     [[ 2.1721e-02,  1.6138e-03, -1.2762e-02],
      [ 1.1541e-02,  3.9117e-03, -1.7966e-02],
      [ 8.2566e-03, -7.2988e-03, -1.8711e-02]],


     [[ 1.1666e-02,  5.8356e-03,  5.1317e-03],
      [ 2.7172e-02,  3.0096e-02,  2.2107e-02],
      [ 5.0745e-02,  5.0426e-02,  5.2568e-02]],

     [[ 1.5641e-02,  6.1531e-03,  1.0435e-02],
      [-4.9628e-03, -1.0472e-02, -9.7399e-03],
      [-1.3043e-02, -1.1895e-02, -8.3080e-03]],

     [[-7.5200e-03, -8.6918e-03, -7.7642e-03],
      [-2.1090e-02, -1.2527e-02, -1.4423e-02],
      [-1.5833e-02, -1.0271e-02, -6.5119e-03]]],

    [[[ 2.9262e-02,  2.4535e-02,  9.8333e-03],
      [ 1.3875e-02,  1.1888e-02,  1.5868e-03],
      [ 3.2187e-02,  2.7896e-02,  2.1428e-02]],

     [[-1.2907e-02, -1.6465e-02, -1.0769e-02],
      [-1.2730e-02, -8.0963e-04, -9.7610e-03],
      [-1.7824e-02, -1.9745e-03, -6.0412e-03]],

     [[ 7.3908e-03,  1.3782e-03, -2.7653e-03],
      [ 2.2362e-02,  3.1297e-03,  7.8103e-03],
      [-3.6186e-03,  4.7948e-03, -3.9560e-03]],


     [[ 5.8102e-03,  6.1981e-04, -9.2120e-04],
      [-2.2606e-03, -2.2002e-02, -3.1097e-03],
      [-9.4541e-03, -2.8003e-02, -2.2712e-02]],

     [[ 2.0525e-03, -3.4167e-03,  1.0797e-02],
      [ 8.3235e-03, -7.1854e-03,  8.9610e-03],
      [ 1.1586e-02, -4.0099e-03,  1.9634e-02]],

     [[ 7.4921e-03, -1.0478e-03,  1.5801e-02],
      [ 7.9514e-04, -1.6881e-02, -2.4732e-04],
      [ 1.5973e-02,  1.6192e-02,  3.7039e-03]]],

    [[[-2.3103e-02, -2.4085e-02, -2.1704e-02],
      [ 1.0276e-02, -7.1129e-03,  6.8848e-03],
      [ 8.5694e-03, -1.9288e-03, -2.8843e-03]],

     [[-1.3841e-02, -1.0492e-02, -2.4102e-02],
      [-2.7612e-02, -2.5791e-02, -1.2960e-02],
      [-1.5598e-03,  3.6200e-04,  2.1741e-02]],

     [[ 2.7855e-02,  2.6277e-02,  2.2290e-02],
      [ 2.4780e-02,  2.5150e-02,  2.5566e-03],
      [-1.9022e-03,  1.0823e-02, -8.6423e-03]],


     [[-6.3089e-03,  1.8908e-03,  6.9918e-03],
      [-6.9307e-03, -8.7947e-03, -7.2253e-03],
      [-3.5385e-03, -4.9549e-03, -8.7845e-03]],

     [[ 8.2309e-03,  1.8546e-04, -9.4308e-04],
      [ 2.4796e-04, -1.3734e-03, -6.4844e-03],
      [ 1.0330e-02, -6.3096e-03,  2.9899e-03]],

     [[-3.2615e-03,  1.1206e-03,  2.0341e-03],
      [ 3.0882e-03, -1.5877e-02, -3.2855e-03],
      [-1.0955e-02, -5.5521e-03, -1.2025e-04]]],


    [[[-1.7462e-02, -3.8714e-02, -3.6843e-02],
      [-2.3918e-02, -3.9357e-02, -2.3753e-02],
      [-6.3624e-03, -1.7850e-02,  1.0370e-03]],

     [[ 2.8356e-02,  1.4364e-02,  8.2898e-03],
      [ 1.7873e-02, -1.1615e-03,  5.5851e-03],
      [-1.2643e-03, -8.9554e-03, -3.0568e-03]],

     [[ 5.1422e-03,  4.6503e-04,  9.0143e-03],
      [ 1.9823e-02,  3.4737e-02,  2.8242e-02],
      [ 3.2088e-02,  2.3840e-02,  2.2072e-02]],


     [[ 3.4983e-03,  9.9425e-03,  4.5399e-03],
      [ 1.1027e-02,  4.5929e-03,  2.6449e-03],
      [ 1.7532e-02, -1.0313e-03, -8.3937e-03]],

     [[-1.0549e-02, -1.0171e-02, -5.7416e-03],
      [-8.2740e-03, -8.3159e-03, -1.4377e-02],
      [-1.1800e-02, -1.1000e-02, -1.6324e-03]],

     [[-1.3096e-02, -3.1618e-02, -2.5536e-02],
      [-4.8419e-03, -1.3772e-02, -1.3568e-02],
      [-1.5098e-02, -8.9054e-03, -1.2804e-02]]],

    [[[ 3.8384e-03, -6.2667e-03, -1.1664e-02],
      [ 3.3901e-03, -1.1676e-02, -6.1101e-03],
      [-1.7972e-02, -3.3468e-02, -9.4137e-03]],

     [[ 1.0985e-03,  1.3939e-04,  5.5219e-03],
      [-9.3755e-03, -3.6006e-03, -3.1602e-05],
      [-2.7297e-03, -5.7176e-03, -3.4485e-03]],

     [[ 1.9236e-02,  1.2842e-02,  8.1627e-03],
      [ 1.5062e-02,  4.3942e-04,  1.0795e-03],
      [ 6.7360e-03,  9.0229e-03,  2.8575e-03]],


     [[-7.0815e-03, -4.9577e-03,  1.6272e-03],
      [ 2.0418e-03, -8.2440e-03,  8.4400e-03],
      [ 6.3961e-03, -5.5046e-03,  1.0984e-03]],

     [[ 9.5378e-04, -1.1439e-02, -3.1516e-03],
      [ 1.0669e-03, -1.8538e-02, -1.2831e-02],
      [ 5.5736e-03, -1.1039e-02, -1.3992e-02]],

     [[-1.9465e-02, -1.2852e-02, -1.8318e-02],
      [-2.6698e-02, -1.8011e-02, -1.0662e-02],
      [-1.6479e-02, -1.4482e-02, -1.6194e-02]]],

    [[[-2.3728e-02, -3.6695e-02, -3.7500e-02],
      [-1.3462e-03, -3.9573e-04,  3.2643e-03],
      [ 1.5174e-02,  2.6909e-02,  1.3840e-02]],

     [[ 2.8536e-04, -1.5611e-02, -6.2689e-03],
      [-4.9708e-04, -2.2292e-02, -2.0291e-02],
      [ 1.2825e-02, -1.6755e-02, -2.1885e-02]],

     [[ 8.5642e-03,  6.9444e-03,  9.6820e-03],
      [ 3.2590e-03,  1.7821e-02,  7.3303e-03],
      [ 1.2386e-02,  2.1082e-02, -7.5897e-03]],


     [[ 7.6673e-03,  1.3758e-02,  1.1945e-02],
      [ 8.6567e-03,  1.0310e-02, -1.6190e-03],
      [ 1.7062e-02, -6.1529e-03, -4.0647e-03]],

     [[ 1.5687e-02,  3.6550e-03, -1.1310e-02],
      [-1.4527e-04,  8.9713e-04,  2.0525e-03],
      [-1.0805e-02, -5.5566e-03,  8.8173e-03]],

     [[-1.4262e-02, -1.7638e-02, -6.3932e-03],
      [-2.4702e-03, -1.2362e-02, -2.2834e-02],
      [-4.4622e-03, -9.5870e-03, -1.7204e-02]]]], device='cuda:0'))

Thanks for any help

You could serialize them to a text file e.g. using numpy’s savetxt method, in case you need to access them directly.

May I ask what your use case is that you need to see all values?

Thanks just to visualize the weights of layer

In that case, you could also try to use imhow from matplotlib, which might be clearer in case your tensor is huge.