import torch
import torch.nn as nn
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
def describe(x):
print("Type: {}".format(x.type()))
print("Shape/size: {}".format(x.shape))
print("Values: \n{}".format(x))
abcd_padded = torch.tensor([1, 2, 3, 4], dtype=torch.float32)
efg_padded = torch.tensor([5, 6, 7, 0], dtype=torch.float32)
h_padded = torch.tensor([8, 0, 0, 0], dtype=torch.float32)
padded_tensor = torch.stack([abcd_padded, efg_padded, h_padded])
describe(padded_tensor)
Type: torch.FloatTensor Shape/size: torch.Size([3, 4]) Values: tensor([[1., 2., 3., 4.], [5., 6., 7., 0.], [8., 0., 0., 0.]])
lengths = [4, 3, 1]
packed_tensor = pack_padded_sequence(padded_tensor, lengths,
batch_first=True)
packed_tensor
PackedSequence(data=tensor([1., 5., 8., 2., 6., 3., 7., 4.]), batch_sizes=tensor([3, 2, 2, 1]))
unpacked_tensor, unpacked_lengths = pad_packed_sequence(packed_tensor, batch_first=True)
describe(unpacked_tensor)
describe(unpacked_lengths)
Type: torch.FloatTensor Shape/size: torch.Size([3, 4]) Values: tensor([[1., 2., 3., 4.], [5., 6., 7., 0.], [8., 0., 0., 0.]]) Type: torch.LongTensor Shape/size: torch.Size([3]) Values: tensor([4, 3, 1])