-
Notifications
You must be signed in to change notification settings - Fork 0
/
utils.py
120 lines (94 loc) · 3.42 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
import os
import shutil
import torch
import torch.nn.functional as F
import numpy as np
import matplotlib
matplotlib.use("Agg")
from matplotlib import pyplot as plt
def process_meta(meta_path):
with open(meta_path, "r", encoding="utf-8") as f:
text = []
name = []
sid = []
for line in f.readlines():
n, t, s = line.strip('\n').split('|')
name.append(n)
text.append(t)
sid.append(s)
return name, text, sid
def get_param_num(model):
num_param = sum(param.numel() for param in model.parameters())
return num_param
def plot_data(data, titles=None, filename=None):
fig, axes = plt.subplots(len(data), 1, squeeze=False)
fig.tight_layout()
if titles is None:
titles = [None for i in range(len(data))]
for i in range(len(data)):
spectrogram = data[i]
axes[i][0].imshow(spectrogram, origin='lower')
axes[i][0].set_aspect(2.5, adjustable='box')
axes[i][0].set_ylim(0, 80)
axes[i][0].set_title(titles[i], fontsize='medium')
axes[i][0].tick_params(labelsize='x-small', left=False, labelleft=False)
axes[i][0].set_anchor('W')
plt.savefig(filename, dpi=200)
plt.close()
def get_mask_from_lengths(lengths, max_len=None):
batch_size = lengths.shape[0]
if max_len is None:
max_len = torch.max(lengths).item()
ids = torch.arange(0, max_len).unsqueeze(0).expand(batch_size, -1).cuda()
mask = (ids >= lengths.unsqueeze(1).expand(-1, max_len))
return mask
def pad_1D(inputs, PAD=0):
def pad_data(x, length, PAD):
x_padded = np.pad(x, (0, length - x.shape[0]),
mode='constant',
constant_values=PAD)
return x_padded
max_len = max((len(x) for x in inputs))
padded = np.stack([pad_data(x, max_len, PAD) for x in inputs])
return padded
def pad_2D(inputs, maxlen=None):
def pad(x, max_len):
PAD = 0
if np.shape(x)[0] > max_len:
raise ValueError("not max_len")
s = np.shape(x)[1]
x_padded = np.pad(x, (0, max_len - np.shape(x)[0]),
mode='constant',
constant_values=PAD)
return x_padded[:, :s]
if maxlen:
output = np.stack([pad(x, maxlen) for x in inputs])
else:
max_len = max(np.shape(x)[0] for x in inputs)
output = np.stack([pad(x, max_len) for x in inputs])
return output
def pad(input_ele, mel_max_length=None):
if mel_max_length:
max_len = mel_max_length
else:
max_len = max([input_ele[i].size(0)for i in range(len(input_ele))])
out_list = list()
for i, batch in enumerate(input_ele):
if len(batch.shape) == 1:
one_batch_padded = F.pad(
batch, (0, max_len-batch.size(0)), "constant", 0.0)
elif len(batch.shape) == 2:
one_batch_padded = F.pad(
batch, (0, 0, 0, max_len-batch.size(0)), "constant", 0.0)
out_list.append(one_batch_padded)
out_padded = torch.stack(out_list)
return out_padded
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
def build_env(config, config_name, path):
t_path = os.path.join(path, config_name)
if config != t_path:
os.makedirs(path, exist_ok=True)
shutil.copyfile(config, t_path)