马上注册,结交更多好友,享用更多功能^_^
您需要 登录 才可以下载或查看,没有账号?立即注册
x
代码 def __init__(self, train_list, train_path, **kwargs):
self.data_list, self.data_length, self.data_label = [], [], []
self.train_path = train_path
self.datas = []
lines = open(train_list).read().splitlines()
# Get the ground-truth labels, that is used to compute the NMI for post-analyze.
dictkeys = list(set([x.split()[0] for x in lines]))
dictkeys.sort()
dictkeys = { key : ii for ii, key in enumerate(dictkeys) }
for lidx, line in enumerate(lines):
data = line.split()
file_name = data[1]
speaker_label = dictkeys[data[0]]
self.data_list.append(file_name) # Filename
self.data_label.append(speaker_label) # GT Speaker label
self.minibatch = []
batch_size = 32
for i in range(0, len(self.data_list), batch_size):
batch_data = self.data_list[i:i + batch_size]
batch_label = self.data_label[i:i + batch_size]
self.minibatch.append([batch_data, batch_label])
# sort the training set by the length of the audios, audio with similar length are saved togethor.
def __getitem__(self, index):
data_lists, data_labels = self.minibatch[index] # Get one minibatch
filenames, labels, segments = [], [], []
for num in range(len(data_lists)):
filename = data_lists[num] # Read filename
label = data_labels[num] # Read GT label
file = os.path.join(self.train_path, filename)
signal = pd.read_csv(file, header=None, usecols=[0], skiprows=[0], engine='python').values.flatten()
segments.append(signal)
filenames.append(filename)
labels.append(label)
print(segments)
segments = torch.FloatTensor(numpy.array(segments))
return segments, filenames, labels
报错Traceback (most recent call last):
File "main_train.py", line 65, in <module>
dic_label, NMI = Trainer.cluster_network(loader = clusterLoader, n_cluster = args.n_cluster) # Do clustering
File "/home/data/pxy/apython/loss2/Stage2/model.py", line 45, in cluster_network
for data, filenames, labels in tqdm.tqdm(loader):
File "/home/data/anaconda3/envs/loss/lib/python3.8/site-packages/tqdm/std.py", line 1178, in __iter__
for obj in iterable:
File "/home/data/anaconda3/envs/loss/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 435, in __next__
data = self._next_data()
File "/home/data/anaconda3/envs/loss/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 1085, in _next_data
return self._process_data(data)
File "/home/data/anaconda3/envs/loss/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 1111, in _process_data
data.reraise()
File "/home/data/anaconda3/envs/loss/lib/python3.8/site-packages/torch/_utils.py", line 428, in reraise
raise self.exc_type(msg)
TypeError: Caught TypeError in DataLoader worker process 0.
Original Traceback (most recent call last):
File "/home/data/anaconda3/envs/loss/lib/python3.8/site-packages/torch/utils/data/_utils/worker.py", line 198, in _worker_loop
data = fetcher.fetch(index)
File "/home/data/anaconda3/envs/loss/lib/python3.8/site-packages/torch/utils/data/_utils/fetch.py", line 44, in fetch
data = [self.dataset[idx] for idx in possibly_batched_index]
File "/home/data/anaconda3/envs/loss/lib/python3.8/site-packages/torch/utils/data/_utils/fetch.py", line 44, in <listcomp>
data = [self.dataset[idx] for idx in possibly_batched_index]
File "/home/data/pxy/apython/loss2/Stage2/dataLoader.py", line 83, in __getitem__
segments = torch.FloatTensor(numpy.array(segments))
TypeError: can't convert np.ndarray of type numpy.object_. The only supported types are: float64, float32, float16, complex64, complex128, int64, int32, int16, int8, uint8, and bool.
打印出来的segment
[array([-0.00082384, -0.00082384, 0.00082384, ..., 0.00020596,
-0.00020596, -0.00020596]), array([ 0.00020596, 0. , 0.00041192, ..., 0. ,
0.00020596, -0.00020596]), array([ 0.00020596, 0.00020596, 0.0010298 , ..., -0.00020596,
0.00082384, -0.00020596]), array([-0.00061788, 0.00041192, 0. , ..., -0.0010298 ,
-0.00020596, -0.00082384]), array([-0.00185364, -0.0010298 , 0. , ..., -0.00041192,
0. , -0.00020596]), array([-0.00020596, -0.00061788, 0.00020596, ..., -0.00082384,
0. , -0.00020596]), array([-0.00020596, 0. , 0. , ..., -0.00020596,
0.00041192, 0. ]), array([ 0. , 0. , 0.00020596, ..., -0.00082384,
-0.00020596, 0. ]), array([-0.00041192, -0.00082384, -0.00082384, ..., -0.00041192,
0.00020596, -0.00020596]), array([-0.00020596, 0.00020596, -0.0010298 , ..., 0.00020596,
0.00041192, -0.0010298 ]), array([ 0.00061788, 0.00164768, 0.00041192, ..., 0. ,
-0.00020596, -0.00082384]), array([-0.00061788, -0.00020596, -0.00061788, ..., 0.00082384,
0.00061788, 0.00041192]), array([ 0.00020596, 0.00041192, -0.0010298 , ..., 0. ,
0.00164768, 0.00041192]), array([0. , 0.00082384, 0. , ..., 0.00041192, 0.00061788,
0.00061788]), array([ 0.00041192, 0.00041192, -0.00041192, ..., 0.00020596,
0.00020596, -0.00061788]), array([-0.00061788, 0.00082384, -0.00020596, ..., -0.00185364,
-0.00061788, -0.00123576]), array([-0.00123576, 0.00020596, -0.0010298 , ..., 0.00020596,
0.00082384, -0.00082384])]
[array([-0.00061788, -0.00061788, -0.00144172, ..., -0.00061788,
0.00020596, -0.00061788]), array([-0.00041192, -0.00061788, -0.0010298 , ..., -0.00020596,
0.00020596, -0.00041192]), array([-0.00082384, 0.00082384, 0.00020596, ..., 0.00020596,
0.00061788, 0.00020596]), array([ 0.00020596, 0.00061788, -0.00020596, ..., 0.00041192,
0.00061788, 0. ]), array([ 0.00041192, 0.00061788, 0.00020596, ..., -0.00061788,
-0.00041192, -0.00061788]), array([ 0. , 0.00041192, -0.00041192, ..., -0.00020596,
0.00020596, -0.0010298 ]), array([-0.00041192, 0.00020596, -0.0010298 , ..., -0.00164768,
-0.00061788, -0.00061788]), array([ 0.00164704, 0.00082352, 0.00082352, ..., 0.00041176,
-0.00123528, -0.00041176]), array([-0.00041176, -0.00041176, 0.00041176, ..., -0.00041176,
-0.00041176, 0. ]), array([ 0.00082352, 0.00082352, 0.00082352, ..., 0.00082352,
-0.00041176, 0. ]), array([ 0.00041176, -0.00041176, 0.00123528, ..., -0.00164704,
-0.00041176, -0.00082352]), array([ 0.00041176, -0.00041176, 0.00082352, ..., 0.00123528,
0.00082352, 0. ]), array([ 0. , 0.00041176, 0.00041176, ..., 0. ,
-0.00041176, 0. ]), array([-0.00041176, -0.00082352, 0.00041176, ..., 0.00164704,
0.00082352, 0.00164704]), array([0.00164704, 0.00082352, 0.00082352, ..., 0.00041176, 0.00041176,
0.00041176]), array([ 0.00082352, 0.00041176, 0.00082352, ..., 0. ,
-0.00041176, -0.00041176]), array([-0.00226556, -0.00082384, -0.00164768, ..., -0.00061788,
0.00041192, -0.00061788]), array([-0.00041192, -0.00041192, 0.00020596, ..., 0. ,
-0.00020596, -0.00082384]), array([-0.00020596, 0.00041192, -0.00061788, ..., -0.00082384,
0.00082384, -0.00041192]), array([-0.00041192, 0.00061788, -0.00020596, ..., 0. ,
-0.00041192, -0.00082384]), array([-0.00041192, 0.00041192, -0.00020596, ..., -0.00082384,
-0.00041192, 0.00020596]), array([-0.00020596, 0.00041192, -0.0010298 , ..., 0.00061788,
0. , -0.00020596]), array([ 0.00020596, 0.00082384, 0. , ..., -0.00061788,
0.00020596, 0.00020596]), array([ 0. , 0.00082384, -0.00020596, ..., 0. ,
0.00020596, 0.00041192]), array([-0.00020596, 0.00082384, -0.00061788, ..., -0.00041192,
0. , 0.00020596]), array([-0.00205288, 0. , 0. , ..., 0. ,
0. , 0.00205288]), array([-0.00205288, 0. , -0.00205288, ..., 0. ,
0. , -0.00205288]), array([0., 0., 0., ..., 0., 0., 0.]), array([ 0. , -0.00205288, -0.00410576, ..., -0.00205288,
0. , 0. ]), array([-0.00226556, -0.00164768, -0.00082384, ..., -0.00082384,
-0.00164768, -0.00144172]), array([-0.00082384, -0.00082384, -0.00061788, ..., 0.00123576,
0.00123576, 0.00082384]), array([0.00061788, 0.00061788, 0.00082384, ..., 0.00041192, 0.00020596,
0.00041192])]
[array([ 0.00020596, 0. , 0.0010298 , ..., -0.00020596,
-0.00061788, 0.00061788]), array([ 0. , -0.00061788, 0.0010298 , ..., 0.00061788,
0.00041192, 0.0010298 ]), array([ 0.00123576, 0. , 0.00041192, ..., 0.00041192,
-0.00082384, 0.00061788]), array([-0.00820832, -0.00820832, -0.00820832, ..., 0. ,
0. , -0.01641664]), array([-0.00820832, 0. , 0. , ..., -0.00820832,
-0.00820832, -0.00820832]), array([-0.00820832, -0.00820832, -0.00820832, ..., -0.00820832,
-0.00820832, -0.00820832]), array([-0.01641664, -0.01641664, -0.00820832, ..., -0.00820832,
-0.00820832, 0.00820832]), array([-0.00820832, -0.00820832, 0. , ..., -0.00820832,
-0.00820832, -0.00820832]), array([ 0.00820832, -0.00820832, -0.00820832, ..., -0.01641664,
0. , -0.00820832]), array([-0.00820832, -0.00820832, -0.00820832, ..., -0.01641664,
-0.01641664, -0.00820832]), array([-0.00820832, 0. , -0.02462496, ..., -0.01641664,
-0.00820832, -0.00820832]), array([ 0. , 0. , 0.00164704, ..., 0.00123528,
-0.00082352, -0.00082352]), array([-0.00041176, -0.00041176, 0. , ..., -0.00041176,
-0.00123528, 0. ]), array([-0.00082352, -0.00082352, -0.00041176, ..., 0.00123528,
0. , -0.00082352]), array([ 0.00082352, 0.00082352, 0. , ..., -0.00041176,
-0.00123528, -0.00123528]), array([-0.00082352, -0.00041176, 0. , ..., -0.00082352,
-0.00082352, -0.00041176]), array([ 0.00041176, -0.00041176, 0. , ..., 0.00082352,
0.00041176, 0. ]), array([ 0. , 0. , 0.00082352, ..., 0.00123528,
-0.00041176, 0.00082352]), array([ 0.00041176, 0. , -0.00041176, ..., -0.00082352,
-0.00041176, -0.00082352]), array([-0.00041176, -0.00041176, -0.00123528, ..., 0.00041176,
-0.00082352, -0.00041176]), array([-0.00041192, 0. , 0.00020596, ..., 0.0010298 ,
0.00123576, 0.00082384]), array([0.00020596, 0.00123576, 0.00061788, ..., 0.00020596, 0.00020596,
0.00061788]), array([ 0. , 0.00020596, 0.00061788, ..., 0.00061788,
-0.00020596, -0.00020596]), array([-0.0010298 , 0.00020596, 0.00041192, ..., 0.00144172,
0.00082384, 0.00082384]), array([0.00020596, 0.00041192, 0.00082384, ..., 0.00082384, 0.00144172,
0.00082384]), array([ 0.00082384, 0.0010298 , 0.00082384, ..., -0.00020596,
-0.00020596, -0.00020596]), array([-0.00020596, -0.00123576, 0.00041192, ..., -0.00061788,
0.00082384, -0.00082384]), array([ 0.00041192, 0.00061788, -0.00041192, ..., 0. ,
-0.00041192, 0.00041192]), array([ 0.00020596, 0. , 0.00041192, ..., -0.00061788,
-0.00041192, 0.00020596]), array([ 0.00185364, 0.00144172, 0.00061788, ..., -0.00041192,
-0.00020596, 0.00020596]), array([ 0.00041192, 0. , 0.00082384, ..., 0.00041192,
-0.00041192, 0.00020596]), array([ 0.00061788, -0.00020596, 0.00082384, ..., -0.00061788,
0. , 0.00041192])]
请问如何解决 |