diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_b/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu120_xset/xset_b/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_b/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_b/config.yaml b/ckpt/Others/MST-GCN/ntu120_xset/xset_b/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..40121dc5aaa5a864c63ebb858206770f0eeda32a --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_b/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu120/xset_b.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 120 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu120/xset_b/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu120/xset_b diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_b/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu120_xset/xset_b/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..adb9fb1a445b78a705adc926098ee961bfdb3638 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_b/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ef06ec9a637932e5101e90189ee5aa4064133280b97542e0b0ef61f8e5f1ac0 +size 34946665 diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_b/log.txt b/ckpt/Others/MST-GCN/ntu120_xset/xset_b/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..f398fdd20911ca38e051c319793ab884c8420bf2 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_b/log.txt @@ -0,0 +1,631 @@ +[ Wed Sep 7 21:35:36 2022 ] # generator parameters: 2.922995 M. +[ Wed Sep 7 21:35:37 2022 ] Parameters: +{'work_dir': './work_dir/ntu120/xset_b', 'model_saved_name': './runs/ntu120/xset_b/runs', 'config': 'config/ntu120/xset_b.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Wed Sep 7 21:35:37 2022 ] Training epoch: 1 +[ Wed Sep 7 21:35:37 2022 ] Learning rate: 0.015 +[ Wed Sep 7 21:40:00 2022 ] Mean training loss: 3.6086. +[ Wed Sep 7 21:40:00 2022 ] Time consumption: [Data]01%, [Network]98% +[ Wed Sep 7 21:40:00 2022 ] Training epoch: 2 +[ Wed Sep 7 21:40:00 2022 ] Learning rate: 0.03 +[ Wed Sep 7 21:44:23 2022 ] Mean training loss: 2.7809. +[ Wed Sep 7 21:44:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:44:23 2022 ] Training epoch: 3 +[ Wed Sep 7 21:44:23 2022 ] Learning rate: 0.045 +[ Wed Sep 7 21:48:46 2022 ] Mean training loss: 2.3419. +[ Wed Sep 7 21:48:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:48:46 2022 ] Training epoch: 4 +[ Wed Sep 7 21:48:46 2022 ] Learning rate: 0.06 +[ Wed Sep 7 21:53:09 2022 ] Mean training loss: 2.0429. +[ Wed Sep 7 21:53:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:53:09 2022 ] Training epoch: 5 +[ Wed Sep 7 21:53:09 2022 ] Learning rate: 0.075 +[ Wed Sep 7 21:57:32 2022 ] Mean training loss: 1.8437. +[ Wed Sep 7 21:57:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:57:32 2022 ] Training epoch: 6 +[ Wed Sep 7 21:57:32 2022 ] Learning rate: 0.09 +[ Wed Sep 7 22:01:55 2022 ] Mean training loss: 1.7147. +[ Wed Sep 7 22:01:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:01:55 2022 ] Training epoch: 7 +[ Wed Sep 7 22:01:55 2022 ] Learning rate: 0.10500000000000001 +[ Wed Sep 7 22:06:17 2022 ] Mean training loss: 1.5963. +[ Wed Sep 7 22:06:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:06:17 2022 ] Training epoch: 8 +[ Wed Sep 7 22:06:17 2022 ] Learning rate: 0.12 +[ Wed Sep 7 22:10:40 2022 ] Mean training loss: 1.5492. +[ Wed Sep 7 22:10:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:10:40 2022 ] Training epoch: 9 +[ Wed Sep 7 22:10:40 2022 ] Learning rate: 0.13499999999999998 +[ Wed Sep 7 22:15:03 2022 ] Mean training loss: 1.4981. +[ Wed Sep 7 22:15:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:15:03 2022 ] Training epoch: 10 +[ Wed Sep 7 22:15:03 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:19:25 2022 ] Mean training loss: 1.4923. +[ Wed Sep 7 22:19:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:19:25 2022 ] Training epoch: 11 +[ Wed Sep 7 22:19:25 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:23:48 2022 ] Mean training loss: 1.3916. +[ Wed Sep 7 22:23:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:23:48 2022 ] Training epoch: 12 +[ Wed Sep 7 22:23:48 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:28:11 2022 ] Mean training loss: 1.3541. +[ Wed Sep 7 22:28:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:28:11 2022 ] Training epoch: 13 +[ Wed Sep 7 22:28:11 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:32:34 2022 ] Mean training loss: 1.2929. +[ Wed Sep 7 22:32:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:32:34 2022 ] Training epoch: 14 +[ Wed Sep 7 22:32:34 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:36:57 2022 ] Mean training loss: 1.2638. +[ Wed Sep 7 22:36:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:36:57 2022 ] Training epoch: 15 +[ Wed Sep 7 22:36:57 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:41:20 2022 ] Mean training loss: 1.2265. +[ Wed Sep 7 22:41:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:41:20 2022 ] Training epoch: 16 +[ Wed Sep 7 22:41:20 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:45:42 2022 ] Mean training loss: 1.1839. +[ Wed Sep 7 22:45:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:45:42 2022 ] Training epoch: 17 +[ Wed Sep 7 22:45:42 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:50:05 2022 ] Mean training loss: 1.1774. +[ Wed Sep 7 22:50:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:50:05 2022 ] Training epoch: 18 +[ Wed Sep 7 22:50:05 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:54:28 2022 ] Mean training loss: 1.1332. +[ Wed Sep 7 22:54:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:54:28 2022 ] Training epoch: 19 +[ Wed Sep 7 22:54:28 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:58:50 2022 ] Mean training loss: 1.1246. +[ Wed Sep 7 22:58:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:58:50 2022 ] Training epoch: 20 +[ Wed Sep 7 22:58:50 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:03:13 2022 ] Mean training loss: 1.1059. +[ Wed Sep 7 23:03:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:03:13 2022 ] Training epoch: 21 +[ Wed Sep 7 23:03:13 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:07:35 2022 ] Mean training loss: 1.0691. +[ Wed Sep 7 23:07:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:07:35 2022 ] Training epoch: 22 +[ Wed Sep 7 23:07:35 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:11:58 2022 ] Mean training loss: 1.0708. +[ Wed Sep 7 23:11:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:11:58 2022 ] Training epoch: 23 +[ Wed Sep 7 23:11:58 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:16:20 2022 ] Mean training loss: 1.0656. +[ Wed Sep 7 23:16:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:16:20 2022 ] Training epoch: 24 +[ Wed Sep 7 23:16:20 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:20:42 2022 ] Mean training loss: 1.0507. +[ Wed Sep 7 23:20:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:20:42 2022 ] Training epoch: 25 +[ Wed Sep 7 23:20:42 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:25:05 2022 ] Mean training loss: 1.0367. +[ Wed Sep 7 23:25:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:25:05 2022 ] Training epoch: 26 +[ Wed Sep 7 23:25:05 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:29:27 2022 ] Mean training loss: 1.0236. +[ Wed Sep 7 23:29:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:29:27 2022 ] Training epoch: 27 +[ Wed Sep 7 23:29:27 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:33:48 2022 ] Mean training loss: 1.0079. +[ Wed Sep 7 23:33:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:33:48 2022 ] Training epoch: 28 +[ Wed Sep 7 23:33:48 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:38:11 2022 ] Mean training loss: 0.9952. +[ Wed Sep 7 23:38:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:38:11 2022 ] Training epoch: 29 +[ Wed Sep 7 23:38:11 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:42:33 2022 ] Mean training loss: 1.0019. +[ Wed Sep 7 23:42:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:42:33 2022 ] Training epoch: 30 +[ Wed Sep 7 23:42:33 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:46:56 2022 ] Mean training loss: 0.9858. +[ Wed Sep 7 23:46:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:46:56 2022 ] Training epoch: 31 +[ Wed Sep 7 23:46:56 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:51:18 2022 ] Mean training loss: 0.9633. +[ Wed Sep 7 23:51:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:51:18 2022 ] Training epoch: 32 +[ Wed Sep 7 23:51:18 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:55:40 2022 ] Mean training loss: 0.9670. +[ Wed Sep 7 23:55:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:55:40 2022 ] Training epoch: 33 +[ Wed Sep 7 23:55:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:00:02 2022 ] Mean training loss: 0.9783. +[ Thu Sep 8 00:00:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:00:02 2022 ] Training epoch: 34 +[ Thu Sep 8 00:00:02 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:04:24 2022 ] Mean training loss: 0.9647. +[ Thu Sep 8 00:04:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:04:24 2022 ] Training epoch: 35 +[ Thu Sep 8 00:04:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:08:46 2022 ] Mean training loss: 0.9546. +[ Thu Sep 8 00:08:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:08:46 2022 ] Training epoch: 36 +[ Thu Sep 8 00:08:46 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:13:08 2022 ] Mean training loss: 0.9352. +[ Thu Sep 8 00:13:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:13:08 2022 ] Training epoch: 37 +[ Thu Sep 8 00:13:08 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:17:30 2022 ] Mean training loss: 0.9385. +[ Thu Sep 8 00:17:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:17:30 2022 ] Training epoch: 38 +[ Thu Sep 8 00:17:30 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:21:52 2022 ] Mean training loss: 0.9287. +[ Thu Sep 8 00:21:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:21:52 2022 ] Training epoch: 39 +[ Thu Sep 8 00:21:52 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:26:14 2022 ] Mean training loss: 0.9366. +[ Thu Sep 8 00:26:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:26:14 2022 ] Training epoch: 40 +[ Thu Sep 8 00:26:14 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:30:37 2022 ] Mean training loss: 0.9410. +[ Thu Sep 8 00:30:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:30:37 2022 ] Training epoch: 41 +[ Thu Sep 8 00:30:37 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:34:59 2022 ] Mean training loss: 0.9381. +[ Thu Sep 8 00:34:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:34:59 2022 ] Training epoch: 42 +[ Thu Sep 8 00:34:59 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:39:21 2022 ] Mean training loss: 0.9230. +[ Thu Sep 8 00:39:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:39:21 2022 ] Training epoch: 43 +[ Thu Sep 8 00:39:21 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:43:43 2022 ] Mean training loss: 0.9217. +[ Thu Sep 8 00:43:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:43:43 2022 ] Training epoch: 44 +[ Thu Sep 8 00:43:43 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:48:05 2022 ] Mean training loss: 0.9077. +[ Thu Sep 8 00:48:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:48:05 2022 ] Training epoch: 45 +[ Thu Sep 8 00:48:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:52:26 2022 ] Mean training loss: 0.9124. +[ Thu Sep 8 00:52:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:52:26 2022 ] Training epoch: 46 +[ Thu Sep 8 00:52:26 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:56:48 2022 ] Mean training loss: 0.9260. +[ Thu Sep 8 00:56:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:56:48 2022 ] Training epoch: 47 +[ Thu Sep 8 00:56:48 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:01:10 2022 ] Mean training loss: 0.9053. +[ Thu Sep 8 01:01:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:01:10 2022 ] Training epoch: 48 +[ Thu Sep 8 01:01:10 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:05:33 2022 ] Mean training loss: 0.8866. +[ Thu Sep 8 01:05:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:05:33 2022 ] Training epoch: 49 +[ Thu Sep 8 01:05:33 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:09:54 2022 ] Mean training loss: 0.8913. +[ Thu Sep 8 01:09:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:09:54 2022 ] Training epoch: 50 +[ Thu Sep 8 01:09:54 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:14:15 2022 ] Mean training loss: 0.8890. +[ Thu Sep 8 01:14:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:14:15 2022 ] Training epoch: 51 +[ Thu Sep 8 01:14:15 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:18:37 2022 ] Mean training loss: 0.4241. +[ Thu Sep 8 01:18:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:18:37 2022 ] Eval epoch: 51 +[ Thu Sep 8 01:26:33 2022 ] Epoch 51 Curr Acc: (34810/59477)58.53% +[ Thu Sep 8 01:26:33 2022 ] Epoch 51 Best Acc 58.53% +[ Thu Sep 8 01:26:33 2022 ] Training epoch: 52 +[ Thu Sep 8 01:26:33 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:30:54 2022 ] Mean training loss: 0.2982. +[ Thu Sep 8 01:30:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:30:54 2022 ] Eval epoch: 52 +[ Thu Sep 8 01:38:43 2022 ] Epoch 52 Curr Acc: (35818/59477)60.22% +[ Thu Sep 8 01:38:43 2022 ] Epoch 52 Best Acc 60.22% +[ Thu Sep 8 01:38:43 2022 ] Training epoch: 53 +[ Thu Sep 8 01:38:43 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:43:04 2022 ] Mean training loss: 0.2447. +[ Thu Sep 8 01:43:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:43:04 2022 ] Eval epoch: 53 +[ Thu Sep 8 01:50:52 2022 ] Epoch 53 Curr Acc: (36031/59477)60.58% +[ Thu Sep 8 01:50:52 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 01:50:52 2022 ] Training epoch: 54 +[ Thu Sep 8 01:50:52 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:55:14 2022 ] Mean training loss: 0.2083. +[ Thu Sep 8 01:55:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:55:14 2022 ] Eval epoch: 54 +[ Thu Sep 8 02:03:02 2022 ] Epoch 54 Curr Acc: (35908/59477)60.37% +[ Thu Sep 8 02:03:02 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 02:03:02 2022 ] Training epoch: 55 +[ Thu Sep 8 02:03:02 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:07:23 2022 ] Mean training loss: 0.1723. +[ Thu Sep 8 02:07:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:07:23 2022 ] Eval epoch: 55 +[ Thu Sep 8 02:15:11 2022 ] Epoch 55 Curr Acc: (35825/59477)60.23% +[ Thu Sep 8 02:15:11 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 02:15:11 2022 ] Training epoch: 56 +[ Thu Sep 8 02:15:11 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:19:32 2022 ] Mean training loss: 0.1578. +[ Thu Sep 8 02:19:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:19:32 2022 ] Eval epoch: 56 +[ Thu Sep 8 02:27:21 2022 ] Epoch 56 Curr Acc: (35967/59477)60.47% +[ Thu Sep 8 02:27:21 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 02:27:21 2022 ] Training epoch: 57 +[ Thu Sep 8 02:27:21 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:31:42 2022 ] Mean training loss: 0.1368. +[ Thu Sep 8 02:31:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:31:42 2022 ] Eval epoch: 57 +[ Thu Sep 8 02:39:30 2022 ] Epoch 57 Curr Acc: (35911/59477)60.38% +[ Thu Sep 8 02:39:30 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 02:39:30 2022 ] Training epoch: 58 +[ Thu Sep 8 02:39:30 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:43:52 2022 ] Mean training loss: 0.1265. +[ Thu Sep 8 02:43:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:43:52 2022 ] Eval epoch: 58 +[ Thu Sep 8 02:51:40 2022 ] Epoch 58 Curr Acc: (35536/59477)59.75% +[ Thu Sep 8 02:51:40 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 02:51:40 2022 ] Training epoch: 59 +[ Thu Sep 8 02:51:40 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:56:01 2022 ] Mean training loss: 0.1084. +[ Thu Sep 8 02:56:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:56:01 2022 ] Eval epoch: 59 +[ Thu Sep 8 03:03:49 2022 ] Epoch 59 Curr Acc: (35434/59477)59.58% +[ Thu Sep 8 03:03:49 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 03:03:49 2022 ] Training epoch: 60 +[ Thu Sep 8 03:03:49 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:08:11 2022 ] Mean training loss: 0.0970. +[ Thu Sep 8 03:08:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:08:11 2022 ] Eval epoch: 60 +[ Thu Sep 8 03:15:59 2022 ] Epoch 60 Curr Acc: (35254/59477)59.27% +[ Thu Sep 8 03:15:59 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 03:15:59 2022 ] Training epoch: 61 +[ Thu Sep 8 03:15:59 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:20:20 2022 ] Mean training loss: 0.0871. +[ Thu Sep 8 03:20:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:20:20 2022 ] Eval epoch: 61 +[ Thu Sep 8 03:28:08 2022 ] Epoch 61 Curr Acc: (35169/59477)59.13% +[ Thu Sep 8 03:28:08 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 03:28:08 2022 ] Training epoch: 62 +[ Thu Sep 8 03:28:08 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:32:29 2022 ] Mean training loss: 0.0778. +[ Thu Sep 8 03:32:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:32:29 2022 ] Eval epoch: 62 +[ Thu Sep 8 03:40:17 2022 ] Epoch 62 Curr Acc: (35937/59477)60.42% +[ Thu Sep 8 03:40:17 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 03:40:17 2022 ] Training epoch: 63 +[ Thu Sep 8 03:40:17 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:44:38 2022 ] Mean training loss: 0.0739. +[ Thu Sep 8 03:44:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:44:38 2022 ] Eval epoch: 63 +[ Thu Sep 8 03:52:26 2022 ] Epoch 63 Curr Acc: (35673/59477)59.98% +[ Thu Sep 8 03:52:26 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 03:52:26 2022 ] Training epoch: 64 +[ Thu Sep 8 03:52:26 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:56:47 2022 ] Mean training loss: 0.0715. +[ Thu Sep 8 03:56:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:56:47 2022 ] Eval epoch: 64 +[ Thu Sep 8 04:04:36 2022 ] Epoch 64 Curr Acc: (35520/59477)59.72% +[ Thu Sep 8 04:04:36 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 04:04:36 2022 ] Training epoch: 65 +[ Thu Sep 8 04:04:36 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:08:57 2022 ] Mean training loss: 0.0650. +[ Thu Sep 8 04:08:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:08:57 2022 ] Eval epoch: 65 +[ Thu Sep 8 04:16:45 2022 ] Epoch 65 Curr Acc: (35293/59477)59.34% +[ Thu Sep 8 04:16:45 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 04:16:45 2022 ] Training epoch: 66 +[ Thu Sep 8 04:16:45 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:21:06 2022 ] Mean training loss: 0.0668. +[ Thu Sep 8 04:21:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:21:06 2022 ] Eval epoch: 66 +[ Thu Sep 8 04:28:54 2022 ] Epoch 66 Curr Acc: (35144/59477)59.09% +[ Thu Sep 8 04:28:54 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 04:28:54 2022 ] Training epoch: 67 +[ Thu Sep 8 04:28:54 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:33:15 2022 ] Mean training loss: 0.0558. +[ Thu Sep 8 04:33:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:33:16 2022 ] Eval epoch: 67 +[ Thu Sep 8 04:41:04 2022 ] Epoch 67 Curr Acc: (34918/59477)58.71% +[ Thu Sep 8 04:41:04 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 04:41:04 2022 ] Training epoch: 68 +[ Thu Sep 8 04:41:04 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:45:25 2022 ] Mean training loss: 0.0578. +[ Thu Sep 8 04:45:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:45:25 2022 ] Eval epoch: 68 +[ Thu Sep 8 04:53:13 2022 ] Epoch 68 Curr Acc: (35221/59477)59.22% +[ Thu Sep 8 04:53:13 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 04:53:13 2022 ] Training epoch: 69 +[ Thu Sep 8 04:53:13 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:57:34 2022 ] Mean training loss: 0.0592. +[ Thu Sep 8 04:57:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:57:34 2022 ] Eval epoch: 69 +[ Thu Sep 8 05:05:22 2022 ] Epoch 69 Curr Acc: (35637/59477)59.92% +[ Thu Sep 8 05:05:22 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 05:05:22 2022 ] Training epoch: 70 +[ Thu Sep 8 05:05:22 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:09:42 2022 ] Mean training loss: 0.0493. +[ Thu Sep 8 05:09:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:09:43 2022 ] Eval epoch: 70 +[ Thu Sep 8 05:17:31 2022 ] Epoch 70 Curr Acc: (34949/59477)58.76% +[ Thu Sep 8 05:17:31 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 05:17:31 2022 ] Training epoch: 71 +[ Thu Sep 8 05:17:31 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:21:52 2022 ] Mean training loss: 0.0381. +[ Thu Sep 8 05:21:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:21:52 2022 ] Eval epoch: 71 +[ Thu Sep 8 05:29:40 2022 ] Epoch 71 Curr Acc: (35652/59477)59.94% +[ Thu Sep 8 05:29:40 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 05:29:40 2022 ] Training epoch: 72 +[ Thu Sep 8 05:29:40 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:34:01 2022 ] Mean training loss: 0.0282. +[ Thu Sep 8 05:34:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:34:02 2022 ] Eval epoch: 72 +[ Thu Sep 8 05:41:50 2022 ] Epoch 72 Curr Acc: (35606/59477)59.87% +[ Thu Sep 8 05:41:50 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 05:41:50 2022 ] Training epoch: 73 +[ Thu Sep 8 05:41:50 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:46:11 2022 ] Mean training loss: 0.0261. +[ Thu Sep 8 05:46:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:46:11 2022 ] Eval epoch: 73 +[ Thu Sep 8 05:53:59 2022 ] Epoch 73 Curr Acc: (35464/59477)59.63% +[ Thu Sep 8 05:53:59 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 05:53:59 2022 ] Training epoch: 74 +[ Thu Sep 8 05:53:59 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:58:20 2022 ] Mean training loss: 0.0262. +[ Thu Sep 8 05:58:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:58:20 2022 ] Eval epoch: 74 +[ Thu Sep 8 06:06:08 2022 ] Epoch 74 Curr Acc: (35752/59477)60.11% +[ Thu Sep 8 06:06:08 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 06:06:08 2022 ] Training epoch: 75 +[ Thu Sep 8 06:06:08 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:10:29 2022 ] Mean training loss: 0.0240. +[ Thu Sep 8 06:10:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:10:29 2022 ] Eval epoch: 75 +[ Thu Sep 8 06:18:17 2022 ] Epoch 75 Curr Acc: (35418/59477)59.55% +[ Thu Sep 8 06:18:17 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 06:18:17 2022 ] Training epoch: 76 +[ Thu Sep 8 06:18:17 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:22:38 2022 ] Mean training loss: 0.0237. +[ Thu Sep 8 06:22:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:22:38 2022 ] Eval epoch: 76 +[ Thu Sep 8 06:30:26 2022 ] Epoch 76 Curr Acc: (35509/59477)59.70% +[ Thu Sep 8 06:30:26 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 06:30:26 2022 ] Training epoch: 77 +[ Thu Sep 8 06:30:26 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:34:47 2022 ] Mean training loss: 0.0215. +[ Thu Sep 8 06:34:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:34:47 2022 ] Eval epoch: 77 +[ Thu Sep 8 06:42:35 2022 ] Epoch 77 Curr Acc: (35933/59477)60.41% +[ Thu Sep 8 06:42:35 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 06:42:35 2022 ] Training epoch: 78 +[ Thu Sep 8 06:42:35 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:46:56 2022 ] Mean training loss: 0.0203. +[ Thu Sep 8 06:46:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:46:56 2022 ] Eval epoch: 78 +[ Thu Sep 8 06:54:44 2022 ] Epoch 78 Curr Acc: (35729/59477)60.07% +[ Thu Sep 8 06:54:44 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 06:54:44 2022 ] Training epoch: 79 +[ Thu Sep 8 06:54:44 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:59:04 2022 ] Mean training loss: 0.0199. +[ Thu Sep 8 06:59:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:59:04 2022 ] Eval epoch: 79 +[ Thu Sep 8 07:06:52 2022 ] Epoch 79 Curr Acc: (35253/59477)59.27% +[ Thu Sep 8 07:06:52 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 07:06:52 2022 ] Training epoch: 80 +[ Thu Sep 8 07:06:52 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:11:13 2022 ] Mean training loss: 0.0214. +[ Thu Sep 8 07:11:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:11:13 2022 ] Eval epoch: 80 +[ Thu Sep 8 07:19:01 2022 ] Epoch 80 Curr Acc: (35619/59477)59.89% +[ Thu Sep 8 07:19:01 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 07:19:01 2022 ] Training epoch: 81 +[ Thu Sep 8 07:19:01 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:23:22 2022 ] Mean training loss: 0.0198. +[ Thu Sep 8 07:23:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:23:22 2022 ] Eval epoch: 81 +[ Thu Sep 8 07:31:10 2022 ] Epoch 81 Curr Acc: (35872/59477)60.31% +[ Thu Sep 8 07:31:10 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 07:31:10 2022 ] Training epoch: 82 +[ Thu Sep 8 07:31:10 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:35:31 2022 ] Mean training loss: 0.0198. +[ Thu Sep 8 07:35:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:35:31 2022 ] Eval epoch: 82 +[ Thu Sep 8 07:43:19 2022 ] Epoch 82 Curr Acc: (35989/59477)60.51% +[ Thu Sep 8 07:43:19 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 07:43:19 2022 ] Training epoch: 83 +[ Thu Sep 8 07:43:19 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:47:41 2022 ] Mean training loss: 0.0192. +[ Thu Sep 8 07:47:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:47:41 2022 ] Eval epoch: 83 +[ Thu Sep 8 07:55:29 2022 ] Epoch 83 Curr Acc: (35826/59477)60.24% +[ Thu Sep 8 07:55:29 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 07:55:29 2022 ] Training epoch: 84 +[ Thu Sep 8 07:55:29 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:59:49 2022 ] Mean training loss: 0.0196. +[ Thu Sep 8 07:59:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:59:49 2022 ] Eval epoch: 84 +[ Thu Sep 8 08:07:37 2022 ] Epoch 84 Curr Acc: (35643/59477)59.93% +[ Thu Sep 8 08:07:37 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 08:07:37 2022 ] Training epoch: 85 +[ Thu Sep 8 08:07:37 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:11:57 2022 ] Mean training loss: 0.0183. +[ Thu Sep 8 08:11:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:11:57 2022 ] Eval epoch: 85 +[ Thu Sep 8 08:19:45 2022 ] Epoch 85 Curr Acc: (35881/59477)60.33% +[ Thu Sep 8 08:19:45 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 08:19:45 2022 ] Training epoch: 86 +[ Thu Sep 8 08:19:45 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:24:05 2022 ] Mean training loss: 0.0176. +[ Thu Sep 8 08:24:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:24:05 2022 ] Eval epoch: 86 +[ Thu Sep 8 08:31:53 2022 ] Epoch 86 Curr Acc: (35653/59477)59.94% +[ Thu Sep 8 08:31:53 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 08:31:53 2022 ] Training epoch: 87 +[ Thu Sep 8 08:31:53 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:36:13 2022 ] Mean training loss: 0.0165. +[ Thu Sep 8 08:36:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:36:13 2022 ] Eval epoch: 87 +[ Thu Sep 8 08:44:01 2022 ] Epoch 87 Curr Acc: (35749/59477)60.11% +[ Thu Sep 8 08:44:01 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 08:44:01 2022 ] Training epoch: 88 +[ Thu Sep 8 08:44:01 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:48:21 2022 ] Mean training loss: 0.0172. +[ Thu Sep 8 08:48:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:48:21 2022 ] Eval epoch: 88 +[ Thu Sep 8 08:56:09 2022 ] Epoch 88 Curr Acc: (35747/59477)60.10% +[ Thu Sep 8 08:56:09 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 08:56:09 2022 ] Training epoch: 89 +[ Thu Sep 8 08:56:09 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:00:29 2022 ] Mean training loss: 0.0170. +[ Thu Sep 8 09:00:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:00:29 2022 ] Eval epoch: 89 +[ Thu Sep 8 09:08:17 2022 ] Epoch 89 Curr Acc: (35815/59477)60.22% +[ Thu Sep 8 09:08:17 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 09:08:17 2022 ] Training epoch: 90 +[ Thu Sep 8 09:08:17 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:12:38 2022 ] Mean training loss: 0.0171. +[ Thu Sep 8 09:12:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:12:38 2022 ] Eval epoch: 90 +[ Thu Sep 8 09:20:26 2022 ] Epoch 90 Curr Acc: (35771/59477)60.14% +[ Thu Sep 8 09:20:26 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 09:20:26 2022 ] Training epoch: 91 +[ Thu Sep 8 09:20:26 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:24:47 2022 ] Mean training loss: 0.0170. +[ Thu Sep 8 09:24:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:24:47 2022 ] Eval epoch: 91 +[ Thu Sep 8 09:32:35 2022 ] Epoch 91 Curr Acc: (35898/59477)60.36% +[ Thu Sep 8 09:32:35 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 09:32:35 2022 ] Training epoch: 92 +[ Thu Sep 8 09:32:35 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:36:56 2022 ] Mean training loss: 0.0163. +[ Thu Sep 8 09:36:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:36:56 2022 ] Eval epoch: 92 +[ Thu Sep 8 09:44:44 2022 ] Epoch 92 Curr Acc: (35934/59477)60.42% +[ Thu Sep 8 09:44:44 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 09:44:44 2022 ] Training epoch: 93 +[ Thu Sep 8 09:44:44 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:49:06 2022 ] Mean training loss: 0.0174. +[ Thu Sep 8 09:49:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:49:06 2022 ] Eval epoch: 93 +[ Thu Sep 8 09:56:54 2022 ] Epoch 93 Curr Acc: (35876/59477)60.32% +[ Thu Sep 8 09:56:54 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 09:56:54 2022 ] Training epoch: 94 +[ Thu Sep 8 09:56:54 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:01:16 2022 ] Mean training loss: 0.0173. +[ Thu Sep 8 10:01:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:01:16 2022 ] Eval epoch: 94 +[ Thu Sep 8 10:09:04 2022 ] Epoch 94 Curr Acc: (35933/59477)60.41% +[ Thu Sep 8 10:09:04 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 10:09:04 2022 ] Training epoch: 95 +[ Thu Sep 8 10:09:04 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:13:25 2022 ] Mean training loss: 0.0171. +[ Thu Sep 8 10:13:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:13:26 2022 ] Eval epoch: 95 +[ Thu Sep 8 10:21:13 2022 ] Epoch 95 Curr Acc: (35639/59477)59.92% +[ Thu Sep 8 10:21:13 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 10:21:13 2022 ] Training epoch: 96 +[ Thu Sep 8 10:21:13 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:25:34 2022 ] Mean training loss: 0.0186. +[ Thu Sep 8 10:25:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:25:34 2022 ] Eval epoch: 96 +[ Thu Sep 8 10:33:22 2022 ] Epoch 96 Curr Acc: (35774/59477)60.15% +[ Thu Sep 8 10:33:22 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 10:33:22 2022 ] Training epoch: 97 +[ Thu Sep 8 10:33:22 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:37:44 2022 ] Mean training loss: 0.0168. +[ Thu Sep 8 10:37:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:37:44 2022 ] Eval epoch: 97 +[ Thu Sep 8 10:45:32 2022 ] Epoch 97 Curr Acc: (35688/59477)60.00% +[ Thu Sep 8 10:45:32 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 10:45:32 2022 ] Training epoch: 98 +[ Thu Sep 8 10:45:32 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:49:54 2022 ] Mean training loss: 0.0166. +[ Thu Sep 8 10:49:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:49:54 2022 ] Eval epoch: 98 +[ Thu Sep 8 10:57:43 2022 ] Epoch 98 Curr Acc: (35811/59477)60.21% +[ Thu Sep 8 10:57:43 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 10:57:43 2022 ] Training epoch: 99 +[ Thu Sep 8 10:57:43 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:02:05 2022 ] Mean training loss: 0.0165. +[ Thu Sep 8 11:02:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:02:05 2022 ] Eval epoch: 99 +[ Thu Sep 8 11:09:54 2022 ] Epoch 99 Curr Acc: (36018/59477)60.56% +[ Thu Sep 8 11:09:54 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 11:09:54 2022 ] Training epoch: 100 +[ Thu Sep 8 11:09:54 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:14:15 2022 ] Mean training loss: 0.0160. +[ Thu Sep 8 11:14:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:14:15 2022 ] Eval epoch: 100 +[ Thu Sep 8 11:22:04 2022 ] Epoch 100 Curr Acc: (35906/59477)60.37% +[ Thu Sep 8 11:22:04 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 11:22:04 2022 ] Training epoch: 101 +[ Thu Sep 8 11:22:04 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:26:26 2022 ] Mean training loss: 0.0159. +[ Thu Sep 8 11:26:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:26:26 2022 ] Eval epoch: 101 +[ Thu Sep 8 11:34:15 2022 ] Epoch 101 Curr Acc: (35758/59477)60.12% +[ Thu Sep 8 11:34:15 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 11:34:15 2022 ] Training epoch: 102 +[ Thu Sep 8 11:34:15 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:38:38 2022 ] Mean training loss: 0.0157. +[ Thu Sep 8 11:38:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:38:38 2022 ] Eval epoch: 102 +[ Thu Sep 8 11:46:27 2022 ] Epoch 102 Curr Acc: (35932/59477)60.41% +[ Thu Sep 8 11:46:27 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 11:46:27 2022 ] Training epoch: 103 +[ Thu Sep 8 11:46:27 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:50:49 2022 ] Mean training loss: 0.0176. +[ Thu Sep 8 11:50:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:50:49 2022 ] Eval epoch: 103 +[ Thu Sep 8 11:58:38 2022 ] Epoch 103 Curr Acc: (35839/59477)60.26% +[ Thu Sep 8 11:58:38 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 11:58:38 2022 ] Training epoch: 104 +[ Thu Sep 8 11:58:38 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:03:00 2022 ] Mean training loss: 0.0184. +[ Thu Sep 8 12:03:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:03:00 2022 ] Eval epoch: 104 +[ Thu Sep 8 12:10:49 2022 ] Epoch 104 Curr Acc: (35819/59477)60.22% +[ Thu Sep 8 12:10:49 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 12:10:49 2022 ] Training epoch: 105 +[ Thu Sep 8 12:10:49 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:15:11 2022 ] Mean training loss: 0.0157. +[ Thu Sep 8 12:15:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:15:11 2022 ] Eval epoch: 105 +[ Thu Sep 8 12:23:00 2022 ] Epoch 105 Curr Acc: (35630/59477)59.91% +[ Thu Sep 8 12:23:00 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 12:23:00 2022 ] Training epoch: 106 +[ Thu Sep 8 12:23:00 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:27:23 2022 ] Mean training loss: 0.0172. +[ Thu Sep 8 12:27:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:27:23 2022 ] Eval epoch: 106 +[ Thu Sep 8 12:35:11 2022 ] Epoch 106 Curr Acc: (35892/59477)60.35% +[ Thu Sep 8 12:35:11 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 12:35:12 2022 ] Training epoch: 107 +[ Thu Sep 8 12:35:12 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:39:34 2022 ] Mean training loss: 0.0157. +[ Thu Sep 8 12:39:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:39:34 2022 ] Eval epoch: 107 +[ Thu Sep 8 12:47:23 2022 ] Epoch 107 Curr Acc: (35688/59477)60.00% +[ Thu Sep 8 12:47:23 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 12:47:23 2022 ] Training epoch: 108 +[ Thu Sep 8 12:47:23 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:51:46 2022 ] Mean training loss: 0.0163. +[ Thu Sep 8 12:51:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:51:46 2022 ] Eval epoch: 108 +[ Thu Sep 8 12:59:34 2022 ] Epoch 108 Curr Acc: (35900/59477)60.36% +[ Thu Sep 8 12:59:34 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 12:59:34 2022 ] Training epoch: 109 +[ Thu Sep 8 12:59:34 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:03:57 2022 ] Mean training loss: 0.0162. +[ Thu Sep 8 13:03:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:03:57 2022 ] Eval epoch: 109 +[ Thu Sep 8 13:11:46 2022 ] Epoch 109 Curr Acc: (35867/59477)60.30% +[ Thu Sep 8 13:11:46 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 13:11:46 2022 ] Training epoch: 110 +[ Thu Sep 8 13:11:46 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:16:09 2022 ] Mean training loss: 0.0158. +[ Thu Sep 8 13:16:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:16:09 2022 ] Eval epoch: 110 +[ Thu Sep 8 13:23:58 2022 ] Epoch 110 Curr Acc: (35741/59477)60.09% +[ Thu Sep 8 13:23:58 2022 ] Epoch 53 Best Acc 60.58% +[ Thu Sep 8 13:23:58 2022 ] epoch: 53, best accuracy: 0.6057971989172285 +[ Thu Sep 8 13:23:58 2022 ] Experiment: ./work_dir/ntu120/xset_b +[ Thu Sep 8 13:23:58 2022 ] # generator parameters: 2.922995 M. +[ Thu Sep 8 13:23:58 2022 ] Load weights from ./runs/ntu120/xset_b/runs-52-68741.pt. +[ Thu Sep 8 13:23:58 2022 ] Eval epoch: 1 +[ Thu Sep 8 13:31:47 2022 ] Epoch 1 Curr Acc: (36031/59477)60.58% +[ Thu Sep 8 13:31:47 2022 ] Epoch 53 Best Acc 60.58% diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/config.yaml b/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b9d59a72fdd2369ec2dfa112d73c876f280f7c13 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu120/xset_bm.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 120 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu120/xset_bm/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone_motion.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone_motion.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu120/xset_bm diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..633416082dce43b329db00d6a2b5c891329061ec --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4623e5e955d6059c5eea10e3c6bbcc860f03e1765f0a7b768011e8bd6aada7a4 +size 34946665 diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/log.txt b/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..4bb507e18a7047a970e78ee7bfab7d95c529fd2a --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_bm/log.txt @@ -0,0 +1,631 @@ +[ Wed Sep 7 21:35:43 2022 ] # generator parameters: 2.922995 M. +[ Wed Sep 7 21:35:43 2022 ] Parameters: +{'work_dir': './work_dir/ntu120/xset_bm', 'model_saved_name': './runs/ntu120/xset_bm/runs', 'config': 'config/ntu120/xset_bm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Wed Sep 7 21:35:43 2022 ] Training epoch: 1 +[ Wed Sep 7 21:35:43 2022 ] Learning rate: 0.015 +[ Wed Sep 7 21:40:09 2022 ] Mean training loss: 3.7616. +[ Wed Sep 7 21:40:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:40:09 2022 ] Training epoch: 2 +[ Wed Sep 7 21:40:09 2022 ] Learning rate: 0.03 +[ Wed Sep 7 21:44:32 2022 ] Mean training loss: 2.8929. +[ Wed Sep 7 21:44:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:44:32 2022 ] Training epoch: 3 +[ Wed Sep 7 21:44:32 2022 ] Learning rate: 0.045 +[ Wed Sep 7 21:48:57 2022 ] Mean training loss: 2.3294. +[ Wed Sep 7 21:48:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:48:57 2022 ] Training epoch: 4 +[ Wed Sep 7 21:48:57 2022 ] Learning rate: 0.06 +[ Wed Sep 7 21:53:21 2022 ] Mean training loss: 1.9961. +[ Wed Sep 7 21:53:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:53:21 2022 ] Training epoch: 5 +[ Wed Sep 7 21:53:21 2022 ] Learning rate: 0.075 +[ Wed Sep 7 21:57:45 2022 ] Mean training loss: 1.7605. +[ Wed Sep 7 21:57:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:57:45 2022 ] Training epoch: 6 +[ Wed Sep 7 21:57:45 2022 ] Learning rate: 0.09 +[ Wed Sep 7 22:02:10 2022 ] Mean training loss: 1.6430. +[ Wed Sep 7 22:02:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:02:10 2022 ] Training epoch: 7 +[ Wed Sep 7 22:02:10 2022 ] Learning rate: 0.10500000000000001 +[ Wed Sep 7 22:06:34 2022 ] Mean training loss: 1.5372. +[ Wed Sep 7 22:06:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:06:34 2022 ] Training epoch: 8 +[ Wed Sep 7 22:06:34 2022 ] Learning rate: 0.12 +[ Wed Sep 7 22:10:58 2022 ] Mean training loss: 1.4878. +[ Wed Sep 7 22:10:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:10:58 2022 ] Training epoch: 9 +[ Wed Sep 7 22:10:58 2022 ] Learning rate: 0.13499999999999998 +[ Wed Sep 7 22:15:22 2022 ] Mean training loss: 1.4265. +[ Wed Sep 7 22:15:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:15:22 2022 ] Training epoch: 10 +[ Wed Sep 7 22:15:22 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:19:46 2022 ] Mean training loss: 1.4175. +[ Wed Sep 7 22:19:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:19:46 2022 ] Training epoch: 11 +[ Wed Sep 7 22:19:46 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:24:11 2022 ] Mean training loss: 1.3220. +[ Wed Sep 7 22:24:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:24:11 2022 ] Training epoch: 12 +[ Wed Sep 7 22:24:11 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:28:35 2022 ] Mean training loss: 1.3016. +[ Wed Sep 7 22:28:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:28:35 2022 ] Training epoch: 13 +[ Wed Sep 7 22:28:35 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:32:59 2022 ] Mean training loss: 1.2391. +[ Wed Sep 7 22:32:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:32:59 2022 ] Training epoch: 14 +[ Wed Sep 7 22:32:59 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:37:24 2022 ] Mean training loss: 1.1996. +[ Wed Sep 7 22:37:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:37:24 2022 ] Training epoch: 15 +[ Wed Sep 7 22:37:24 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:41:48 2022 ] Mean training loss: 1.1829. +[ Wed Sep 7 22:41:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:41:48 2022 ] Training epoch: 16 +[ Wed Sep 7 22:41:48 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:46:13 2022 ] Mean training loss: 1.1456. +[ Wed Sep 7 22:46:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:46:13 2022 ] Training epoch: 17 +[ Wed Sep 7 22:46:13 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:50:38 2022 ] Mean training loss: 1.1325. +[ Wed Sep 7 22:50:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:50:38 2022 ] Training epoch: 18 +[ Wed Sep 7 22:50:38 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:55:02 2022 ] Mean training loss: 1.1006. +[ Wed Sep 7 22:55:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:55:02 2022 ] Training epoch: 19 +[ Wed Sep 7 22:55:02 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:59:26 2022 ] Mean training loss: 1.0961. +[ Wed Sep 7 22:59:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:59:26 2022 ] Training epoch: 20 +[ Wed Sep 7 22:59:26 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:03:51 2022 ] Mean training loss: 1.0554. +[ Wed Sep 7 23:03:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:03:51 2022 ] Training epoch: 21 +[ Wed Sep 7 23:03:51 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:08:15 2022 ] Mean training loss: 1.0497. +[ Wed Sep 7 23:08:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:08:15 2022 ] Training epoch: 22 +[ Wed Sep 7 23:08:15 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:12:39 2022 ] Mean training loss: 1.0341. +[ Wed Sep 7 23:12:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:12:39 2022 ] Training epoch: 23 +[ Wed Sep 7 23:12:39 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:17:04 2022 ] Mean training loss: 1.0167. +[ Wed Sep 7 23:17:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:17:04 2022 ] Training epoch: 24 +[ Wed Sep 7 23:17:04 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:21:29 2022 ] Mean training loss: 1.0323. +[ Wed Sep 7 23:21:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:21:29 2022 ] Training epoch: 25 +[ Wed Sep 7 23:21:29 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:25:54 2022 ] Mean training loss: 1.0009. +[ Wed Sep 7 23:25:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:25:54 2022 ] Training epoch: 26 +[ Wed Sep 7 23:25:54 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:30:19 2022 ] Mean training loss: 1.0010. +[ Wed Sep 7 23:30:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:30:19 2022 ] Training epoch: 27 +[ Wed Sep 7 23:30:19 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:34:43 2022 ] Mean training loss: 0.9783. +[ Wed Sep 7 23:34:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:34:43 2022 ] Training epoch: 28 +[ Wed Sep 7 23:34:43 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:39:08 2022 ] Mean training loss: 0.9682. +[ Wed Sep 7 23:39:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:39:08 2022 ] Training epoch: 29 +[ Wed Sep 7 23:39:08 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:43:32 2022 ] Mean training loss: 0.9655. +[ Wed Sep 7 23:43:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:43:32 2022 ] Training epoch: 30 +[ Wed Sep 7 23:43:32 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:47:57 2022 ] Mean training loss: 0.9336. +[ Wed Sep 7 23:47:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:47:57 2022 ] Training epoch: 31 +[ Wed Sep 7 23:47:57 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:52:22 2022 ] Mean training loss: 0.9539. +[ Wed Sep 7 23:52:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:52:22 2022 ] Training epoch: 32 +[ Wed Sep 7 23:52:22 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:56:47 2022 ] Mean training loss: 0.9466. +[ Wed Sep 7 23:56:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:56:47 2022 ] Training epoch: 33 +[ Wed Sep 7 23:56:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:01:12 2022 ] Mean training loss: 0.9424. +[ Thu Sep 8 00:01:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:01:12 2022 ] Training epoch: 34 +[ Thu Sep 8 00:01:12 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:05:36 2022 ] Mean training loss: 0.9185. +[ Thu Sep 8 00:05:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:05:36 2022 ] Training epoch: 35 +[ Thu Sep 8 00:05:36 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:10:01 2022 ] Mean training loss: 0.9306. +[ Thu Sep 8 00:10:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:10:01 2022 ] Training epoch: 36 +[ Thu Sep 8 00:10:01 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:14:26 2022 ] Mean training loss: 0.9035. +[ Thu Sep 8 00:14:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:14:26 2022 ] Training epoch: 37 +[ Thu Sep 8 00:14:26 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:18:50 2022 ] Mean training loss: 0.9184. +[ Thu Sep 8 00:18:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:18:50 2022 ] Training epoch: 38 +[ Thu Sep 8 00:18:50 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:23:14 2022 ] Mean training loss: 0.9169. +[ Thu Sep 8 00:23:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:23:14 2022 ] Training epoch: 39 +[ Thu Sep 8 00:23:14 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:27:39 2022 ] Mean training loss: 0.9008. +[ Thu Sep 8 00:27:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:27:39 2022 ] Training epoch: 40 +[ Thu Sep 8 00:27:39 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:32:03 2022 ] Mean training loss: 0.9175. +[ Thu Sep 8 00:32:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:32:03 2022 ] Training epoch: 41 +[ Thu Sep 8 00:32:03 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:36:27 2022 ] Mean training loss: 0.8860. +[ Thu Sep 8 00:36:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:36:27 2022 ] Training epoch: 42 +[ Thu Sep 8 00:36:27 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:40:52 2022 ] Mean training loss: 0.9037. +[ Thu Sep 8 00:40:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:40:52 2022 ] Training epoch: 43 +[ Thu Sep 8 00:40:52 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:45:15 2022 ] Mean training loss: 0.8643. +[ Thu Sep 8 00:45:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:45:15 2022 ] Training epoch: 44 +[ Thu Sep 8 00:45:15 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:49:39 2022 ] Mean training loss: 0.8888. +[ Thu Sep 8 00:49:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:49:39 2022 ] Training epoch: 45 +[ Thu Sep 8 00:49:39 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:54:03 2022 ] Mean training loss: 0.8861. +[ Thu Sep 8 00:54:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:54:03 2022 ] Training epoch: 46 +[ Thu Sep 8 00:54:03 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:58:25 2022 ] Mean training loss: 0.9013. +[ Thu Sep 8 00:58:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:58:25 2022 ] Training epoch: 47 +[ Thu Sep 8 00:58:25 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:02:49 2022 ] Mean training loss: 0.8633. +[ Thu Sep 8 01:02:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:02:49 2022 ] Training epoch: 48 +[ Thu Sep 8 01:02:49 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:07:12 2022 ] Mean training loss: 0.8594. +[ Thu Sep 8 01:07:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:07:12 2022 ] Training epoch: 49 +[ Thu Sep 8 01:07:12 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:11:37 2022 ] Mean training loss: 0.8595. +[ Thu Sep 8 01:11:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:11:37 2022 ] Training epoch: 50 +[ Thu Sep 8 01:11:37 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:16:01 2022 ] Mean training loss: 0.8746. +[ Thu Sep 8 01:16:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:16:01 2022 ] Training epoch: 51 +[ Thu Sep 8 01:16:01 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:20:24 2022 ] Mean training loss: 0.4132. +[ Thu Sep 8 01:20:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:20:24 2022 ] Eval epoch: 51 +[ Thu Sep 8 01:28:23 2022 ] Epoch 51 Curr Acc: (32764/59477)55.09% +[ Thu Sep 8 01:28:23 2022 ] Epoch 51 Best Acc 55.09% +[ Thu Sep 8 01:28:23 2022 ] Training epoch: 52 +[ Thu Sep 8 01:28:23 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:32:48 2022 ] Mean training loss: 0.2703. +[ Thu Sep 8 01:32:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:32:48 2022 ] Eval epoch: 52 +[ Thu Sep 8 01:40:37 2022 ] Epoch 52 Curr Acc: (33744/59477)56.73% +[ Thu Sep 8 01:40:37 2022 ] Epoch 52 Best Acc 56.73% +[ Thu Sep 8 01:40:37 2022 ] Training epoch: 53 +[ Thu Sep 8 01:40:37 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:45:02 2022 ] Mean training loss: 0.2134. +[ Thu Sep 8 01:45:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:45:02 2022 ] Eval epoch: 53 +[ Thu Sep 8 01:52:50 2022 ] Epoch 53 Curr Acc: (34759/59477)58.44% +[ Thu Sep 8 01:52:50 2022 ] Epoch 53 Best Acc 58.44% +[ Thu Sep 8 01:52:50 2022 ] Training epoch: 54 +[ Thu Sep 8 01:52:50 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:57:15 2022 ] Mean training loss: 0.1782. +[ Thu Sep 8 01:57:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:57:15 2022 ] Eval epoch: 54 +[ Thu Sep 8 02:05:04 2022 ] Epoch 54 Curr Acc: (34526/59477)58.05% +[ Thu Sep 8 02:05:04 2022 ] Epoch 53 Best Acc 58.44% +[ Thu Sep 8 02:05:04 2022 ] Training epoch: 55 +[ Thu Sep 8 02:05:04 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:09:28 2022 ] Mean training loss: 0.1372. +[ Thu Sep 8 02:09:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:09:28 2022 ] Eval epoch: 55 +[ Thu Sep 8 02:17:17 2022 ] Epoch 55 Curr Acc: (34180/59477)57.47% +[ Thu Sep 8 02:17:17 2022 ] Epoch 53 Best Acc 58.44% +[ Thu Sep 8 02:17:17 2022 ] Training epoch: 56 +[ Thu Sep 8 02:17:17 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:21:41 2022 ] Mean training loss: 0.1177. +[ Thu Sep 8 02:21:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:21:41 2022 ] Eval epoch: 56 +[ Thu Sep 8 02:29:30 2022 ] Epoch 56 Curr Acc: (33575/59477)56.45% +[ Thu Sep 8 02:29:30 2022 ] Epoch 53 Best Acc 58.44% +[ Thu Sep 8 02:29:30 2022 ] Training epoch: 57 +[ Thu Sep 8 02:29:30 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:33:55 2022 ] Mean training loss: 0.0980. +[ Thu Sep 8 02:33:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:33:55 2022 ] Eval epoch: 57 +[ Thu Sep 8 02:41:44 2022 ] Epoch 57 Curr Acc: (34946/59477)58.76% +[ Thu Sep 8 02:41:44 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 02:41:44 2022 ] Training epoch: 58 +[ Thu Sep 8 02:41:44 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:46:07 2022 ] Mean training loss: 0.0823. +[ Thu Sep 8 02:46:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:46:07 2022 ] Eval epoch: 58 +[ Thu Sep 8 02:53:56 2022 ] Epoch 58 Curr Acc: (34670/59477)58.29% +[ Thu Sep 8 02:53:56 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 02:53:56 2022 ] Training epoch: 59 +[ Thu Sep 8 02:53:56 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:58:20 2022 ] Mean training loss: 0.0653. +[ Thu Sep 8 02:58:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:58:20 2022 ] Eval epoch: 59 +[ Thu Sep 8 03:06:08 2022 ] Epoch 59 Curr Acc: (34472/59477)57.96% +[ Thu Sep 8 03:06:08 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 03:06:08 2022 ] Training epoch: 60 +[ Thu Sep 8 03:06:08 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:10:31 2022 ] Mean training loss: 0.0597. +[ Thu Sep 8 03:10:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:10:31 2022 ] Eval epoch: 60 +[ Thu Sep 8 03:18:19 2022 ] Epoch 60 Curr Acc: (34501/59477)58.01% +[ Thu Sep 8 03:18:19 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 03:18:19 2022 ] Training epoch: 61 +[ Thu Sep 8 03:18:19 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:22:42 2022 ] Mean training loss: 0.0531. +[ Thu Sep 8 03:22:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:22:42 2022 ] Eval epoch: 61 +[ Thu Sep 8 03:30:30 2022 ] Epoch 61 Curr Acc: (33065/59477)55.59% +[ Thu Sep 8 03:30:30 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 03:30:30 2022 ] Training epoch: 62 +[ Thu Sep 8 03:30:30 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:34:52 2022 ] Mean training loss: 0.0472. +[ Thu Sep 8 03:34:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:34:52 2022 ] Eval epoch: 62 +[ Thu Sep 8 03:42:40 2022 ] Epoch 62 Curr Acc: (34525/59477)58.05% +[ Thu Sep 8 03:42:40 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 03:42:40 2022 ] Training epoch: 63 +[ Thu Sep 8 03:42:40 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:47:01 2022 ] Mean training loss: 0.0389. +[ Thu Sep 8 03:47:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:47:02 2022 ] Eval epoch: 63 +[ Thu Sep 8 03:54:50 2022 ] Epoch 63 Curr Acc: (34550/59477)58.09% +[ Thu Sep 8 03:54:50 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 03:54:50 2022 ] Training epoch: 64 +[ Thu Sep 8 03:54:50 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:59:11 2022 ] Mean training loss: 0.0394. +[ Thu Sep 8 03:59:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:59:11 2022 ] Eval epoch: 64 +[ Thu Sep 8 04:06:59 2022 ] Epoch 64 Curr Acc: (34375/59477)57.80% +[ Thu Sep 8 04:06:59 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 04:06:59 2022 ] Training epoch: 65 +[ Thu Sep 8 04:06:59 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:11:20 2022 ] Mean training loss: 0.0407. +[ Thu Sep 8 04:11:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:11:20 2022 ] Eval epoch: 65 +[ Thu Sep 8 04:19:08 2022 ] Epoch 65 Curr Acc: (34053/59477)57.25% +[ Thu Sep 8 04:19:08 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 04:19:08 2022 ] Training epoch: 66 +[ Thu Sep 8 04:19:08 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:23:30 2022 ] Mean training loss: 0.0376. +[ Thu Sep 8 04:23:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:23:30 2022 ] Eval epoch: 66 +[ Thu Sep 8 04:31:18 2022 ] Epoch 66 Curr Acc: (34359/59477)57.77% +[ Thu Sep 8 04:31:18 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 04:31:18 2022 ] Training epoch: 67 +[ Thu Sep 8 04:31:18 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:35:40 2022 ] Mean training loss: 0.0326. +[ Thu Sep 8 04:35:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:35:40 2022 ] Eval epoch: 67 +[ Thu Sep 8 04:43:29 2022 ] Epoch 67 Curr Acc: (34243/59477)57.57% +[ Thu Sep 8 04:43:29 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 04:43:29 2022 ] Training epoch: 68 +[ Thu Sep 8 04:43:29 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:47:50 2022 ] Mean training loss: 0.0375. +[ Thu Sep 8 04:47:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:47:50 2022 ] Eval epoch: 68 +[ Thu Sep 8 04:55:38 2022 ] Epoch 68 Curr Acc: (33569/59477)56.44% +[ Thu Sep 8 04:55:38 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 04:55:38 2022 ] Training epoch: 69 +[ Thu Sep 8 04:55:38 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:00:00 2022 ] Mean training loss: 0.0333. +[ Thu Sep 8 05:00:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:00:00 2022 ] Eval epoch: 69 +[ Thu Sep 8 05:07:48 2022 ] Epoch 69 Curr Acc: (34417/59477)57.87% +[ Thu Sep 8 05:07:48 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 05:07:48 2022 ] Training epoch: 70 +[ Thu Sep 8 05:07:48 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:12:11 2022 ] Mean training loss: 0.0329. +[ Thu Sep 8 05:12:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:12:11 2022 ] Eval epoch: 70 +[ Thu Sep 8 05:19:58 2022 ] Epoch 70 Curr Acc: (33863/59477)56.93% +[ Thu Sep 8 05:19:58 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 05:19:58 2022 ] Training epoch: 71 +[ Thu Sep 8 05:19:58 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:24:20 2022 ] Mean training loss: 0.0224. +[ Thu Sep 8 05:24:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:24:20 2022 ] Eval epoch: 71 +[ Thu Sep 8 05:32:08 2022 ] Epoch 71 Curr Acc: (34442/59477)57.91% +[ Thu Sep 8 05:32:08 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 05:32:08 2022 ] Training epoch: 72 +[ Thu Sep 8 05:32:08 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:36:30 2022 ] Mean training loss: 0.0190. +[ Thu Sep 8 05:36:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:36:30 2022 ] Eval epoch: 72 +[ Thu Sep 8 05:44:17 2022 ] Epoch 72 Curr Acc: (34665/59477)58.28% +[ Thu Sep 8 05:44:17 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 05:44:17 2022 ] Training epoch: 73 +[ Thu Sep 8 05:44:17 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:48:39 2022 ] Mean training loss: 0.0169. +[ Thu Sep 8 05:48:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:48:39 2022 ] Eval epoch: 73 +[ Thu Sep 8 05:56:27 2022 ] Epoch 73 Curr Acc: (34484/59477)57.98% +[ Thu Sep 8 05:56:27 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 05:56:27 2022 ] Training epoch: 74 +[ Thu Sep 8 05:56:27 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:00:48 2022 ] Mean training loss: 0.0175. +[ Thu Sep 8 06:00:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:00:48 2022 ] Eval epoch: 74 +[ Thu Sep 8 06:08:36 2022 ] Epoch 74 Curr Acc: (34740/59477)58.41% +[ Thu Sep 8 06:08:36 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 06:08:36 2022 ] Training epoch: 75 +[ Thu Sep 8 06:08:36 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:12:58 2022 ] Mean training loss: 0.0159. +[ Thu Sep 8 06:12:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:12:58 2022 ] Eval epoch: 75 +[ Thu Sep 8 06:20:45 2022 ] Epoch 75 Curr Acc: (34399/59477)57.84% +[ Thu Sep 8 06:20:45 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 06:20:45 2022 ] Training epoch: 76 +[ Thu Sep 8 06:20:45 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:25:07 2022 ] Mean training loss: 0.0149. +[ Thu Sep 8 06:25:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:25:07 2022 ] Eval epoch: 76 +[ Thu Sep 8 06:32:54 2022 ] Epoch 76 Curr Acc: (34653/59477)58.26% +[ Thu Sep 8 06:32:54 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 06:32:54 2022 ] Training epoch: 77 +[ Thu Sep 8 06:32:54 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:37:16 2022 ] Mean training loss: 0.0167. +[ Thu Sep 8 06:37:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:37:16 2022 ] Eval epoch: 77 +[ Thu Sep 8 06:45:03 2022 ] Epoch 77 Curr Acc: (34762/59477)58.45% +[ Thu Sep 8 06:45:03 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 06:45:03 2022 ] Training epoch: 78 +[ Thu Sep 8 06:45:03 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:49:25 2022 ] Mean training loss: 0.0148. +[ Thu Sep 8 06:49:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:49:25 2022 ] Eval epoch: 78 +[ Thu Sep 8 06:57:13 2022 ] Epoch 78 Curr Acc: (34756/59477)58.44% +[ Thu Sep 8 06:57:13 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 06:57:13 2022 ] Training epoch: 79 +[ Thu Sep 8 06:57:13 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:01:34 2022 ] Mean training loss: 0.0150. +[ Thu Sep 8 07:01:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:01:34 2022 ] Eval epoch: 79 +[ Thu Sep 8 07:09:21 2022 ] Epoch 79 Curr Acc: (33861/59477)56.93% +[ Thu Sep 8 07:09:21 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 07:09:21 2022 ] Training epoch: 80 +[ Thu Sep 8 07:09:21 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:13:44 2022 ] Mean training loss: 0.0138. +[ Thu Sep 8 07:13:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:13:44 2022 ] Eval epoch: 80 +[ Thu Sep 8 07:21:31 2022 ] Epoch 80 Curr Acc: (34681/59477)58.31% +[ Thu Sep 8 07:21:31 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 07:21:31 2022 ] Training epoch: 81 +[ Thu Sep 8 07:21:31 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:25:54 2022 ] Mean training loss: 0.0137. +[ Thu Sep 8 07:25:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:25:54 2022 ] Eval epoch: 81 +[ Thu Sep 8 07:33:41 2022 ] Epoch 81 Curr Acc: (34671/59477)58.29% +[ Thu Sep 8 07:33:41 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 07:33:41 2022 ] Training epoch: 82 +[ Thu Sep 8 07:33:41 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:38:03 2022 ] Mean training loss: 0.0154. +[ Thu Sep 8 07:38:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:38:03 2022 ] Eval epoch: 82 +[ Thu Sep 8 07:45:51 2022 ] Epoch 82 Curr Acc: (34766/59477)58.45% +[ Thu Sep 8 07:45:51 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 07:45:51 2022 ] Training epoch: 83 +[ Thu Sep 8 07:45:51 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:50:13 2022 ] Mean training loss: 0.0134. +[ Thu Sep 8 07:50:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:50:13 2022 ] Eval epoch: 83 +[ Thu Sep 8 07:58:00 2022 ] Epoch 83 Curr Acc: (34705/59477)58.35% +[ Thu Sep 8 07:58:00 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 07:58:00 2022 ] Training epoch: 84 +[ Thu Sep 8 07:58:00 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:02:22 2022 ] Mean training loss: 0.0133. +[ Thu Sep 8 08:02:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:02:22 2022 ] Eval epoch: 84 +[ Thu Sep 8 08:10:09 2022 ] Epoch 84 Curr Acc: (34637/59477)58.24% +[ Thu Sep 8 08:10:09 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 08:10:10 2022 ] Training epoch: 85 +[ Thu Sep 8 08:10:10 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:14:31 2022 ] Mean training loss: 0.0128. +[ Thu Sep 8 08:14:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:14:31 2022 ] Eval epoch: 85 +[ Thu Sep 8 08:22:19 2022 ] Epoch 85 Curr Acc: (34649/59477)58.26% +[ Thu Sep 8 08:22:19 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 08:22:19 2022 ] Training epoch: 86 +[ Thu Sep 8 08:22:19 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:26:40 2022 ] Mean training loss: 0.0135. +[ Thu Sep 8 08:26:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:26:40 2022 ] Eval epoch: 86 +[ Thu Sep 8 08:34:28 2022 ] Epoch 86 Curr Acc: (34534/59477)58.06% +[ Thu Sep 8 08:34:28 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 08:34:28 2022 ] Training epoch: 87 +[ Thu Sep 8 08:34:28 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:38:49 2022 ] Mean training loss: 0.0114. +[ Thu Sep 8 08:38:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:38:50 2022 ] Eval epoch: 87 +[ Thu Sep 8 08:46:37 2022 ] Epoch 87 Curr Acc: (34684/59477)58.31% +[ Thu Sep 8 08:46:37 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 08:46:37 2022 ] Training epoch: 88 +[ Thu Sep 8 08:46:37 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:50:59 2022 ] Mean training loss: 0.0127. +[ Thu Sep 8 08:50:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:50:59 2022 ] Eval epoch: 88 +[ Thu Sep 8 08:58:46 2022 ] Epoch 88 Curr Acc: (33591/59477)56.48% +[ Thu Sep 8 08:58:46 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 08:58:47 2022 ] Training epoch: 89 +[ Thu Sep 8 08:58:47 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:03:09 2022 ] Mean training loss: 0.0123. +[ Thu Sep 8 09:03:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:03:09 2022 ] Eval epoch: 89 +[ Thu Sep 8 09:10:56 2022 ] Epoch 89 Curr Acc: (34079/59477)57.30% +[ Thu Sep 8 09:10:56 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 09:10:56 2022 ] Training epoch: 90 +[ Thu Sep 8 09:10:56 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:15:18 2022 ] Mean training loss: 0.0137. +[ Thu Sep 8 09:15:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:15:18 2022 ] Eval epoch: 90 +[ Thu Sep 8 09:23:05 2022 ] Epoch 90 Curr Acc: (34595/59477)58.17% +[ Thu Sep 8 09:23:05 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 09:23:05 2022 ] Training epoch: 91 +[ Thu Sep 8 09:23:05 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:27:27 2022 ] Mean training loss: 0.0133. +[ Thu Sep 8 09:27:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:27:27 2022 ] Eval epoch: 91 +[ Thu Sep 8 09:35:14 2022 ] Epoch 91 Curr Acc: (34765/59477)58.45% +[ Thu Sep 8 09:35:14 2022 ] Epoch 57 Best Acc 58.76% +[ Thu Sep 8 09:35:14 2022 ] Training epoch: 92 +[ Thu Sep 8 09:35:14 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:39:36 2022 ] Mean training loss: 0.0131. +[ Thu Sep 8 09:39:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:39:36 2022 ] Eval epoch: 92 +[ Thu Sep 8 09:47:24 2022 ] Epoch 92 Curr Acc: (34956/59477)58.77% +[ Thu Sep 8 09:47:24 2022 ] Epoch 92 Best Acc 58.77% +[ Thu Sep 8 09:47:24 2022 ] Training epoch: 93 +[ Thu Sep 8 09:47:24 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:51:45 2022 ] Mean training loss: 0.0126. +[ Thu Sep 8 09:51:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:51:45 2022 ] Eval epoch: 93 +[ Thu Sep 8 09:59:33 2022 ] Epoch 93 Curr Acc: (34953/59477)58.77% +[ Thu Sep 8 09:59:33 2022 ] Epoch 92 Best Acc 58.77% +[ Thu Sep 8 09:59:33 2022 ] Training epoch: 94 +[ Thu Sep 8 09:59:33 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:03:54 2022 ] Mean training loss: 0.0135. +[ Thu Sep 8 10:03:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:03:54 2022 ] Eval epoch: 94 +[ Thu Sep 8 10:11:42 2022 ] Epoch 94 Curr Acc: (35044/59477)58.92% +[ Thu Sep 8 10:11:42 2022 ] Epoch 94 Best Acc 58.92% +[ Thu Sep 8 10:11:42 2022 ] Training epoch: 95 +[ Thu Sep 8 10:11:42 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:16:03 2022 ] Mean training loss: 0.0117. +[ Thu Sep 8 10:16:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:16:03 2022 ] Eval epoch: 95 +[ Thu Sep 8 10:23:51 2022 ] Epoch 95 Curr Acc: (33790/59477)56.81% +[ Thu Sep 8 10:23:51 2022 ] Epoch 94 Best Acc 58.92% +[ Thu Sep 8 10:23:51 2022 ] Training epoch: 96 +[ Thu Sep 8 10:23:51 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:28:13 2022 ] Mean training loss: 0.0124. +[ Thu Sep 8 10:28:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:28:13 2022 ] Eval epoch: 96 +[ Thu Sep 8 10:36:00 2022 ] Epoch 96 Curr Acc: (34878/59477)58.64% +[ Thu Sep 8 10:36:00 2022 ] Epoch 94 Best Acc 58.92% +[ Thu Sep 8 10:36:00 2022 ] Training epoch: 97 +[ Thu Sep 8 10:36:00 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:40:24 2022 ] Mean training loss: 0.0122. +[ Thu Sep 8 10:40:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:40:24 2022 ] Eval epoch: 97 +[ Thu Sep 8 10:48:12 2022 ] Epoch 97 Curr Acc: (34601/59477)58.18% +[ Thu Sep 8 10:48:12 2022 ] Epoch 94 Best Acc 58.92% +[ Thu Sep 8 10:48:12 2022 ] Training epoch: 98 +[ Thu Sep 8 10:48:12 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:52:34 2022 ] Mean training loss: 0.0127. +[ Thu Sep 8 10:52:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:52:34 2022 ] Eval epoch: 98 +[ Thu Sep 8 11:00:22 2022 ] Epoch 98 Curr Acc: (34640/59477)58.24% +[ Thu Sep 8 11:00:22 2022 ] Epoch 94 Best Acc 58.92% +[ Thu Sep 8 11:00:22 2022 ] Training epoch: 99 +[ Thu Sep 8 11:00:22 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:04:45 2022 ] Mean training loss: 0.0119. +[ Thu Sep 8 11:04:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:04:45 2022 ] Eval epoch: 99 +[ Thu Sep 8 11:12:32 2022 ] Epoch 99 Curr Acc: (34937/59477)58.74% +[ Thu Sep 8 11:12:32 2022 ] Epoch 94 Best Acc 58.92% +[ Thu Sep 8 11:12:33 2022 ] Training epoch: 100 +[ Thu Sep 8 11:12:33 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:16:55 2022 ] Mean training loss: 0.0120. +[ Thu Sep 8 11:16:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:16:55 2022 ] Eval epoch: 100 +[ Thu Sep 8 11:24:43 2022 ] Epoch 100 Curr Acc: (34501/59477)58.01% +[ Thu Sep 8 11:24:43 2022 ] Epoch 94 Best Acc 58.92% +[ Thu Sep 8 11:24:43 2022 ] Training epoch: 101 +[ Thu Sep 8 11:24:43 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:29:05 2022 ] Mean training loss: 0.0122. +[ Thu Sep 8 11:29:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:29:05 2022 ] Eval epoch: 101 +[ Thu Sep 8 11:36:54 2022 ] Epoch 101 Curr Acc: (34881/59477)58.65% +[ Thu Sep 8 11:36:54 2022 ] Epoch 94 Best Acc 58.92% +[ Thu Sep 8 11:36:54 2022 ] Training epoch: 102 +[ Thu Sep 8 11:36:54 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:41:16 2022 ] Mean training loss: 0.0118. +[ Thu Sep 8 11:41:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:41:16 2022 ] Eval epoch: 102 +[ Thu Sep 8 11:49:04 2022 ] Epoch 102 Curr Acc: (35078/59477)58.98% +[ Thu Sep 8 11:49:04 2022 ] Epoch 102 Best Acc 58.98% +[ Thu Sep 8 11:49:04 2022 ] Training epoch: 103 +[ Thu Sep 8 11:49:04 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:53:27 2022 ] Mean training loss: 0.0119. +[ Thu Sep 8 11:53:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:53:27 2022 ] Eval epoch: 103 +[ Thu Sep 8 12:01:15 2022 ] Epoch 103 Curr Acc: (34416/59477)57.86% +[ Thu Sep 8 12:01:15 2022 ] Epoch 102 Best Acc 58.98% +[ Thu Sep 8 12:01:15 2022 ] Training epoch: 104 +[ Thu Sep 8 12:01:15 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:05:38 2022 ] Mean training loss: 0.0125. +[ Thu Sep 8 12:05:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:05:38 2022 ] Eval epoch: 104 +[ Thu Sep 8 12:13:26 2022 ] Epoch 104 Curr Acc: (34796/59477)58.50% +[ Thu Sep 8 12:13:26 2022 ] Epoch 102 Best Acc 58.98% +[ Thu Sep 8 12:13:26 2022 ] Training epoch: 105 +[ Thu Sep 8 12:13:26 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:17:48 2022 ] Mean training loss: 0.0115. +[ Thu Sep 8 12:17:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:17:49 2022 ] Eval epoch: 105 +[ Thu Sep 8 12:25:37 2022 ] Epoch 105 Curr Acc: (34704/59477)58.35% +[ Thu Sep 8 12:25:37 2022 ] Epoch 102 Best Acc 58.98% +[ Thu Sep 8 12:25:37 2022 ] Training epoch: 106 +[ Thu Sep 8 12:25:37 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:29:59 2022 ] Mean training loss: 0.0128. +[ Thu Sep 8 12:29:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:29:59 2022 ] Eval epoch: 106 +[ Thu Sep 8 12:37:47 2022 ] Epoch 106 Curr Acc: (34773/59477)58.46% +[ Thu Sep 8 12:37:47 2022 ] Epoch 102 Best Acc 58.98% +[ Thu Sep 8 12:37:47 2022 ] Training epoch: 107 +[ Thu Sep 8 12:37:47 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:42:09 2022 ] Mean training loss: 0.0120. +[ Thu Sep 8 12:42:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:42:09 2022 ] Eval epoch: 107 +[ Thu Sep 8 12:49:58 2022 ] Epoch 107 Curr Acc: (34658/59477)58.27% +[ Thu Sep 8 12:49:58 2022 ] Epoch 102 Best Acc 58.98% +[ Thu Sep 8 12:49:58 2022 ] Training epoch: 108 +[ Thu Sep 8 12:49:58 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:54:19 2022 ] Mean training loss: 0.0108. +[ Thu Sep 8 12:54:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:54:19 2022 ] Eval epoch: 108 +[ Thu Sep 8 13:02:07 2022 ] Epoch 108 Curr Acc: (34883/59477)58.65% +[ Thu Sep 8 13:02:07 2022 ] Epoch 102 Best Acc 58.98% +[ Thu Sep 8 13:02:07 2022 ] Training epoch: 109 +[ Thu Sep 8 13:02:07 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:06:29 2022 ] Mean training loss: 0.0124. +[ Thu Sep 8 13:06:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:06:29 2022 ] Eval epoch: 109 +[ Thu Sep 8 13:14:17 2022 ] Epoch 109 Curr Acc: (34702/59477)58.35% +[ Thu Sep 8 13:14:17 2022 ] Epoch 102 Best Acc 58.98% +[ Thu Sep 8 13:14:17 2022 ] Training epoch: 110 +[ Thu Sep 8 13:14:17 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:18:38 2022 ] Mean training loss: 0.0120. +[ Thu Sep 8 13:18:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:18:38 2022 ] Eval epoch: 110 +[ Thu Sep 8 13:26:27 2022 ] Epoch 110 Curr Acc: (34520/59477)58.04% +[ Thu Sep 8 13:26:27 2022 ] Epoch 102 Best Acc 58.98% +[ Thu Sep 8 13:26:27 2022 ] epoch: 102, best accuracy: 0.5897741984296451 +[ Thu Sep 8 13:26:27 2022 ] Experiment: ./work_dir/ntu120/xset_bm +[ Thu Sep 8 13:26:27 2022 ] # generator parameters: 2.922995 M. +[ Thu Sep 8 13:26:27 2022 ] Load weights from ./runs/ntu120/xset_bm/runs-101-132294.pt. +[ Thu Sep 8 13:26:27 2022 ] Eval epoch: 1 +[ Thu Sep 8 13:34:15 2022 ] Epoch 1 Curr Acc: (35078/59477)58.98% +[ Thu Sep 8 13:34:15 2022 ] Epoch 102 Best Acc 58.98% diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_j/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu120_xset/xset_j/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_j/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_j/config.yaml b/ckpt/Others/MST-GCN/ntu120_xset/xset_j/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..684ed104e2d981595fd046a834130546400b53dc --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_j/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu120/xset_j.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 120 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu120/xset_j/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu120/xset_j diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_j/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu120_xset/xset_j/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..6b756a1f01f788f4d8159df20657d25e11d7d983 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_j/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:07508146e0acf0d9152cf573420ce523856f97de6dad49a42ab2bd14d86cebbe +size 34946665 diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_j/log.txt b/ckpt/Others/MST-GCN/ntu120_xset/xset_j/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..342ec7f481113f20bc686ca3b50f1f203e7a532b --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_j/log.txt @@ -0,0 +1,631 @@ +[ Wed Sep 7 21:35:51 2022 ] # generator parameters: 2.922995 M. +[ Wed Sep 7 21:35:51 2022 ] Parameters: +{'work_dir': './work_dir/ntu120/xset_j', 'model_saved_name': './runs/ntu120/xset_j/runs', 'config': 'config/ntu120/xset_j.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Wed Sep 7 21:35:51 2022 ] Training epoch: 1 +[ Wed Sep 7 21:35:51 2022 ] Learning rate: 0.015 +[ Wed Sep 7 21:40:17 2022 ] Mean training loss: 3.6800. +[ Wed Sep 7 21:40:17 2022 ] Time consumption: [Data]01%, [Network]98% +[ Wed Sep 7 21:40:17 2022 ] Training epoch: 2 +[ Wed Sep 7 21:40:17 2022 ] Learning rate: 0.03 +[ Wed Sep 7 21:44:42 2022 ] Mean training loss: 2.8223. +[ Wed Sep 7 21:44:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:44:42 2022 ] Training epoch: 3 +[ Wed Sep 7 21:44:42 2022 ] Learning rate: 0.045 +[ Wed Sep 7 21:49:05 2022 ] Mean training loss: 2.3654. +[ Wed Sep 7 21:49:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:49:05 2022 ] Training epoch: 4 +[ Wed Sep 7 21:49:05 2022 ] Learning rate: 0.06 +[ Wed Sep 7 21:53:27 2022 ] Mean training loss: 2.0752. +[ Wed Sep 7 21:53:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:53:27 2022 ] Training epoch: 5 +[ Wed Sep 7 21:53:27 2022 ] Learning rate: 0.075 +[ Wed Sep 7 21:57:50 2022 ] Mean training loss: 1.8883. +[ Wed Sep 7 21:57:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:57:50 2022 ] Training epoch: 6 +[ Wed Sep 7 21:57:50 2022 ] Learning rate: 0.09 +[ Wed Sep 7 22:02:13 2022 ] Mean training loss: 1.7515. +[ Wed Sep 7 22:02:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:02:13 2022 ] Training epoch: 7 +[ Wed Sep 7 22:02:13 2022 ] Learning rate: 0.10500000000000001 +[ Wed Sep 7 22:06:35 2022 ] Mean training loss: 1.6382. +[ Wed Sep 7 22:06:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:06:35 2022 ] Training epoch: 8 +[ Wed Sep 7 22:06:35 2022 ] Learning rate: 0.12 +[ Wed Sep 7 22:10:58 2022 ] Mean training loss: 1.5736. +[ Wed Sep 7 22:10:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:10:58 2022 ] Training epoch: 9 +[ Wed Sep 7 22:10:58 2022 ] Learning rate: 0.13499999999999998 +[ Wed Sep 7 22:15:20 2022 ] Mean training loss: 1.5032. +[ Wed Sep 7 22:15:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:15:20 2022 ] Training epoch: 10 +[ Wed Sep 7 22:15:20 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:19:43 2022 ] Mean training loss: 1.4871. +[ Wed Sep 7 22:19:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:19:43 2022 ] Training epoch: 11 +[ Wed Sep 7 22:19:43 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:24:05 2022 ] Mean training loss: 1.3952. +[ Wed Sep 7 22:24:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:24:05 2022 ] Training epoch: 12 +[ Wed Sep 7 22:24:05 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:28:28 2022 ] Mean training loss: 1.3432. +[ Wed Sep 7 22:28:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:28:28 2022 ] Training epoch: 13 +[ Wed Sep 7 22:28:28 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:32:51 2022 ] Mean training loss: 1.2907. +[ Wed Sep 7 22:32:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:32:51 2022 ] Training epoch: 14 +[ Wed Sep 7 22:32:51 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:37:13 2022 ] Mean training loss: 1.2527. +[ Wed Sep 7 22:37:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:37:13 2022 ] Training epoch: 15 +[ Wed Sep 7 22:37:13 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:41:36 2022 ] Mean training loss: 1.2237. +[ Wed Sep 7 22:41:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:41:36 2022 ] Training epoch: 16 +[ Wed Sep 7 22:41:36 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:45:59 2022 ] Mean training loss: 1.1655. +[ Wed Sep 7 22:45:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:45:59 2022 ] Training epoch: 17 +[ Wed Sep 7 22:45:59 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:50:23 2022 ] Mean training loss: 1.1655. +[ Wed Sep 7 22:50:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:50:23 2022 ] Training epoch: 18 +[ Wed Sep 7 22:50:23 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:54:46 2022 ] Mean training loss: 1.1225. +[ Wed Sep 7 22:54:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:54:46 2022 ] Training epoch: 19 +[ Wed Sep 7 22:54:46 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:59:09 2022 ] Mean training loss: 1.1175. +[ Wed Sep 7 22:59:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:59:09 2022 ] Training epoch: 20 +[ Wed Sep 7 22:59:09 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:03:32 2022 ] Mean training loss: 1.0823. +[ Wed Sep 7 23:03:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:03:32 2022 ] Training epoch: 21 +[ Wed Sep 7 23:03:32 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:07:55 2022 ] Mean training loss: 1.0720. +[ Wed Sep 7 23:07:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:07:55 2022 ] Training epoch: 22 +[ Wed Sep 7 23:07:55 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:12:18 2022 ] Mean training loss: 1.0578. +[ Wed Sep 7 23:12:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:12:18 2022 ] Training epoch: 23 +[ Wed Sep 7 23:12:18 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:16:42 2022 ] Mean training loss: 1.0478. +[ Wed Sep 7 23:16:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:16:42 2022 ] Training epoch: 24 +[ Wed Sep 7 23:16:42 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:21:06 2022 ] Mean training loss: 1.0217. +[ Wed Sep 7 23:21:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:21:06 2022 ] Training epoch: 25 +[ Wed Sep 7 23:21:06 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:25:29 2022 ] Mean training loss: 1.0131. +[ Wed Sep 7 23:25:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:25:29 2022 ] Training epoch: 26 +[ Wed Sep 7 23:25:29 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:29:53 2022 ] Mean training loss: 0.9915. +[ Wed Sep 7 23:29:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:29:53 2022 ] Training epoch: 27 +[ Wed Sep 7 23:29:53 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:34:17 2022 ] Mean training loss: 0.9798. +[ Wed Sep 7 23:34:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:34:17 2022 ] Training epoch: 28 +[ Wed Sep 7 23:34:17 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:38:40 2022 ] Mean training loss: 0.9867. +[ Wed Sep 7 23:38:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:38:40 2022 ] Training epoch: 29 +[ Wed Sep 7 23:38:40 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:43:03 2022 ] Mean training loss: 0.9692. +[ Wed Sep 7 23:43:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:43:03 2022 ] Training epoch: 30 +[ Wed Sep 7 23:43:03 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:47:27 2022 ] Mean training loss: 0.9718. +[ Wed Sep 7 23:47:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:47:27 2022 ] Training epoch: 31 +[ Wed Sep 7 23:47:27 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:51:50 2022 ] Mean training loss: 0.9417. +[ Wed Sep 7 23:51:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:51:50 2022 ] Training epoch: 32 +[ Wed Sep 7 23:51:50 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:56:14 2022 ] Mean training loss: 0.9465. +[ Wed Sep 7 23:56:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:56:14 2022 ] Training epoch: 33 +[ Wed Sep 7 23:56:14 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:00:37 2022 ] Mean training loss: 0.9442. +[ Thu Sep 8 00:00:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:00:37 2022 ] Training epoch: 34 +[ Thu Sep 8 00:00:37 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:05:00 2022 ] Mean training loss: 0.9245. +[ Thu Sep 8 00:05:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:05:00 2022 ] Training epoch: 35 +[ Thu Sep 8 00:05:00 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:09:24 2022 ] Mean training loss: 0.9371. +[ Thu Sep 8 00:09:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:09:24 2022 ] Training epoch: 36 +[ Thu Sep 8 00:09:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:13:47 2022 ] Mean training loss: 0.9328. +[ Thu Sep 8 00:13:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:13:47 2022 ] Training epoch: 37 +[ Thu Sep 8 00:13:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:18:11 2022 ] Mean training loss: 0.9232. +[ Thu Sep 8 00:18:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:18:11 2022 ] Training epoch: 38 +[ Thu Sep 8 00:18:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:22:35 2022 ] Mean training loss: 0.9113. +[ Thu Sep 8 00:22:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:22:35 2022 ] Training epoch: 39 +[ Thu Sep 8 00:22:35 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:26:58 2022 ] Mean training loss: 0.9030. +[ Thu Sep 8 00:26:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:26:58 2022 ] Training epoch: 40 +[ Thu Sep 8 00:26:58 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:31:22 2022 ] Mean training loss: 0.9017. +[ Thu Sep 8 00:31:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:31:22 2022 ] Training epoch: 41 +[ Thu Sep 8 00:31:22 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:35:45 2022 ] Mean training loss: 0.8869. +[ Thu Sep 8 00:35:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:35:45 2022 ] Training epoch: 42 +[ Thu Sep 8 00:35:45 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:40:08 2022 ] Mean training loss: 0.8835. +[ Thu Sep 8 00:40:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:40:08 2022 ] Training epoch: 43 +[ Thu Sep 8 00:40:08 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:44:31 2022 ] Mean training loss: 0.8969. +[ Thu Sep 8 00:44:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:44:31 2022 ] Training epoch: 44 +[ Thu Sep 8 00:44:31 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:48:55 2022 ] Mean training loss: 0.8744. +[ Thu Sep 8 00:48:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:48:55 2022 ] Training epoch: 45 +[ Thu Sep 8 00:48:55 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:53:18 2022 ] Mean training loss: 0.8795. +[ Thu Sep 8 00:53:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:53:18 2022 ] Training epoch: 46 +[ Thu Sep 8 00:53:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:57:42 2022 ] Mean training loss: 0.8780. +[ Thu Sep 8 00:57:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:57:42 2022 ] Training epoch: 47 +[ Thu Sep 8 00:57:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:02:05 2022 ] Mean training loss: 0.8804. +[ Thu Sep 8 01:02:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:02:05 2022 ] Training epoch: 48 +[ Thu Sep 8 01:02:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:06:28 2022 ] Mean training loss: 0.8462. +[ Thu Sep 8 01:06:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:06:28 2022 ] Training epoch: 49 +[ Thu Sep 8 01:06:28 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:10:51 2022 ] Mean training loss: 0.8762. +[ Thu Sep 8 01:10:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:10:51 2022 ] Training epoch: 50 +[ Thu Sep 8 01:10:51 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:15:14 2022 ] Mean training loss: 0.8819. +[ Thu Sep 8 01:15:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:15:14 2022 ] Training epoch: 51 +[ Thu Sep 8 01:15:14 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:19:37 2022 ] Mean training loss: 0.4295. +[ Thu Sep 8 01:19:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:19:37 2022 ] Eval epoch: 51 +[ Thu Sep 8 01:27:35 2022 ] Epoch 51 Curr Acc: (34662/59477)58.28% +[ Thu Sep 8 01:27:35 2022 ] Epoch 51 Best Acc 58.28% +[ Thu Sep 8 01:27:36 2022 ] Training epoch: 52 +[ Thu Sep 8 01:27:36 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:31:58 2022 ] Mean training loss: 0.2936. +[ Thu Sep 8 01:31:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:31:58 2022 ] Eval epoch: 52 +[ Thu Sep 8 01:39:49 2022 ] Epoch 52 Curr Acc: (36032/59477)60.58% +[ Thu Sep 8 01:39:49 2022 ] Epoch 52 Best Acc 60.58% +[ Thu Sep 8 01:39:49 2022 ] Training epoch: 53 +[ Thu Sep 8 01:39:49 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:44:12 2022 ] Mean training loss: 0.2383. +[ Thu Sep 8 01:44:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:44:12 2022 ] Eval epoch: 53 +[ Thu Sep 8 01:52:03 2022 ] Epoch 53 Curr Acc: (36164/59477)60.80% +[ Thu Sep 8 01:52:03 2022 ] Epoch 53 Best Acc 60.80% +[ Thu Sep 8 01:52:03 2022 ] Training epoch: 54 +[ Thu Sep 8 01:52:03 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:56:25 2022 ] Mean training loss: 0.2054. +[ Thu Sep 8 01:56:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:56:25 2022 ] Eval epoch: 54 +[ Thu Sep 8 02:04:16 2022 ] Epoch 54 Curr Acc: (36324/59477)61.07% +[ Thu Sep 8 02:04:16 2022 ] Epoch 54 Best Acc 61.07% +[ Thu Sep 8 02:04:16 2022 ] Training epoch: 55 +[ Thu Sep 8 02:04:16 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:08:40 2022 ] Mean training loss: 0.1660. +[ Thu Sep 8 02:08:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:08:40 2022 ] Eval epoch: 55 +[ Thu Sep 8 02:16:31 2022 ] Epoch 55 Curr Acc: (36200/59477)60.86% +[ Thu Sep 8 02:16:31 2022 ] Epoch 54 Best Acc 61.07% +[ Thu Sep 8 02:16:31 2022 ] Training epoch: 56 +[ Thu Sep 8 02:16:31 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:20:54 2022 ] Mean training loss: 0.1457. +[ Thu Sep 8 02:20:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:20:54 2022 ] Eval epoch: 56 +[ Thu Sep 8 02:28:45 2022 ] Epoch 56 Curr Acc: (36422/59477)61.24% +[ Thu Sep 8 02:28:45 2022 ] Epoch 56 Best Acc 61.24% +[ Thu Sep 8 02:28:45 2022 ] Training epoch: 57 +[ Thu Sep 8 02:28:45 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:33:08 2022 ] Mean training loss: 0.1336. +[ Thu Sep 8 02:33:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:33:08 2022 ] Eval epoch: 57 +[ Thu Sep 8 02:40:59 2022 ] Epoch 57 Curr Acc: (36565/59477)61.48% +[ Thu Sep 8 02:40:59 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 02:40:59 2022 ] Training epoch: 58 +[ Thu Sep 8 02:40:59 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:45:22 2022 ] Mean training loss: 0.1069. +[ Thu Sep 8 02:45:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:45:23 2022 ] Eval epoch: 58 +[ Thu Sep 8 02:53:13 2022 ] Epoch 58 Curr Acc: (35997/59477)60.52% +[ Thu Sep 8 02:53:13 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 02:53:13 2022 ] Training epoch: 59 +[ Thu Sep 8 02:53:13 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:57:36 2022 ] Mean training loss: 0.0936. +[ Thu Sep 8 02:57:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:57:36 2022 ] Eval epoch: 59 +[ Thu Sep 8 03:05:27 2022 ] Epoch 59 Curr Acc: (36102/59477)60.70% +[ Thu Sep 8 03:05:27 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 03:05:27 2022 ] Training epoch: 60 +[ Thu Sep 8 03:05:27 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:09:50 2022 ] Mean training loss: 0.0901. +[ Thu Sep 8 03:09:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:09:50 2022 ] Eval epoch: 60 +[ Thu Sep 8 03:17:41 2022 ] Epoch 60 Curr Acc: (36186/59477)60.84% +[ Thu Sep 8 03:17:41 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 03:17:41 2022 ] Training epoch: 61 +[ Thu Sep 8 03:17:41 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:22:05 2022 ] Mean training loss: 0.0733. +[ Thu Sep 8 03:22:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:22:05 2022 ] Eval epoch: 61 +[ Thu Sep 8 03:29:55 2022 ] Epoch 61 Curr Acc: (35863/59477)60.30% +[ Thu Sep 8 03:29:55 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 03:29:55 2022 ] Training epoch: 62 +[ Thu Sep 8 03:29:55 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:34:18 2022 ] Mean training loss: 0.0733. +[ Thu Sep 8 03:34:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:34:19 2022 ] Eval epoch: 62 +[ Thu Sep 8 03:42:09 2022 ] Epoch 62 Curr Acc: (36012/59477)60.55% +[ Thu Sep 8 03:42:09 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 03:42:09 2022 ] Training epoch: 63 +[ Thu Sep 8 03:42:09 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:46:31 2022 ] Mean training loss: 0.0674. +[ Thu Sep 8 03:46:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:46:31 2022 ] Eval epoch: 63 +[ Thu Sep 8 03:54:22 2022 ] Epoch 63 Curr Acc: (35929/59477)60.41% +[ Thu Sep 8 03:54:22 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 03:54:22 2022 ] Training epoch: 64 +[ Thu Sep 8 03:54:22 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:58:45 2022 ] Mean training loss: 0.0624. +[ Thu Sep 8 03:58:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:58:45 2022 ] Eval epoch: 64 +[ Thu Sep 8 04:06:36 2022 ] Epoch 64 Curr Acc: (35778/59477)60.15% +[ Thu Sep 8 04:06:36 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 04:06:36 2022 ] Training epoch: 65 +[ Thu Sep 8 04:06:36 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:11:00 2022 ] Mean training loss: 0.0549. +[ Thu Sep 8 04:11:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:11:00 2022 ] Eval epoch: 65 +[ Thu Sep 8 04:18:50 2022 ] Epoch 65 Curr Acc: (36043/59477)60.60% +[ Thu Sep 8 04:18:50 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 04:18:50 2022 ] Training epoch: 66 +[ Thu Sep 8 04:18:50 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:23:13 2022 ] Mean training loss: 0.0514. +[ Thu Sep 8 04:23:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:23:13 2022 ] Eval epoch: 66 +[ Thu Sep 8 04:31:04 2022 ] Epoch 66 Curr Acc: (35771/59477)60.14% +[ Thu Sep 8 04:31:04 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 04:31:04 2022 ] Training epoch: 67 +[ Thu Sep 8 04:31:04 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:35:28 2022 ] Mean training loss: 0.0475. +[ Thu Sep 8 04:35:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:35:28 2022 ] Eval epoch: 67 +[ Thu Sep 8 04:43:19 2022 ] Epoch 67 Curr Acc: (35898/59477)60.36% +[ Thu Sep 8 04:43:19 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 04:43:19 2022 ] Training epoch: 68 +[ Thu Sep 8 04:43:19 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:47:42 2022 ] Mean training loss: 0.0568. +[ Thu Sep 8 04:47:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:47:42 2022 ] Eval epoch: 68 +[ Thu Sep 8 04:55:33 2022 ] Epoch 68 Curr Acc: (35158/59477)59.11% +[ Thu Sep 8 04:55:33 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 04:55:33 2022 ] Training epoch: 69 +[ Thu Sep 8 04:55:33 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:59:55 2022 ] Mean training loss: 0.0533. +[ Thu Sep 8 04:59:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:59:55 2022 ] Eval epoch: 69 +[ Thu Sep 8 05:07:46 2022 ] Epoch 69 Curr Acc: (35877/59477)60.32% +[ Thu Sep 8 05:07:46 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 05:07:46 2022 ] Training epoch: 70 +[ Thu Sep 8 05:07:46 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:12:07 2022 ] Mean training loss: 0.0444. +[ Thu Sep 8 05:12:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:12:07 2022 ] Eval epoch: 70 +[ Thu Sep 8 05:19:58 2022 ] Epoch 70 Curr Acc: (35739/59477)60.09% +[ Thu Sep 8 05:19:58 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 05:19:58 2022 ] Training epoch: 71 +[ Thu Sep 8 05:19:58 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:24:20 2022 ] Mean training loss: 0.0331. +[ Thu Sep 8 05:24:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:24:20 2022 ] Eval epoch: 71 +[ Thu Sep 8 05:32:11 2022 ] Epoch 71 Curr Acc: (36238/59477)60.93% +[ Thu Sep 8 05:32:11 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 05:32:11 2022 ] Training epoch: 72 +[ Thu Sep 8 05:32:11 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:36:32 2022 ] Mean training loss: 0.0255. +[ Thu Sep 8 05:36:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:36:32 2022 ] Eval epoch: 72 +[ Thu Sep 8 05:44:23 2022 ] Epoch 72 Curr Acc: (36186/59477)60.84% +[ Thu Sep 8 05:44:23 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 05:44:23 2022 ] Training epoch: 73 +[ Thu Sep 8 05:44:23 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:48:44 2022 ] Mean training loss: 0.0223. +[ Thu Sep 8 05:48:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:48:45 2022 ] Eval epoch: 73 +[ Thu Sep 8 05:56:35 2022 ] Epoch 73 Curr Acc: (36274/59477)60.99% +[ Thu Sep 8 05:56:35 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 05:56:35 2022 ] Training epoch: 74 +[ Thu Sep 8 05:56:35 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:00:58 2022 ] Mean training loss: 0.0225. +[ Thu Sep 8 06:00:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:00:58 2022 ] Eval epoch: 74 +[ Thu Sep 8 06:08:48 2022 ] Epoch 74 Curr Acc: (36436/59477)61.26% +[ Thu Sep 8 06:08:48 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 06:08:48 2022 ] Training epoch: 75 +[ Thu Sep 8 06:08:48 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:13:09 2022 ] Mean training loss: 0.0216. +[ Thu Sep 8 06:13:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:13:10 2022 ] Eval epoch: 75 +[ Thu Sep 8 06:21:00 2022 ] Epoch 75 Curr Acc: (35925/59477)60.40% +[ Thu Sep 8 06:21:00 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 06:21:00 2022 ] Training epoch: 76 +[ Thu Sep 8 06:21:00 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:25:23 2022 ] Mean training loss: 0.0199. +[ Thu Sep 8 06:25:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:25:23 2022 ] Eval epoch: 76 +[ Thu Sep 8 06:33:13 2022 ] Epoch 76 Curr Acc: (36147/59477)60.77% +[ Thu Sep 8 06:33:13 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 06:33:13 2022 ] Training epoch: 77 +[ Thu Sep 8 06:33:13 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:37:36 2022 ] Mean training loss: 0.0186. +[ Thu Sep 8 06:37:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:37:36 2022 ] Eval epoch: 77 +[ Thu Sep 8 06:45:27 2022 ] Epoch 77 Curr Acc: (36519/59477)61.40% +[ Thu Sep 8 06:45:27 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 06:45:27 2022 ] Training epoch: 78 +[ Thu Sep 8 06:45:27 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:49:50 2022 ] Mean training loss: 0.0201. +[ Thu Sep 8 06:49:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:49:50 2022 ] Eval epoch: 78 +[ Thu Sep 8 06:57:41 2022 ] Epoch 78 Curr Acc: (36282/59477)61.00% +[ Thu Sep 8 06:57:41 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 06:57:41 2022 ] Training epoch: 79 +[ Thu Sep 8 06:57:41 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:02:04 2022 ] Mean training loss: 0.0191. +[ Thu Sep 8 07:02:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:02:04 2022 ] Eval epoch: 79 +[ Thu Sep 8 07:09:55 2022 ] Epoch 79 Curr Acc: (35872/59477)60.31% +[ Thu Sep 8 07:09:55 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 07:09:55 2022 ] Training epoch: 80 +[ Thu Sep 8 07:09:55 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:14:17 2022 ] Mean training loss: 0.0175. +[ Thu Sep 8 07:14:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:14:17 2022 ] Eval epoch: 80 +[ Thu Sep 8 07:22:08 2022 ] Epoch 80 Curr Acc: (36153/59477)60.78% +[ Thu Sep 8 07:22:08 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 07:22:08 2022 ] Training epoch: 81 +[ Thu Sep 8 07:22:08 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:26:32 2022 ] Mean training loss: 0.0167. +[ Thu Sep 8 07:26:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:26:32 2022 ] Eval epoch: 81 +[ Thu Sep 8 07:34:22 2022 ] Epoch 81 Curr Acc: (36225/59477)60.91% +[ Thu Sep 8 07:34:22 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 07:34:22 2022 ] Training epoch: 82 +[ Thu Sep 8 07:34:22 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:38:45 2022 ] Mean training loss: 0.0176. +[ Thu Sep 8 07:38:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:38:45 2022 ] Eval epoch: 82 +[ Thu Sep 8 07:46:35 2022 ] Epoch 82 Curr Acc: (36389/59477)61.18% +[ Thu Sep 8 07:46:35 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 07:46:35 2022 ] Training epoch: 83 +[ Thu Sep 8 07:46:35 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:50:58 2022 ] Mean training loss: 0.0177. +[ Thu Sep 8 07:50:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:50:58 2022 ] Eval epoch: 83 +[ Thu Sep 8 07:58:48 2022 ] Epoch 83 Curr Acc: (35930/59477)60.41% +[ Thu Sep 8 07:58:48 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 07:58:48 2022 ] Training epoch: 84 +[ Thu Sep 8 07:58:48 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:03:11 2022 ] Mean training loss: 0.0180. +[ Thu Sep 8 08:03:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:03:11 2022 ] Eval epoch: 84 +[ Thu Sep 8 08:11:00 2022 ] Epoch 84 Curr Acc: (36213/59477)60.89% +[ Thu Sep 8 08:11:00 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 08:11:00 2022 ] Training epoch: 85 +[ Thu Sep 8 08:11:00 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:15:23 2022 ] Mean training loss: 0.0158. +[ Thu Sep 8 08:15:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:15:23 2022 ] Eval epoch: 85 +[ Thu Sep 8 08:23:13 2022 ] Epoch 85 Curr Acc: (36517/59477)61.40% +[ Thu Sep 8 08:23:13 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 08:23:13 2022 ] Training epoch: 86 +[ Thu Sep 8 08:23:13 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:27:36 2022 ] Mean training loss: 0.0171. +[ Thu Sep 8 08:27:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:27:36 2022 ] Eval epoch: 86 +[ Thu Sep 8 08:35:25 2022 ] Epoch 86 Curr Acc: (36145/59477)60.77% +[ Thu Sep 8 08:35:25 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 08:35:25 2022 ] Training epoch: 87 +[ Thu Sep 8 08:35:25 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:39:48 2022 ] Mean training loss: 0.0155. +[ Thu Sep 8 08:39:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:39:48 2022 ] Eval epoch: 87 +[ Thu Sep 8 08:47:38 2022 ] Epoch 87 Curr Acc: (36236/59477)60.92% +[ Thu Sep 8 08:47:38 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 08:47:38 2022 ] Training epoch: 88 +[ Thu Sep 8 08:47:38 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:52:01 2022 ] Mean training loss: 0.0173. +[ Thu Sep 8 08:52:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:52:01 2022 ] Eval epoch: 88 +[ Thu Sep 8 08:59:51 2022 ] Epoch 88 Curr Acc: (36280/59477)61.00% +[ Thu Sep 8 08:59:51 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 08:59:51 2022 ] Training epoch: 89 +[ Thu Sep 8 08:59:51 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:04:13 2022 ] Mean training loss: 0.0153. +[ Thu Sep 8 09:04:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:04:13 2022 ] Eval epoch: 89 +[ Thu Sep 8 09:12:03 2022 ] Epoch 89 Curr Acc: (36354/59477)61.12% +[ Thu Sep 8 09:12:03 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 09:12:03 2022 ] Training epoch: 90 +[ Thu Sep 8 09:12:03 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:16:26 2022 ] Mean training loss: 0.0148. +[ Thu Sep 8 09:16:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:16:26 2022 ] Eval epoch: 90 +[ Thu Sep 8 09:24:16 2022 ] Epoch 90 Curr Acc: (36149/59477)60.78% +[ Thu Sep 8 09:24:16 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 09:24:16 2022 ] Training epoch: 91 +[ Thu Sep 8 09:24:16 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:28:39 2022 ] Mean training loss: 0.0165. +[ Thu Sep 8 09:28:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:28:39 2022 ] Eval epoch: 91 +[ Thu Sep 8 09:36:29 2022 ] Epoch 91 Curr Acc: (36326/59477)61.08% +[ Thu Sep 8 09:36:29 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 09:36:29 2022 ] Training epoch: 92 +[ Thu Sep 8 09:36:29 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:40:52 2022 ] Mean training loss: 0.0171. +[ Thu Sep 8 09:40:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:40:52 2022 ] Eval epoch: 92 +[ Thu Sep 8 09:48:43 2022 ] Epoch 92 Curr Acc: (36436/59477)61.26% +[ Thu Sep 8 09:48:43 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 09:48:43 2022 ] Training epoch: 93 +[ Thu Sep 8 09:48:43 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:53:06 2022 ] Mean training loss: 0.0143. +[ Thu Sep 8 09:53:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:53:06 2022 ] Eval epoch: 93 +[ Thu Sep 8 10:00:57 2022 ] Epoch 93 Curr Acc: (36333/59477)61.09% +[ Thu Sep 8 10:00:57 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 10:00:57 2022 ] Training epoch: 94 +[ Thu Sep 8 10:00:57 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:05:19 2022 ] Mean training loss: 0.0159. +[ Thu Sep 8 10:05:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:05:19 2022 ] Eval epoch: 94 +[ Thu Sep 8 10:13:10 2022 ] Epoch 94 Curr Acc: (36458/59477)61.30% +[ Thu Sep 8 10:13:10 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 10:13:10 2022 ] Training epoch: 95 +[ Thu Sep 8 10:13:10 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:17:32 2022 ] Mean training loss: 0.0151. +[ Thu Sep 8 10:17:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:17:32 2022 ] Eval epoch: 95 +[ Thu Sep 8 10:25:23 2022 ] Epoch 95 Curr Acc: (36264/59477)60.97% +[ Thu Sep 8 10:25:23 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 10:25:23 2022 ] Training epoch: 96 +[ Thu Sep 8 10:25:23 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:29:46 2022 ] Mean training loss: 0.0157. +[ Thu Sep 8 10:29:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:29:46 2022 ] Eval epoch: 96 +[ Thu Sep 8 10:37:37 2022 ] Epoch 96 Curr Acc: (36347/59477)61.11% +[ Thu Sep 8 10:37:37 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 10:37:37 2022 ] Training epoch: 97 +[ Thu Sep 8 10:37:37 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:42:00 2022 ] Mean training loss: 0.0149. +[ Thu Sep 8 10:42:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:42:00 2022 ] Eval epoch: 97 +[ Thu Sep 8 10:49:51 2022 ] Epoch 97 Curr Acc: (36200/59477)60.86% +[ Thu Sep 8 10:49:51 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 10:49:51 2022 ] Training epoch: 98 +[ Thu Sep 8 10:49:51 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:54:14 2022 ] Mean training loss: 0.0158. +[ Thu Sep 8 10:54:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:54:14 2022 ] Eval epoch: 98 +[ Thu Sep 8 11:02:05 2022 ] Epoch 98 Curr Acc: (36354/59477)61.12% +[ Thu Sep 8 11:02:05 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 11:02:05 2022 ] Training epoch: 99 +[ Thu Sep 8 11:02:05 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:06:29 2022 ] Mean training loss: 0.0161. +[ Thu Sep 8 11:06:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:06:29 2022 ] Eval epoch: 99 +[ Thu Sep 8 11:14:20 2022 ] Epoch 99 Curr Acc: (36426/59477)61.24% +[ Thu Sep 8 11:14:20 2022 ] Epoch 57 Best Acc 61.48% +[ Thu Sep 8 11:14:20 2022 ] Training epoch: 100 +[ Thu Sep 8 11:14:20 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:18:42 2022 ] Mean training loss: 0.0150. +[ Thu Sep 8 11:18:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:18:43 2022 ] Eval epoch: 100 +[ Thu Sep 8 11:26:34 2022 ] Epoch 100 Curr Acc: (36627/59477)61.58% +[ Thu Sep 8 11:26:34 2022 ] Epoch 100 Best Acc 61.58% +[ Thu Sep 8 11:26:34 2022 ] Training epoch: 101 +[ Thu Sep 8 11:26:34 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:30:57 2022 ] Mean training loss: 0.0143. +[ Thu Sep 8 11:30:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:30:57 2022 ] Eval epoch: 101 +[ Thu Sep 8 11:38:48 2022 ] Epoch 101 Curr Acc: (36316/59477)61.06% +[ Thu Sep 8 11:38:48 2022 ] Epoch 100 Best Acc 61.58% +[ Thu Sep 8 11:38:48 2022 ] Training epoch: 102 +[ Thu Sep 8 11:38:48 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:43:11 2022 ] Mean training loss: 0.0143. +[ Thu Sep 8 11:43:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:43:11 2022 ] Eval epoch: 102 +[ Thu Sep 8 11:51:02 2022 ] Epoch 102 Curr Acc: (36425/59477)61.24% +[ Thu Sep 8 11:51:02 2022 ] Epoch 100 Best Acc 61.58% +[ Thu Sep 8 11:51:02 2022 ] Training epoch: 103 +[ Thu Sep 8 11:51:02 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:55:25 2022 ] Mean training loss: 0.0154. +[ Thu Sep 8 11:55:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:55:25 2022 ] Eval epoch: 103 +[ Thu Sep 8 12:03:16 2022 ] Epoch 103 Curr Acc: (36413/59477)61.22% +[ Thu Sep 8 12:03:16 2022 ] Epoch 100 Best Acc 61.58% +[ Thu Sep 8 12:03:16 2022 ] Training epoch: 104 +[ Thu Sep 8 12:03:16 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:07:39 2022 ] Mean training loss: 0.0147. +[ Thu Sep 8 12:07:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:07:39 2022 ] Eval epoch: 104 +[ Thu Sep 8 12:15:31 2022 ] Epoch 104 Curr Acc: (36372/59477)61.15% +[ Thu Sep 8 12:15:31 2022 ] Epoch 100 Best Acc 61.58% +[ Thu Sep 8 12:15:31 2022 ] Training epoch: 105 +[ Thu Sep 8 12:15:31 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:19:52 2022 ] Mean training loss: 0.0152. +[ Thu Sep 8 12:19:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:19:52 2022 ] Eval epoch: 105 +[ Thu Sep 8 12:27:43 2022 ] Epoch 105 Curr Acc: (36226/59477)60.91% +[ Thu Sep 8 12:27:43 2022 ] Epoch 100 Best Acc 61.58% +[ Thu Sep 8 12:27:43 2022 ] Training epoch: 106 +[ Thu Sep 8 12:27:43 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:32:06 2022 ] Mean training loss: 0.0142. +[ Thu Sep 8 12:32:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:32:06 2022 ] Eval epoch: 106 +[ Thu Sep 8 12:39:58 2022 ] Epoch 106 Curr Acc: (36352/59477)61.12% +[ Thu Sep 8 12:39:58 2022 ] Epoch 100 Best Acc 61.58% +[ Thu Sep 8 12:39:58 2022 ] Training epoch: 107 +[ Thu Sep 8 12:39:58 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:44:21 2022 ] Mean training loss: 0.0143. +[ Thu Sep 8 12:44:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:44:21 2022 ] Eval epoch: 107 +[ Thu Sep 8 12:52:12 2022 ] Epoch 107 Curr Acc: (36122/59477)60.73% +[ Thu Sep 8 12:52:12 2022 ] Epoch 100 Best Acc 61.58% +[ Thu Sep 8 12:52:12 2022 ] Training epoch: 108 +[ Thu Sep 8 12:52:12 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:56:35 2022 ] Mean training loss: 0.0145. +[ Thu Sep 8 12:56:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:56:36 2022 ] Eval epoch: 108 +[ Thu Sep 8 13:04:27 2022 ] Epoch 108 Curr Acc: (36278/59477)61.00% +[ Thu Sep 8 13:04:27 2022 ] Epoch 100 Best Acc 61.58% +[ Thu Sep 8 13:04:27 2022 ] Training epoch: 109 +[ Thu Sep 8 13:04:27 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:08:50 2022 ] Mean training loss: 0.0153. +[ Thu Sep 8 13:08:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:08:50 2022 ] Eval epoch: 109 +[ Thu Sep 8 13:16:41 2022 ] Epoch 109 Curr Acc: (36406/59477)61.21% +[ Thu Sep 8 13:16:41 2022 ] Epoch 100 Best Acc 61.58% +[ Thu Sep 8 13:16:41 2022 ] Training epoch: 110 +[ Thu Sep 8 13:16:41 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:21:05 2022 ] Mean training loss: 0.0155. +[ Thu Sep 8 13:21:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:21:05 2022 ] Eval epoch: 110 +[ Thu Sep 8 13:28:56 2022 ] Epoch 110 Curr Acc: (36290/59477)61.02% +[ Thu Sep 8 13:28:56 2022 ] Epoch 100 Best Acc 61.58% +[ Thu Sep 8 13:28:56 2022 ] epoch: 100, best accuracy: 0.6158178791801873 +[ Thu Sep 8 13:28:56 2022 ] Experiment: ./work_dir/ntu120/xset_j +[ Thu Sep 8 13:28:56 2022 ] # generator parameters: 2.922995 M. +[ Thu Sep 8 13:28:56 2022 ] Load weights from ./runs/ntu120/xset_j/runs-99-129700.pt. +[ Thu Sep 8 13:28:57 2022 ] Eval epoch: 1 +[ Thu Sep 8 13:36:47 2022 ] Epoch 1 Curr Acc: (36627/59477)61.58% +[ Thu Sep 8 13:36:47 2022 ] Epoch 100 Best Acc 61.58% diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/config.yaml b/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9dbeef896fdc70094ee71ba39204eed201c6e96b --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu120/xset_jm.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 120 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu120/xset_jm/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint_motion.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint_motion.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu120/xset_jm diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..ae4507aee753df49305ec6af404b497b46a1651b --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f7933a777dd27a03db074d25e7aa8607a961bc65883dbcfcfae7fb523d2ac29f +size 34946665 diff --git a/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/log.txt b/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..d117126272ebd353f03518e4226bda06bc2b916c --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xset/xset_jm/log.txt @@ -0,0 +1,631 @@ +[ Wed Sep 7 21:35:56 2022 ] # generator parameters: 2.922995 M. +[ Wed Sep 7 21:35:57 2022 ] Parameters: +{'work_dir': './work_dir/ntu120/xset_jm', 'model_saved_name': './runs/ntu120/xset_jm/runs', 'config': 'config/ntu120/xset_jm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Wed Sep 7 21:35:57 2022 ] Training epoch: 1 +[ Wed Sep 7 21:35:57 2022 ] Learning rate: 0.015 +[ Wed Sep 7 21:40:23 2022 ] Mean training loss: 3.7301. +[ Wed Sep 7 21:40:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:40:23 2022 ] Training epoch: 2 +[ Wed Sep 7 21:40:23 2022 ] Learning rate: 0.03 +[ Wed Sep 7 21:44:51 2022 ] Mean training loss: 2.7842. +[ Wed Sep 7 21:44:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:44:51 2022 ] Training epoch: 3 +[ Wed Sep 7 21:44:51 2022 ] Learning rate: 0.045 +[ Wed Sep 7 21:49:19 2022 ] Mean training loss: 2.2637. +[ Wed Sep 7 21:49:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:49:19 2022 ] Training epoch: 4 +[ Wed Sep 7 21:49:19 2022 ] Learning rate: 0.06 +[ Wed Sep 7 21:53:47 2022 ] Mean training loss: 1.9632. +[ Wed Sep 7 21:53:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:53:47 2022 ] Training epoch: 5 +[ Wed Sep 7 21:53:47 2022 ] Learning rate: 0.075 +[ Wed Sep 7 21:58:16 2022 ] Mean training loss: 1.7516. +[ Wed Sep 7 21:58:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:58:16 2022 ] Training epoch: 6 +[ Wed Sep 7 21:58:16 2022 ] Learning rate: 0.09 +[ Wed Sep 7 22:02:44 2022 ] Mean training loss: 1.6499. +[ Wed Sep 7 22:02:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:02:44 2022 ] Training epoch: 7 +[ Wed Sep 7 22:02:44 2022 ] Learning rate: 0.10500000000000001 +[ Wed Sep 7 22:07:13 2022 ] Mean training loss: 1.5352. +[ Wed Sep 7 22:07:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:07:13 2022 ] Training epoch: 8 +[ Wed Sep 7 22:07:13 2022 ] Learning rate: 0.12 +[ Wed Sep 7 22:11:41 2022 ] Mean training loss: 1.4923. +[ Wed Sep 7 22:11:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:11:41 2022 ] Training epoch: 9 +[ Wed Sep 7 22:11:41 2022 ] Learning rate: 0.13499999999999998 +[ Wed Sep 7 22:16:10 2022 ] Mean training loss: 1.4588. +[ Wed Sep 7 22:16:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:16:10 2022 ] Training epoch: 10 +[ Wed Sep 7 22:16:10 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:20:37 2022 ] Mean training loss: 1.4317. +[ Wed Sep 7 22:20:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:20:37 2022 ] Training epoch: 11 +[ Wed Sep 7 22:20:37 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:25:06 2022 ] Mean training loss: 1.3728. +[ Wed Sep 7 22:25:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:25:06 2022 ] Training epoch: 12 +[ Wed Sep 7 22:25:06 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:29:34 2022 ] Mean training loss: 1.3204. +[ Wed Sep 7 22:29:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:29:34 2022 ] Training epoch: 13 +[ Wed Sep 7 22:29:34 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:34:01 2022 ] Mean training loss: 1.2679. +[ Wed Sep 7 22:34:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:34:01 2022 ] Training epoch: 14 +[ Wed Sep 7 22:34:01 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:38:29 2022 ] Mean training loss: 1.2497. +[ Wed Sep 7 22:38:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:38:29 2022 ] Training epoch: 15 +[ Wed Sep 7 22:38:29 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:42:56 2022 ] Mean training loss: 1.2155. +[ Wed Sep 7 22:42:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:42:56 2022 ] Training epoch: 16 +[ Wed Sep 7 22:42:56 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:47:23 2022 ] Mean training loss: 1.1775. +[ Wed Sep 7 22:47:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:47:23 2022 ] Training epoch: 17 +[ Wed Sep 7 22:47:23 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:51:52 2022 ] Mean training loss: 1.1661. +[ Wed Sep 7 22:51:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:51:52 2022 ] Training epoch: 18 +[ Wed Sep 7 22:51:52 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:56:19 2022 ] Mean training loss: 1.1315. +[ Wed Sep 7 22:56:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:56:19 2022 ] Training epoch: 19 +[ Wed Sep 7 22:56:19 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:00:47 2022 ] Mean training loss: 1.1277. +[ Wed Sep 7 23:00:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:00:47 2022 ] Training epoch: 20 +[ Wed Sep 7 23:00:47 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:05:14 2022 ] Mean training loss: 1.1047. +[ Wed Sep 7 23:05:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:05:14 2022 ] Training epoch: 21 +[ Wed Sep 7 23:05:14 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:09:42 2022 ] Mean training loss: 1.0754. +[ Wed Sep 7 23:09:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:09:42 2022 ] Training epoch: 22 +[ Wed Sep 7 23:09:42 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:14:10 2022 ] Mean training loss: 1.0693. +[ Wed Sep 7 23:14:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:14:10 2022 ] Training epoch: 23 +[ Wed Sep 7 23:14:10 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:18:37 2022 ] Mean training loss: 1.0565. +[ Wed Sep 7 23:18:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:18:37 2022 ] Training epoch: 24 +[ Wed Sep 7 23:18:37 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:23:05 2022 ] Mean training loss: 1.0643. +[ Wed Sep 7 23:23:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:23:05 2022 ] Training epoch: 25 +[ Wed Sep 7 23:23:05 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:27:33 2022 ] Mean training loss: 1.0342. +[ Wed Sep 7 23:27:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:27:33 2022 ] Training epoch: 26 +[ Wed Sep 7 23:27:33 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:32:01 2022 ] Mean training loss: 1.0300. +[ Wed Sep 7 23:32:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:32:01 2022 ] Training epoch: 27 +[ Wed Sep 7 23:32:01 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:36:30 2022 ] Mean training loss: 1.0195. +[ Wed Sep 7 23:36:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:36:30 2022 ] Training epoch: 28 +[ Wed Sep 7 23:36:30 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:40:58 2022 ] Mean training loss: 1.0123. +[ Wed Sep 7 23:40:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:40:58 2022 ] Training epoch: 29 +[ Wed Sep 7 23:40:58 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:45:26 2022 ] Mean training loss: 0.9886. +[ Wed Sep 7 23:45:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:45:26 2022 ] Training epoch: 30 +[ Wed Sep 7 23:45:26 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:49:54 2022 ] Mean training loss: 0.9750. +[ Wed Sep 7 23:49:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:49:54 2022 ] Training epoch: 31 +[ Wed Sep 7 23:49:54 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:54:22 2022 ] Mean training loss: 0.9614. +[ Wed Sep 7 23:54:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:54:22 2022 ] Training epoch: 32 +[ Wed Sep 7 23:54:22 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:58:50 2022 ] Mean training loss: 0.9800. +[ Wed Sep 7 23:58:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:58:50 2022 ] Training epoch: 33 +[ Wed Sep 7 23:58:50 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:03:18 2022 ] Mean training loss: 0.9875. +[ Thu Sep 8 00:03:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:03:18 2022 ] Training epoch: 34 +[ Thu Sep 8 00:03:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:07:45 2022 ] Mean training loss: 0.9604. +[ Thu Sep 8 00:07:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:07:45 2022 ] Training epoch: 35 +[ Thu Sep 8 00:07:45 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:12:12 2022 ] Mean training loss: 0.9519. +[ Thu Sep 8 00:12:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:12:12 2022 ] Training epoch: 36 +[ Thu Sep 8 00:12:12 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:16:39 2022 ] Mean training loss: 0.9461. +[ Thu Sep 8 00:16:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:16:39 2022 ] Training epoch: 37 +[ Thu Sep 8 00:16:39 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:21:05 2022 ] Mean training loss: 0.9580. +[ Thu Sep 8 00:21:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:21:05 2022 ] Training epoch: 38 +[ Thu Sep 8 00:21:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:25:32 2022 ] Mean training loss: 0.9211. +[ Thu Sep 8 00:25:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:25:32 2022 ] Training epoch: 39 +[ Thu Sep 8 00:25:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:29:59 2022 ] Mean training loss: 0.9380. +[ Thu Sep 8 00:29:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:29:59 2022 ] Training epoch: 40 +[ Thu Sep 8 00:29:59 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:34:26 2022 ] Mean training loss: 0.9347. +[ Thu Sep 8 00:34:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:34:26 2022 ] Training epoch: 41 +[ Thu Sep 8 00:34:26 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:38:53 2022 ] Mean training loss: 0.9027. +[ Thu Sep 8 00:38:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:38:53 2022 ] Training epoch: 42 +[ Thu Sep 8 00:38:53 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:43:20 2022 ] Mean training loss: 0.9365. +[ Thu Sep 8 00:43:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:43:20 2022 ] Training epoch: 43 +[ Thu Sep 8 00:43:20 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:47:48 2022 ] Mean training loss: 0.9236. +[ Thu Sep 8 00:47:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:47:48 2022 ] Training epoch: 44 +[ Thu Sep 8 00:47:48 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:52:16 2022 ] Mean training loss: 0.9109. +[ Thu Sep 8 00:52:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:52:16 2022 ] Training epoch: 45 +[ Thu Sep 8 00:52:16 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:56:44 2022 ] Mean training loss: 0.9087. +[ Thu Sep 8 00:56:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:56:44 2022 ] Training epoch: 46 +[ Thu Sep 8 00:56:44 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:01:11 2022 ] Mean training loss: 0.9218. +[ Thu Sep 8 01:01:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:01:11 2022 ] Training epoch: 47 +[ Thu Sep 8 01:01:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:05:39 2022 ] Mean training loss: 0.8873. +[ Thu Sep 8 01:05:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:05:39 2022 ] Training epoch: 48 +[ Thu Sep 8 01:05:39 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:10:07 2022 ] Mean training loss: 0.8890. +[ Thu Sep 8 01:10:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:10:07 2022 ] Training epoch: 49 +[ Thu Sep 8 01:10:07 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:14:35 2022 ] Mean training loss: 0.8973. +[ Thu Sep 8 01:14:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:14:35 2022 ] Training epoch: 50 +[ Thu Sep 8 01:14:35 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:19:02 2022 ] Mean training loss: 0.9033. +[ Thu Sep 8 01:19:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:19:02 2022 ] Training epoch: 51 +[ Thu Sep 8 01:19:02 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:23:31 2022 ] Mean training loss: 0.4222. +[ Thu Sep 8 01:23:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:23:31 2022 ] Eval epoch: 51 +[ Thu Sep 8 01:31:34 2022 ] Epoch 51 Curr Acc: (32813/59477)55.17% +[ Thu Sep 8 01:31:34 2022 ] Epoch 51 Best Acc 55.17% +[ Thu Sep 8 01:31:34 2022 ] Training epoch: 52 +[ Thu Sep 8 01:31:34 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:36:01 2022 ] Mean training loss: 0.2876. +[ Thu Sep 8 01:36:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:36:01 2022 ] Eval epoch: 52 +[ Thu Sep 8 01:43:52 2022 ] Epoch 52 Curr Acc: (33442/59477)56.23% +[ Thu Sep 8 01:43:52 2022 ] Epoch 52 Best Acc 56.23% +[ Thu Sep 8 01:43:52 2022 ] Training epoch: 53 +[ Thu Sep 8 01:43:52 2022 ] Learning rate: 0.015 +[ Thu Sep 8 01:48:19 2022 ] Mean training loss: 0.2285. +[ Thu Sep 8 01:48:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:48:19 2022 ] Eval epoch: 53 +[ Thu Sep 8 01:56:10 2022 ] Epoch 53 Curr Acc: (34407/59477)57.85% +[ Thu Sep 8 01:56:10 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 01:56:10 2022 ] Training epoch: 54 +[ Thu Sep 8 01:56:10 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:00:37 2022 ] Mean training loss: 0.1980. +[ Thu Sep 8 02:00:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:00:37 2022 ] Eval epoch: 54 +[ Thu Sep 8 02:08:28 2022 ] Epoch 54 Curr Acc: (33565/59477)56.43% +[ Thu Sep 8 02:08:28 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 02:08:28 2022 ] Training epoch: 55 +[ Thu Sep 8 02:08:28 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:12:55 2022 ] Mean training loss: 0.1544. +[ Thu Sep 8 02:12:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:12:55 2022 ] Eval epoch: 55 +[ Thu Sep 8 02:20:46 2022 ] Epoch 55 Curr Acc: (33778/59477)56.79% +[ Thu Sep 8 02:20:46 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 02:20:46 2022 ] Training epoch: 56 +[ Thu Sep 8 02:20:46 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:25:13 2022 ] Mean training loss: 0.1342. +[ Thu Sep 8 02:25:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:25:13 2022 ] Eval epoch: 56 +[ Thu Sep 8 02:33:04 2022 ] Epoch 56 Curr Acc: (32699/59477)54.98% +[ Thu Sep 8 02:33:04 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 02:33:04 2022 ] Training epoch: 57 +[ Thu Sep 8 02:33:04 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:37:30 2022 ] Mean training loss: 0.1154. +[ Thu Sep 8 02:37:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:37:30 2022 ] Eval epoch: 57 +[ Thu Sep 8 02:45:21 2022 ] Epoch 57 Curr Acc: (34318/59477)57.70% +[ Thu Sep 8 02:45:21 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 02:45:21 2022 ] Training epoch: 58 +[ Thu Sep 8 02:45:21 2022 ] Learning rate: 0.015 +[ Thu Sep 8 02:49:49 2022 ] Mean training loss: 0.0991. +[ Thu Sep 8 02:49:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:49:49 2022 ] Eval epoch: 58 +[ Thu Sep 8 02:57:40 2022 ] Epoch 58 Curr Acc: (33961/59477)57.10% +[ Thu Sep 8 02:57:40 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 02:57:40 2022 ] Training epoch: 59 +[ Thu Sep 8 02:57:40 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:02:07 2022 ] Mean training loss: 0.0811. +[ Thu Sep 8 03:02:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:02:07 2022 ] Eval epoch: 59 +[ Thu Sep 8 03:09:59 2022 ] Epoch 59 Curr Acc: (34302/59477)57.67% +[ Thu Sep 8 03:09:59 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 03:09:59 2022 ] Training epoch: 60 +[ Thu Sep 8 03:09:59 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:14:26 2022 ] Mean training loss: 0.0705. +[ Thu Sep 8 03:14:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:14:26 2022 ] Eval epoch: 60 +[ Thu Sep 8 03:22:17 2022 ] Epoch 60 Curr Acc: (33822/59477)56.87% +[ Thu Sep 8 03:22:17 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 03:22:17 2022 ] Training epoch: 61 +[ Thu Sep 8 03:22:17 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:26:43 2022 ] Mean training loss: 0.0648. +[ Thu Sep 8 03:26:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:26:43 2022 ] Eval epoch: 61 +[ Thu Sep 8 03:34:34 2022 ] Epoch 61 Curr Acc: (31688/59477)53.28% +[ Thu Sep 8 03:34:34 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 03:34:34 2022 ] Training epoch: 62 +[ Thu Sep 8 03:34:34 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:39:00 2022 ] Mean training loss: 0.0592. +[ Thu Sep 8 03:39:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:39:00 2022 ] Eval epoch: 62 +[ Thu Sep 8 03:46:51 2022 ] Epoch 62 Curr Acc: (33950/59477)57.08% +[ Thu Sep 8 03:46:51 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 03:46:51 2022 ] Training epoch: 63 +[ Thu Sep 8 03:46:51 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:51:18 2022 ] Mean training loss: 0.0518. +[ Thu Sep 8 03:51:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:51:18 2022 ] Eval epoch: 63 +[ Thu Sep 8 03:59:09 2022 ] Epoch 63 Curr Acc: (34179/59477)57.47% +[ Thu Sep 8 03:59:09 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 03:59:09 2022 ] Training epoch: 64 +[ Thu Sep 8 03:59:09 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:03:36 2022 ] Mean training loss: 0.0489. +[ Thu Sep 8 04:03:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:03:36 2022 ] Eval epoch: 64 +[ Thu Sep 8 04:11:26 2022 ] Epoch 64 Curr Acc: (34018/59477)57.20% +[ Thu Sep 8 04:11:26 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 04:11:26 2022 ] Training epoch: 65 +[ Thu Sep 8 04:11:26 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:15:53 2022 ] Mean training loss: 0.0428. +[ Thu Sep 8 04:15:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:15:53 2022 ] Eval epoch: 65 +[ Thu Sep 8 04:23:44 2022 ] Epoch 65 Curr Acc: (32835/59477)55.21% +[ Thu Sep 8 04:23:44 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 04:23:44 2022 ] Training epoch: 66 +[ Thu Sep 8 04:23:44 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:28:10 2022 ] Mean training loss: 0.0445. +[ Thu Sep 8 04:28:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:28:11 2022 ] Eval epoch: 66 +[ Thu Sep 8 04:36:01 2022 ] Epoch 66 Curr Acc: (33433/59477)56.21% +[ Thu Sep 8 04:36:01 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 04:36:01 2022 ] Training epoch: 67 +[ Thu Sep 8 04:36:01 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:40:27 2022 ] Mean training loss: 0.0374. +[ Thu Sep 8 04:40:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:40:27 2022 ] Eval epoch: 67 +[ Thu Sep 8 04:48:18 2022 ] Epoch 67 Curr Acc: (33614/59477)56.52% +[ Thu Sep 8 04:48:18 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 04:48:18 2022 ] Training epoch: 68 +[ Thu Sep 8 04:48:18 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:52:44 2022 ] Mean training loss: 0.0423. +[ Thu Sep 8 04:52:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:52:44 2022 ] Eval epoch: 68 +[ Thu Sep 8 05:00:34 2022 ] Epoch 68 Curr Acc: (33131/59477)55.70% +[ Thu Sep 8 05:00:34 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 05:00:35 2022 ] Training epoch: 69 +[ Thu Sep 8 05:00:35 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:05:00 2022 ] Mean training loss: 0.0395. +[ Thu Sep 8 05:05:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:05:00 2022 ] Eval epoch: 69 +[ Thu Sep 8 05:12:51 2022 ] Epoch 69 Curr Acc: (33446/59477)56.23% +[ Thu Sep 8 05:12:51 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 05:12:51 2022 ] Training epoch: 70 +[ Thu Sep 8 05:12:51 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:17:17 2022 ] Mean training loss: 0.0333. +[ Thu Sep 8 05:17:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:17:17 2022 ] Eval epoch: 70 +[ Thu Sep 8 05:25:07 2022 ] Epoch 70 Curr Acc: (33792/59477)56.82% +[ Thu Sep 8 05:25:07 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 05:25:08 2022 ] Training epoch: 71 +[ Thu Sep 8 05:25:08 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:29:33 2022 ] Mean training loss: 0.0233. +[ Thu Sep 8 05:29:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:29:33 2022 ] Eval epoch: 71 +[ Thu Sep 8 05:37:24 2022 ] Epoch 71 Curr Acc: (33925/59477)57.04% +[ Thu Sep 8 05:37:24 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 05:37:24 2022 ] Training epoch: 72 +[ Thu Sep 8 05:37:24 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:41:49 2022 ] Mean training loss: 0.0224. +[ Thu Sep 8 05:41:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:41:49 2022 ] Eval epoch: 72 +[ Thu Sep 8 05:49:40 2022 ] Epoch 72 Curr Acc: (34114/59477)57.36% +[ Thu Sep 8 05:49:40 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 05:49:40 2022 ] Training epoch: 73 +[ Thu Sep 8 05:49:40 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 05:54:06 2022 ] Mean training loss: 0.0193. +[ Thu Sep 8 05:54:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:54:06 2022 ] Eval epoch: 73 +[ Thu Sep 8 06:01:57 2022 ] Epoch 73 Curr Acc: (33739/59477)56.73% +[ Thu Sep 8 06:01:57 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 06:01:57 2022 ] Training epoch: 74 +[ Thu Sep 8 06:01:57 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:06:23 2022 ] Mean training loss: 0.0176. +[ Thu Sep 8 06:06:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:06:23 2022 ] Eval epoch: 74 +[ Thu Sep 8 06:14:13 2022 ] Epoch 74 Curr Acc: (34195/59477)57.49% +[ Thu Sep 8 06:14:13 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 06:14:13 2022 ] Training epoch: 75 +[ Thu Sep 8 06:14:13 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:18:39 2022 ] Mean training loss: 0.0177. +[ Thu Sep 8 06:18:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:18:39 2022 ] Eval epoch: 75 +[ Thu Sep 8 06:26:30 2022 ] Epoch 75 Curr Acc: (34027/59477)57.21% +[ Thu Sep 8 06:26:30 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 06:26:30 2022 ] Training epoch: 76 +[ Thu Sep 8 06:26:30 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:30:56 2022 ] Mean training loss: 0.0164. +[ Thu Sep 8 06:30:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:30:56 2022 ] Eval epoch: 76 +[ Thu Sep 8 06:38:47 2022 ] Epoch 76 Curr Acc: (34208/59477)57.51% +[ Thu Sep 8 06:38:47 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 06:38:47 2022 ] Training epoch: 77 +[ Thu Sep 8 06:38:47 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:43:13 2022 ] Mean training loss: 0.0160. +[ Thu Sep 8 06:43:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:43:13 2022 ] Eval epoch: 77 +[ Thu Sep 8 06:51:03 2022 ] Epoch 77 Curr Acc: (34368/59477)57.78% +[ Thu Sep 8 06:51:03 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 06:51:03 2022 ] Training epoch: 78 +[ Thu Sep 8 06:51:03 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 06:55:29 2022 ] Mean training loss: 0.0170. +[ Thu Sep 8 06:55:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:55:29 2022 ] Eval epoch: 78 +[ Thu Sep 8 07:03:20 2022 ] Epoch 78 Curr Acc: (34327/59477)57.71% +[ Thu Sep 8 07:03:20 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 07:03:20 2022 ] Training epoch: 79 +[ Thu Sep 8 07:03:20 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:07:45 2022 ] Mean training loss: 0.0164. +[ Thu Sep 8 07:07:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:07:45 2022 ] Eval epoch: 79 +[ Thu Sep 8 07:15:36 2022 ] Epoch 79 Curr Acc: (33012/59477)55.50% +[ Thu Sep 8 07:15:36 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 07:15:36 2022 ] Training epoch: 80 +[ Thu Sep 8 07:15:36 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:20:01 2022 ] Mean training loss: 0.0164. +[ Thu Sep 8 07:20:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:20:01 2022 ] Eval epoch: 80 +[ Thu Sep 8 07:27:52 2022 ] Epoch 80 Curr Acc: (34292/59477)57.66% +[ Thu Sep 8 07:27:52 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 07:27:52 2022 ] Training epoch: 81 +[ Thu Sep 8 07:27:52 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:32:18 2022 ] Mean training loss: 0.0143. +[ Thu Sep 8 07:32:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:32:18 2022 ] Eval epoch: 81 +[ Thu Sep 8 07:40:09 2022 ] Epoch 81 Curr Acc: (34228/59477)57.55% +[ Thu Sep 8 07:40:09 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 07:40:09 2022 ] Training epoch: 82 +[ Thu Sep 8 07:40:09 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:44:35 2022 ] Mean training loss: 0.0158. +[ Thu Sep 8 07:44:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:44:35 2022 ] Eval epoch: 82 +[ Thu Sep 8 07:52:25 2022 ] Epoch 82 Curr Acc: (34370/59477)57.79% +[ Thu Sep 8 07:52:25 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 07:52:25 2022 ] Training epoch: 83 +[ Thu Sep 8 07:52:25 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:56:50 2022 ] Mean training loss: 0.0141. +[ Thu Sep 8 07:56:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:56:50 2022 ] Eval epoch: 83 +[ Thu Sep 8 08:04:41 2022 ] Epoch 83 Curr Acc: (34255/59477)57.59% +[ Thu Sep 8 08:04:41 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 08:04:41 2022 ] Training epoch: 84 +[ Thu Sep 8 08:04:41 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:09:06 2022 ] Mean training loss: 0.0151. +[ Thu Sep 8 08:09:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:09:06 2022 ] Eval epoch: 84 +[ Thu Sep 8 08:16:57 2022 ] Epoch 84 Curr Acc: (34008/59477)57.18% +[ Thu Sep 8 08:16:57 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 08:16:57 2022 ] Training epoch: 85 +[ Thu Sep 8 08:16:57 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:21:22 2022 ] Mean training loss: 0.0156. +[ Thu Sep 8 08:21:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:21:22 2022 ] Eval epoch: 85 +[ Thu Sep 8 08:29:13 2022 ] Epoch 85 Curr Acc: (33965/59477)57.11% +[ Thu Sep 8 08:29:13 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 08:29:13 2022 ] Training epoch: 86 +[ Thu Sep 8 08:29:13 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:33:37 2022 ] Mean training loss: 0.0137. +[ Thu Sep 8 08:33:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:33:37 2022 ] Eval epoch: 86 +[ Thu Sep 8 08:41:28 2022 ] Epoch 86 Curr Acc: (34226/59477)57.54% +[ Thu Sep 8 08:41:28 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 08:41:28 2022 ] Training epoch: 87 +[ Thu Sep 8 08:41:28 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:45:54 2022 ] Mean training loss: 0.0140. +[ Thu Sep 8 08:45:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:45:54 2022 ] Eval epoch: 87 +[ Thu Sep 8 08:53:44 2022 ] Epoch 87 Curr Acc: (34184/59477)57.47% +[ Thu Sep 8 08:53:44 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 08:53:44 2022 ] Training epoch: 88 +[ Thu Sep 8 08:53:44 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:58:10 2022 ] Mean training loss: 0.0153. +[ Thu Sep 8 08:58:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:58:10 2022 ] Eval epoch: 88 +[ Thu Sep 8 09:06:01 2022 ] Epoch 88 Curr Acc: (32881/59477)55.28% +[ Thu Sep 8 09:06:01 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 09:06:01 2022 ] Training epoch: 89 +[ Thu Sep 8 09:06:01 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:10:27 2022 ] Mean training loss: 0.0135. +[ Thu Sep 8 09:10:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:10:27 2022 ] Eval epoch: 89 +[ Thu Sep 8 09:18:18 2022 ] Epoch 89 Curr Acc: (33262/59477)55.92% +[ Thu Sep 8 09:18:18 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 09:18:18 2022 ] Training epoch: 90 +[ Thu Sep 8 09:18:18 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:22:44 2022 ] Mean training loss: 0.0137. +[ Thu Sep 8 09:22:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:22:44 2022 ] Eval epoch: 90 +[ Thu Sep 8 09:30:35 2022 ] Epoch 90 Curr Acc: (33971/59477)57.12% +[ Thu Sep 8 09:30:35 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 09:30:35 2022 ] Training epoch: 91 +[ Thu Sep 8 09:30:35 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:35:01 2022 ] Mean training loss: 0.0136. +[ Thu Sep 8 09:35:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:35:01 2022 ] Eval epoch: 91 +[ Thu Sep 8 09:42:51 2022 ] Epoch 91 Curr Acc: (34256/59477)57.60% +[ Thu Sep 8 09:42:51 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 09:42:51 2022 ] Training epoch: 92 +[ Thu Sep 8 09:42:51 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:47:17 2022 ] Mean training loss: 0.0141. +[ Thu Sep 8 09:47:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:47:17 2022 ] Eval epoch: 92 +[ Thu Sep 8 09:55:08 2022 ] Epoch 92 Curr Acc: (34344/59477)57.74% +[ Thu Sep 8 09:55:08 2022 ] Epoch 53 Best Acc 57.85% +[ Thu Sep 8 09:55:08 2022 ] Training epoch: 93 +[ Thu Sep 8 09:55:08 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 09:59:34 2022 ] Mean training loss: 0.0136. +[ Thu Sep 8 09:59:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:59:34 2022 ] Eval epoch: 93 +[ Thu Sep 8 10:07:25 2022 ] Epoch 93 Curr Acc: (34425/59477)57.88% +[ Thu Sep 8 10:07:25 2022 ] Epoch 93 Best Acc 57.88% +[ Thu Sep 8 10:07:25 2022 ] Training epoch: 94 +[ Thu Sep 8 10:07:25 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:11:52 2022 ] Mean training loss: 0.0139. +[ Thu Sep 8 10:11:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:11:52 2022 ] Eval epoch: 94 +[ Thu Sep 8 10:19:43 2022 ] Epoch 94 Curr Acc: (34310/59477)57.69% +[ Thu Sep 8 10:19:43 2022 ] Epoch 93 Best Acc 57.88% +[ Thu Sep 8 10:19:43 2022 ] Training epoch: 95 +[ Thu Sep 8 10:19:43 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:24:09 2022 ] Mean training loss: 0.0134. +[ Thu Sep 8 10:24:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:24:09 2022 ] Eval epoch: 95 +[ Thu Sep 8 10:32:00 2022 ] Epoch 95 Curr Acc: (32562/59477)54.75% +[ Thu Sep 8 10:32:00 2022 ] Epoch 93 Best Acc 57.88% +[ Thu Sep 8 10:32:00 2022 ] Training epoch: 96 +[ Thu Sep 8 10:32:00 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:36:26 2022 ] Mean training loss: 0.0124. +[ Thu Sep 8 10:36:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:36:26 2022 ] Eval epoch: 96 +[ Thu Sep 8 10:44:17 2022 ] Epoch 96 Curr Acc: (34062/59477)57.27% +[ Thu Sep 8 10:44:17 2022 ] Epoch 93 Best Acc 57.88% +[ Thu Sep 8 10:44:17 2022 ] Training epoch: 97 +[ Thu Sep 8 10:44:17 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 10:48:43 2022 ] Mean training loss: 0.0130. +[ Thu Sep 8 10:48:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:48:43 2022 ] Eval epoch: 97 +[ Thu Sep 8 10:56:34 2022 ] Epoch 97 Curr Acc: (34155/59477)57.43% +[ Thu Sep 8 10:56:34 2022 ] Epoch 93 Best Acc 57.88% +[ Thu Sep 8 10:56:34 2022 ] Training epoch: 98 +[ Thu Sep 8 10:56:34 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:01:01 2022 ] Mean training loss: 0.0125. +[ Thu Sep 8 11:01:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:01:01 2022 ] Eval epoch: 98 +[ Thu Sep 8 11:08:52 2022 ] Epoch 98 Curr Acc: (34319/59477)57.70% +[ Thu Sep 8 11:08:52 2022 ] Epoch 93 Best Acc 57.88% +[ Thu Sep 8 11:08:52 2022 ] Training epoch: 99 +[ Thu Sep 8 11:08:52 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:13:18 2022 ] Mean training loss: 0.0125. +[ Thu Sep 8 11:13:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:13:18 2022 ] Eval epoch: 99 +[ Thu Sep 8 11:21:10 2022 ] Epoch 99 Curr Acc: (34330/59477)57.72% +[ Thu Sep 8 11:21:10 2022 ] Epoch 93 Best Acc 57.88% +[ Thu Sep 8 11:21:10 2022 ] Training epoch: 100 +[ Thu Sep 8 11:21:10 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:25:36 2022 ] Mean training loss: 0.0134. +[ Thu Sep 8 11:25:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:25:36 2022 ] Eval epoch: 100 +[ Thu Sep 8 11:33:27 2022 ] Epoch 100 Curr Acc: (33592/59477)56.48% +[ Thu Sep 8 11:33:27 2022 ] Epoch 93 Best Acc 57.88% +[ Thu Sep 8 11:33:28 2022 ] Training epoch: 101 +[ Thu Sep 8 11:33:28 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:37:54 2022 ] Mean training loss: 0.0127. +[ Thu Sep 8 11:37:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:37:54 2022 ] Eval epoch: 101 +[ Thu Sep 8 11:45:45 2022 ] Epoch 101 Curr Acc: (34314/59477)57.69% +[ Thu Sep 8 11:45:45 2022 ] Epoch 93 Best Acc 57.88% +[ Thu Sep 8 11:45:45 2022 ] Training epoch: 102 +[ Thu Sep 8 11:45:45 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 11:50:12 2022 ] Mean training loss: 0.0129. +[ Thu Sep 8 11:50:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:50:12 2022 ] Eval epoch: 102 +[ Thu Sep 8 11:58:03 2022 ] Epoch 102 Curr Acc: (34594/59477)58.16% +[ Thu Sep 8 11:58:03 2022 ] Epoch 102 Best Acc 58.16% +[ Thu Sep 8 11:58:03 2022 ] Training epoch: 103 +[ Thu Sep 8 11:58:03 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:02:30 2022 ] Mean training loss: 0.0132. +[ Thu Sep 8 12:02:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:02:30 2022 ] Eval epoch: 103 +[ Thu Sep 8 12:10:21 2022 ] Epoch 103 Curr Acc: (33746/59477)56.74% +[ Thu Sep 8 12:10:21 2022 ] Epoch 102 Best Acc 58.16% +[ Thu Sep 8 12:10:21 2022 ] Training epoch: 104 +[ Thu Sep 8 12:10:21 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:14:48 2022 ] Mean training loss: 0.0133. +[ Thu Sep 8 12:14:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:14:48 2022 ] Eval epoch: 104 +[ Thu Sep 8 12:22:39 2022 ] Epoch 104 Curr Acc: (34022/59477)57.20% +[ Thu Sep 8 12:22:39 2022 ] Epoch 102 Best Acc 58.16% +[ Thu Sep 8 12:22:39 2022 ] Training epoch: 105 +[ Thu Sep 8 12:22:39 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:27:06 2022 ] Mean training loss: 0.0124. +[ Thu Sep 8 12:27:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:27:06 2022 ] Eval epoch: 105 +[ Thu Sep 8 12:34:57 2022 ] Epoch 105 Curr Acc: (34214/59477)57.52% +[ Thu Sep 8 12:34:57 2022 ] Epoch 102 Best Acc 58.16% +[ Thu Sep 8 12:34:57 2022 ] Training epoch: 106 +[ Thu Sep 8 12:34:57 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:39:24 2022 ] Mean training loss: 0.0126. +[ Thu Sep 8 12:39:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:39:24 2022 ] Eval epoch: 106 +[ Thu Sep 8 12:47:15 2022 ] Epoch 106 Curr Acc: (34257/59477)57.60% +[ Thu Sep 8 12:47:15 2022 ] Epoch 102 Best Acc 58.16% +[ Thu Sep 8 12:47:15 2022 ] Training epoch: 107 +[ Thu Sep 8 12:47:15 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:51:42 2022 ] Mean training loss: 0.0123. +[ Thu Sep 8 12:51:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:51:42 2022 ] Eval epoch: 107 +[ Thu Sep 8 12:59:33 2022 ] Epoch 107 Curr Acc: (33921/59477)57.03% +[ Thu Sep 8 12:59:33 2022 ] Epoch 102 Best Acc 58.16% +[ Thu Sep 8 12:59:33 2022 ] Training epoch: 108 +[ Thu Sep 8 12:59:33 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:04:00 2022 ] Mean training loss: 0.0124. +[ Thu Sep 8 13:04:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:04:00 2022 ] Eval epoch: 108 +[ Thu Sep 8 13:11:52 2022 ] Epoch 108 Curr Acc: (34280/59477)57.64% +[ Thu Sep 8 13:11:52 2022 ] Epoch 102 Best Acc 58.16% +[ Thu Sep 8 13:11:52 2022 ] Training epoch: 109 +[ Thu Sep 8 13:11:52 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:16:18 2022 ] Mean training loss: 0.0128. +[ Thu Sep 8 13:16:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:16:18 2022 ] Eval epoch: 109 +[ Thu Sep 8 13:24:10 2022 ] Epoch 109 Curr Acc: (34290/59477)57.65% +[ Thu Sep 8 13:24:10 2022 ] Epoch 102 Best Acc 58.16% +[ Thu Sep 8 13:24:10 2022 ] Training epoch: 110 +[ Thu Sep 8 13:24:10 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:28:36 2022 ] Mean training loss: 0.0126. +[ Thu Sep 8 13:28:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:28:36 2022 ] Eval epoch: 110 +[ Thu Sep 8 13:36:27 2022 ] Epoch 110 Curr Acc: (34164/59477)57.44% +[ Thu Sep 8 13:36:27 2022 ] Epoch 102 Best Acc 58.16% +[ Thu Sep 8 13:36:27 2022 ] epoch: 102, best accuracy: 0.5816365990214705 +[ Thu Sep 8 13:36:27 2022 ] Experiment: ./work_dir/ntu120/xset_jm +[ Thu Sep 8 13:36:27 2022 ] # generator parameters: 2.922995 M. +[ Thu Sep 8 13:36:27 2022 ] Load weights from ./runs/ntu120/xset_jm/runs-101-132294.pt. +[ Thu Sep 8 13:36:27 2022 ] Eval epoch: 1 +[ Thu Sep 8 13:44:18 2022 ] Epoch 1 Curr Acc: (34594/59477)58.16% +[ Thu Sep 8 13:44:18 2022 ] Epoch 102 Best Acc 58.16% diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/config.yaml b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9590298a2d9715b10942252c546d543470a4e258 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu120/xsub_b.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 120 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu120/xsub_b/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu120/xsub_b diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..76b24208be9bc8e27ff89f1d1dd06802a07b1127 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:21e6be77baaebfe7bd8e924bcec2a71be07bea20e769b2e3999c7d38e5d33951 +size 29946137 diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/log.txt b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..e2adcedd8df7b3b3c2d8aeaae1f7eded0b72e409 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/log.txt @@ -0,0 +1,631 @@ +[ Wed Sep 7 21:34:12 2022 ] # generator parameters: 2.922995 M. +[ Wed Sep 7 21:34:19 2022 ] Parameters: +{'work_dir': './work_dir/ntu120/xsub_b', 'model_saved_name': './runs/ntu120/xsub_b/runs', 'config': 'config/ntu120/xsub_b.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Wed Sep 7 21:34:19 2022 ] Training epoch: 1 +[ Wed Sep 7 21:34:19 2022 ] Learning rate: 0.015 +[ Wed Sep 7 21:40:53 2022 ] Mean training loss: 3.3579. +[ Wed Sep 7 21:40:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:40:53 2022 ] Training epoch: 2 +[ Wed Sep 7 21:40:53 2022 ] Learning rate: 0.03 +[ Wed Sep 7 21:47:29 2022 ] Mean training loss: 2.4453. +[ Wed Sep 7 21:47:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:47:29 2022 ] Training epoch: 3 +[ Wed Sep 7 21:47:29 2022 ] Learning rate: 0.045 +[ Wed Sep 7 21:54:04 2022 ] Mean training loss: 1.9786. +[ Wed Sep 7 21:54:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:54:04 2022 ] Training epoch: 4 +[ Wed Sep 7 21:54:04 2022 ] Learning rate: 0.06 +[ Wed Sep 7 22:00:40 2022 ] Mean training loss: 1.7189. +[ Wed Sep 7 22:00:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:00:41 2022 ] Training epoch: 5 +[ Wed Sep 7 22:00:41 2022 ] Learning rate: 0.075 +[ Wed Sep 7 22:07:16 2022 ] Mean training loss: 1.5571. +[ Wed Sep 7 22:07:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:07:16 2022 ] Training epoch: 6 +[ Wed Sep 7 22:07:16 2022 ] Learning rate: 0.09 +[ Wed Sep 7 22:13:52 2022 ] Mean training loss: 1.4579. +[ Wed Sep 7 22:13:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:13:52 2022 ] Training epoch: 7 +[ Wed Sep 7 22:13:52 2022 ] Learning rate: 0.10500000000000001 +[ Wed Sep 7 22:20:27 2022 ] Mean training loss: 1.3949. +[ Wed Sep 7 22:20:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:20:27 2022 ] Training epoch: 8 +[ Wed Sep 7 22:20:27 2022 ] Learning rate: 0.12 +[ Wed Sep 7 22:27:02 2022 ] Mean training loss: 1.3628. +[ Wed Sep 7 22:27:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:27:02 2022 ] Training epoch: 9 +[ Wed Sep 7 22:27:02 2022 ] Learning rate: 0.13499999999999998 +[ Wed Sep 7 22:33:37 2022 ] Mean training loss: 1.3236. +[ Wed Sep 7 22:33:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:33:37 2022 ] Training epoch: 10 +[ Wed Sep 7 22:33:37 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:40:12 2022 ] Mean training loss: 1.3199. +[ Wed Sep 7 22:40:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:40:12 2022 ] Training epoch: 11 +[ Wed Sep 7 22:40:12 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:46:48 2022 ] Mean training loss: 1.2531. +[ Wed Sep 7 22:46:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:46:48 2022 ] Training epoch: 12 +[ Wed Sep 7 22:46:48 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:53:23 2022 ] Mean training loss: 1.2180. +[ Wed Sep 7 22:53:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:53:23 2022 ] Training epoch: 13 +[ Wed Sep 7 22:53:23 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:59:59 2022 ] Mean training loss: 1.1717. +[ Wed Sep 7 22:59:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:59:59 2022 ] Training epoch: 14 +[ Wed Sep 7 22:59:59 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:06:34 2022 ] Mean training loss: 1.1578. +[ Wed Sep 7 23:06:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:06:34 2022 ] Training epoch: 15 +[ Wed Sep 7 23:06:34 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:13:10 2022 ] Mean training loss: 1.1288. +[ Wed Sep 7 23:13:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:13:10 2022 ] Training epoch: 16 +[ Wed Sep 7 23:13:10 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:19:46 2022 ] Mean training loss: 1.1052. +[ Wed Sep 7 23:19:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:19:46 2022 ] Training epoch: 17 +[ Wed Sep 7 23:19:46 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:26:21 2022 ] Mean training loss: 1.0852. +[ Wed Sep 7 23:26:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:26:21 2022 ] Training epoch: 18 +[ Wed Sep 7 23:26:21 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:32:57 2022 ] Mean training loss: 1.0627. +[ Wed Sep 7 23:32:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:32:57 2022 ] Training epoch: 19 +[ Wed Sep 7 23:32:57 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:39:33 2022 ] Mean training loss: 1.0540. +[ Wed Sep 7 23:39:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:39:33 2022 ] Training epoch: 20 +[ Wed Sep 7 23:39:33 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:46:08 2022 ] Mean training loss: 1.0431. +[ Wed Sep 7 23:46:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:46:08 2022 ] Training epoch: 21 +[ Wed Sep 7 23:46:08 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:52:43 2022 ] Mean training loss: 1.0358. +[ Wed Sep 7 23:52:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:52:43 2022 ] Training epoch: 22 +[ Wed Sep 7 23:52:43 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:59:19 2022 ] Mean training loss: 1.0118. +[ Wed Sep 7 23:59:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:59:19 2022 ] Training epoch: 23 +[ Wed Sep 7 23:59:19 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:05:54 2022 ] Mean training loss: 1.0057. +[ Thu Sep 8 00:05:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:05:54 2022 ] Training epoch: 24 +[ Thu Sep 8 00:05:54 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:12:31 2022 ] Mean training loss: 1.0026. +[ Thu Sep 8 00:12:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:12:31 2022 ] Training epoch: 25 +[ Thu Sep 8 00:12:31 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:19:08 2022 ] Mean training loss: 0.9769. +[ Thu Sep 8 00:19:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:19:08 2022 ] Training epoch: 26 +[ Thu Sep 8 00:19:08 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:25:43 2022 ] Mean training loss: 0.9739. +[ Thu Sep 8 00:25:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:25:43 2022 ] Training epoch: 27 +[ Thu Sep 8 00:25:43 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:32:20 2022 ] Mean training loss: 0.9803. +[ Thu Sep 8 00:32:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:32:20 2022 ] Training epoch: 28 +[ Thu Sep 8 00:32:20 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:38:55 2022 ] Mean training loss: 0.9674. +[ Thu Sep 8 00:38:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:38:55 2022 ] Training epoch: 29 +[ Thu Sep 8 00:38:55 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:45:31 2022 ] Mean training loss: 0.9484. +[ Thu Sep 8 00:45:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:45:31 2022 ] Training epoch: 30 +[ Thu Sep 8 00:45:31 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:52:07 2022 ] Mean training loss: 0.9565. +[ Thu Sep 8 00:52:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:52:07 2022 ] Training epoch: 31 +[ Thu Sep 8 00:52:07 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:58:42 2022 ] Mean training loss: 0.9433. +[ Thu Sep 8 00:58:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:58:42 2022 ] Training epoch: 32 +[ Thu Sep 8 00:58:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:05:17 2022 ] Mean training loss: 0.9634. +[ Thu Sep 8 01:05:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:05:17 2022 ] Training epoch: 33 +[ Thu Sep 8 01:05:17 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:11:52 2022 ] Mean training loss: 0.9298. +[ Thu Sep 8 01:11:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:11:52 2022 ] Training epoch: 34 +[ Thu Sep 8 01:11:52 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:18:27 2022 ] Mean training loss: 0.9412. +[ Thu Sep 8 01:18:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:18:27 2022 ] Training epoch: 35 +[ Thu Sep 8 01:18:27 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:25:03 2022 ] Mean training loss: 0.9290. +[ Thu Sep 8 01:25:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:25:03 2022 ] Training epoch: 36 +[ Thu Sep 8 01:25:03 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:31:39 2022 ] Mean training loss: 0.9283. +[ Thu Sep 8 01:31:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:31:39 2022 ] Training epoch: 37 +[ Thu Sep 8 01:31:39 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:38:14 2022 ] Mean training loss: 0.9286. +[ Thu Sep 8 01:38:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:38:14 2022 ] Training epoch: 38 +[ Thu Sep 8 01:38:14 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:44:49 2022 ] Mean training loss: 0.9283. +[ Thu Sep 8 01:44:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:44:49 2022 ] Training epoch: 39 +[ Thu Sep 8 01:44:49 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:51:25 2022 ] Mean training loss: 0.9218. +[ Thu Sep 8 01:51:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:51:25 2022 ] Training epoch: 40 +[ Thu Sep 8 01:51:25 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:58:00 2022 ] Mean training loss: 0.9245. +[ Thu Sep 8 01:58:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:58:00 2022 ] Training epoch: 41 +[ Thu Sep 8 01:58:00 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:04:35 2022 ] Mean training loss: 0.9271. +[ Thu Sep 8 02:04:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:04:35 2022 ] Training epoch: 42 +[ Thu Sep 8 02:04:35 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:11:10 2022 ] Mean training loss: 0.9300. +[ Thu Sep 8 02:11:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:11:10 2022 ] Training epoch: 43 +[ Thu Sep 8 02:11:10 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:17:46 2022 ] Mean training loss: 0.9108. +[ Thu Sep 8 02:17:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:17:46 2022 ] Training epoch: 44 +[ Thu Sep 8 02:17:46 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:24:21 2022 ] Mean training loss: 0.9156. +[ Thu Sep 8 02:24:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:24:21 2022 ] Training epoch: 45 +[ Thu Sep 8 02:24:21 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:30:56 2022 ] Mean training loss: 0.9297. +[ Thu Sep 8 02:30:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:30:56 2022 ] Training epoch: 46 +[ Thu Sep 8 02:30:56 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:37:30 2022 ] Mean training loss: 0.9178. +[ Thu Sep 8 02:37:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:37:30 2022 ] Training epoch: 47 +[ Thu Sep 8 02:37:30 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:44:05 2022 ] Mean training loss: 0.9152. +[ Thu Sep 8 02:44:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:44:05 2022 ] Training epoch: 48 +[ Thu Sep 8 02:44:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:50:42 2022 ] Mean training loss: 0.9138. +[ Thu Sep 8 02:50:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:50:42 2022 ] Training epoch: 49 +[ Thu Sep 8 02:50:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:57:20 2022 ] Mean training loss: 0.9059. +[ Thu Sep 8 02:57:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:57:20 2022 ] Training epoch: 50 +[ Thu Sep 8 02:57:20 2022 ] Learning rate: 0.15 +[ Thu Sep 8 03:03:57 2022 ] Mean training loss: 0.9049. +[ Thu Sep 8 03:03:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:03:57 2022 ] Training epoch: 51 +[ Thu Sep 8 03:03:57 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:10:35 2022 ] Mean training loss: 0.4705. +[ Thu Sep 8 03:10:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:10:35 2022 ] Eval epoch: 51 +[ Thu Sep 8 03:17:25 2022 ] Epoch 51 Curr Acc: (28300/50919)55.58% +[ Thu Sep 8 03:17:25 2022 ] Epoch 51 Best Acc 55.58% +[ Thu Sep 8 03:17:25 2022 ] Training epoch: 52 +[ Thu Sep 8 03:17:25 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:24:00 2022 ] Mean training loss: 0.3493. +[ Thu Sep 8 03:24:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:24:00 2022 ] Eval epoch: 52 +[ Thu Sep 8 03:30:44 2022 ] Epoch 52 Curr Acc: (29876/50919)58.67% +[ Thu Sep 8 03:30:44 2022 ] Epoch 52 Best Acc 58.67% +[ Thu Sep 8 03:30:44 2022 ] Training epoch: 53 +[ Thu Sep 8 03:30:44 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:37:19 2022 ] Mean training loss: 0.2904. +[ Thu Sep 8 03:37:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:37:19 2022 ] Eval epoch: 53 +[ Thu Sep 8 03:44:02 2022 ] Epoch 53 Curr Acc: (29712/50919)58.35% +[ Thu Sep 8 03:44:02 2022 ] Epoch 52 Best Acc 58.67% +[ Thu Sep 8 03:44:02 2022 ] Training epoch: 54 +[ Thu Sep 8 03:44:02 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:50:37 2022 ] Mean training loss: 0.2570. +[ Thu Sep 8 03:50:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:50:37 2022 ] Eval epoch: 54 +[ Thu Sep 8 03:57:20 2022 ] Epoch 54 Curr Acc: (29786/50919)58.50% +[ Thu Sep 8 03:57:20 2022 ] Epoch 52 Best Acc 58.67% +[ Thu Sep 8 03:57:20 2022 ] Training epoch: 55 +[ Thu Sep 8 03:57:20 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:03:56 2022 ] Mean training loss: 0.2286. +[ Thu Sep 8 04:03:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:03:56 2022 ] Eval epoch: 55 +[ Thu Sep 8 04:10:39 2022 ] Epoch 55 Curr Acc: (29113/50919)57.18% +[ Thu Sep 8 04:10:39 2022 ] Epoch 52 Best Acc 58.67% +[ Thu Sep 8 04:10:39 2022 ] Training epoch: 56 +[ Thu Sep 8 04:10:39 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:17:14 2022 ] Mean training loss: 0.2013. +[ Thu Sep 8 04:17:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:17:14 2022 ] Eval epoch: 56 +[ Thu Sep 8 04:23:57 2022 ] Epoch 56 Curr Acc: (30500/50919)59.90% +[ Thu Sep 8 04:23:57 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 04:23:57 2022 ] Training epoch: 57 +[ Thu Sep 8 04:23:57 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:30:32 2022 ] Mean training loss: 0.1756. +[ Thu Sep 8 04:30:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:30:32 2022 ] Eval epoch: 57 +[ Thu Sep 8 04:37:16 2022 ] Epoch 57 Curr Acc: (29922/50919)58.76% +[ Thu Sep 8 04:37:16 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 04:37:16 2022 ] Training epoch: 58 +[ Thu Sep 8 04:37:16 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:43:52 2022 ] Mean training loss: 0.1621. +[ Thu Sep 8 04:43:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:43:52 2022 ] Eval epoch: 58 +[ Thu Sep 8 04:50:35 2022 ] Epoch 58 Curr Acc: (29356/50919)57.65% +[ Thu Sep 8 04:50:35 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 04:50:35 2022 ] Training epoch: 59 +[ Thu Sep 8 04:50:35 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:57:10 2022 ] Mean training loss: 0.1457. +[ Thu Sep 8 04:57:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:57:10 2022 ] Eval epoch: 59 +[ Thu Sep 8 05:03:53 2022 ] Epoch 59 Curr Acc: (29717/50919)58.36% +[ Thu Sep 8 05:03:53 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 05:03:53 2022 ] Training epoch: 60 +[ Thu Sep 8 05:03:53 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:10:28 2022 ] Mean training loss: 0.1316. +[ Thu Sep 8 05:10:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:10:29 2022 ] Eval epoch: 60 +[ Thu Sep 8 05:17:11 2022 ] Epoch 60 Curr Acc: (29844/50919)58.61% +[ Thu Sep 8 05:17:11 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 05:17:11 2022 ] Training epoch: 61 +[ Thu Sep 8 05:17:11 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:23:47 2022 ] Mean training loss: 0.1264. +[ Thu Sep 8 05:23:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:23:47 2022 ] Eval epoch: 61 +[ Thu Sep 8 05:30:30 2022 ] Epoch 61 Curr Acc: (29379/50919)57.70% +[ Thu Sep 8 05:30:30 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 05:30:30 2022 ] Training epoch: 62 +[ Thu Sep 8 05:30:30 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:37:06 2022 ] Mean training loss: 0.1152. +[ Thu Sep 8 05:37:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:37:06 2022 ] Eval epoch: 62 +[ Thu Sep 8 05:43:49 2022 ] Epoch 62 Curr Acc: (29449/50919)57.83% +[ Thu Sep 8 05:43:49 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 05:43:49 2022 ] Training epoch: 63 +[ Thu Sep 8 05:43:49 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:50:24 2022 ] Mean training loss: 0.1107. +[ Thu Sep 8 05:50:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:50:24 2022 ] Eval epoch: 63 +[ Thu Sep 8 05:57:07 2022 ] Epoch 63 Curr Acc: (29466/50919)57.87% +[ Thu Sep 8 05:57:07 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 05:57:07 2022 ] Training epoch: 64 +[ Thu Sep 8 05:57:07 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:03:43 2022 ] Mean training loss: 0.1082. +[ Thu Sep 8 06:03:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:03:43 2022 ] Eval epoch: 64 +[ Thu Sep 8 06:10:26 2022 ] Epoch 64 Curr Acc: (28773/50919)56.51% +[ Thu Sep 8 06:10:26 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 06:10:26 2022 ] Training epoch: 65 +[ Thu Sep 8 06:10:26 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:17:01 2022 ] Mean training loss: 0.1056. +[ Thu Sep 8 06:17:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:17:02 2022 ] Eval epoch: 65 +[ Thu Sep 8 06:23:44 2022 ] Epoch 65 Curr Acc: (28107/50919)55.20% +[ Thu Sep 8 06:23:44 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 06:23:44 2022 ] Training epoch: 66 +[ Thu Sep 8 06:23:44 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:30:19 2022 ] Mean training loss: 0.1126. +[ Thu Sep 8 06:30:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:30:19 2022 ] Eval epoch: 66 +[ Thu Sep 8 06:37:02 2022 ] Epoch 66 Curr Acc: (28915/50919)56.79% +[ Thu Sep 8 06:37:02 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 06:37:02 2022 ] Training epoch: 67 +[ Thu Sep 8 06:37:02 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:43:37 2022 ] Mean training loss: 0.1096. +[ Thu Sep 8 06:43:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:43:37 2022 ] Eval epoch: 67 +[ Thu Sep 8 06:50:20 2022 ] Epoch 67 Curr Acc: (27970/50919)54.93% +[ Thu Sep 8 06:50:20 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 06:50:20 2022 ] Training epoch: 68 +[ Thu Sep 8 06:50:20 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:56:56 2022 ] Mean training loss: 0.1034. +[ Thu Sep 8 06:56:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:56:56 2022 ] Eval epoch: 68 +[ Thu Sep 8 07:03:39 2022 ] Epoch 68 Curr Acc: (28812/50919)56.58% +[ Thu Sep 8 07:03:39 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 07:03:39 2022 ] Training epoch: 69 +[ Thu Sep 8 07:03:39 2022 ] Learning rate: 0.015 +[ Thu Sep 8 07:10:14 2022 ] Mean training loss: 0.1027. +[ Thu Sep 8 07:10:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:10:14 2022 ] Eval epoch: 69 +[ Thu Sep 8 07:16:56 2022 ] Epoch 69 Curr Acc: (28696/50919)56.36% +[ Thu Sep 8 07:16:56 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 07:16:56 2022 ] Training epoch: 70 +[ Thu Sep 8 07:16:56 2022 ] Learning rate: 0.015 +[ Thu Sep 8 07:23:31 2022 ] Mean training loss: 0.1057. +[ Thu Sep 8 07:23:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:23:31 2022 ] Eval epoch: 70 +[ Thu Sep 8 07:30:14 2022 ] Epoch 70 Curr Acc: (27995/50919)54.98% +[ Thu Sep 8 07:30:14 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 07:30:14 2022 ] Training epoch: 71 +[ Thu Sep 8 07:30:14 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:36:49 2022 ] Mean training loss: 0.0602. +[ Thu Sep 8 07:36:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:36:49 2022 ] Eval epoch: 71 +[ Thu Sep 8 07:43:32 2022 ] Epoch 71 Curr Acc: (29615/50919)58.16% +[ Thu Sep 8 07:43:32 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 07:43:32 2022 ] Training epoch: 72 +[ Thu Sep 8 07:43:32 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:50:08 2022 ] Mean training loss: 0.0393. +[ Thu Sep 8 07:50:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:50:08 2022 ] Eval epoch: 72 +[ Thu Sep 8 07:56:50 2022 ] Epoch 72 Curr Acc: (29373/50919)57.69% +[ Thu Sep 8 07:56:50 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 07:56:50 2022 ] Training epoch: 73 +[ Thu Sep 8 07:56:50 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:03:26 2022 ] Mean training loss: 0.0340. +[ Thu Sep 8 08:03:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:03:26 2022 ] Eval epoch: 73 +[ Thu Sep 8 08:10:08 2022 ] Epoch 73 Curr Acc: (29514/50919)57.96% +[ Thu Sep 8 08:10:08 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 08:10:08 2022 ] Training epoch: 74 +[ Thu Sep 8 08:10:08 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:16:44 2022 ] Mean training loss: 0.0307. +[ Thu Sep 8 08:16:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:16:44 2022 ] Eval epoch: 74 +[ Thu Sep 8 08:23:26 2022 ] Epoch 74 Curr Acc: (30247/50919)59.40% +[ Thu Sep 8 08:23:26 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 08:23:26 2022 ] Training epoch: 75 +[ Thu Sep 8 08:23:26 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:30:01 2022 ] Mean training loss: 0.0268. +[ Thu Sep 8 08:30:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:30:01 2022 ] Eval epoch: 75 +[ Thu Sep 8 08:36:44 2022 ] Epoch 75 Curr Acc: (29983/50919)58.88% +[ Thu Sep 8 08:36:44 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 08:36:44 2022 ] Training epoch: 76 +[ Thu Sep 8 08:36:44 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:43:19 2022 ] Mean training loss: 0.0262. +[ Thu Sep 8 08:43:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:43:19 2022 ] Eval epoch: 76 +[ Thu Sep 8 08:50:02 2022 ] Epoch 76 Curr Acc: (29562/50919)58.06% +[ Thu Sep 8 08:50:02 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 08:50:02 2022 ] Training epoch: 77 +[ Thu Sep 8 08:50:02 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:56:37 2022 ] Mean training loss: 0.0256. +[ Thu Sep 8 08:56:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:56:37 2022 ] Eval epoch: 77 +[ Thu Sep 8 09:03:20 2022 ] Epoch 77 Curr Acc: (28726/50919)56.42% +[ Thu Sep 8 09:03:20 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 09:03:20 2022 ] Training epoch: 78 +[ Thu Sep 8 09:03:20 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:09:55 2022 ] Mean training loss: 0.0236. +[ Thu Sep 8 09:09:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:09:55 2022 ] Eval epoch: 78 +[ Thu Sep 8 09:16:37 2022 ] Epoch 78 Curr Acc: (29965/50919)58.85% +[ Thu Sep 8 09:16:37 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 09:16:37 2022 ] Training epoch: 79 +[ Thu Sep 8 09:16:37 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:23:12 2022 ] Mean training loss: 0.0202. +[ Thu Sep 8 09:23:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:23:12 2022 ] Eval epoch: 79 +[ Thu Sep 8 09:29:55 2022 ] Epoch 79 Curr Acc: (29629/50919)58.19% +[ Thu Sep 8 09:29:55 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 09:29:55 2022 ] Training epoch: 80 +[ Thu Sep 8 09:29:55 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:36:30 2022 ] Mean training loss: 0.0205. +[ Thu Sep 8 09:36:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:36:30 2022 ] Eval epoch: 80 +[ Thu Sep 8 09:43:12 2022 ] Epoch 80 Curr Acc: (30249/50919)59.41% +[ Thu Sep 8 09:43:12 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 09:43:12 2022 ] Training epoch: 81 +[ Thu Sep 8 09:43:12 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:49:47 2022 ] Mean training loss: 0.0199. +[ Thu Sep 8 09:49:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:49:47 2022 ] Eval epoch: 81 +[ Thu Sep 8 09:56:30 2022 ] Epoch 81 Curr Acc: (30134/50919)59.18% +[ Thu Sep 8 09:56:30 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 09:56:30 2022 ] Training epoch: 82 +[ Thu Sep 8 09:56:30 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:03:05 2022 ] Mean training loss: 0.0194. +[ Thu Sep 8 10:03:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:03:05 2022 ] Eval epoch: 82 +[ Thu Sep 8 10:09:48 2022 ] Epoch 82 Curr Acc: (30251/50919)59.41% +[ Thu Sep 8 10:09:48 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 10:09:48 2022 ] Training epoch: 83 +[ Thu Sep 8 10:09:48 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:16:24 2022 ] Mean training loss: 0.0175. +[ Thu Sep 8 10:16:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:16:24 2022 ] Eval epoch: 83 +[ Thu Sep 8 10:23:07 2022 ] Epoch 83 Curr Acc: (29964/50919)58.85% +[ Thu Sep 8 10:23:07 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 10:23:07 2022 ] Training epoch: 84 +[ Thu Sep 8 10:23:07 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:29:43 2022 ] Mean training loss: 0.0183. +[ Thu Sep 8 10:29:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:29:43 2022 ] Eval epoch: 84 +[ Thu Sep 8 10:36:26 2022 ] Epoch 84 Curr Acc: (29913/50919)58.75% +[ Thu Sep 8 10:36:26 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 10:36:26 2022 ] Training epoch: 85 +[ Thu Sep 8 10:36:26 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:43:01 2022 ] Mean training loss: 0.0177. +[ Thu Sep 8 10:43:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:43:02 2022 ] Eval epoch: 85 +[ Thu Sep 8 10:49:45 2022 ] Epoch 85 Curr Acc: (29971/50919)58.86% +[ Thu Sep 8 10:49:45 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 10:49:45 2022 ] Training epoch: 86 +[ Thu Sep 8 10:49:45 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:56:20 2022 ] Mean training loss: 0.0173. +[ Thu Sep 8 10:56:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:56:20 2022 ] Eval epoch: 86 +[ Thu Sep 8 11:03:03 2022 ] Epoch 86 Curr Acc: (29880/50919)58.68% +[ Thu Sep 8 11:03:03 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 11:03:03 2022 ] Training epoch: 87 +[ Thu Sep 8 11:03:03 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:09:39 2022 ] Mean training loss: 0.0179. +[ Thu Sep 8 11:09:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:09:39 2022 ] Eval epoch: 87 +[ Thu Sep 8 11:16:23 2022 ] Epoch 87 Curr Acc: (30201/50919)59.31% +[ Thu Sep 8 11:16:23 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 11:16:23 2022 ] Training epoch: 88 +[ Thu Sep 8 11:16:23 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:22:59 2022 ] Mean training loss: 0.0164. +[ Thu Sep 8 11:22:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:22:59 2022 ] Eval epoch: 88 +[ Thu Sep 8 11:29:42 2022 ] Epoch 88 Curr Acc: (30104/50919)59.12% +[ Thu Sep 8 11:29:42 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 11:29:42 2022 ] Training epoch: 89 +[ Thu Sep 8 11:29:42 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:36:17 2022 ] Mean training loss: 0.0170. +[ Thu Sep 8 11:36:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:36:17 2022 ] Eval epoch: 89 +[ Thu Sep 8 11:43:01 2022 ] Epoch 89 Curr Acc: (30149/50919)59.21% +[ Thu Sep 8 11:43:01 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 11:43:01 2022 ] Training epoch: 90 +[ Thu Sep 8 11:43:01 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:49:36 2022 ] Mean training loss: 0.0174. +[ Thu Sep 8 11:49:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:49:36 2022 ] Eval epoch: 90 +[ Thu Sep 8 11:56:19 2022 ] Epoch 90 Curr Acc: (29425/50919)57.79% +[ Thu Sep 8 11:56:19 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 11:56:20 2022 ] Training epoch: 91 +[ Thu Sep 8 11:56:20 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:02:55 2022 ] Mean training loss: 0.0153. +[ Thu Sep 8 12:02:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:02:55 2022 ] Eval epoch: 91 +[ Thu Sep 8 12:09:38 2022 ] Epoch 91 Curr Acc: (30085/50919)59.08% +[ Thu Sep 8 12:09:38 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 12:09:38 2022 ] Training epoch: 92 +[ Thu Sep 8 12:09:38 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:16:14 2022 ] Mean training loss: 0.0160. +[ Thu Sep 8 12:16:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:16:14 2022 ] Eval epoch: 92 +[ Thu Sep 8 12:22:57 2022 ] Epoch 92 Curr Acc: (29728/50919)58.38% +[ Thu Sep 8 12:22:57 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 12:22:57 2022 ] Training epoch: 93 +[ Thu Sep 8 12:22:57 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:29:32 2022 ] Mean training loss: 0.0155. +[ Thu Sep 8 12:29:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:29:32 2022 ] Eval epoch: 93 +[ Thu Sep 8 12:36:15 2022 ] Epoch 93 Curr Acc: (29908/50919)58.74% +[ Thu Sep 8 12:36:15 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 12:36:15 2022 ] Training epoch: 94 +[ Thu Sep 8 12:36:15 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:42:50 2022 ] Mean training loss: 0.0155. +[ Thu Sep 8 12:42:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:42:50 2022 ] Eval epoch: 94 +[ Thu Sep 8 12:49:33 2022 ] Epoch 94 Curr Acc: (30040/50919)59.00% +[ Thu Sep 8 12:49:33 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 12:49:33 2022 ] Training epoch: 95 +[ Thu Sep 8 12:49:33 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:56:08 2022 ] Mean training loss: 0.0153. +[ Thu Sep 8 12:56:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:56:08 2022 ] Eval epoch: 95 +[ Thu Sep 8 13:02:51 2022 ] Epoch 95 Curr Acc: (29757/50919)58.44% +[ Thu Sep 8 13:02:51 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 13:02:51 2022 ] Training epoch: 96 +[ Thu Sep 8 13:02:51 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:09:26 2022 ] Mean training loss: 0.0157. +[ Thu Sep 8 13:09:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:09:26 2022 ] Eval epoch: 96 +[ Thu Sep 8 13:16:09 2022 ] Epoch 96 Curr Acc: (29844/50919)58.61% +[ Thu Sep 8 13:16:09 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 13:16:09 2022 ] Training epoch: 97 +[ Thu Sep 8 13:16:09 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:22:44 2022 ] Mean training loss: 0.0158. +[ Thu Sep 8 13:22:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:22:44 2022 ] Eval epoch: 97 +[ Thu Sep 8 13:29:27 2022 ] Epoch 97 Curr Acc: (30111/50919)59.14% +[ Thu Sep 8 13:29:27 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 13:29:27 2022 ] Training epoch: 98 +[ Thu Sep 8 13:29:27 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:36:03 2022 ] Mean training loss: 0.0149. +[ Thu Sep 8 13:36:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:36:03 2022 ] Eval epoch: 98 +[ Thu Sep 8 13:42:46 2022 ] Epoch 98 Curr Acc: (30025/50919)58.97% +[ Thu Sep 8 13:42:46 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 13:42:46 2022 ] Training epoch: 99 +[ Thu Sep 8 13:42:46 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:49:22 2022 ] Mean training loss: 0.0149. +[ Thu Sep 8 13:49:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:49:22 2022 ] Eval epoch: 99 +[ Thu Sep 8 13:56:04 2022 ] Epoch 99 Curr Acc: (29153/50919)57.25% +[ Thu Sep 8 13:56:04 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 13:56:05 2022 ] Training epoch: 100 +[ Thu Sep 8 13:56:05 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:02:39 2022 ] Mean training loss: 0.0151. +[ Thu Sep 8 14:02:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:02:39 2022 ] Eval epoch: 100 +[ Thu Sep 8 14:09:21 2022 ] Epoch 100 Curr Acc: (30215/50919)59.34% +[ Thu Sep 8 14:09:21 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 14:09:21 2022 ] Training epoch: 101 +[ Thu Sep 8 14:09:21 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:15:56 2022 ] Mean training loss: 0.0152. +[ Thu Sep 8 14:15:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:15:56 2022 ] Eval epoch: 101 +[ Thu Sep 8 14:22:39 2022 ] Epoch 101 Curr Acc: (29818/50919)58.56% +[ Thu Sep 8 14:22:39 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 14:22:39 2022 ] Training epoch: 102 +[ Thu Sep 8 14:22:39 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:29:14 2022 ] Mean training loss: 0.0155. +[ Thu Sep 8 14:29:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:29:14 2022 ] Eval epoch: 102 +[ Thu Sep 8 14:35:56 2022 ] Epoch 102 Curr Acc: (29704/50919)58.34% +[ Thu Sep 8 14:35:56 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 14:35:56 2022 ] Training epoch: 103 +[ Thu Sep 8 14:35:56 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:42:31 2022 ] Mean training loss: 0.0153. +[ Thu Sep 8 14:42:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:42:31 2022 ] Eval epoch: 103 +[ Thu Sep 8 14:49:14 2022 ] Epoch 103 Curr Acc: (30147/50919)59.21% +[ Thu Sep 8 14:49:14 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 14:49:14 2022 ] Training epoch: 104 +[ Thu Sep 8 14:49:14 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:55:48 2022 ] Mean training loss: 0.0139. +[ Thu Sep 8 14:55:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:55:48 2022 ] Eval epoch: 104 +[ Thu Sep 8 15:02:31 2022 ] Epoch 104 Curr Acc: (30201/50919)59.31% +[ Thu Sep 8 15:02:31 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 15:02:31 2022 ] Training epoch: 105 +[ Thu Sep 8 15:02:31 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:09:05 2022 ] Mean training loss: 0.0146. +[ Thu Sep 8 15:09:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:09:05 2022 ] Eval epoch: 105 +[ Thu Sep 8 15:15:48 2022 ] Epoch 105 Curr Acc: (29992/50919)58.90% +[ Thu Sep 8 15:15:48 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 15:15:48 2022 ] Training epoch: 106 +[ Thu Sep 8 15:15:48 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:22:22 2022 ] Mean training loss: 0.0144. +[ Thu Sep 8 15:22:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:22:22 2022 ] Eval epoch: 106 +[ Thu Sep 8 15:29:04 2022 ] Epoch 106 Curr Acc: (30148/50919)59.21% +[ Thu Sep 8 15:29:04 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 15:29:04 2022 ] Training epoch: 107 +[ Thu Sep 8 15:29:04 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:35:39 2022 ] Mean training loss: 0.0143. +[ Thu Sep 8 15:35:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:35:39 2022 ] Eval epoch: 107 +[ Thu Sep 8 15:42:22 2022 ] Epoch 107 Curr Acc: (29915/50919)58.75% +[ Thu Sep 8 15:42:22 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 15:42:22 2022 ] Training epoch: 108 +[ Thu Sep 8 15:42:22 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:48:55 2022 ] Mean training loss: 0.0157. +[ Thu Sep 8 15:48:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:48:55 2022 ] Eval epoch: 108 +[ Thu Sep 8 15:55:37 2022 ] Epoch 108 Curr Acc: (30345/50919)59.59% +[ Thu Sep 8 15:55:37 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 15:55:37 2022 ] Training epoch: 109 +[ Thu Sep 8 15:55:37 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 16:02:12 2022 ] Mean training loss: 0.0140. +[ Thu Sep 8 16:02:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 16:02:12 2022 ] Eval epoch: 109 +[ Thu Sep 8 16:08:54 2022 ] Epoch 109 Curr Acc: (29940/50919)58.80% +[ Thu Sep 8 16:08:54 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 16:08:54 2022 ] Training epoch: 110 +[ Thu Sep 8 16:08:54 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 16:15:29 2022 ] Mean training loss: 0.0155. +[ Thu Sep 8 16:15:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 16:15:29 2022 ] Eval epoch: 110 +[ Thu Sep 8 16:22:12 2022 ] Epoch 110 Curr Acc: (29913/50919)58.75% +[ Thu Sep 8 16:22:12 2022 ] Epoch 56 Best Acc 59.90% +[ Thu Sep 8 16:22:12 2022 ] epoch: 56, best accuracy: 0.5989905536243838 +[ Thu Sep 8 16:22:12 2022 ] Experiment: ./work_dir/ntu120/xsub_b +[ Thu Sep 8 16:22:12 2022 ] # generator parameters: 2.922995 M. +[ Thu Sep 8 16:22:12 2022 ] Load weights from ./runs/ntu120/xsub_b/runs-55-109200.pt. +[ Thu Sep 8 16:22:12 2022 ] Eval epoch: 1 +[ Thu Sep 8 16:28:54 2022 ] Epoch 1 Curr Acc: (30500/50919)59.90% +[ Thu Sep 8 16:28:54 2022 ] Epoch 56 Best Acc 59.90% diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/config.yaml b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b0a109fff2d0825c5cdbdd608e0173996bb8b35d --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu120/xsub_bm.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 120 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu120/xsub_bm/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone_motion.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone_motion.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu120/xsub_bm diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..dbaa58940644af35a329133b5fc76b06acc0dbd0 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5ed3be9094c4a76dd87e77bea5421bd2466a9547d072311f8ef389544269d6c1 +size 29946137 diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/log.txt b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..d3ff0b8965a362916d21ab07edb31e3cd7456b14 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/log.txt @@ -0,0 +1,631 @@ +[ Wed Sep 7 21:34:40 2022 ] # generator parameters: 2.922995 M. +[ Wed Sep 7 21:34:40 2022 ] Parameters: +{'work_dir': './work_dir/ntu120/xsub_bm', 'model_saved_name': './runs/ntu120/xsub_bm/runs', 'config': 'config/ntu120/xsub_bm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Wed Sep 7 21:34:40 2022 ] Training epoch: 1 +[ Wed Sep 7 21:34:40 2022 ] Learning rate: 0.015 +[ Wed Sep 7 21:41:19 2022 ] Mean training loss: 3.5177. +[ Wed Sep 7 21:41:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:41:19 2022 ] Training epoch: 2 +[ Wed Sep 7 21:41:19 2022 ] Learning rate: 0.03 +[ Wed Sep 7 21:47:58 2022 ] Mean training loss: 2.4868. +[ Wed Sep 7 21:47:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:47:58 2022 ] Training epoch: 3 +[ Wed Sep 7 21:47:58 2022 ] Learning rate: 0.045 +[ Wed Sep 7 21:54:36 2022 ] Mean training loss: 1.9641. +[ Wed Sep 7 21:54:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:54:36 2022 ] Training epoch: 4 +[ Wed Sep 7 21:54:36 2022 ] Learning rate: 0.06 +[ Wed Sep 7 22:01:14 2022 ] Mean training loss: 1.6675. +[ Wed Sep 7 22:01:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:01:14 2022 ] Training epoch: 5 +[ Wed Sep 7 22:01:14 2022 ] Learning rate: 0.075 +[ Wed Sep 7 22:07:53 2022 ] Mean training loss: 1.5121. +[ Wed Sep 7 22:07:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:07:53 2022 ] Training epoch: 6 +[ Wed Sep 7 22:07:53 2022 ] Learning rate: 0.09 +[ Wed Sep 7 22:14:31 2022 ] Mean training loss: 1.4097. +[ Wed Sep 7 22:14:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:14:31 2022 ] Training epoch: 7 +[ Wed Sep 7 22:14:31 2022 ] Learning rate: 0.10500000000000001 +[ Wed Sep 7 22:21:09 2022 ] Mean training loss: 1.3619. +[ Wed Sep 7 22:21:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:21:09 2022 ] Training epoch: 8 +[ Wed Sep 7 22:21:09 2022 ] Learning rate: 0.12 +[ Wed Sep 7 22:27:46 2022 ] Mean training loss: 1.3424. +[ Wed Sep 7 22:27:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:27:46 2022 ] Training epoch: 9 +[ Wed Sep 7 22:27:46 2022 ] Learning rate: 0.13499999999999998 +[ Wed Sep 7 22:34:25 2022 ] Mean training loss: 1.3057. +[ Wed Sep 7 22:34:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:34:25 2022 ] Training epoch: 10 +[ Wed Sep 7 22:34:25 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:41:02 2022 ] Mean training loss: 1.3086. +[ Wed Sep 7 22:41:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:41:02 2022 ] Training epoch: 11 +[ Wed Sep 7 22:41:02 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:47:42 2022 ] Mean training loss: 1.2515. +[ Wed Sep 7 22:47:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:47:42 2022 ] Training epoch: 12 +[ Wed Sep 7 22:47:42 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:54:20 2022 ] Mean training loss: 1.2029. +[ Wed Sep 7 22:54:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:54:20 2022 ] Training epoch: 13 +[ Wed Sep 7 22:54:20 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:01:00 2022 ] Mean training loss: 1.1772. +[ Wed Sep 7 23:01:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:01:00 2022 ] Training epoch: 14 +[ Wed Sep 7 23:01:00 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:07:38 2022 ] Mean training loss: 1.1704. +[ Wed Sep 7 23:07:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:07:38 2022 ] Training epoch: 15 +[ Wed Sep 7 23:07:38 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:14:16 2022 ] Mean training loss: 1.1350. +[ Wed Sep 7 23:14:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:14:16 2022 ] Training epoch: 16 +[ Wed Sep 7 23:14:16 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:20:54 2022 ] Mean training loss: 1.1048. +[ Wed Sep 7 23:20:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:20:54 2022 ] Training epoch: 17 +[ Wed Sep 7 23:20:54 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:27:33 2022 ] Mean training loss: 1.0995. +[ Wed Sep 7 23:27:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:27:33 2022 ] Training epoch: 18 +[ Wed Sep 7 23:27:33 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:34:11 2022 ] Mean training loss: 1.0836. +[ Wed Sep 7 23:34:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:34:11 2022 ] Training epoch: 19 +[ Wed Sep 7 23:34:11 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:40:50 2022 ] Mean training loss: 1.0612. +[ Wed Sep 7 23:40:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:40:50 2022 ] Training epoch: 20 +[ Wed Sep 7 23:40:50 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:47:30 2022 ] Mean training loss: 1.0556. +[ Wed Sep 7 23:47:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:47:30 2022 ] Training epoch: 21 +[ Wed Sep 7 23:47:30 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:54:08 2022 ] Mean training loss: 1.0510. +[ Wed Sep 7 23:54:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:54:08 2022 ] Training epoch: 22 +[ Wed Sep 7 23:54:08 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:00:47 2022 ] Mean training loss: 1.0254. +[ Thu Sep 8 00:00:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:00:47 2022 ] Training epoch: 23 +[ Thu Sep 8 00:00:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:07:26 2022 ] Mean training loss: 1.0141. +[ Thu Sep 8 00:07:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:07:26 2022 ] Training epoch: 24 +[ Thu Sep 8 00:07:26 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:14:05 2022 ] Mean training loss: 1.0105. +[ Thu Sep 8 00:14:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:14:05 2022 ] Training epoch: 25 +[ Thu Sep 8 00:14:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:20:45 2022 ] Mean training loss: 0.9963. +[ Thu Sep 8 00:20:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:20:45 2022 ] Training epoch: 26 +[ Thu Sep 8 00:20:45 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:27:23 2022 ] Mean training loss: 1.0034. +[ Thu Sep 8 00:27:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:27:23 2022 ] Training epoch: 27 +[ Thu Sep 8 00:27:23 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:34:03 2022 ] Mean training loss: 0.9867. +[ Thu Sep 8 00:34:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:34:03 2022 ] Training epoch: 28 +[ Thu Sep 8 00:34:03 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:40:42 2022 ] Mean training loss: 1.0034. +[ Thu Sep 8 00:40:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:40:42 2022 ] Training epoch: 29 +[ Thu Sep 8 00:40:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:47:21 2022 ] Mean training loss: 0.9684. +[ Thu Sep 8 00:47:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:47:21 2022 ] Training epoch: 30 +[ Thu Sep 8 00:47:21 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:53:59 2022 ] Mean training loss: 0.9827. +[ Thu Sep 8 00:53:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:53:59 2022 ] Training epoch: 31 +[ Thu Sep 8 00:53:59 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:00:38 2022 ] Mean training loss: 0.9715. +[ Thu Sep 8 01:00:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:00:38 2022 ] Training epoch: 32 +[ Thu Sep 8 01:00:38 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:07:17 2022 ] Mean training loss: 0.9609. +[ Thu Sep 8 01:07:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:07:17 2022 ] Training epoch: 33 +[ Thu Sep 8 01:07:17 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:13:56 2022 ] Mean training loss: 0.9581. +[ Thu Sep 8 01:13:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:13:56 2022 ] Training epoch: 34 +[ Thu Sep 8 01:13:56 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:20:35 2022 ] Mean training loss: 0.9673. +[ Thu Sep 8 01:20:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:20:35 2022 ] Training epoch: 35 +[ Thu Sep 8 01:20:35 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:27:17 2022 ] Mean training loss: 0.9638. +[ Thu Sep 8 01:27:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:27:17 2022 ] Training epoch: 36 +[ Thu Sep 8 01:27:17 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:33:58 2022 ] Mean training loss: 0.9563. +[ Thu Sep 8 01:33:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:33:58 2022 ] Training epoch: 37 +[ Thu Sep 8 01:33:58 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:40:39 2022 ] Mean training loss: 0.9399. +[ Thu Sep 8 01:40:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:40:39 2022 ] Training epoch: 38 +[ Thu Sep 8 01:40:39 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:47:18 2022 ] Mean training loss: 0.9570. +[ Thu Sep 8 01:47:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:47:18 2022 ] Training epoch: 39 +[ Thu Sep 8 01:47:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:53:59 2022 ] Mean training loss: 0.9351. +[ Thu Sep 8 01:53:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:53:59 2022 ] Training epoch: 40 +[ Thu Sep 8 01:53:59 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:00:40 2022 ] Mean training loss: 0.9485. +[ Thu Sep 8 02:00:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:00:40 2022 ] Training epoch: 41 +[ Thu Sep 8 02:00:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:07:22 2022 ] Mean training loss: 0.9391. +[ Thu Sep 8 02:07:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:07:22 2022 ] Training epoch: 42 +[ Thu Sep 8 02:07:22 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:14:02 2022 ] Mean training loss: 0.9480. +[ Thu Sep 8 02:14:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:14:02 2022 ] Training epoch: 43 +[ Thu Sep 8 02:14:02 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:20:42 2022 ] Mean training loss: 0.9306. +[ Thu Sep 8 02:20:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:20:42 2022 ] Training epoch: 44 +[ Thu Sep 8 02:20:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:27:22 2022 ] Mean training loss: 0.9404. +[ Thu Sep 8 02:27:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:27:22 2022 ] Training epoch: 45 +[ Thu Sep 8 02:27:22 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:34:02 2022 ] Mean training loss: 0.9390. +[ Thu Sep 8 02:34:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:34:02 2022 ] Training epoch: 46 +[ Thu Sep 8 02:34:02 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:40:42 2022 ] Mean training loss: 0.9434. +[ Thu Sep 8 02:40:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:40:42 2022 ] Training epoch: 47 +[ Thu Sep 8 02:40:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:47:24 2022 ] Mean training loss: 0.9347. +[ Thu Sep 8 02:47:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:47:24 2022 ] Training epoch: 48 +[ Thu Sep 8 02:47:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:54:05 2022 ] Mean training loss: 0.9452. +[ Thu Sep 8 02:54:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:54:05 2022 ] Training epoch: 49 +[ Thu Sep 8 02:54:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 03:00:46 2022 ] Mean training loss: 0.9305. +[ Thu Sep 8 03:00:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:00:46 2022 ] Training epoch: 50 +[ Thu Sep 8 03:00:46 2022 ] Learning rate: 0.15 +[ Thu Sep 8 03:07:27 2022 ] Mean training loss: 0.9310. +[ Thu Sep 8 03:07:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:07:27 2022 ] Training epoch: 51 +[ Thu Sep 8 03:07:27 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:14:07 2022 ] Mean training loss: 0.4632. +[ Thu Sep 8 03:14:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:14:07 2022 ] Eval epoch: 51 +[ Thu Sep 8 03:21:03 2022 ] Epoch 51 Curr Acc: (26802/50919)52.64% +[ Thu Sep 8 03:21:03 2022 ] Epoch 51 Best Acc 52.64% +[ Thu Sep 8 03:21:03 2022 ] Training epoch: 52 +[ Thu Sep 8 03:21:03 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:27:41 2022 ] Mean training loss: 0.3347. +[ Thu Sep 8 03:27:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:27:41 2022 ] Eval epoch: 52 +[ Thu Sep 8 03:34:29 2022 ] Epoch 52 Curr Acc: (28067/50919)55.12% +[ Thu Sep 8 03:34:29 2022 ] Epoch 52 Best Acc 55.12% +[ Thu Sep 8 03:34:29 2022 ] Training epoch: 53 +[ Thu Sep 8 03:34:29 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:41:07 2022 ] Mean training loss: 0.2764. +[ Thu Sep 8 03:41:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:41:07 2022 ] Eval epoch: 53 +[ Thu Sep 8 03:47:55 2022 ] Epoch 53 Curr Acc: (28580/50919)56.13% +[ Thu Sep 8 03:47:55 2022 ] Epoch 53 Best Acc 56.13% +[ Thu Sep 8 03:47:55 2022 ] Training epoch: 54 +[ Thu Sep 8 03:47:55 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:54:33 2022 ] Mean training loss: 0.2362. +[ Thu Sep 8 03:54:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:54:33 2022 ] Eval epoch: 54 +[ Thu Sep 8 04:01:20 2022 ] Epoch 54 Curr Acc: (28779/50919)56.52% +[ Thu Sep 8 04:01:20 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 04:01:20 2022 ] Training epoch: 55 +[ Thu Sep 8 04:01:20 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:07:58 2022 ] Mean training loss: 0.1985. +[ Thu Sep 8 04:07:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:07:58 2022 ] Eval epoch: 55 +[ Thu Sep 8 04:14:46 2022 ] Epoch 55 Curr Acc: (28006/50919)55.00% +[ Thu Sep 8 04:14:46 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 04:14:46 2022 ] Training epoch: 56 +[ Thu Sep 8 04:14:46 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:21:23 2022 ] Mean training loss: 0.1702. +[ Thu Sep 8 04:21:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:21:23 2022 ] Eval epoch: 56 +[ Thu Sep 8 04:28:11 2022 ] Epoch 56 Curr Acc: (28411/50919)55.80% +[ Thu Sep 8 04:28:11 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 04:28:11 2022 ] Training epoch: 57 +[ Thu Sep 8 04:28:11 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:34:50 2022 ] Mean training loss: 0.1436. +[ Thu Sep 8 04:34:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:34:51 2022 ] Eval epoch: 57 +[ Thu Sep 8 04:41:38 2022 ] Epoch 57 Curr Acc: (28130/50919)55.24% +[ Thu Sep 8 04:41:38 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 04:41:38 2022 ] Training epoch: 58 +[ Thu Sep 8 04:41:38 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:48:17 2022 ] Mean training loss: 0.1257. +[ Thu Sep 8 04:48:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:48:17 2022 ] Eval epoch: 58 +[ Thu Sep 8 04:55:04 2022 ] Epoch 58 Curr Acc: (27163/50919)53.35% +[ Thu Sep 8 04:55:04 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 04:55:04 2022 ] Training epoch: 59 +[ Thu Sep 8 04:55:04 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:01:44 2022 ] Mean training loss: 0.1122. +[ Thu Sep 8 05:01:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:01:44 2022 ] Eval epoch: 59 +[ Thu Sep 8 05:08:32 2022 ] Epoch 59 Curr Acc: (28446/50919)55.87% +[ Thu Sep 8 05:08:32 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 05:08:32 2022 ] Training epoch: 60 +[ Thu Sep 8 05:08:32 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:15:11 2022 ] Mean training loss: 0.1003. +[ Thu Sep 8 05:15:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:15:11 2022 ] Eval epoch: 60 +[ Thu Sep 8 05:21:58 2022 ] Epoch 60 Curr Acc: (27848/50919)54.69% +[ Thu Sep 8 05:21:58 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 05:21:58 2022 ] Training epoch: 61 +[ Thu Sep 8 05:21:58 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:28:37 2022 ] Mean training loss: 0.0900. +[ Thu Sep 8 05:28:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:28:37 2022 ] Eval epoch: 61 +[ Thu Sep 8 05:35:24 2022 ] Epoch 61 Curr Acc: (28017/50919)55.02% +[ Thu Sep 8 05:35:24 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 05:35:24 2022 ] Training epoch: 62 +[ Thu Sep 8 05:35:24 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:42:03 2022 ] Mean training loss: 0.0846. +[ Thu Sep 8 05:42:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:42:03 2022 ] Eval epoch: 62 +[ Thu Sep 8 05:48:51 2022 ] Epoch 62 Curr Acc: (27292/50919)53.60% +[ Thu Sep 8 05:48:51 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 05:48:51 2022 ] Training epoch: 63 +[ Thu Sep 8 05:48:51 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:55:29 2022 ] Mean training loss: 0.0817. +[ Thu Sep 8 05:55:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:55:29 2022 ] Eval epoch: 63 +[ Thu Sep 8 06:02:16 2022 ] Epoch 63 Curr Acc: (27251/50919)53.52% +[ Thu Sep 8 06:02:16 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 06:02:16 2022 ] Training epoch: 64 +[ Thu Sep 8 06:02:16 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:08:55 2022 ] Mean training loss: 0.0857. +[ Thu Sep 8 06:08:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:08:55 2022 ] Eval epoch: 64 +[ Thu Sep 8 06:15:42 2022 ] Epoch 64 Curr Acc: (27321/50919)53.66% +[ Thu Sep 8 06:15:42 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 06:15:42 2022 ] Training epoch: 65 +[ Thu Sep 8 06:15:42 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:22:21 2022 ] Mean training loss: 0.0840. +[ Thu Sep 8 06:22:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:22:21 2022 ] Eval epoch: 65 +[ Thu Sep 8 06:29:08 2022 ] Epoch 65 Curr Acc: (27135/50919)53.29% +[ Thu Sep 8 06:29:08 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 06:29:08 2022 ] Training epoch: 66 +[ Thu Sep 8 06:29:08 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:35:46 2022 ] Mean training loss: 0.0902. +[ Thu Sep 8 06:35:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:35:46 2022 ] Eval epoch: 66 +[ Thu Sep 8 06:42:33 2022 ] Epoch 66 Curr Acc: (27501/50919)54.01% +[ Thu Sep 8 06:42:33 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 06:42:33 2022 ] Training epoch: 67 +[ Thu Sep 8 06:42:33 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:49:10 2022 ] Mean training loss: 0.0792. +[ Thu Sep 8 06:49:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:49:10 2022 ] Eval epoch: 67 +[ Thu Sep 8 06:55:57 2022 ] Epoch 67 Curr Acc: (26786/50919)52.61% +[ Thu Sep 8 06:55:57 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 06:55:57 2022 ] Training epoch: 68 +[ Thu Sep 8 06:55:57 2022 ] Learning rate: 0.015 +[ Thu Sep 8 07:02:36 2022 ] Mean training loss: 0.0829. +[ Thu Sep 8 07:02:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:02:36 2022 ] Eval epoch: 68 +[ Thu Sep 8 07:09:23 2022 ] Epoch 68 Curr Acc: (27327/50919)53.67% +[ Thu Sep 8 07:09:23 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 07:09:23 2022 ] Training epoch: 69 +[ Thu Sep 8 07:09:23 2022 ] Learning rate: 0.015 +[ Thu Sep 8 07:16:02 2022 ] Mean training loss: 0.0866. +[ Thu Sep 8 07:16:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:16:02 2022 ] Eval epoch: 69 +[ Thu Sep 8 07:22:49 2022 ] Epoch 69 Curr Acc: (27766/50919)54.53% +[ Thu Sep 8 07:22:49 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 07:22:49 2022 ] Training epoch: 70 +[ Thu Sep 8 07:22:49 2022 ] Learning rate: 0.015 +[ Thu Sep 8 07:29:28 2022 ] Mean training loss: 0.0811. +[ Thu Sep 8 07:29:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:29:28 2022 ] Eval epoch: 70 +[ Thu Sep 8 07:36:14 2022 ] Epoch 70 Curr Acc: (26193/50919)51.44% +[ Thu Sep 8 07:36:14 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 07:36:14 2022 ] Training epoch: 71 +[ Thu Sep 8 07:36:14 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:42:53 2022 ] Mean training loss: 0.0408. +[ Thu Sep 8 07:42:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:42:53 2022 ] Eval epoch: 71 +[ Thu Sep 8 07:49:40 2022 ] Epoch 71 Curr Acc: (28004/50919)55.00% +[ Thu Sep 8 07:49:40 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 07:49:40 2022 ] Training epoch: 72 +[ Thu Sep 8 07:49:40 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:56:18 2022 ] Mean training loss: 0.0294. +[ Thu Sep 8 07:56:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:56:18 2022 ] Eval epoch: 72 +[ Thu Sep 8 08:03:04 2022 ] Epoch 72 Curr Acc: (28081/50919)55.15% +[ Thu Sep 8 08:03:04 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 08:03:04 2022 ] Training epoch: 73 +[ Thu Sep 8 08:03:04 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:09:43 2022 ] Mean training loss: 0.0240. +[ Thu Sep 8 08:09:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:09:43 2022 ] Eval epoch: 73 +[ Thu Sep 8 08:16:29 2022 ] Epoch 73 Curr Acc: (28268/50919)55.52% +[ Thu Sep 8 08:16:29 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 08:16:29 2022 ] Training epoch: 74 +[ Thu Sep 8 08:16:29 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:23:07 2022 ] Mean training loss: 0.0197. +[ Thu Sep 8 08:23:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:23:07 2022 ] Eval epoch: 74 +[ Thu Sep 8 08:29:54 2022 ] Epoch 74 Curr Acc: (27956/50919)54.90% +[ Thu Sep 8 08:29:54 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 08:29:54 2022 ] Training epoch: 75 +[ Thu Sep 8 08:29:54 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:36:32 2022 ] Mean training loss: 0.0185. +[ Thu Sep 8 08:36:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:36:32 2022 ] Eval epoch: 75 +[ Thu Sep 8 08:43:19 2022 ] Epoch 75 Curr Acc: (28479/50919)55.93% +[ Thu Sep 8 08:43:19 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 08:43:19 2022 ] Training epoch: 76 +[ Thu Sep 8 08:43:19 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:49:58 2022 ] Mean training loss: 0.0187. +[ Thu Sep 8 08:49:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:49:58 2022 ] Eval epoch: 76 +[ Thu Sep 8 08:56:45 2022 ] Epoch 76 Curr Acc: (27736/50919)54.47% +[ Thu Sep 8 08:56:45 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 08:56:45 2022 ] Training epoch: 77 +[ Thu Sep 8 08:56:45 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:03:23 2022 ] Mean training loss: 0.0174. +[ Thu Sep 8 09:03:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:03:23 2022 ] Eval epoch: 77 +[ Thu Sep 8 09:10:10 2022 ] Epoch 77 Curr Acc: (28292/50919)55.56% +[ Thu Sep 8 09:10:10 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 09:10:10 2022 ] Training epoch: 78 +[ Thu Sep 8 09:10:10 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:16:49 2022 ] Mean training loss: 0.0167. +[ Thu Sep 8 09:16:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:16:50 2022 ] Eval epoch: 78 +[ Thu Sep 8 09:23:37 2022 ] Epoch 78 Curr Acc: (28095/50919)55.18% +[ Thu Sep 8 09:23:37 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 09:23:37 2022 ] Training epoch: 79 +[ Thu Sep 8 09:23:37 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:30:16 2022 ] Mean training loss: 0.0166. +[ Thu Sep 8 09:30:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:30:16 2022 ] Eval epoch: 79 +[ Thu Sep 8 09:37:03 2022 ] Epoch 79 Curr Acc: (27909/50919)54.81% +[ Thu Sep 8 09:37:03 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 09:37:03 2022 ] Training epoch: 80 +[ Thu Sep 8 09:37:03 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:43:41 2022 ] Mean training loss: 0.0145. +[ Thu Sep 8 09:43:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:43:41 2022 ] Eval epoch: 80 +[ Thu Sep 8 09:50:28 2022 ] Epoch 80 Curr Acc: (27822/50919)54.64% +[ Thu Sep 8 09:50:28 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 09:50:28 2022 ] Training epoch: 81 +[ Thu Sep 8 09:50:28 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:57:07 2022 ] Mean training loss: 0.0152. +[ Thu Sep 8 09:57:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:57:07 2022 ] Eval epoch: 81 +[ Thu Sep 8 10:03:54 2022 ] Epoch 81 Curr Acc: (28262/50919)55.50% +[ Thu Sep 8 10:03:54 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 10:03:54 2022 ] Training epoch: 82 +[ Thu Sep 8 10:03:54 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:10:33 2022 ] Mean training loss: 0.0138. +[ Thu Sep 8 10:10:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:10:33 2022 ] Eval epoch: 82 +[ Thu Sep 8 10:17:21 2022 ] Epoch 82 Curr Acc: (28551/50919)56.07% +[ Thu Sep 8 10:17:21 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 10:17:21 2022 ] Training epoch: 83 +[ Thu Sep 8 10:17:21 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:24:00 2022 ] Mean training loss: 0.0138. +[ Thu Sep 8 10:24:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:24:00 2022 ] Eval epoch: 83 +[ Thu Sep 8 10:30:48 2022 ] Epoch 83 Curr Acc: (28096/50919)55.18% +[ Thu Sep 8 10:30:48 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 10:30:48 2022 ] Training epoch: 84 +[ Thu Sep 8 10:30:48 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:37:27 2022 ] Mean training loss: 0.0137. +[ Thu Sep 8 10:37:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:37:27 2022 ] Eval epoch: 84 +[ Thu Sep 8 10:44:14 2022 ] Epoch 84 Curr Acc: (27911/50919)54.81% +[ Thu Sep 8 10:44:14 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 10:44:14 2022 ] Training epoch: 85 +[ Thu Sep 8 10:44:14 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:50:53 2022 ] Mean training loss: 0.0149. +[ Thu Sep 8 10:50:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:50:53 2022 ] Eval epoch: 85 +[ Thu Sep 8 10:57:41 2022 ] Epoch 85 Curr Acc: (28660/50919)56.29% +[ Thu Sep 8 10:57:41 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 10:57:41 2022 ] Training epoch: 86 +[ Thu Sep 8 10:57:41 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:04:18 2022 ] Mean training loss: 0.0134. +[ Thu Sep 8 11:04:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:04:18 2022 ] Eval epoch: 86 +[ Thu Sep 8 11:11:05 2022 ] Epoch 86 Curr Acc: (28392/50919)55.76% +[ Thu Sep 8 11:11:05 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 11:11:05 2022 ] Training epoch: 87 +[ Thu Sep 8 11:11:05 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:17:44 2022 ] Mean training loss: 0.0134. +[ Thu Sep 8 11:17:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:17:44 2022 ] Eval epoch: 87 +[ Thu Sep 8 11:24:32 2022 ] Epoch 87 Curr Acc: (28551/50919)56.07% +[ Thu Sep 8 11:24:32 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 11:24:32 2022 ] Training epoch: 88 +[ Thu Sep 8 11:24:32 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:31:12 2022 ] Mean training loss: 0.0114. +[ Thu Sep 8 11:31:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:31:12 2022 ] Eval epoch: 88 +[ Thu Sep 8 11:38:00 2022 ] Epoch 88 Curr Acc: (28432/50919)55.84% +[ Thu Sep 8 11:38:00 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 11:38:00 2022 ] Training epoch: 89 +[ Thu Sep 8 11:38:00 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:44:39 2022 ] Mean training loss: 0.0120. +[ Thu Sep 8 11:44:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:44:39 2022 ] Eval epoch: 89 +[ Thu Sep 8 11:51:27 2022 ] Epoch 89 Curr Acc: (28606/50919)56.18% +[ Thu Sep 8 11:51:27 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 11:51:27 2022 ] Training epoch: 90 +[ Thu Sep 8 11:51:27 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:58:07 2022 ] Mean training loss: 0.0120. +[ Thu Sep 8 11:58:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:58:07 2022 ] Eval epoch: 90 +[ Thu Sep 8 12:04:55 2022 ] Epoch 90 Curr Acc: (28496/50919)55.96% +[ Thu Sep 8 12:04:55 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 12:04:55 2022 ] Training epoch: 91 +[ Thu Sep 8 12:04:55 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:11:36 2022 ] Mean training loss: 0.0114. +[ Thu Sep 8 12:11:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:11:36 2022 ] Eval epoch: 91 +[ Thu Sep 8 12:18:24 2022 ] Epoch 91 Curr Acc: (28335/50919)55.65% +[ Thu Sep 8 12:18:24 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 12:18:24 2022 ] Training epoch: 92 +[ Thu Sep 8 12:18:24 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:25:03 2022 ] Mean training loss: 0.0117. +[ Thu Sep 8 12:25:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:25:03 2022 ] Eval epoch: 92 +[ Thu Sep 8 12:31:52 2022 ] Epoch 92 Curr Acc: (28666/50919)56.30% +[ Thu Sep 8 12:31:52 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 12:31:52 2022 ] Training epoch: 93 +[ Thu Sep 8 12:31:52 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:38:30 2022 ] Mean training loss: 0.0121. +[ Thu Sep 8 12:38:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:38:31 2022 ] Eval epoch: 93 +[ Thu Sep 8 12:45:19 2022 ] Epoch 93 Curr Acc: (28146/50919)55.28% +[ Thu Sep 8 12:45:19 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 12:45:19 2022 ] Training epoch: 94 +[ Thu Sep 8 12:45:19 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:51:58 2022 ] Mean training loss: 0.0111. +[ Thu Sep 8 12:51:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:51:58 2022 ] Eval epoch: 94 +[ Thu Sep 8 12:58:47 2022 ] Epoch 94 Curr Acc: (28741/50919)56.44% +[ Thu Sep 8 12:58:47 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 12:58:47 2022 ] Training epoch: 95 +[ Thu Sep 8 12:58:47 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:05:26 2022 ] Mean training loss: 0.0116. +[ Thu Sep 8 13:05:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:05:26 2022 ] Eval epoch: 95 +[ Thu Sep 8 13:12:14 2022 ] Epoch 95 Curr Acc: (28011/50919)55.01% +[ Thu Sep 8 13:12:14 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 13:12:14 2022 ] Training epoch: 96 +[ Thu Sep 8 13:12:14 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:18:54 2022 ] Mean training loss: 0.0116. +[ Thu Sep 8 13:18:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:18:54 2022 ] Eval epoch: 96 +[ Thu Sep 8 13:25:42 2022 ] Epoch 96 Curr Acc: (27446/50919)53.90% +[ Thu Sep 8 13:25:42 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 13:25:42 2022 ] Training epoch: 97 +[ Thu Sep 8 13:25:42 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:32:23 2022 ] Mean training loss: 0.0113. +[ Thu Sep 8 13:32:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:32:23 2022 ] Eval epoch: 97 +[ Thu Sep 8 13:39:11 2022 ] Epoch 97 Curr Acc: (28261/50919)55.50% +[ Thu Sep 8 13:39:11 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 13:39:11 2022 ] Training epoch: 98 +[ Thu Sep 8 13:39:11 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:45:52 2022 ] Mean training loss: 0.0113. +[ Thu Sep 8 13:45:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:45:52 2022 ] Eval epoch: 98 +[ Thu Sep 8 13:52:39 2022 ] Epoch 98 Curr Acc: (28415/50919)55.80% +[ Thu Sep 8 13:52:39 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 13:52:39 2022 ] Training epoch: 99 +[ Thu Sep 8 13:52:39 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:59:17 2022 ] Mean training loss: 0.0111. +[ Thu Sep 8 13:59:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:59:17 2022 ] Eval epoch: 99 +[ Thu Sep 8 14:06:04 2022 ] Epoch 99 Curr Acc: (28017/50919)55.02% +[ Thu Sep 8 14:06:04 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 14:06:04 2022 ] Training epoch: 100 +[ Thu Sep 8 14:06:04 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:12:42 2022 ] Mean training loss: 0.0104. +[ Thu Sep 8 14:12:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:12:42 2022 ] Eval epoch: 100 +[ Thu Sep 8 14:19:29 2022 ] Epoch 100 Curr Acc: (28442/50919)55.86% +[ Thu Sep 8 14:19:29 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 14:19:29 2022 ] Training epoch: 101 +[ Thu Sep 8 14:19:29 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:26:07 2022 ] Mean training loss: 0.0109. +[ Thu Sep 8 14:26:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:26:07 2022 ] Eval epoch: 101 +[ Thu Sep 8 14:32:54 2022 ] Epoch 101 Curr Acc: (28236/50919)55.45% +[ Thu Sep 8 14:32:54 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 14:32:54 2022 ] Training epoch: 102 +[ Thu Sep 8 14:32:54 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:39:33 2022 ] Mean training loss: 0.0116. +[ Thu Sep 8 14:39:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:39:33 2022 ] Eval epoch: 102 +[ Thu Sep 8 14:46:20 2022 ] Epoch 102 Curr Acc: (28441/50919)55.86% +[ Thu Sep 8 14:46:20 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 14:46:20 2022 ] Training epoch: 103 +[ Thu Sep 8 14:46:20 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:52:58 2022 ] Mean training loss: 0.0122. +[ Thu Sep 8 14:52:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:52:58 2022 ] Eval epoch: 103 +[ Thu Sep 8 14:59:45 2022 ] Epoch 103 Curr Acc: (28322/50919)55.62% +[ Thu Sep 8 14:59:45 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 14:59:45 2022 ] Training epoch: 104 +[ Thu Sep 8 14:59:45 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:06:24 2022 ] Mean training loss: 0.0116. +[ Thu Sep 8 15:06:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:06:24 2022 ] Eval epoch: 104 +[ Thu Sep 8 15:13:11 2022 ] Epoch 104 Curr Acc: (28575/50919)56.12% +[ Thu Sep 8 15:13:11 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 15:13:11 2022 ] Training epoch: 105 +[ Thu Sep 8 15:13:11 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:19:49 2022 ] Mean training loss: 0.0110. +[ Thu Sep 8 15:19:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:19:49 2022 ] Eval epoch: 105 +[ Thu Sep 8 15:26:35 2022 ] Epoch 105 Curr Acc: (28136/50919)55.26% +[ Thu Sep 8 15:26:35 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 15:26:35 2022 ] Training epoch: 106 +[ Thu Sep 8 15:26:35 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:33:14 2022 ] Mean training loss: 0.0110. +[ Thu Sep 8 15:33:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:33:14 2022 ] Eval epoch: 106 +[ Thu Sep 8 15:40:00 2022 ] Epoch 106 Curr Acc: (28410/50919)55.79% +[ Thu Sep 8 15:40:00 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 15:40:00 2022 ] Training epoch: 107 +[ Thu Sep 8 15:40:00 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:46:37 2022 ] Mean training loss: 0.0112. +[ Thu Sep 8 15:46:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:46:37 2022 ] Eval epoch: 107 +[ Thu Sep 8 15:53:24 2022 ] Epoch 107 Curr Acc: (28708/50919)56.38% +[ Thu Sep 8 15:53:24 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 15:53:24 2022 ] Training epoch: 108 +[ Thu Sep 8 15:53:24 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 16:00:02 2022 ] Mean training loss: 0.0112. +[ Thu Sep 8 16:00:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 16:00:02 2022 ] Eval epoch: 108 +[ Thu Sep 8 16:06:49 2022 ] Epoch 108 Curr Acc: (28446/50919)55.87% +[ Thu Sep 8 16:06:49 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 16:06:49 2022 ] Training epoch: 109 +[ Thu Sep 8 16:06:49 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 16:13:28 2022 ] Mean training loss: 0.0108. +[ Thu Sep 8 16:13:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 16:13:28 2022 ] Eval epoch: 109 +[ Thu Sep 8 16:20:14 2022 ] Epoch 109 Curr Acc: (28427/50919)55.83% +[ Thu Sep 8 16:20:14 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 16:20:14 2022 ] Training epoch: 110 +[ Thu Sep 8 16:20:14 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 16:26:53 2022 ] Mean training loss: 0.0121. +[ Thu Sep 8 16:26:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 16:26:53 2022 ] Eval epoch: 110 +[ Thu Sep 8 16:33:39 2022 ] Epoch 110 Curr Acc: (28147/50919)55.28% +[ Thu Sep 8 16:33:39 2022 ] Epoch 54 Best Acc 56.52% +[ Thu Sep 8 16:33:39 2022 ] epoch: 54, best accuracy: 0.5651917751723325 +[ Thu Sep 8 16:33:39 2022 ] Experiment: ./work_dir/ntu120/xsub_bm +[ Thu Sep 8 16:33:39 2022 ] # generator parameters: 2.922995 M. +[ Thu Sep 8 16:33:39 2022 ] Load weights from ./runs/ntu120/xsub_bm/runs-53-105300.pt. +[ Thu Sep 8 16:33:39 2022 ] Eval epoch: 1 +[ Thu Sep 8 16:40:24 2022 ] Epoch 1 Curr Acc: (28779/50919)56.52% +[ Thu Sep 8 16:40:24 2022 ] Epoch 54 Best Acc 56.52% diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/config.yaml b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9b88ac969a75379472d2ef0e16d198f08bd336f9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu120/xsub_j.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 120 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu120/xsub_j/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu120/xsub_j diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..ba165ac1a257fd2214292910e87a73bc71a0f8a1 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:970fb96c694e69cc7b3de27dda5a9160f196839288f4f24f73d3500159078621 +size 29946137 diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/log.txt b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..ee544f864f446dc9621b8599b681f3b9fffe6706 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/log.txt @@ -0,0 +1,631 @@ +[ Wed Sep 7 21:34:48 2022 ] # generator parameters: 2.922995 M. +[ Wed Sep 7 21:34:48 2022 ] Parameters: +{'work_dir': './work_dir/ntu120/xsub_j', 'model_saved_name': './runs/ntu120/xsub_j/runs', 'config': 'config/ntu120/xsub_j.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Wed Sep 7 21:34:48 2022 ] Training epoch: 1 +[ Wed Sep 7 21:34:48 2022 ] Learning rate: 0.015 +[ Wed Sep 7 21:41:24 2022 ] Mean training loss: 3.4110. +[ Wed Sep 7 21:41:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:41:24 2022 ] Training epoch: 2 +[ Wed Sep 7 21:41:24 2022 ] Learning rate: 0.03 +[ Wed Sep 7 21:47:59 2022 ] Mean training loss: 2.4547. +[ Wed Sep 7 21:47:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:47:59 2022 ] Training epoch: 3 +[ Wed Sep 7 21:47:59 2022 ] Learning rate: 0.045 +[ Wed Sep 7 21:54:36 2022 ] Mean training loss: 1.9976. +[ Wed Sep 7 21:54:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:54:36 2022 ] Training epoch: 4 +[ Wed Sep 7 21:54:36 2022 ] Learning rate: 0.06 +[ Wed Sep 7 22:01:13 2022 ] Mean training loss: 1.7330. +[ Wed Sep 7 22:01:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:01:13 2022 ] Training epoch: 5 +[ Wed Sep 7 22:01:13 2022 ] Learning rate: 0.075 +[ Wed Sep 7 22:07:49 2022 ] Mean training loss: 1.5684. +[ Wed Sep 7 22:07:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:07:49 2022 ] Training epoch: 6 +[ Wed Sep 7 22:07:49 2022 ] Learning rate: 0.09 +[ Wed Sep 7 22:14:24 2022 ] Mean training loss: 1.4553. +[ Wed Sep 7 22:14:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:14:24 2022 ] Training epoch: 7 +[ Wed Sep 7 22:14:24 2022 ] Learning rate: 0.10500000000000001 +[ Wed Sep 7 22:21:00 2022 ] Mean training loss: 1.4066. +[ Wed Sep 7 22:21:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:21:00 2022 ] Training epoch: 8 +[ Wed Sep 7 22:21:00 2022 ] Learning rate: 0.12 +[ Wed Sep 7 22:27:36 2022 ] Mean training loss: 1.3621. +[ Wed Sep 7 22:27:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:27:36 2022 ] Training epoch: 9 +[ Wed Sep 7 22:27:36 2022 ] Learning rate: 0.13499999999999998 +[ Wed Sep 7 22:34:12 2022 ] Mean training loss: 1.3254. +[ Wed Sep 7 22:34:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:34:12 2022 ] Training epoch: 10 +[ Wed Sep 7 22:34:12 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:40:48 2022 ] Mean training loss: 1.3225. +[ Wed Sep 7 22:40:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:40:48 2022 ] Training epoch: 11 +[ Wed Sep 7 22:40:48 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:47:25 2022 ] Mean training loss: 1.2530. +[ Wed Sep 7 22:47:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:47:25 2022 ] Training epoch: 12 +[ Wed Sep 7 22:47:25 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:53:59 2022 ] Mean training loss: 1.1999. +[ Wed Sep 7 22:53:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:53:59 2022 ] Training epoch: 13 +[ Wed Sep 7 22:53:59 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:00:34 2022 ] Mean training loss: 1.1760. +[ Wed Sep 7 23:00:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:00:34 2022 ] Training epoch: 14 +[ Wed Sep 7 23:00:34 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:07:09 2022 ] Mean training loss: 1.1365. +[ Wed Sep 7 23:07:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:07:09 2022 ] Training epoch: 15 +[ Wed Sep 7 23:07:09 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:13:44 2022 ] Mean training loss: 1.1145. +[ Wed Sep 7 23:13:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:13:44 2022 ] Training epoch: 16 +[ Wed Sep 7 23:13:44 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:20:21 2022 ] Mean training loss: 1.0964. +[ Wed Sep 7 23:20:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:20:21 2022 ] Training epoch: 17 +[ Wed Sep 7 23:20:21 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:26:58 2022 ] Mean training loss: 1.0684. +[ Wed Sep 7 23:26:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:26:58 2022 ] Training epoch: 18 +[ Wed Sep 7 23:26:58 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:33:34 2022 ] Mean training loss: 1.0638. +[ Wed Sep 7 23:33:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:33:34 2022 ] Training epoch: 19 +[ Wed Sep 7 23:33:34 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:40:10 2022 ] Mean training loss: 1.0479. +[ Wed Sep 7 23:40:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:40:10 2022 ] Training epoch: 20 +[ Wed Sep 7 23:40:10 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:46:45 2022 ] Mean training loss: 1.0294. +[ Wed Sep 7 23:46:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:46:45 2022 ] Training epoch: 21 +[ Wed Sep 7 23:46:45 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:53:21 2022 ] Mean training loss: 1.0288. +[ Wed Sep 7 23:53:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:53:21 2022 ] Training epoch: 22 +[ Wed Sep 7 23:53:21 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:59:56 2022 ] Mean training loss: 1.0129. +[ Wed Sep 7 23:59:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:59:56 2022 ] Training epoch: 23 +[ Wed Sep 7 23:59:56 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:06:33 2022 ] Mean training loss: 1.0015. +[ Thu Sep 8 00:06:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:06:33 2022 ] Training epoch: 24 +[ Thu Sep 8 00:06:33 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:13:08 2022 ] Mean training loss: 1.0029. +[ Thu Sep 8 00:13:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:13:08 2022 ] Training epoch: 25 +[ Thu Sep 8 00:13:08 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:19:44 2022 ] Mean training loss: 0.9837. +[ Thu Sep 8 00:19:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:19:44 2022 ] Training epoch: 26 +[ Thu Sep 8 00:19:44 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:26:20 2022 ] Mean training loss: 0.9914. +[ Thu Sep 8 00:26:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:26:20 2022 ] Training epoch: 27 +[ Thu Sep 8 00:26:20 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:32:56 2022 ] Mean training loss: 0.9702. +[ Thu Sep 8 00:32:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:32:56 2022 ] Training epoch: 28 +[ Thu Sep 8 00:32:56 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:39:32 2022 ] Mean training loss: 0.9798. +[ Thu Sep 8 00:39:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:39:32 2022 ] Training epoch: 29 +[ Thu Sep 8 00:39:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:46:08 2022 ] Mean training loss: 0.9441. +[ Thu Sep 8 00:46:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:46:08 2022 ] Training epoch: 30 +[ Thu Sep 8 00:46:08 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:52:43 2022 ] Mean training loss: 0.9694. +[ Thu Sep 8 00:52:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:52:43 2022 ] Training epoch: 31 +[ Thu Sep 8 00:52:43 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:59:18 2022 ] Mean training loss: 0.9698. +[ Thu Sep 8 00:59:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:59:18 2022 ] Training epoch: 32 +[ Thu Sep 8 00:59:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:05:53 2022 ] Mean training loss: 0.9586. +[ Thu Sep 8 01:05:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:05:53 2022 ] Training epoch: 33 +[ Thu Sep 8 01:05:53 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:12:28 2022 ] Mean training loss: 0.9430. +[ Thu Sep 8 01:12:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:12:28 2022 ] Training epoch: 34 +[ Thu Sep 8 01:12:28 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:19:05 2022 ] Mean training loss: 0.9436. +[ Thu Sep 8 01:19:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:19:05 2022 ] Training epoch: 35 +[ Thu Sep 8 01:19:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:25:42 2022 ] Mean training loss: 0.9375. +[ Thu Sep 8 01:25:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:25:42 2022 ] Training epoch: 36 +[ Thu Sep 8 01:25:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:32:18 2022 ] Mean training loss: 0.9467. +[ Thu Sep 8 01:32:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:32:18 2022 ] Training epoch: 37 +[ Thu Sep 8 01:32:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:38:54 2022 ] Mean training loss: 0.9447. +[ Thu Sep 8 01:38:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:38:54 2022 ] Training epoch: 38 +[ Thu Sep 8 01:38:54 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:45:30 2022 ] Mean training loss: 0.9448. +[ Thu Sep 8 01:45:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:45:30 2022 ] Training epoch: 39 +[ Thu Sep 8 01:45:30 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:52:06 2022 ] Mean training loss: 0.9296. +[ Thu Sep 8 01:52:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:52:06 2022 ] Training epoch: 40 +[ Thu Sep 8 01:52:06 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:58:40 2022 ] Mean training loss: 0.9398. +[ Thu Sep 8 01:58:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:58:40 2022 ] Training epoch: 41 +[ Thu Sep 8 01:58:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:05:14 2022 ] Mean training loss: 0.9259. +[ Thu Sep 8 02:05:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:05:14 2022 ] Training epoch: 42 +[ Thu Sep 8 02:05:14 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:11:49 2022 ] Mean training loss: 0.9358. +[ Thu Sep 8 02:11:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:11:49 2022 ] Training epoch: 43 +[ Thu Sep 8 02:11:49 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:18:24 2022 ] Mean training loss: 0.9339. +[ Thu Sep 8 02:18:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:18:24 2022 ] Training epoch: 44 +[ Thu Sep 8 02:18:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:25:00 2022 ] Mean training loss: 0.9247. +[ Thu Sep 8 02:25:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:25:00 2022 ] Training epoch: 45 +[ Thu Sep 8 02:25:00 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:31:37 2022 ] Mean training loss: 0.9417. +[ Thu Sep 8 02:31:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:31:37 2022 ] Training epoch: 46 +[ Thu Sep 8 02:31:37 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:38:12 2022 ] Mean training loss: 0.9303. +[ Thu Sep 8 02:38:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:38:12 2022 ] Training epoch: 47 +[ Thu Sep 8 02:38:12 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:44:47 2022 ] Mean training loss: 0.9310. +[ Thu Sep 8 02:44:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:44:47 2022 ] Training epoch: 48 +[ Thu Sep 8 02:44:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:51:23 2022 ] Mean training loss: 0.9202. +[ Thu Sep 8 02:51:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:51:23 2022 ] Training epoch: 49 +[ Thu Sep 8 02:51:23 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:57:59 2022 ] Mean training loss: 0.9208. +[ Thu Sep 8 02:57:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:57:59 2022 ] Training epoch: 50 +[ Thu Sep 8 02:57:59 2022 ] Learning rate: 0.15 +[ Thu Sep 8 03:04:35 2022 ] Mean training loss: 0.9195. +[ Thu Sep 8 03:04:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:04:35 2022 ] Training epoch: 51 +[ Thu Sep 8 03:04:35 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:11:11 2022 ] Mean training loss: 0.4928. +[ Thu Sep 8 03:11:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:11:11 2022 ] Eval epoch: 51 +[ Thu Sep 8 03:17:55 2022 ] Epoch 51 Curr Acc: (27550/50919)54.11% +[ Thu Sep 8 03:17:55 2022 ] Epoch 51 Best Acc 54.11% +[ Thu Sep 8 03:17:55 2022 ] Training epoch: 52 +[ Thu Sep 8 03:17:55 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:24:31 2022 ] Mean training loss: 0.3719. +[ Thu Sep 8 03:24:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:24:31 2022 ] Eval epoch: 52 +[ Thu Sep 8 03:31:14 2022 ] Epoch 52 Curr Acc: (28520/50919)56.01% +[ Thu Sep 8 03:31:14 2022 ] Epoch 52 Best Acc 56.01% +[ Thu Sep 8 03:31:14 2022 ] Training epoch: 53 +[ Thu Sep 8 03:31:14 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:37:51 2022 ] Mean training loss: 0.3150. +[ Thu Sep 8 03:37:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:37:51 2022 ] Eval epoch: 53 +[ Thu Sep 8 03:44:35 2022 ] Epoch 53 Curr Acc: (28941/50919)56.84% +[ Thu Sep 8 03:44:35 2022 ] Epoch 53 Best Acc 56.84% +[ Thu Sep 8 03:44:35 2022 ] Training epoch: 54 +[ Thu Sep 8 03:44:35 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:51:10 2022 ] Mean training loss: 0.2692. +[ Thu Sep 8 03:51:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:51:10 2022 ] Eval epoch: 54 +[ Thu Sep 8 03:57:54 2022 ] Epoch 54 Curr Acc: (29213/50919)57.37% +[ Thu Sep 8 03:57:54 2022 ] Epoch 54 Best Acc 57.37% +[ Thu Sep 8 03:57:54 2022 ] Training epoch: 55 +[ Thu Sep 8 03:57:54 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:04:30 2022 ] Mean training loss: 0.2441. +[ Thu Sep 8 04:04:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:04:30 2022 ] Eval epoch: 55 +[ Thu Sep 8 04:11:13 2022 ] Epoch 55 Curr Acc: (28870/50919)56.70% +[ Thu Sep 8 04:11:13 2022 ] Epoch 54 Best Acc 57.37% +[ Thu Sep 8 04:11:13 2022 ] Training epoch: 56 +[ Thu Sep 8 04:11:13 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:17:49 2022 ] Mean training loss: 0.2166. +[ Thu Sep 8 04:17:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:17:49 2022 ] Eval epoch: 56 +[ Thu Sep 8 04:24:32 2022 ] Epoch 56 Curr Acc: (29086/50919)57.12% +[ Thu Sep 8 04:24:32 2022 ] Epoch 54 Best Acc 57.37% +[ Thu Sep 8 04:24:32 2022 ] Training epoch: 57 +[ Thu Sep 8 04:24:32 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:31:09 2022 ] Mean training loss: 0.1935. +[ Thu Sep 8 04:31:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:31:09 2022 ] Eval epoch: 57 +[ Thu Sep 8 04:37:52 2022 ] Epoch 57 Curr Acc: (28702/50919)56.37% +[ Thu Sep 8 04:37:52 2022 ] Epoch 54 Best Acc 57.37% +[ Thu Sep 8 04:37:52 2022 ] Training epoch: 58 +[ Thu Sep 8 04:37:52 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:44:29 2022 ] Mean training loss: 0.1774. +[ Thu Sep 8 04:44:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:44:29 2022 ] Eval epoch: 58 +[ Thu Sep 8 04:51:12 2022 ] Epoch 58 Curr Acc: (29216/50919)57.38% +[ Thu Sep 8 04:51:12 2022 ] Epoch 58 Best Acc 57.38% +[ Thu Sep 8 04:51:12 2022 ] Training epoch: 59 +[ Thu Sep 8 04:51:12 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:57:49 2022 ] Mean training loss: 0.1613. +[ Thu Sep 8 04:57:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:57:49 2022 ] Eval epoch: 59 +[ Thu Sep 8 05:04:32 2022 ] Epoch 59 Curr Acc: (29268/50919)57.48% +[ Thu Sep 8 05:04:32 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 05:04:32 2022 ] Training epoch: 60 +[ Thu Sep 8 05:04:32 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:11:08 2022 ] Mean training loss: 0.1465. +[ Thu Sep 8 05:11:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:11:08 2022 ] Eval epoch: 60 +[ Thu Sep 8 05:17:51 2022 ] Epoch 60 Curr Acc: (27961/50919)54.91% +[ Thu Sep 8 05:17:51 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 05:17:51 2022 ] Training epoch: 61 +[ Thu Sep 8 05:17:51 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:24:28 2022 ] Mean training loss: 0.1338. +[ Thu Sep 8 05:24:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:24:28 2022 ] Eval epoch: 61 +[ Thu Sep 8 05:31:11 2022 ] Epoch 61 Curr Acc: (28617/50919)56.20% +[ Thu Sep 8 05:31:11 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 05:31:11 2022 ] Training epoch: 62 +[ Thu Sep 8 05:31:11 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:37:47 2022 ] Mean training loss: 0.1262. +[ Thu Sep 8 05:37:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:37:47 2022 ] Eval epoch: 62 +[ Thu Sep 8 05:44:30 2022 ] Epoch 62 Curr Acc: (28582/50919)56.13% +[ Thu Sep 8 05:44:30 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 05:44:30 2022 ] Training epoch: 63 +[ Thu Sep 8 05:44:30 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:51:07 2022 ] Mean training loss: 0.1247. +[ Thu Sep 8 05:51:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:51:07 2022 ] Eval epoch: 63 +[ Thu Sep 8 05:57:50 2022 ] Epoch 63 Curr Acc: (28114/50919)55.21% +[ Thu Sep 8 05:57:50 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 05:57:50 2022 ] Training epoch: 64 +[ Thu Sep 8 05:57:50 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:04:26 2022 ] Mean training loss: 0.1191. +[ Thu Sep 8 06:04:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:04:26 2022 ] Eval epoch: 64 +[ Thu Sep 8 06:11:09 2022 ] Epoch 64 Curr Acc: (28186/50919)55.35% +[ Thu Sep 8 06:11:09 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 06:11:09 2022 ] Training epoch: 65 +[ Thu Sep 8 06:11:09 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:17:45 2022 ] Mean training loss: 0.1178. +[ Thu Sep 8 06:17:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:17:45 2022 ] Eval epoch: 65 +[ Thu Sep 8 06:24:28 2022 ] Epoch 65 Curr Acc: (27422/50919)53.85% +[ Thu Sep 8 06:24:28 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 06:24:28 2022 ] Training epoch: 66 +[ Thu Sep 8 06:24:28 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:31:04 2022 ] Mean training loss: 0.1247. +[ Thu Sep 8 06:31:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:31:04 2022 ] Eval epoch: 66 +[ Thu Sep 8 06:37:47 2022 ] Epoch 66 Curr Acc: (27001/50919)53.03% +[ Thu Sep 8 06:37:47 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 06:37:47 2022 ] Training epoch: 67 +[ Thu Sep 8 06:37:47 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:44:23 2022 ] Mean training loss: 0.1185. +[ Thu Sep 8 06:44:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:44:24 2022 ] Eval epoch: 67 +[ Thu Sep 8 06:51:06 2022 ] Epoch 67 Curr Acc: (27867/50919)54.73% +[ Thu Sep 8 06:51:06 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 06:51:06 2022 ] Training epoch: 68 +[ Thu Sep 8 06:51:06 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:57:43 2022 ] Mean training loss: 0.1110. +[ Thu Sep 8 06:57:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:57:43 2022 ] Eval epoch: 68 +[ Thu Sep 8 07:04:26 2022 ] Epoch 68 Curr Acc: (28220/50919)55.42% +[ Thu Sep 8 07:04:26 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 07:04:26 2022 ] Training epoch: 69 +[ Thu Sep 8 07:04:26 2022 ] Learning rate: 0.015 +[ Thu Sep 8 07:11:01 2022 ] Mean training loss: 0.1122. +[ Thu Sep 8 07:11:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:11:01 2022 ] Eval epoch: 69 +[ Thu Sep 8 07:17:44 2022 ] Epoch 69 Curr Acc: (27883/50919)54.76% +[ Thu Sep 8 07:17:44 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 07:17:44 2022 ] Training epoch: 70 +[ Thu Sep 8 07:17:44 2022 ] Learning rate: 0.015 +[ Thu Sep 8 07:24:21 2022 ] Mean training loss: 0.1229. +[ Thu Sep 8 07:24:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:24:21 2022 ] Eval epoch: 70 +[ Thu Sep 8 07:31:04 2022 ] Epoch 70 Curr Acc: (27158/50919)53.34% +[ Thu Sep 8 07:31:04 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 07:31:04 2022 ] Training epoch: 71 +[ Thu Sep 8 07:31:04 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:37:41 2022 ] Mean training loss: 0.0659. +[ Thu Sep 8 07:37:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:37:41 2022 ] Eval epoch: 71 +[ Thu Sep 8 07:44:24 2022 ] Epoch 71 Curr Acc: (28399/50919)55.77% +[ Thu Sep 8 07:44:24 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 07:44:24 2022 ] Training epoch: 72 +[ Thu Sep 8 07:44:24 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:50:58 2022 ] Mean training loss: 0.0415. +[ Thu Sep 8 07:50:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:50:58 2022 ] Eval epoch: 72 +[ Thu Sep 8 07:57:41 2022 ] Epoch 72 Curr Acc: (28925/50919)56.81% +[ Thu Sep 8 07:57:41 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 07:57:41 2022 ] Training epoch: 73 +[ Thu Sep 8 07:57:41 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:04:17 2022 ] Mean training loss: 0.0350. +[ Thu Sep 8 08:04:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:04:17 2022 ] Eval epoch: 73 +[ Thu Sep 8 08:11:00 2022 ] Epoch 73 Curr Acc: (28805/50919)56.57% +[ Thu Sep 8 08:11:00 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 08:11:00 2022 ] Training epoch: 74 +[ Thu Sep 8 08:11:00 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:17:37 2022 ] Mean training loss: 0.0290. +[ Thu Sep 8 08:17:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:17:37 2022 ] Eval epoch: 74 +[ Thu Sep 8 08:24:20 2022 ] Epoch 74 Curr Acc: (29052/50919)57.06% +[ Thu Sep 8 08:24:20 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 08:24:20 2022 ] Training epoch: 75 +[ Thu Sep 8 08:24:20 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:30:56 2022 ] Mean training loss: 0.0276. +[ Thu Sep 8 08:30:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:30:56 2022 ] Eval epoch: 75 +[ Thu Sep 8 08:37:39 2022 ] Epoch 75 Curr Acc: (28863/50919)56.68% +[ Thu Sep 8 08:37:39 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 08:37:39 2022 ] Training epoch: 76 +[ Thu Sep 8 08:37:39 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:44:16 2022 ] Mean training loss: 0.0257. +[ Thu Sep 8 08:44:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:44:16 2022 ] Eval epoch: 76 +[ Thu Sep 8 08:50:58 2022 ] Epoch 76 Curr Acc: (28568/50919)56.10% +[ Thu Sep 8 08:50:58 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 08:50:58 2022 ] Training epoch: 77 +[ Thu Sep 8 08:50:58 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:57:35 2022 ] Mean training loss: 0.0261. +[ Thu Sep 8 08:57:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:57:35 2022 ] Eval epoch: 77 +[ Thu Sep 8 09:04:18 2022 ] Epoch 77 Curr Acc: (28749/50919)56.46% +[ Thu Sep 8 09:04:18 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 09:04:18 2022 ] Training epoch: 78 +[ Thu Sep 8 09:04:18 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:10:55 2022 ] Mean training loss: 0.0242. +[ Thu Sep 8 09:10:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:10:55 2022 ] Eval epoch: 78 +[ Thu Sep 8 09:17:38 2022 ] Epoch 78 Curr Acc: (28752/50919)56.47% +[ Thu Sep 8 09:17:38 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 09:17:38 2022 ] Training epoch: 79 +[ Thu Sep 8 09:17:38 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:24:15 2022 ] Mean training loss: 0.0220. +[ Thu Sep 8 09:24:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:24:15 2022 ] Eval epoch: 79 +[ Thu Sep 8 09:30:58 2022 ] Epoch 79 Curr Acc: (28827/50919)56.61% +[ Thu Sep 8 09:30:58 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 09:30:58 2022 ] Training epoch: 80 +[ Thu Sep 8 09:30:58 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:37:34 2022 ] Mean training loss: 0.0207. +[ Thu Sep 8 09:37:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:37:34 2022 ] Eval epoch: 80 +[ Thu Sep 8 09:44:16 2022 ] Epoch 80 Curr Acc: (28801/50919)56.56% +[ Thu Sep 8 09:44:16 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 09:44:17 2022 ] Training epoch: 81 +[ Thu Sep 8 09:44:17 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:50:54 2022 ] Mean training loss: 0.0207. +[ Thu Sep 8 09:50:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:50:54 2022 ] Eval epoch: 81 +[ Thu Sep 8 09:57:37 2022 ] Epoch 81 Curr Acc: (28660/50919)56.29% +[ Thu Sep 8 09:57:37 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 09:57:37 2022 ] Training epoch: 82 +[ Thu Sep 8 09:57:37 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:04:13 2022 ] Mean training loss: 0.0191. +[ Thu Sep 8 10:04:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:04:13 2022 ] Eval epoch: 82 +[ Thu Sep 8 10:10:56 2022 ] Epoch 82 Curr Acc: (29050/50919)57.05% +[ Thu Sep 8 10:10:56 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 10:10:56 2022 ] Training epoch: 83 +[ Thu Sep 8 10:10:56 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:17:32 2022 ] Mean training loss: 0.0185. +[ Thu Sep 8 10:17:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:17:33 2022 ] Eval epoch: 83 +[ Thu Sep 8 10:24:16 2022 ] Epoch 83 Curr Acc: (28827/50919)56.61% +[ Thu Sep 8 10:24:16 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 10:24:16 2022 ] Training epoch: 84 +[ Thu Sep 8 10:24:16 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:30:52 2022 ] Mean training loss: 0.0185. +[ Thu Sep 8 10:30:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:30:52 2022 ] Eval epoch: 84 +[ Thu Sep 8 10:37:35 2022 ] Epoch 84 Curr Acc: (29072/50919)57.09% +[ Thu Sep 8 10:37:35 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 10:37:35 2022 ] Training epoch: 85 +[ Thu Sep 8 10:37:35 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:44:12 2022 ] Mean training loss: 0.0185. +[ Thu Sep 8 10:44:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:44:12 2022 ] Eval epoch: 85 +[ Thu Sep 8 10:50:55 2022 ] Epoch 85 Curr Acc: (28934/50919)56.82% +[ Thu Sep 8 10:50:55 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 10:50:55 2022 ] Training epoch: 86 +[ Thu Sep 8 10:50:55 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:57:33 2022 ] Mean training loss: 0.0179. +[ Thu Sep 8 10:57:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:57:33 2022 ] Eval epoch: 86 +[ Thu Sep 8 11:04:16 2022 ] Epoch 86 Curr Acc: (29233/50919)57.41% +[ Thu Sep 8 11:04:16 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 11:04:16 2022 ] Training epoch: 87 +[ Thu Sep 8 11:04:16 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:10:53 2022 ] Mean training loss: 0.0172. +[ Thu Sep 8 11:10:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:10:53 2022 ] Eval epoch: 87 +[ Thu Sep 8 11:17:36 2022 ] Epoch 87 Curr Acc: (29174/50919)57.29% +[ Thu Sep 8 11:17:36 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 11:17:36 2022 ] Training epoch: 88 +[ Thu Sep 8 11:17:36 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:24:13 2022 ] Mean training loss: 0.0166. +[ Thu Sep 8 11:24:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:24:13 2022 ] Eval epoch: 88 +[ Thu Sep 8 11:30:56 2022 ] Epoch 88 Curr Acc: (28578/50919)56.12% +[ Thu Sep 8 11:30:56 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 11:30:56 2022 ] Training epoch: 89 +[ Thu Sep 8 11:30:56 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:37:33 2022 ] Mean training loss: 0.0164. +[ Thu Sep 8 11:37:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:37:33 2022 ] Eval epoch: 89 +[ Thu Sep 8 11:44:16 2022 ] Epoch 89 Curr Acc: (29062/50919)57.07% +[ Thu Sep 8 11:44:16 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 11:44:16 2022 ] Training epoch: 90 +[ Thu Sep 8 11:44:16 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:50:52 2022 ] Mean training loss: 0.0162. +[ Thu Sep 8 11:50:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:50:52 2022 ] Eval epoch: 90 +[ Thu Sep 8 11:57:36 2022 ] Epoch 90 Curr Acc: (28627/50919)56.22% +[ Thu Sep 8 11:57:36 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 11:57:36 2022 ] Training epoch: 91 +[ Thu Sep 8 11:57:36 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:04:11 2022 ] Mean training loss: 0.0158. +[ Thu Sep 8 12:04:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:04:11 2022 ] Eval epoch: 91 +[ Thu Sep 8 12:10:55 2022 ] Epoch 91 Curr Acc: (29127/50919)57.20% +[ Thu Sep 8 12:10:55 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 12:10:55 2022 ] Training epoch: 92 +[ Thu Sep 8 12:10:55 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:17:32 2022 ] Mean training loss: 0.0150. +[ Thu Sep 8 12:17:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:17:32 2022 ] Eval epoch: 92 +[ Thu Sep 8 12:24:15 2022 ] Epoch 92 Curr Acc: (28910/50919)56.78% +[ Thu Sep 8 12:24:15 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 12:24:15 2022 ] Training epoch: 93 +[ Thu Sep 8 12:24:15 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:30:52 2022 ] Mean training loss: 0.0155. +[ Thu Sep 8 12:30:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:30:52 2022 ] Eval epoch: 93 +[ Thu Sep 8 12:37:35 2022 ] Epoch 93 Curr Acc: (29038/50919)57.03% +[ Thu Sep 8 12:37:35 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 12:37:35 2022 ] Training epoch: 94 +[ Thu Sep 8 12:37:35 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:44:12 2022 ] Mean training loss: 0.0147. +[ Thu Sep 8 12:44:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:44:12 2022 ] Eval epoch: 94 +[ Thu Sep 8 12:50:56 2022 ] Epoch 94 Curr Acc: (28978/50919)56.91% +[ Thu Sep 8 12:50:56 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 12:50:56 2022 ] Training epoch: 95 +[ Thu Sep 8 12:50:56 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:57:31 2022 ] Mean training loss: 0.0147. +[ Thu Sep 8 12:57:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:57:31 2022 ] Eval epoch: 95 +[ Thu Sep 8 13:04:14 2022 ] Epoch 95 Curr Acc: (29125/50919)57.20% +[ Thu Sep 8 13:04:14 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 13:04:14 2022 ] Training epoch: 96 +[ Thu Sep 8 13:04:14 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:10:51 2022 ] Mean training loss: 0.0153. +[ Thu Sep 8 13:10:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:10:51 2022 ] Eval epoch: 96 +[ Thu Sep 8 13:17:34 2022 ] Epoch 96 Curr Acc: (28496/50919)55.96% +[ Thu Sep 8 13:17:34 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 13:17:34 2022 ] Training epoch: 97 +[ Thu Sep 8 13:17:34 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:24:11 2022 ] Mean training loss: 0.0149. +[ Thu Sep 8 13:24:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:24:11 2022 ] Eval epoch: 97 +[ Thu Sep 8 13:30:54 2022 ] Epoch 97 Curr Acc: (29098/50919)57.15% +[ Thu Sep 8 13:30:54 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 13:30:54 2022 ] Training epoch: 98 +[ Thu Sep 8 13:30:54 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:37:31 2022 ] Mean training loss: 0.0138. +[ Thu Sep 8 13:37:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:37:31 2022 ] Eval epoch: 98 +[ Thu Sep 8 13:44:14 2022 ] Epoch 98 Curr Acc: (28776/50919)56.51% +[ Thu Sep 8 13:44:14 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 13:44:15 2022 ] Training epoch: 99 +[ Thu Sep 8 13:44:15 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:50:51 2022 ] Mean training loss: 0.0146. +[ Thu Sep 8 13:50:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:50:51 2022 ] Eval epoch: 99 +[ Thu Sep 8 13:57:34 2022 ] Epoch 99 Curr Acc: (28686/50919)56.34% +[ Thu Sep 8 13:57:34 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 13:57:34 2022 ] Training epoch: 100 +[ Thu Sep 8 13:57:34 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:04:11 2022 ] Mean training loss: 0.0147. +[ Thu Sep 8 14:04:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:04:11 2022 ] Eval epoch: 100 +[ Thu Sep 8 14:10:55 2022 ] Epoch 100 Curr Acc: (29201/50919)57.35% +[ Thu Sep 8 14:10:55 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 14:10:55 2022 ] Training epoch: 101 +[ Thu Sep 8 14:10:55 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:17:32 2022 ] Mean training loss: 0.0139. +[ Thu Sep 8 14:17:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:17:32 2022 ] Eval epoch: 101 +[ Thu Sep 8 14:24:15 2022 ] Epoch 101 Curr Acc: (28936/50919)56.83% +[ Thu Sep 8 14:24:15 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 14:24:15 2022 ] Training epoch: 102 +[ Thu Sep 8 14:24:15 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:30:51 2022 ] Mean training loss: 0.0150. +[ Thu Sep 8 14:30:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:30:52 2022 ] Eval epoch: 102 +[ Thu Sep 8 14:37:35 2022 ] Epoch 102 Curr Acc: (28802/50919)56.56% +[ Thu Sep 8 14:37:35 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 14:37:35 2022 ] Training epoch: 103 +[ Thu Sep 8 14:37:35 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:44:13 2022 ] Mean training loss: 0.0158. +[ Thu Sep 8 14:44:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:44:13 2022 ] Eval epoch: 103 +[ Thu Sep 8 14:50:56 2022 ] Epoch 103 Curr Acc: (28958/50919)56.87% +[ Thu Sep 8 14:50:56 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 14:50:56 2022 ] Training epoch: 104 +[ Thu Sep 8 14:50:56 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:57:32 2022 ] Mean training loss: 0.0145. +[ Thu Sep 8 14:57:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:57:32 2022 ] Eval epoch: 104 +[ Thu Sep 8 15:04:15 2022 ] Epoch 104 Curr Acc: (29202/50919)57.35% +[ Thu Sep 8 15:04:15 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 15:04:15 2022 ] Training epoch: 105 +[ Thu Sep 8 15:04:15 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:10:51 2022 ] Mean training loss: 0.0143. +[ Thu Sep 8 15:10:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:10:51 2022 ] Eval epoch: 105 +[ Thu Sep 8 15:17:35 2022 ] Epoch 105 Curr Acc: (28959/50919)56.87% +[ Thu Sep 8 15:17:35 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 15:17:35 2022 ] Training epoch: 106 +[ Thu Sep 8 15:17:35 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:24:10 2022 ] Mean training loss: 0.0137. +[ Thu Sep 8 15:24:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:24:10 2022 ] Eval epoch: 106 +[ Thu Sep 8 15:30:53 2022 ] Epoch 106 Curr Acc: (29126/50919)57.20% +[ Thu Sep 8 15:30:53 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 15:30:53 2022 ] Training epoch: 107 +[ Thu Sep 8 15:30:53 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:37:29 2022 ] Mean training loss: 0.0153. +[ Thu Sep 8 15:37:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:37:29 2022 ] Eval epoch: 107 +[ Thu Sep 8 15:44:13 2022 ] Epoch 107 Curr Acc: (29225/50919)57.40% +[ Thu Sep 8 15:44:13 2022 ] Epoch 59 Best Acc 57.48% +[ Thu Sep 8 15:44:13 2022 ] Training epoch: 108 +[ Thu Sep 8 15:44:13 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:50:49 2022 ] Mean training loss: 0.0148. +[ Thu Sep 8 15:50:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:50:49 2022 ] Eval epoch: 108 +[ Thu Sep 8 15:57:32 2022 ] Epoch 108 Curr Acc: (29271/50919)57.49% +[ Thu Sep 8 15:57:32 2022 ] Epoch 108 Best Acc 57.49% +[ Thu Sep 8 15:57:32 2022 ] Training epoch: 109 +[ Thu Sep 8 15:57:32 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 16:04:07 2022 ] Mean training loss: 0.0138. +[ Thu Sep 8 16:04:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 16:04:07 2022 ] Eval epoch: 109 +[ Thu Sep 8 16:10:51 2022 ] Epoch 109 Curr Acc: (28907/50919)56.77% +[ Thu Sep 8 16:10:51 2022 ] Epoch 108 Best Acc 57.49% +[ Thu Sep 8 16:10:51 2022 ] Training epoch: 110 +[ Thu Sep 8 16:10:51 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 16:17:26 2022 ] Mean training loss: 0.0146. +[ Thu Sep 8 16:17:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 16:17:26 2022 ] Eval epoch: 110 +[ Thu Sep 8 16:24:10 2022 ] Epoch 110 Curr Acc: (28645/50919)56.26% +[ Thu Sep 8 16:24:10 2022 ] Epoch 108 Best Acc 57.49% +[ Thu Sep 8 16:24:10 2022 ] epoch: 108, best accuracy: 0.5748541801685029 +[ Thu Sep 8 16:24:10 2022 ] Experiment: ./work_dir/ntu120/xsub_j +[ Thu Sep 8 16:24:10 2022 ] # generator parameters: 2.922995 M. +[ Thu Sep 8 16:24:10 2022 ] Load weights from ./runs/ntu120/xsub_j/runs-107-210600.pt. +[ Thu Sep 8 16:24:10 2022 ] Eval epoch: 1 +[ Thu Sep 8 16:30:53 2022 ] Epoch 1 Curr Acc: (29271/50919)57.49% +[ Thu Sep 8 16:30:53 2022 ] Epoch 108 Best Acc 57.49% diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/config.yaml b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..54ee75f5626f6dff07bbab8eb6bc67001cafe76f --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu120/xsub_jm.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 120 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu120/xsub_jm/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint_motion.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint_motion.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu120/xsub_jm diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..445e47876b15e338e6ddfdd4675efe99645d3efa --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17ffc874015dadb8c222a343c24de209c0fea9259c1445b38a61d0d35bd29ef7 +size 29946137 diff --git a/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/log.txt b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..dd63a822203df0a493b49ec27ed69dd40329c107 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/log.txt @@ -0,0 +1,631 @@ +[ Wed Sep 7 21:34:56 2022 ] # generator parameters: 2.922995 M. +[ Wed Sep 7 21:34:57 2022 ] Parameters: +{'work_dir': './work_dir/ntu120/xsub_jm', 'model_saved_name': './runs/ntu120/xsub_jm/runs', 'config': 'config/ntu120/xsub_jm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Wed Sep 7 21:34:57 2022 ] Training epoch: 1 +[ Wed Sep 7 21:34:57 2022 ] Learning rate: 0.015 +[ Wed Sep 7 21:41:33 2022 ] Mean training loss: 3.5505. +[ Wed Sep 7 21:41:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:41:33 2022 ] Training epoch: 2 +[ Wed Sep 7 21:41:33 2022 ] Learning rate: 0.03 +[ Wed Sep 7 21:48:10 2022 ] Mean training loss: 2.5463. +[ Wed Sep 7 21:48:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:48:10 2022 ] Training epoch: 3 +[ Wed Sep 7 21:48:10 2022 ] Learning rate: 0.045 +[ Wed Sep 7 21:54:48 2022 ] Mean training loss: 1.9928. +[ Wed Sep 7 21:54:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 21:54:48 2022 ] Training epoch: 4 +[ Wed Sep 7 21:54:48 2022 ] Learning rate: 0.06 +[ Wed Sep 7 22:01:25 2022 ] Mean training loss: 1.6971. +[ Wed Sep 7 22:01:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:01:25 2022 ] Training epoch: 5 +[ Wed Sep 7 22:01:25 2022 ] Learning rate: 0.075 +[ Wed Sep 7 22:08:02 2022 ] Mean training loss: 1.5555. +[ Wed Sep 7 22:08:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:08:02 2022 ] Training epoch: 6 +[ Wed Sep 7 22:08:02 2022 ] Learning rate: 0.09 +[ Wed Sep 7 22:14:38 2022 ] Mean training loss: 1.4570. +[ Wed Sep 7 22:14:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:14:38 2022 ] Training epoch: 7 +[ Wed Sep 7 22:14:38 2022 ] Learning rate: 0.10500000000000001 +[ Wed Sep 7 22:21:16 2022 ] Mean training loss: 1.3999. +[ Wed Sep 7 22:21:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:21:16 2022 ] Training epoch: 8 +[ Wed Sep 7 22:21:16 2022 ] Learning rate: 0.12 +[ Wed Sep 7 22:27:54 2022 ] Mean training loss: 1.3780. +[ Wed Sep 7 22:27:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:27:54 2022 ] Training epoch: 9 +[ Wed Sep 7 22:27:54 2022 ] Learning rate: 0.13499999999999998 +[ Wed Sep 7 22:34:31 2022 ] Mean training loss: 1.3462. +[ Wed Sep 7 22:34:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:34:31 2022 ] Training epoch: 10 +[ Wed Sep 7 22:34:31 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:41:08 2022 ] Mean training loss: 1.3458. +[ Wed Sep 7 22:41:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:41:08 2022 ] Training epoch: 11 +[ Wed Sep 7 22:41:08 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:47:45 2022 ] Mean training loss: 1.2974. +[ Wed Sep 7 22:47:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:47:45 2022 ] Training epoch: 12 +[ Wed Sep 7 22:47:45 2022 ] Learning rate: 0.15 +[ Wed Sep 7 22:54:22 2022 ] Mean training loss: 1.2424. +[ Wed Sep 7 22:54:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 22:54:22 2022 ] Training epoch: 13 +[ Wed Sep 7 22:54:22 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:00:59 2022 ] Mean training loss: 1.2103. +[ Wed Sep 7 23:00:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:00:59 2022 ] Training epoch: 14 +[ Wed Sep 7 23:00:59 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:07:37 2022 ] Mean training loss: 1.1999. +[ Wed Sep 7 23:07:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:07:37 2022 ] Training epoch: 15 +[ Wed Sep 7 23:07:37 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:14:14 2022 ] Mean training loss: 1.1591. +[ Wed Sep 7 23:14:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:14:14 2022 ] Training epoch: 16 +[ Wed Sep 7 23:14:14 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:20:51 2022 ] Mean training loss: 1.1549. +[ Wed Sep 7 23:20:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:20:51 2022 ] Training epoch: 17 +[ Wed Sep 7 23:20:51 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:27:28 2022 ] Mean training loss: 1.1448. +[ Wed Sep 7 23:27:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:27:28 2022 ] Training epoch: 18 +[ Wed Sep 7 23:27:28 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:34:05 2022 ] Mean training loss: 1.1075. +[ Wed Sep 7 23:34:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:34:05 2022 ] Training epoch: 19 +[ Wed Sep 7 23:34:05 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:40:40 2022 ] Mean training loss: 1.1084. +[ Wed Sep 7 23:40:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:40:40 2022 ] Training epoch: 20 +[ Wed Sep 7 23:40:40 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:47:16 2022 ] Mean training loss: 1.0991. +[ Wed Sep 7 23:47:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:47:16 2022 ] Training epoch: 21 +[ Wed Sep 7 23:47:16 2022 ] Learning rate: 0.15 +[ Wed Sep 7 23:53:54 2022 ] Mean training loss: 1.0842. +[ Wed Sep 7 23:53:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Wed Sep 7 23:53:54 2022 ] Training epoch: 22 +[ Wed Sep 7 23:53:54 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:00:32 2022 ] Mean training loss: 1.0714. +[ Thu Sep 8 00:00:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:00:32 2022 ] Training epoch: 23 +[ Thu Sep 8 00:00:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:07:10 2022 ] Mean training loss: 1.0559. +[ Thu Sep 8 00:07:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:07:10 2022 ] Training epoch: 24 +[ Thu Sep 8 00:07:10 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:13:46 2022 ] Mean training loss: 1.0471. +[ Thu Sep 8 00:13:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:13:46 2022 ] Training epoch: 25 +[ Thu Sep 8 00:13:46 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:20:23 2022 ] Mean training loss: 1.0418. +[ Thu Sep 8 00:20:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:20:23 2022 ] Training epoch: 26 +[ Thu Sep 8 00:20:23 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:27:00 2022 ] Mean training loss: 1.0377. +[ Thu Sep 8 00:27:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:27:00 2022 ] Training epoch: 27 +[ Thu Sep 8 00:27:00 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:33:37 2022 ] Mean training loss: 1.0365. +[ Thu Sep 8 00:33:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:33:37 2022 ] Training epoch: 28 +[ Thu Sep 8 00:33:37 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:40:14 2022 ] Mean training loss: 1.0270. +[ Thu Sep 8 00:40:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:40:14 2022 ] Training epoch: 29 +[ Thu Sep 8 00:40:14 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:46:52 2022 ] Mean training loss: 1.0189. +[ Thu Sep 8 00:46:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:46:52 2022 ] Training epoch: 30 +[ Thu Sep 8 00:46:52 2022 ] Learning rate: 0.15 +[ Thu Sep 8 00:53:28 2022 ] Mean training loss: 1.0129. +[ Thu Sep 8 00:53:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 00:53:28 2022 ] Training epoch: 31 +[ Thu Sep 8 00:53:28 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:00:05 2022 ] Mean training loss: 1.0095. +[ Thu Sep 8 01:00:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:00:05 2022 ] Training epoch: 32 +[ Thu Sep 8 01:00:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:06:41 2022 ] Mean training loss: 1.0034. +[ Thu Sep 8 01:06:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:06:41 2022 ] Training epoch: 33 +[ Thu Sep 8 01:06:41 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:13:16 2022 ] Mean training loss: 1.0090. +[ Thu Sep 8 01:13:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:13:16 2022 ] Training epoch: 34 +[ Thu Sep 8 01:13:16 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:19:53 2022 ] Mean training loss: 0.9960. +[ Thu Sep 8 01:19:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:19:53 2022 ] Training epoch: 35 +[ Thu Sep 8 01:19:53 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:26:30 2022 ] Mean training loss: 1.0126. +[ Thu Sep 8 01:26:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:26:30 2022 ] Training epoch: 36 +[ Thu Sep 8 01:26:30 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:33:08 2022 ] Mean training loss: 0.9907. +[ Thu Sep 8 01:33:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:33:08 2022 ] Training epoch: 37 +[ Thu Sep 8 01:33:08 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:39:47 2022 ] Mean training loss: 0.9857. +[ Thu Sep 8 01:39:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:39:47 2022 ] Training epoch: 38 +[ Thu Sep 8 01:39:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:46:26 2022 ] Mean training loss: 0.9833. +[ Thu Sep 8 01:46:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:46:26 2022 ] Training epoch: 39 +[ Thu Sep 8 01:46:26 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:53:05 2022 ] Mean training loss: 0.9808. +[ Thu Sep 8 01:53:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:53:05 2022 ] Training epoch: 40 +[ Thu Sep 8 01:53:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 01:59:44 2022 ] Mean training loss: 0.9796. +[ Thu Sep 8 01:59:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 01:59:44 2022 ] Training epoch: 41 +[ Thu Sep 8 01:59:44 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:06:23 2022 ] Mean training loss: 0.9781. +[ Thu Sep 8 02:06:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:06:23 2022 ] Training epoch: 42 +[ Thu Sep 8 02:06:23 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:13:02 2022 ] Mean training loss: 0.9829. +[ Thu Sep 8 02:13:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:13:02 2022 ] Training epoch: 43 +[ Thu Sep 8 02:13:02 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:19:40 2022 ] Mean training loss: 0.9875. +[ Thu Sep 8 02:19:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:19:40 2022 ] Training epoch: 44 +[ Thu Sep 8 02:19:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:26:17 2022 ] Mean training loss: 0.9765. +[ Thu Sep 8 02:26:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:26:17 2022 ] Training epoch: 45 +[ Thu Sep 8 02:26:17 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:32:55 2022 ] Mean training loss: 0.9734. +[ Thu Sep 8 02:32:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:32:55 2022 ] Training epoch: 46 +[ Thu Sep 8 02:32:55 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:39:33 2022 ] Mean training loss: 0.9846. +[ Thu Sep 8 02:39:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:39:33 2022 ] Training epoch: 47 +[ Thu Sep 8 02:39:33 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:46:11 2022 ] Mean training loss: 0.9679. +[ Thu Sep 8 02:46:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:46:11 2022 ] Training epoch: 48 +[ Thu Sep 8 02:46:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:52:48 2022 ] Mean training loss: 0.9770. +[ Thu Sep 8 02:52:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:52:48 2022 ] Training epoch: 49 +[ Thu Sep 8 02:52:48 2022 ] Learning rate: 0.15 +[ Thu Sep 8 02:59:25 2022 ] Mean training loss: 0.9773. +[ Thu Sep 8 02:59:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 02:59:25 2022 ] Training epoch: 50 +[ Thu Sep 8 02:59:25 2022 ] Learning rate: 0.15 +[ Thu Sep 8 03:06:03 2022 ] Mean training loss: 0.9652. +[ Thu Sep 8 03:06:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:06:03 2022 ] Training epoch: 51 +[ Thu Sep 8 03:06:03 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:12:39 2022 ] Mean training loss: 0.5024. +[ Thu Sep 8 03:12:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:12:39 2022 ] Eval epoch: 51 +[ Thu Sep 8 03:19:30 2022 ] Epoch 51 Curr Acc: (26427/50919)51.90% +[ Thu Sep 8 03:19:30 2022 ] Epoch 51 Best Acc 51.90% +[ Thu Sep 8 03:19:30 2022 ] Training epoch: 52 +[ Thu Sep 8 03:19:30 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:26:05 2022 ] Mean training loss: 0.3750. +[ Thu Sep 8 03:26:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:26:05 2022 ] Eval epoch: 52 +[ Thu Sep 8 03:32:48 2022 ] Epoch 52 Curr Acc: (27217/50919)53.45% +[ Thu Sep 8 03:32:48 2022 ] Epoch 52 Best Acc 53.45% +[ Thu Sep 8 03:32:48 2022 ] Training epoch: 53 +[ Thu Sep 8 03:32:48 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:39:23 2022 ] Mean training loss: 0.3166. +[ Thu Sep 8 03:39:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:39:24 2022 ] Eval epoch: 53 +[ Thu Sep 8 03:46:06 2022 ] Epoch 53 Curr Acc: (27711/50919)54.42% +[ Thu Sep 8 03:46:06 2022 ] Epoch 53 Best Acc 54.42% +[ Thu Sep 8 03:46:06 2022 ] Training epoch: 54 +[ Thu Sep 8 03:46:06 2022 ] Learning rate: 0.015 +[ Thu Sep 8 03:52:42 2022 ] Mean training loss: 0.2698. +[ Thu Sep 8 03:52:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 03:52:43 2022 ] Eval epoch: 54 +[ Thu Sep 8 03:59:25 2022 ] Epoch 54 Curr Acc: (27776/50919)54.55% +[ Thu Sep 8 03:59:25 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 03:59:25 2022 ] Training epoch: 55 +[ Thu Sep 8 03:59:25 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:06:01 2022 ] Mean training loss: 0.2366. +[ Thu Sep 8 04:06:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:06:02 2022 ] Eval epoch: 55 +[ Thu Sep 8 04:12:44 2022 ] Epoch 55 Curr Acc: (27484/50919)53.98% +[ Thu Sep 8 04:12:44 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 04:12:44 2022 ] Training epoch: 56 +[ Thu Sep 8 04:12:44 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:19:21 2022 ] Mean training loss: 0.2079. +[ Thu Sep 8 04:19:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:19:21 2022 ] Eval epoch: 56 +[ Thu Sep 8 04:26:03 2022 ] Epoch 56 Curr Acc: (26107/50919)51.27% +[ Thu Sep 8 04:26:03 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 04:26:03 2022 ] Training epoch: 57 +[ Thu Sep 8 04:26:03 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:32:38 2022 ] Mean training loss: 0.1758. +[ Thu Sep 8 04:32:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:32:38 2022 ] Eval epoch: 57 +[ Thu Sep 8 04:39:21 2022 ] Epoch 57 Curr Acc: (27395/50919)53.80% +[ Thu Sep 8 04:39:21 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 04:39:21 2022 ] Training epoch: 58 +[ Thu Sep 8 04:39:21 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:45:55 2022 ] Mean training loss: 0.1576. +[ Thu Sep 8 04:45:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:45:55 2022 ] Eval epoch: 58 +[ Thu Sep 8 04:52:38 2022 ] Epoch 58 Curr Acc: (26128/50919)51.31% +[ Thu Sep 8 04:52:38 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 04:52:38 2022 ] Training epoch: 59 +[ Thu Sep 8 04:52:38 2022 ] Learning rate: 0.015 +[ Thu Sep 8 04:59:12 2022 ] Mean training loss: 0.1435. +[ Thu Sep 8 04:59:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 04:59:12 2022 ] Eval epoch: 59 +[ Thu Sep 8 05:05:55 2022 ] Epoch 59 Curr Acc: (27495/50919)54.00% +[ Thu Sep 8 05:05:55 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 05:05:55 2022 ] Training epoch: 60 +[ Thu Sep 8 05:05:55 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:12:31 2022 ] Mean training loss: 0.1249. +[ Thu Sep 8 05:12:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:12:31 2022 ] Eval epoch: 60 +[ Thu Sep 8 05:19:14 2022 ] Epoch 60 Curr Acc: (26375/50919)51.80% +[ Thu Sep 8 05:19:14 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 05:19:14 2022 ] Training epoch: 61 +[ Thu Sep 8 05:19:14 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:25:49 2022 ] Mean training loss: 0.1140. +[ Thu Sep 8 05:25:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:25:49 2022 ] Eval epoch: 61 +[ Thu Sep 8 05:32:32 2022 ] Epoch 61 Curr Acc: (26778/50919)52.59% +[ Thu Sep 8 05:32:32 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 05:32:32 2022 ] Training epoch: 62 +[ Thu Sep 8 05:32:32 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:39:08 2022 ] Mean training loss: 0.1064. +[ Thu Sep 8 05:39:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:39:08 2022 ] Eval epoch: 62 +[ Thu Sep 8 05:45:51 2022 ] Epoch 62 Curr Acc: (27000/50919)53.03% +[ Thu Sep 8 05:45:51 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 05:45:51 2022 ] Training epoch: 63 +[ Thu Sep 8 05:45:51 2022 ] Learning rate: 0.015 +[ Thu Sep 8 05:52:26 2022 ] Mean training loss: 0.1195. +[ Thu Sep 8 05:52:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 05:52:26 2022 ] Eval epoch: 63 +[ Thu Sep 8 05:59:09 2022 ] Epoch 63 Curr Acc: (26677/50919)52.39% +[ Thu Sep 8 05:59:09 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 05:59:09 2022 ] Training epoch: 64 +[ Thu Sep 8 05:59:09 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:05:45 2022 ] Mean training loss: 0.1119. +[ Thu Sep 8 06:05:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:05:45 2022 ] Eval epoch: 64 +[ Thu Sep 8 06:12:28 2022 ] Epoch 64 Curr Acc: (25515/50919)50.11% +[ Thu Sep 8 06:12:28 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 06:12:28 2022 ] Training epoch: 65 +[ Thu Sep 8 06:12:28 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:19:01 2022 ] Mean training loss: 0.1071. +[ Thu Sep 8 06:19:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:19:01 2022 ] Eval epoch: 65 +[ Thu Sep 8 06:25:43 2022 ] Epoch 65 Curr Acc: (26535/50919)52.11% +[ Thu Sep 8 06:25:43 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 06:25:44 2022 ] Training epoch: 66 +[ Thu Sep 8 06:25:44 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:32:19 2022 ] Mean training loss: 0.1063. +[ Thu Sep 8 06:32:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:32:19 2022 ] Eval epoch: 66 +[ Thu Sep 8 06:39:01 2022 ] Epoch 66 Curr Acc: (26613/50919)52.27% +[ Thu Sep 8 06:39:01 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 06:39:01 2022 ] Training epoch: 67 +[ Thu Sep 8 06:39:01 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:45:37 2022 ] Mean training loss: 0.1039. +[ Thu Sep 8 06:45:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:45:37 2022 ] Eval epoch: 67 +[ Thu Sep 8 06:52:20 2022 ] Epoch 67 Curr Acc: (25165/50919)49.42% +[ Thu Sep 8 06:52:20 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 06:52:20 2022 ] Training epoch: 68 +[ Thu Sep 8 06:52:20 2022 ] Learning rate: 0.015 +[ Thu Sep 8 06:58:55 2022 ] Mean training loss: 0.1104. +[ Thu Sep 8 06:58:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 06:58:55 2022 ] Eval epoch: 68 +[ Thu Sep 8 07:05:38 2022 ] Epoch 68 Curr Acc: (26270/50919)51.59% +[ Thu Sep 8 07:05:38 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 07:05:38 2022 ] Training epoch: 69 +[ Thu Sep 8 07:05:38 2022 ] Learning rate: 0.015 +[ Thu Sep 8 07:12:13 2022 ] Mean training loss: 0.0960. +[ Thu Sep 8 07:12:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:12:13 2022 ] Eval epoch: 69 +[ Thu Sep 8 07:18:55 2022 ] Epoch 69 Curr Acc: (26027/50919)51.11% +[ Thu Sep 8 07:18:55 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 07:18:55 2022 ] Training epoch: 70 +[ Thu Sep 8 07:18:55 2022 ] Learning rate: 0.015 +[ Thu Sep 8 07:25:31 2022 ] Mean training loss: 0.1008. +[ Thu Sep 8 07:25:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:25:31 2022 ] Eval epoch: 70 +[ Thu Sep 8 07:32:13 2022 ] Epoch 70 Curr Acc: (25435/50919)49.95% +[ Thu Sep 8 07:32:13 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 07:32:13 2022 ] Training epoch: 71 +[ Thu Sep 8 07:32:13 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:38:48 2022 ] Mean training loss: 0.0533. +[ Thu Sep 8 07:38:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:38:48 2022 ] Eval epoch: 71 +[ Thu Sep 8 07:45:31 2022 ] Epoch 71 Curr Acc: (27067/50919)53.16% +[ Thu Sep 8 07:45:31 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 07:45:31 2022 ] Training epoch: 72 +[ Thu Sep 8 07:45:31 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 07:52:07 2022 ] Mean training loss: 0.0343. +[ Thu Sep 8 07:52:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 07:52:07 2022 ] Eval epoch: 72 +[ Thu Sep 8 07:58:50 2022 ] Epoch 72 Curr Acc: (27244/50919)53.50% +[ Thu Sep 8 07:58:50 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 07:58:50 2022 ] Training epoch: 73 +[ Thu Sep 8 07:58:50 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:05:26 2022 ] Mean training loss: 0.0275. +[ Thu Sep 8 08:05:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:05:26 2022 ] Eval epoch: 73 +[ Thu Sep 8 08:12:08 2022 ] Epoch 73 Curr Acc: (27387/50919)53.79% +[ Thu Sep 8 08:12:08 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 08:12:08 2022 ] Training epoch: 74 +[ Thu Sep 8 08:12:08 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:18:44 2022 ] Mean training loss: 0.0236. +[ Thu Sep 8 08:18:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:18:44 2022 ] Eval epoch: 74 +[ Thu Sep 8 08:25:27 2022 ] Epoch 74 Curr Acc: (26749/50919)52.53% +[ Thu Sep 8 08:25:27 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 08:25:27 2022 ] Training epoch: 75 +[ Thu Sep 8 08:25:27 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:32:03 2022 ] Mean training loss: 0.0219. +[ Thu Sep 8 08:32:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:32:03 2022 ] Eval epoch: 75 +[ Thu Sep 8 08:38:45 2022 ] Epoch 75 Curr Acc: (27462/50919)53.93% +[ Thu Sep 8 08:38:45 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 08:38:45 2022 ] Training epoch: 76 +[ Thu Sep 8 08:38:45 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:45:21 2022 ] Mean training loss: 0.0225. +[ Thu Sep 8 08:45:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:45:21 2022 ] Eval epoch: 76 +[ Thu Sep 8 08:52:04 2022 ] Epoch 76 Curr Acc: (26647/50919)52.33% +[ Thu Sep 8 08:52:04 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 08:52:04 2022 ] Training epoch: 77 +[ Thu Sep 8 08:52:04 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 08:58:40 2022 ] Mean training loss: 0.0201. +[ Thu Sep 8 08:58:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 08:58:40 2022 ] Eval epoch: 77 +[ Thu Sep 8 09:05:22 2022 ] Epoch 77 Curr Acc: (27279/50919)53.57% +[ Thu Sep 8 09:05:22 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 09:05:22 2022 ] Training epoch: 78 +[ Thu Sep 8 09:05:22 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:11:58 2022 ] Mean training loss: 0.0191. +[ Thu Sep 8 09:11:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:11:58 2022 ] Eval epoch: 78 +[ Thu Sep 8 09:18:40 2022 ] Epoch 78 Curr Acc: (27376/50919)53.76% +[ Thu Sep 8 09:18:40 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 09:18:41 2022 ] Training epoch: 79 +[ Thu Sep 8 09:18:41 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:25:16 2022 ] Mean training loss: 0.0169. +[ Thu Sep 8 09:25:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:25:16 2022 ] Eval epoch: 79 +[ Thu Sep 8 09:31:59 2022 ] Epoch 79 Curr Acc: (27226/50919)53.47% +[ Thu Sep 8 09:31:59 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 09:31:59 2022 ] Training epoch: 80 +[ Thu Sep 8 09:31:59 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:38:34 2022 ] Mean training loss: 0.0154. +[ Thu Sep 8 09:38:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:38:34 2022 ] Eval epoch: 80 +[ Thu Sep 8 09:45:16 2022 ] Epoch 80 Curr Acc: (26624/50919)52.29% +[ Thu Sep 8 09:45:16 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 09:45:16 2022 ] Training epoch: 81 +[ Thu Sep 8 09:45:16 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 09:51:52 2022 ] Mean training loss: 0.0160. +[ Thu Sep 8 09:51:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 09:51:52 2022 ] Eval epoch: 81 +[ Thu Sep 8 09:58:34 2022 ] Epoch 81 Curr Acc: (27010/50919)53.05% +[ Thu Sep 8 09:58:34 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 09:58:34 2022 ] Training epoch: 82 +[ Thu Sep 8 09:58:34 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:05:09 2022 ] Mean training loss: 0.0151. +[ Thu Sep 8 10:05:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:05:09 2022 ] Eval epoch: 82 +[ Thu Sep 8 10:11:52 2022 ] Epoch 82 Curr Acc: (27009/50919)53.04% +[ Thu Sep 8 10:11:52 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 10:11:52 2022 ] Training epoch: 83 +[ Thu Sep 8 10:11:52 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:18:27 2022 ] Mean training loss: 0.0148. +[ Thu Sep 8 10:18:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:18:27 2022 ] Eval epoch: 83 +[ Thu Sep 8 10:25:10 2022 ] Epoch 83 Curr Acc: (27217/50919)53.45% +[ Thu Sep 8 10:25:10 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 10:25:10 2022 ] Training epoch: 84 +[ Thu Sep 8 10:25:10 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:31:44 2022 ] Mean training loss: 0.0149. +[ Thu Sep 8 10:31:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:31:44 2022 ] Eval epoch: 84 +[ Thu Sep 8 10:38:27 2022 ] Epoch 84 Curr Acc: (27020/50919)53.06% +[ Thu Sep 8 10:38:27 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 10:38:27 2022 ] Training epoch: 85 +[ Thu Sep 8 10:38:27 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:45:01 2022 ] Mean training loss: 0.0148. +[ Thu Sep 8 10:45:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:45:01 2022 ] Eval epoch: 85 +[ Thu Sep 8 10:51:44 2022 ] Epoch 85 Curr Acc: (27510/50919)54.03% +[ Thu Sep 8 10:51:44 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 10:51:44 2022 ] Training epoch: 86 +[ Thu Sep 8 10:51:44 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 10:58:19 2022 ] Mean training loss: 0.0138. +[ Thu Sep 8 10:58:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 10:58:19 2022 ] Eval epoch: 86 +[ Thu Sep 8 11:05:02 2022 ] Epoch 86 Curr Acc: (27559/50919)54.12% +[ Thu Sep 8 11:05:02 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 11:05:02 2022 ] Training epoch: 87 +[ Thu Sep 8 11:05:02 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:11:38 2022 ] Mean training loss: 0.0150. +[ Thu Sep 8 11:11:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:11:38 2022 ] Eval epoch: 87 +[ Thu Sep 8 11:18:21 2022 ] Epoch 87 Curr Acc: (27731/50919)54.46% +[ Thu Sep 8 11:18:21 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 11:18:21 2022 ] Training epoch: 88 +[ Thu Sep 8 11:18:21 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:24:56 2022 ] Mean training loss: 0.0126. +[ Thu Sep 8 11:24:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:24:56 2022 ] Eval epoch: 88 +[ Thu Sep 8 11:31:39 2022 ] Epoch 88 Curr Acc: (27229/50919)53.48% +[ Thu Sep 8 11:31:39 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 11:31:39 2022 ] Training epoch: 89 +[ Thu Sep 8 11:31:39 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:38:15 2022 ] Mean training loss: 0.0139. +[ Thu Sep 8 11:38:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:38:15 2022 ] Eval epoch: 89 +[ Thu Sep 8 11:44:58 2022 ] Epoch 89 Curr Acc: (27607/50919)54.22% +[ Thu Sep 8 11:44:58 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 11:44:58 2022 ] Training epoch: 90 +[ Thu Sep 8 11:44:58 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 11:51:34 2022 ] Mean training loss: 0.0133. +[ Thu Sep 8 11:51:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 11:51:34 2022 ] Eval epoch: 90 +[ Thu Sep 8 11:58:17 2022 ] Epoch 90 Curr Acc: (27307/50919)53.63% +[ Thu Sep 8 11:58:17 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 11:58:17 2022 ] Training epoch: 91 +[ Thu Sep 8 11:58:17 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:04:54 2022 ] Mean training loss: 0.0128. +[ Thu Sep 8 12:04:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:04:54 2022 ] Eval epoch: 91 +[ Thu Sep 8 12:11:37 2022 ] Epoch 91 Curr Acc: (27251/50919)53.52% +[ Thu Sep 8 12:11:37 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 12:11:37 2022 ] Training epoch: 92 +[ Thu Sep 8 12:11:37 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:18:13 2022 ] Mean training loss: 0.0131. +[ Thu Sep 8 12:18:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:18:13 2022 ] Eval epoch: 92 +[ Thu Sep 8 12:24:56 2022 ] Epoch 92 Curr Acc: (27446/50919)53.90% +[ Thu Sep 8 12:24:56 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 12:24:56 2022 ] Training epoch: 93 +[ Thu Sep 8 12:24:56 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:31:32 2022 ] Mean training loss: 0.0135. +[ Thu Sep 8 12:31:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:31:32 2022 ] Eval epoch: 93 +[ Thu Sep 8 12:38:16 2022 ] Epoch 93 Curr Acc: (26946/50919)52.92% +[ Thu Sep 8 12:38:16 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 12:38:16 2022 ] Training epoch: 94 +[ Thu Sep 8 12:38:16 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:44:51 2022 ] Mean training loss: 0.0133. +[ Thu Sep 8 12:44:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:44:51 2022 ] Eval epoch: 94 +[ Thu Sep 8 12:51:34 2022 ] Epoch 94 Curr Acc: (27578/50919)54.16% +[ Thu Sep 8 12:51:34 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 12:51:35 2022 ] Training epoch: 95 +[ Thu Sep 8 12:51:35 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 12:58:10 2022 ] Mean training loss: 0.0123. +[ Thu Sep 8 12:58:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 12:58:10 2022 ] Eval epoch: 95 +[ Thu Sep 8 13:04:53 2022 ] Epoch 95 Curr Acc: (26810/50919)52.65% +[ Thu Sep 8 13:04:53 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 13:04:53 2022 ] Training epoch: 96 +[ Thu Sep 8 13:04:53 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:11:27 2022 ] Mean training loss: 0.0134. +[ Thu Sep 8 13:11:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:11:27 2022 ] Eval epoch: 96 +[ Thu Sep 8 13:18:09 2022 ] Epoch 96 Curr Acc: (25210/50919)49.51% +[ Thu Sep 8 13:18:09 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 13:18:09 2022 ] Training epoch: 97 +[ Thu Sep 8 13:18:09 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:24:44 2022 ] Mean training loss: 0.0124. +[ Thu Sep 8 13:24:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:24:44 2022 ] Eval epoch: 97 +[ Thu Sep 8 13:31:27 2022 ] Epoch 97 Curr Acc: (27151/50919)53.32% +[ Thu Sep 8 13:31:27 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 13:31:27 2022 ] Training epoch: 98 +[ Thu Sep 8 13:31:27 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:38:02 2022 ] Mean training loss: 0.0127. +[ Thu Sep 8 13:38:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:38:02 2022 ] Eval epoch: 98 +[ Thu Sep 8 13:44:45 2022 ] Epoch 98 Curr Acc: (27226/50919)53.47% +[ Thu Sep 8 13:44:45 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 13:44:45 2022 ] Training epoch: 99 +[ Thu Sep 8 13:44:45 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 13:51:18 2022 ] Mean training loss: 0.0125. +[ Thu Sep 8 13:51:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 13:51:18 2022 ] Eval epoch: 99 +[ Thu Sep 8 13:58:01 2022 ] Epoch 99 Curr Acc: (26979/50919)52.98% +[ Thu Sep 8 13:58:01 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 13:58:01 2022 ] Training epoch: 100 +[ Thu Sep 8 13:58:01 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:04:34 2022 ] Mean training loss: 0.0120. +[ Thu Sep 8 14:04:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:04:35 2022 ] Eval epoch: 100 +[ Thu Sep 8 14:11:17 2022 ] Epoch 100 Curr Acc: (27486/50919)53.98% +[ Thu Sep 8 14:11:17 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 14:11:17 2022 ] Training epoch: 101 +[ Thu Sep 8 14:11:17 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:17:50 2022 ] Mean training loss: 0.0120. +[ Thu Sep 8 14:17:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:17:50 2022 ] Eval epoch: 101 +[ Thu Sep 8 14:24:33 2022 ] Epoch 101 Curr Acc: (27276/50919)53.57% +[ Thu Sep 8 14:24:33 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 14:24:33 2022 ] Training epoch: 102 +[ Thu Sep 8 14:24:33 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:31:06 2022 ] Mean training loss: 0.0124. +[ Thu Sep 8 14:31:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:31:06 2022 ] Eval epoch: 102 +[ Thu Sep 8 14:37:49 2022 ] Epoch 102 Curr Acc: (27455/50919)53.92% +[ Thu Sep 8 14:37:49 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 14:37:49 2022 ] Training epoch: 103 +[ Thu Sep 8 14:37:49 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:44:24 2022 ] Mean training loss: 0.0122. +[ Thu Sep 8 14:44:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:44:24 2022 ] Eval epoch: 103 +[ Thu Sep 8 14:51:06 2022 ] Epoch 103 Curr Acc: (27485/50919)53.98% +[ Thu Sep 8 14:51:06 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 14:51:06 2022 ] Training epoch: 104 +[ Thu Sep 8 14:51:06 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 14:57:41 2022 ] Mean training loss: 0.0123. +[ Thu Sep 8 14:57:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 14:57:41 2022 ] Eval epoch: 104 +[ Thu Sep 8 15:04:23 2022 ] Epoch 104 Curr Acc: (27552/50919)54.11% +[ Thu Sep 8 15:04:23 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 15:04:23 2022 ] Training epoch: 105 +[ Thu Sep 8 15:04:23 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:10:58 2022 ] Mean training loss: 0.0117. +[ Thu Sep 8 15:10:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:10:58 2022 ] Eval epoch: 105 +[ Thu Sep 8 15:17:41 2022 ] Epoch 105 Curr Acc: (27374/50919)53.76% +[ Thu Sep 8 15:17:41 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 15:17:41 2022 ] Training epoch: 106 +[ Thu Sep 8 15:17:41 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:24:15 2022 ] Mean training loss: 0.0133. +[ Thu Sep 8 15:24:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:24:15 2022 ] Eval epoch: 106 +[ Thu Sep 8 15:30:57 2022 ] Epoch 106 Curr Acc: (27505/50919)54.02% +[ Thu Sep 8 15:30:57 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 15:30:57 2022 ] Training epoch: 107 +[ Thu Sep 8 15:30:57 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:37:32 2022 ] Mean training loss: 0.0120. +[ Thu Sep 8 15:37:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:37:32 2022 ] Eval epoch: 107 +[ Thu Sep 8 15:44:15 2022 ] Epoch 107 Curr Acc: (27720/50919)54.44% +[ Thu Sep 8 15:44:15 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 15:44:15 2022 ] Training epoch: 108 +[ Thu Sep 8 15:44:15 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 15:50:48 2022 ] Mean training loss: 0.0119. +[ Thu Sep 8 15:50:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 15:50:48 2022 ] Eval epoch: 108 +[ Thu Sep 8 15:57:30 2022 ] Epoch 108 Curr Acc: (27437/50919)53.88% +[ Thu Sep 8 15:57:30 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 15:57:30 2022 ] Training epoch: 109 +[ Thu Sep 8 15:57:30 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 16:04:04 2022 ] Mean training loss: 0.0114. +[ Thu Sep 8 16:04:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 16:04:04 2022 ] Eval epoch: 109 +[ Thu Sep 8 16:10:47 2022 ] Epoch 109 Curr Acc: (27373/50919)53.76% +[ Thu Sep 8 16:10:47 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 16:10:47 2022 ] Training epoch: 110 +[ Thu Sep 8 16:10:47 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 16:17:21 2022 ] Mean training loss: 0.0129. +[ Thu Sep 8 16:17:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 16:17:21 2022 ] Eval epoch: 110 +[ Thu Sep 8 16:24:03 2022 ] Epoch 110 Curr Acc: (27047/50919)53.12% +[ Thu Sep 8 16:24:03 2022 ] Epoch 54 Best Acc 54.55% +[ Thu Sep 8 16:24:03 2022 ] epoch: 54, best accuracy: 0.5454938235236356 +[ Thu Sep 8 16:24:03 2022 ] Experiment: ./work_dir/ntu120/xsub_jm +[ Thu Sep 8 16:24:04 2022 ] # generator parameters: 2.922995 M. +[ Thu Sep 8 16:24:04 2022 ] Load weights from ./runs/ntu120/xsub_jm/runs-53-105300.pt. +[ Thu Sep 8 16:24:04 2022 ] Eval epoch: 1 +[ Thu Sep 8 16:30:46 2022 ] Epoch 1 Curr Acc: (27776/50919)54.55% +[ Thu Sep 8 16:30:46 2022 ] Epoch 54 Best Acc 54.55% diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/config.yaml b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..00345647aafccff62dc8f5cacb5130fb2afc157c --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu/xsub_b.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 60 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu/xsub_b/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu/xsub_b diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..09d6278f0941e46660d377954e072401c6200fa2 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c48c593e2b823f8bd9f72279376db1fd09671dcb5378ac83302708c76e5c8882 +size 4979902 diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/log.txt b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..6829b1cf4f58ebf58d8b7aa81580a81445d528b7 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/log.txt @@ -0,0 +1,631 @@ +[ Thu Sep 8 17:07:44 2022 ] # generator parameters: 2.896055 M. +[ Thu Sep 8 17:07:44 2022 ] Parameters: +{'work_dir': './work_dir/ntu/xsub_b', 'model_saved_name': './runs/ntu/xsub_b/runs', 'config': 'config/ntu/xsub_b.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Thu Sep 8 17:07:44 2022 ] Training epoch: 1 +[ Thu Sep 8 17:07:44 2022 ] Learning rate: 0.015 +[ Thu Sep 8 17:11:06 2022 ] Mean training loss: 3.1116. +[ Thu Sep 8 17:11:06 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 17:11:06 2022 ] Training epoch: 2 +[ Thu Sep 8 17:11:06 2022 ] Learning rate: 0.03 +[ Thu Sep 8 17:14:28 2022 ] Mean training loss: 2.2707. +[ Thu Sep 8 17:14:28 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:14:28 2022 ] Training epoch: 3 +[ Thu Sep 8 17:14:28 2022 ] Learning rate: 0.045 +[ Thu Sep 8 17:17:49 2022 ] Mean training loss: 1.8360. +[ Thu Sep 8 17:17:49 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:17:49 2022 ] Training epoch: 4 +[ Thu Sep 8 17:17:49 2022 ] Learning rate: 0.06 +[ Thu Sep 8 17:21:11 2022 ] Mean training loss: 1.6257. +[ Thu Sep 8 17:21:11 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:21:11 2022 ] Training epoch: 5 +[ Thu Sep 8 17:21:11 2022 ] Learning rate: 0.075 +[ Thu Sep 8 17:24:32 2022 ] Mean training loss: 1.4463. +[ Thu Sep 8 17:24:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:24:32 2022 ] Training epoch: 6 +[ Thu Sep 8 17:24:32 2022 ] Learning rate: 0.09 +[ Thu Sep 8 17:27:53 2022 ] Mean training loss: 1.3583. +[ Thu Sep 8 17:27:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:27:53 2022 ] Training epoch: 7 +[ Thu Sep 8 17:27:53 2022 ] Learning rate: 0.10500000000000001 +[ Thu Sep 8 17:31:14 2022 ] Mean training loss: 1.2543. +[ Thu Sep 8 17:31:14 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:31:14 2022 ] Training epoch: 8 +[ Thu Sep 8 17:31:14 2022 ] Learning rate: 0.12 +[ Thu Sep 8 17:34:35 2022 ] Mean training loss: 1.1767. +[ Thu Sep 8 17:34:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:34:35 2022 ] Training epoch: 9 +[ Thu Sep 8 17:34:35 2022 ] Learning rate: 0.13499999999999998 +[ Thu Sep 8 17:37:56 2022 ] Mean training loss: 1.1567. +[ Thu Sep 8 17:37:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:37:56 2022 ] Training epoch: 10 +[ Thu Sep 8 17:37:56 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:41:17 2022 ] Mean training loss: 1.1013. +[ Thu Sep 8 17:41:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:41:17 2022 ] Training epoch: 11 +[ Thu Sep 8 17:41:17 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:44:38 2022 ] Mean training loss: 1.0563. +[ Thu Sep 8 17:44:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:44:38 2022 ] Training epoch: 12 +[ Thu Sep 8 17:44:38 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:48:00 2022 ] Mean training loss: 1.0204. +[ Thu Sep 8 17:48:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:48:00 2022 ] Training epoch: 13 +[ Thu Sep 8 17:48:00 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:51:21 2022 ] Mean training loss: 0.9678. +[ Thu Sep 8 17:51:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:51:21 2022 ] Training epoch: 14 +[ Thu Sep 8 17:51:21 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:54:42 2022 ] Mean training loss: 0.9507. +[ Thu Sep 8 17:54:42 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:54:42 2022 ] Training epoch: 15 +[ Thu Sep 8 17:54:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:58:03 2022 ] Mean training loss: 0.9140. +[ Thu Sep 8 17:58:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:58:03 2022 ] Training epoch: 16 +[ Thu Sep 8 17:58:03 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:01:22 2022 ] Mean training loss: 0.8949. +[ Thu Sep 8 18:01:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:01:22 2022 ] Training epoch: 17 +[ Thu Sep 8 18:01:22 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:04:42 2022 ] Mean training loss: 0.8668. +[ Thu Sep 8 18:04:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:04:42 2022 ] Training epoch: 18 +[ Thu Sep 8 18:04:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:08:03 2022 ] Mean training loss: 0.8622. +[ Thu Sep 8 18:08:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:08:03 2022 ] Training epoch: 19 +[ Thu Sep 8 18:08:03 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:11:22 2022 ] Mean training loss: 0.8276. +[ Thu Sep 8 18:11:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:11:22 2022 ] Training epoch: 20 +[ Thu Sep 8 18:11:22 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:14:43 2022 ] Mean training loss: 0.8280. +[ Thu Sep 8 18:14:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:14:43 2022 ] Training epoch: 21 +[ Thu Sep 8 18:14:43 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:18:04 2022 ] Mean training loss: 0.8205. +[ Thu Sep 8 18:18:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:18:04 2022 ] Training epoch: 22 +[ Thu Sep 8 18:18:04 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:21:26 2022 ] Mean training loss: 0.8259. +[ Thu Sep 8 18:21:26 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:21:26 2022 ] Training epoch: 23 +[ Thu Sep 8 18:21:26 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:24:47 2022 ] Mean training loss: 0.8165. +[ Thu Sep 8 18:24:47 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:24:47 2022 ] Training epoch: 24 +[ Thu Sep 8 18:24:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:28:09 2022 ] Mean training loss: 0.7833. +[ Thu Sep 8 18:28:09 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:28:09 2022 ] Training epoch: 25 +[ Thu Sep 8 18:28:09 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:31:32 2022 ] Mean training loss: 0.7724. +[ Thu Sep 8 18:31:32 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:31:32 2022 ] Training epoch: 26 +[ Thu Sep 8 18:31:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:34:53 2022 ] Mean training loss: 0.7551. +[ Thu Sep 8 18:34:53 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:34:53 2022 ] Training epoch: 27 +[ Thu Sep 8 18:34:53 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:38:13 2022 ] Mean training loss: 0.7380. +[ Thu Sep 8 18:38:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:38:13 2022 ] Training epoch: 28 +[ Thu Sep 8 18:38:13 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:41:33 2022 ] Mean training loss: 0.7460. +[ Thu Sep 8 18:41:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:41:33 2022 ] Training epoch: 29 +[ Thu Sep 8 18:41:33 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:44:54 2022 ] Mean training loss: 0.7339. +[ Thu Sep 8 18:44:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:44:54 2022 ] Training epoch: 30 +[ Thu Sep 8 18:44:54 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:48:15 2022 ] Mean training loss: 0.7232. +[ Thu Sep 8 18:48:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:48:15 2022 ] Training epoch: 31 +[ Thu Sep 8 18:48:15 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:51:37 2022 ] Mean training loss: 0.7467. +[ Thu Sep 8 18:51:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:51:37 2022 ] Training epoch: 32 +[ Thu Sep 8 18:51:37 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:54:58 2022 ] Mean training loss: 0.7282. +[ Thu Sep 8 18:54:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:54:58 2022 ] Training epoch: 33 +[ Thu Sep 8 18:54:58 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:58:19 2022 ] Mean training loss: 0.7132. +[ Thu Sep 8 18:58:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:58:19 2022 ] Training epoch: 34 +[ Thu Sep 8 18:58:19 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:01:40 2022 ] Mean training loss: 0.7012. +[ Thu Sep 8 19:01:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:01:40 2022 ] Training epoch: 35 +[ Thu Sep 8 19:01:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:05:01 2022 ] Mean training loss: 0.7128. +[ Thu Sep 8 19:05:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:05:01 2022 ] Training epoch: 36 +[ Thu Sep 8 19:05:01 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:08:22 2022 ] Mean training loss: 0.6946. +[ Thu Sep 8 19:08:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:08:22 2022 ] Training epoch: 37 +[ Thu Sep 8 19:08:22 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:11:43 2022 ] Mean training loss: 0.7103. +[ Thu Sep 8 19:11:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:11:43 2022 ] Training epoch: 38 +[ Thu Sep 8 19:11:43 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:15:05 2022 ] Mean training loss: 0.6890. +[ Thu Sep 8 19:15:05 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:15:05 2022 ] Training epoch: 39 +[ Thu Sep 8 19:15:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:18:24 2022 ] Mean training loss: 0.6835. +[ Thu Sep 8 19:18:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:18:24 2022 ] Training epoch: 40 +[ Thu Sep 8 19:18:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:21:45 2022 ] Mean training loss: 0.6812. +[ Thu Sep 8 19:21:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:21:45 2022 ] Training epoch: 41 +[ Thu Sep 8 19:21:45 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:25:04 2022 ] Mean training loss: 0.6613. +[ Thu Sep 8 19:25:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:25:04 2022 ] Training epoch: 42 +[ Thu Sep 8 19:25:04 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:28:24 2022 ] Mean training loss: 0.6751. +[ Thu Sep 8 19:28:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:28:24 2022 ] Training epoch: 43 +[ Thu Sep 8 19:28:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:31:45 2022 ] Mean training loss: 0.6819. +[ Thu Sep 8 19:31:45 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:31:45 2022 ] Training epoch: 44 +[ Thu Sep 8 19:31:45 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:35:06 2022 ] Mean training loss: 0.6711. +[ Thu Sep 8 19:35:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:35:06 2022 ] Training epoch: 45 +[ Thu Sep 8 19:35:06 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:38:26 2022 ] Mean training loss: 0.6602. +[ Thu Sep 8 19:38:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:38:26 2022 ] Training epoch: 46 +[ Thu Sep 8 19:38:26 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:41:48 2022 ] Mean training loss: 0.6408. +[ Thu Sep 8 19:41:48 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:41:48 2022 ] Training epoch: 47 +[ Thu Sep 8 19:41:48 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:45:10 2022 ] Mean training loss: 0.6448. +[ Thu Sep 8 19:45:10 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:45:10 2022 ] Training epoch: 48 +[ Thu Sep 8 19:45:10 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:48:33 2022 ] Mean training loss: 0.6538. +[ Thu Sep 8 19:48:33 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:48:33 2022 ] Training epoch: 49 +[ Thu Sep 8 19:48:33 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:51:55 2022 ] Mean training loss: 0.6332. +[ Thu Sep 8 19:51:55 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:51:55 2022 ] Training epoch: 50 +[ Thu Sep 8 19:51:55 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:55:17 2022 ] Mean training loss: 0.6572. +[ Thu Sep 8 19:55:17 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:55:17 2022 ] Training epoch: 51 +[ Thu Sep 8 19:55:17 2022 ] Learning rate: 0.015 +[ Thu Sep 8 19:58:39 2022 ] Mean training loss: 0.3288. +[ Thu Sep 8 19:58:39 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:58:39 2022 ] Eval epoch: 51 +[ Thu Sep 8 20:00:51 2022 ] Epoch 51 Curr Acc: (11075/16487)67.17% +[ Thu Sep 8 20:00:51 2022 ] Epoch 51 Best Acc 67.17% +[ Thu Sep 8 20:00:51 2022 ] Training epoch: 52 +[ Thu Sep 8 20:00:51 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:04:13 2022 ] Mean training loss: 0.2253. +[ Thu Sep 8 20:04:13 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:04:13 2022 ] Eval epoch: 52 +[ Thu Sep 8 20:06:26 2022 ] Epoch 52 Curr Acc: (11423/16487)69.28% +[ Thu Sep 8 20:06:26 2022 ] Epoch 52 Best Acc 69.28% +[ Thu Sep 8 20:06:26 2022 ] Training epoch: 53 +[ Thu Sep 8 20:06:26 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:09:47 2022 ] Mean training loss: 0.1931. +[ Thu Sep 8 20:09:47 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:09:47 2022 ] Eval epoch: 53 +[ Thu Sep 8 20:12:00 2022 ] Epoch 53 Curr Acc: (11412/16487)69.22% +[ Thu Sep 8 20:12:00 2022 ] Epoch 52 Best Acc 69.28% +[ Thu Sep 8 20:12:00 2022 ] Training epoch: 54 +[ Thu Sep 8 20:12:00 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:15:21 2022 ] Mean training loss: 0.1663. +[ Thu Sep 8 20:15:21 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:15:21 2022 ] Eval epoch: 54 +[ Thu Sep 8 20:17:33 2022 ] Epoch 54 Curr Acc: (10686/16487)64.81% +[ Thu Sep 8 20:17:33 2022 ] Epoch 52 Best Acc 69.28% +[ Thu Sep 8 20:17:33 2022 ] Training epoch: 55 +[ Thu Sep 8 20:17:33 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:20:55 2022 ] Mean training loss: 0.1412. +[ Thu Sep 8 20:20:55 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:20:55 2022 ] Eval epoch: 55 +[ Thu Sep 8 20:23:07 2022 ] Epoch 55 Curr Acc: (11452/16487)69.46% +[ Thu Sep 8 20:23:07 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 20:23:07 2022 ] Training epoch: 56 +[ Thu Sep 8 20:23:07 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:26:29 2022 ] Mean training loss: 0.1234. +[ Thu Sep 8 20:26:29 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:26:29 2022 ] Eval epoch: 56 +[ Thu Sep 8 20:28:41 2022 ] Epoch 56 Curr Acc: (10908/16487)66.16% +[ Thu Sep 8 20:28:41 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 20:28:41 2022 ] Training epoch: 57 +[ Thu Sep 8 20:28:41 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:32:04 2022 ] Mean training loss: 0.1122. +[ Thu Sep 8 20:32:04 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:32:04 2022 ] Eval epoch: 57 +[ Thu Sep 8 20:34:16 2022 ] Epoch 57 Curr Acc: (11402/16487)69.16% +[ Thu Sep 8 20:34:16 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 20:34:16 2022 ] Training epoch: 58 +[ Thu Sep 8 20:34:16 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:37:37 2022 ] Mean training loss: 0.0942. +[ Thu Sep 8 20:37:37 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:37:37 2022 ] Eval epoch: 58 +[ Thu Sep 8 20:39:49 2022 ] Epoch 58 Curr Acc: (11409/16487)69.20% +[ Thu Sep 8 20:39:49 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 20:39:49 2022 ] Training epoch: 59 +[ Thu Sep 8 20:39:49 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:43:11 2022 ] Mean training loss: 0.0820. +[ Thu Sep 8 20:43:11 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:43:11 2022 ] Eval epoch: 59 +[ Thu Sep 8 20:45:23 2022 ] Epoch 59 Curr Acc: (11000/16487)66.72% +[ Thu Sep 8 20:45:23 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 20:45:23 2022 ] Training epoch: 60 +[ Thu Sep 8 20:45:23 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:48:44 2022 ] Mean training loss: 0.0771. +[ Thu Sep 8 20:48:44 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:48:44 2022 ] Eval epoch: 60 +[ Thu Sep 8 20:50:56 2022 ] Epoch 60 Curr Acc: (11084/16487)67.23% +[ Thu Sep 8 20:50:56 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 20:50:56 2022 ] Training epoch: 61 +[ Thu Sep 8 20:50:56 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:54:18 2022 ] Mean training loss: 0.0672. +[ Thu Sep 8 20:54:18 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:54:18 2022 ] Eval epoch: 61 +[ Thu Sep 8 20:56:30 2022 ] Epoch 61 Curr Acc: (11192/16487)67.88% +[ Thu Sep 8 20:56:30 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 20:56:30 2022 ] Training epoch: 62 +[ Thu Sep 8 20:56:30 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:59:52 2022 ] Mean training loss: 0.0621. +[ Thu Sep 8 20:59:52 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:59:52 2022 ] Eval epoch: 62 +[ Thu Sep 8 21:02:04 2022 ] Epoch 62 Curr Acc: (10289/16487)62.41% +[ Thu Sep 8 21:02:04 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 21:02:04 2022 ] Training epoch: 63 +[ Thu Sep 8 21:02:04 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:05:25 2022 ] Mean training loss: 0.0567. +[ Thu Sep 8 21:05:25 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:05:25 2022 ] Eval epoch: 63 +[ Thu Sep 8 21:07:37 2022 ] Epoch 63 Curr Acc: (10353/16487)62.79% +[ Thu Sep 8 21:07:37 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 21:07:37 2022 ] Training epoch: 64 +[ Thu Sep 8 21:07:37 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:10:59 2022 ] Mean training loss: 0.0561. +[ Thu Sep 8 21:10:59 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:10:59 2022 ] Eval epoch: 64 +[ Thu Sep 8 21:13:11 2022 ] Epoch 64 Curr Acc: (9955/16487)60.38% +[ Thu Sep 8 21:13:11 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 21:13:11 2022 ] Training epoch: 65 +[ Thu Sep 8 21:13:11 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:16:32 2022 ] Mean training loss: 0.0475. +[ Thu Sep 8 21:16:32 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:16:32 2022 ] Eval epoch: 65 +[ Thu Sep 8 21:18:45 2022 ] Epoch 65 Curr Acc: (11008/16487)66.77% +[ Thu Sep 8 21:18:45 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 21:18:45 2022 ] Training epoch: 66 +[ Thu Sep 8 21:18:45 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:22:06 2022 ] Mean training loss: 0.0453. +[ Thu Sep 8 21:22:06 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:22:06 2022 ] Eval epoch: 66 +[ Thu Sep 8 21:24:18 2022 ] Epoch 66 Curr Acc: (11337/16487)68.76% +[ Thu Sep 8 21:24:18 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 21:24:18 2022 ] Training epoch: 67 +[ Thu Sep 8 21:24:18 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:27:40 2022 ] Mean training loss: 0.0457. +[ Thu Sep 8 21:27:40 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:27:40 2022 ] Eval epoch: 67 +[ Thu Sep 8 21:29:53 2022 ] Epoch 67 Curr Acc: (10638/16487)64.52% +[ Thu Sep 8 21:29:53 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 21:29:53 2022 ] Training epoch: 68 +[ Thu Sep 8 21:29:53 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:33:14 2022 ] Mean training loss: 0.0494. +[ Thu Sep 8 21:33:14 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:33:14 2022 ] Eval epoch: 68 +[ Thu Sep 8 21:35:26 2022 ] Epoch 68 Curr Acc: (11151/16487)67.64% +[ Thu Sep 8 21:35:26 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 21:35:26 2022 ] Training epoch: 69 +[ Thu Sep 8 21:35:26 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:38:47 2022 ] Mean training loss: 0.0378. +[ Thu Sep 8 21:38:47 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:38:48 2022 ] Eval epoch: 69 +[ Thu Sep 8 21:41:00 2022 ] Epoch 69 Curr Acc: (11160/16487)67.69% +[ Thu Sep 8 21:41:00 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 21:41:00 2022 ] Training epoch: 70 +[ Thu Sep 8 21:41:00 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:44:21 2022 ] Mean training loss: 0.0363. +[ Thu Sep 8 21:44:21 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:44:21 2022 ] Eval epoch: 70 +[ Thu Sep 8 21:46:34 2022 ] Epoch 70 Curr Acc: (10514/16487)63.77% +[ Thu Sep 8 21:46:34 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 21:46:34 2022 ] Training epoch: 71 +[ Thu Sep 8 21:46:34 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:49:55 2022 ] Mean training loss: 0.0281. +[ Thu Sep 8 21:49:55 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:49:55 2022 ] Eval epoch: 71 +[ Thu Sep 8 21:52:07 2022 ] Epoch 71 Curr Acc: (11203/16487)67.95% +[ Thu Sep 8 21:52:07 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 21:52:07 2022 ] Training epoch: 72 +[ Thu Sep 8 21:52:07 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:55:28 2022 ] Mean training loss: 0.0247. +[ Thu Sep 8 21:55:28 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:55:28 2022 ] Eval epoch: 72 +[ Thu Sep 8 21:57:40 2022 ] Epoch 72 Curr Acc: (10075/16487)61.11% +[ Thu Sep 8 21:57:40 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 21:57:40 2022 ] Training epoch: 73 +[ Thu Sep 8 21:57:40 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:01:01 2022 ] Mean training loss: 0.0245. +[ Thu Sep 8 22:01:01 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:01:01 2022 ] Eval epoch: 73 +[ Thu Sep 8 22:03:14 2022 ] Epoch 73 Curr Acc: (10729/16487)65.08% +[ Thu Sep 8 22:03:14 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 22:03:14 2022 ] Training epoch: 74 +[ Thu Sep 8 22:03:14 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:06:35 2022 ] Mean training loss: 0.0211. +[ Thu Sep 8 22:06:35 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:06:35 2022 ] Eval epoch: 74 +[ Thu Sep 8 22:08:47 2022 ] Epoch 74 Curr Acc: (10541/16487)63.94% +[ Thu Sep 8 22:08:47 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 22:08:47 2022 ] Training epoch: 75 +[ Thu Sep 8 22:08:47 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:12:08 2022 ] Mean training loss: 0.0194. +[ Thu Sep 8 22:12:08 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:12:08 2022 ] Eval epoch: 75 +[ Thu Sep 8 22:14:20 2022 ] Epoch 75 Curr Acc: (9983/16487)60.55% +[ Thu Sep 8 22:14:20 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 22:14:20 2022 ] Training epoch: 76 +[ Thu Sep 8 22:14:20 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:17:41 2022 ] Mean training loss: 0.0206. +[ Thu Sep 8 22:17:41 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:17:42 2022 ] Eval epoch: 76 +[ Thu Sep 8 22:19:54 2022 ] Epoch 76 Curr Acc: (11450/16487)69.45% +[ Thu Sep 8 22:19:54 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 22:19:54 2022 ] Training epoch: 77 +[ Thu Sep 8 22:19:54 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:23:15 2022 ] Mean training loss: 0.0160. +[ Thu Sep 8 22:23:15 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:23:15 2022 ] Eval epoch: 77 +[ Thu Sep 8 22:25:27 2022 ] Epoch 77 Curr Acc: (9984/16487)60.56% +[ Thu Sep 8 22:25:27 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 22:25:27 2022 ] Training epoch: 78 +[ Thu Sep 8 22:25:27 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:28:48 2022 ] Mean training loss: 0.0171. +[ Thu Sep 8 22:28:48 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:28:48 2022 ] Eval epoch: 78 +[ Thu Sep 8 22:31:01 2022 ] Epoch 78 Curr Acc: (10708/16487)64.95% +[ Thu Sep 8 22:31:01 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 22:31:01 2022 ] Training epoch: 79 +[ Thu Sep 8 22:31:01 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:34:21 2022 ] Mean training loss: 0.0179. +[ Thu Sep 8 22:34:21 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:34:21 2022 ] Eval epoch: 79 +[ Thu Sep 8 22:36:34 2022 ] Epoch 79 Curr Acc: (10084/16487)61.16% +[ Thu Sep 8 22:36:34 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 22:36:34 2022 ] Training epoch: 80 +[ Thu Sep 8 22:36:34 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:39:54 2022 ] Mean training loss: 0.0162. +[ Thu Sep 8 22:39:54 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:39:55 2022 ] Eval epoch: 80 +[ Thu Sep 8 22:42:07 2022 ] Epoch 80 Curr Acc: (10999/16487)66.71% +[ Thu Sep 8 22:42:07 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 22:42:07 2022 ] Training epoch: 81 +[ Thu Sep 8 22:42:07 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:45:28 2022 ] Mean training loss: 0.0190. +[ Thu Sep 8 22:45:28 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:45:28 2022 ] Eval epoch: 81 +[ Thu Sep 8 22:47:40 2022 ] Epoch 81 Curr Acc: (11188/16487)67.86% +[ Thu Sep 8 22:47:40 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 22:47:40 2022 ] Training epoch: 82 +[ Thu Sep 8 22:47:40 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:51:01 2022 ] Mean training loss: 0.0153. +[ Thu Sep 8 22:51:01 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:51:01 2022 ] Eval epoch: 82 +[ Thu Sep 8 22:53:13 2022 ] Epoch 82 Curr Acc: (9668/16487)58.64% +[ Thu Sep 8 22:53:13 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 22:53:13 2022 ] Training epoch: 83 +[ Thu Sep 8 22:53:13 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:56:35 2022 ] Mean training loss: 0.0170. +[ Thu Sep 8 22:56:35 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:56:35 2022 ] Eval epoch: 83 +[ Thu Sep 8 22:58:47 2022 ] Epoch 83 Curr Acc: (11192/16487)67.88% +[ Thu Sep 8 22:58:47 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 22:58:47 2022 ] Training epoch: 84 +[ Thu Sep 8 22:58:47 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:02:09 2022 ] Mean training loss: 0.0166. +[ Thu Sep 8 23:02:09 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:02:09 2022 ] Eval epoch: 84 +[ Thu Sep 8 23:04:21 2022 ] Epoch 84 Curr Acc: (10297/16487)62.46% +[ Thu Sep 8 23:04:21 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 23:04:21 2022 ] Training epoch: 85 +[ Thu Sep 8 23:04:21 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:07:43 2022 ] Mean training loss: 0.0164. +[ Thu Sep 8 23:07:43 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:07:43 2022 ] Eval epoch: 85 +[ Thu Sep 8 23:09:56 2022 ] Epoch 85 Curr Acc: (11152/16487)67.64% +[ Thu Sep 8 23:09:56 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 23:09:56 2022 ] Training epoch: 86 +[ Thu Sep 8 23:09:56 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:13:17 2022 ] Mean training loss: 0.0149. +[ Thu Sep 8 23:13:17 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:13:17 2022 ] Eval epoch: 86 +[ Thu Sep 8 23:15:29 2022 ] Epoch 86 Curr Acc: (10591/16487)64.24% +[ Thu Sep 8 23:15:29 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 23:15:29 2022 ] Training epoch: 87 +[ Thu Sep 8 23:15:29 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:18:51 2022 ] Mean training loss: 0.0143. +[ Thu Sep 8 23:18:51 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:18:51 2022 ] Eval epoch: 87 +[ Thu Sep 8 23:21:03 2022 ] Epoch 87 Curr Acc: (11437/16487)69.37% +[ Thu Sep 8 23:21:03 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 23:21:03 2022 ] Training epoch: 88 +[ Thu Sep 8 23:21:03 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:24:24 2022 ] Mean training loss: 0.0148. +[ Thu Sep 8 23:24:24 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:24:24 2022 ] Eval epoch: 88 +[ Thu Sep 8 23:26:37 2022 ] Epoch 88 Curr Acc: (11322/16487)68.67% +[ Thu Sep 8 23:26:37 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 23:26:37 2022 ] Training epoch: 89 +[ Thu Sep 8 23:26:37 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:29:57 2022 ] Mean training loss: 0.0160. +[ Thu Sep 8 23:29:57 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:29:58 2022 ] Eval epoch: 89 +[ Thu Sep 8 23:32:10 2022 ] Epoch 89 Curr Acc: (11235/16487)68.14% +[ Thu Sep 8 23:32:10 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 23:32:10 2022 ] Training epoch: 90 +[ Thu Sep 8 23:32:10 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:35:31 2022 ] Mean training loss: 0.0160. +[ Thu Sep 8 23:35:31 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:35:31 2022 ] Eval epoch: 90 +[ Thu Sep 8 23:37:43 2022 ] Epoch 90 Curr Acc: (11088/16487)67.25% +[ Thu Sep 8 23:37:43 2022 ] Epoch 55 Best Acc 69.46% +[ Thu Sep 8 23:37:43 2022 ] Training epoch: 91 +[ Thu Sep 8 23:37:43 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:41:05 2022 ] Mean training loss: 0.0152. +[ Thu Sep 8 23:41:05 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:41:05 2022 ] Eval epoch: 91 +[ Thu Sep 8 23:43:17 2022 ] Epoch 91 Curr Acc: (11576/16487)70.21% +[ Thu Sep 8 23:43:17 2022 ] Epoch 91 Best Acc 70.21% +[ Thu Sep 8 23:43:17 2022 ] Training epoch: 92 +[ Thu Sep 8 23:43:17 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:46:38 2022 ] Mean training loss: 0.0156. +[ Thu Sep 8 23:46:38 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:46:38 2022 ] Eval epoch: 92 +[ Thu Sep 8 23:48:51 2022 ] Epoch 92 Curr Acc: (9849/16487)59.74% +[ Thu Sep 8 23:48:51 2022 ] Epoch 91 Best Acc 70.21% +[ Thu Sep 8 23:48:51 2022 ] Training epoch: 93 +[ Thu Sep 8 23:48:51 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:52:12 2022 ] Mean training loss: 0.0135. +[ Thu Sep 8 23:52:12 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:52:12 2022 ] Eval epoch: 93 +[ Thu Sep 8 23:54:24 2022 ] Epoch 93 Curr Acc: (11271/16487)68.36% +[ Thu Sep 8 23:54:24 2022 ] Epoch 91 Best Acc 70.21% +[ Thu Sep 8 23:54:24 2022 ] Training epoch: 94 +[ Thu Sep 8 23:54:24 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:57:46 2022 ] Mean training loss: 0.0134. +[ Thu Sep 8 23:57:46 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:57:46 2022 ] Eval epoch: 94 +[ Thu Sep 8 23:59:59 2022 ] Epoch 94 Curr Acc: (10530/16487)63.87% +[ Thu Sep 8 23:59:59 2022 ] Epoch 91 Best Acc 70.21% +[ Thu Sep 8 23:59:59 2022 ] Training epoch: 95 +[ Thu Sep 8 23:59:59 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:03:20 2022 ] Mean training loss: 0.0135. +[ Fri Sep 9 00:03:20 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:03:20 2022 ] Eval epoch: 95 +[ Fri Sep 9 00:05:33 2022 ] Epoch 95 Curr Acc: (10177/16487)61.73% +[ Fri Sep 9 00:05:33 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 00:05:33 2022 ] Training epoch: 96 +[ Fri Sep 9 00:05:33 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:08:54 2022 ] Mean training loss: 0.0145. +[ Fri Sep 9 00:08:54 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:08:54 2022 ] Eval epoch: 96 +[ Fri Sep 9 00:11:06 2022 ] Epoch 96 Curr Acc: (11439/16487)69.38% +[ Fri Sep 9 00:11:06 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 00:11:06 2022 ] Training epoch: 97 +[ Fri Sep 9 00:11:06 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:14:28 2022 ] Mean training loss: 0.0128. +[ Fri Sep 9 00:14:28 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:14:28 2022 ] Eval epoch: 97 +[ Fri Sep 9 00:16:40 2022 ] Epoch 97 Curr Acc: (10205/16487)61.90% +[ Fri Sep 9 00:16:40 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 00:16:40 2022 ] Training epoch: 98 +[ Fri Sep 9 00:16:40 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:20:01 2022 ] Mean training loss: 0.0156. +[ Fri Sep 9 00:20:01 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:20:01 2022 ] Eval epoch: 98 +[ Fri Sep 9 00:22:13 2022 ] Epoch 98 Curr Acc: (11479/16487)69.62% +[ Fri Sep 9 00:22:13 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 00:22:13 2022 ] Training epoch: 99 +[ Fri Sep 9 00:22:13 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:25:35 2022 ] Mean training loss: 0.0147. +[ Fri Sep 9 00:25:35 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:25:35 2022 ] Eval epoch: 99 +[ Fri Sep 9 00:27:47 2022 ] Epoch 99 Curr Acc: (9911/16487)60.11% +[ Fri Sep 9 00:27:47 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 00:27:47 2022 ] Training epoch: 100 +[ Fri Sep 9 00:27:47 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:31:08 2022 ] Mean training loss: 0.0143. +[ Fri Sep 9 00:31:08 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:31:08 2022 ] Eval epoch: 100 +[ Fri Sep 9 00:33:20 2022 ] Epoch 100 Curr Acc: (11292/16487)68.49% +[ Fri Sep 9 00:33:20 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 00:33:21 2022 ] Training epoch: 101 +[ Fri Sep 9 00:33:21 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:36:41 2022 ] Mean training loss: 0.0164. +[ Fri Sep 9 00:36:41 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:36:41 2022 ] Eval epoch: 101 +[ Fri Sep 9 00:38:54 2022 ] Epoch 101 Curr Acc: (11442/16487)69.40% +[ Fri Sep 9 00:38:54 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 00:38:54 2022 ] Training epoch: 102 +[ Fri Sep 9 00:38:54 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:42:15 2022 ] Mean training loss: 0.0159. +[ Fri Sep 9 00:42:15 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:42:15 2022 ] Eval epoch: 102 +[ Fri Sep 9 00:44:27 2022 ] Epoch 102 Curr Acc: (10232/16487)62.06% +[ Fri Sep 9 00:44:27 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 00:44:27 2022 ] Training epoch: 103 +[ Fri Sep 9 00:44:27 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:47:48 2022 ] Mean training loss: 0.0156. +[ Fri Sep 9 00:47:48 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:47:48 2022 ] Eval epoch: 103 +[ Fri Sep 9 00:50:01 2022 ] Epoch 103 Curr Acc: (10424/16487)63.23% +[ Fri Sep 9 00:50:01 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 00:50:01 2022 ] Training epoch: 104 +[ Fri Sep 9 00:50:01 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:53:22 2022 ] Mean training loss: 0.0147. +[ Fri Sep 9 00:53:22 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:53:22 2022 ] Eval epoch: 104 +[ Fri Sep 9 00:55:34 2022 ] Epoch 104 Curr Acc: (10890/16487)66.05% +[ Fri Sep 9 00:55:34 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 00:55:34 2022 ] Training epoch: 105 +[ Fri Sep 9 00:55:34 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:58:55 2022 ] Mean training loss: 0.0152. +[ Fri Sep 9 00:58:55 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:58:55 2022 ] Eval epoch: 105 +[ Fri Sep 9 01:01:07 2022 ] Epoch 105 Curr Acc: (10402/16487)63.09% +[ Fri Sep 9 01:01:07 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 01:01:07 2022 ] Training epoch: 106 +[ Fri Sep 9 01:01:07 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:04:29 2022 ] Mean training loss: 0.0138. +[ Fri Sep 9 01:04:29 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 01:04:29 2022 ] Eval epoch: 106 +[ Fri Sep 9 01:06:42 2022 ] Epoch 106 Curr Acc: (11271/16487)68.36% +[ Fri Sep 9 01:06:42 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 01:06:42 2022 ] Training epoch: 107 +[ Fri Sep 9 01:06:42 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:10:03 2022 ] Mean training loss: 0.0148. +[ Fri Sep 9 01:10:03 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 01:10:03 2022 ] Eval epoch: 107 +[ Fri Sep 9 01:12:16 2022 ] Epoch 107 Curr Acc: (9807/16487)59.48% +[ Fri Sep 9 01:12:16 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 01:12:16 2022 ] Training epoch: 108 +[ Fri Sep 9 01:12:16 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:15:37 2022 ] Mean training loss: 0.0147. +[ Fri Sep 9 01:15:37 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 01:15:37 2022 ] Eval epoch: 108 +[ Fri Sep 9 01:17:49 2022 ] Epoch 108 Curr Acc: (10299/16487)62.47% +[ Fri Sep 9 01:17:49 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 01:17:49 2022 ] Training epoch: 109 +[ Fri Sep 9 01:17:49 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:21:11 2022 ] Mean training loss: 0.0153. +[ Fri Sep 9 01:21:11 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 01:21:11 2022 ] Eval epoch: 109 +[ Fri Sep 9 01:23:23 2022 ] Epoch 109 Curr Acc: (9934/16487)60.25% +[ Fri Sep 9 01:23:23 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 01:23:23 2022 ] Training epoch: 110 +[ Fri Sep 9 01:23:23 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:26:44 2022 ] Mean training loss: 0.0146. +[ Fri Sep 9 01:26:44 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 01:26:44 2022 ] Eval epoch: 110 +[ Fri Sep 9 01:28:56 2022 ] Epoch 110 Curr Acc: (10218/16487)61.98% +[ Fri Sep 9 01:28:56 2022 ] Epoch 91 Best Acc 70.21% +[ Fri Sep 9 01:28:56 2022 ] epoch: 91, best accuracy: 0.7021289500818827 +[ Fri Sep 9 01:28:56 2022 ] Experiment: ./work_dir/ntu/xsub_b +[ Fri Sep 9 01:28:57 2022 ] # generator parameters: 2.896055 M. +[ Fri Sep 9 01:28:57 2022 ] Load weights from ./runs/ntu/xsub_b/runs-90-89726.pt. +[ Fri Sep 9 01:28:57 2022 ] Eval epoch: 1 +[ Fri Sep 9 01:31:09 2022 ] Epoch 1 Curr Acc: (11576/16487)70.21% +[ Fri Sep 9 01:31:09 2022 ] Epoch 91 Best Acc 70.21% diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/config.yaml b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6d138c376a969499be1b3b7d0c89a3fc1f797504 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu/xsub_bm.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 60 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu/xsub_bm/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone_motion.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone_motion.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu/xsub_bm diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..b2ec88afbe563888d7dca533c530c81fc14e2de9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:921660534cf8af04cc285195b7bc526ec1ef4809c15360c66fde68d2b45f53e7 +size 4979902 diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/log.txt b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..aa68312be32322a3e6e17b67420f05ef1e59549f --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/log.txt @@ -0,0 +1,631 @@ +[ Thu Sep 8 17:07:44 2022 ] # generator parameters: 2.896055 M. +[ Thu Sep 8 17:07:44 2022 ] Parameters: +{'work_dir': './work_dir/ntu/xsub_bm', 'model_saved_name': './runs/ntu/xsub_bm/runs', 'config': 'config/ntu/xsub_bm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Thu Sep 8 17:07:44 2022 ] Training epoch: 1 +[ Thu Sep 8 17:07:44 2022 ] Learning rate: 0.015 +[ Thu Sep 8 17:11:09 2022 ] Mean training loss: 3.2652. +[ Thu Sep 8 17:11:09 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 17:11:09 2022 ] Training epoch: 2 +[ Thu Sep 8 17:11:09 2022 ] Learning rate: 0.03 +[ Thu Sep 8 17:14:33 2022 ] Mean training loss: 2.4220. +[ Thu Sep 8 17:14:33 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:14:33 2022 ] Training epoch: 3 +[ Thu Sep 8 17:14:33 2022 ] Learning rate: 0.045 +[ Thu Sep 8 17:17:56 2022 ] Mean training loss: 2.0056. +[ Thu Sep 8 17:17:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:17:56 2022 ] Training epoch: 4 +[ Thu Sep 8 17:17:56 2022 ] Learning rate: 0.06 +[ Thu Sep 8 17:21:19 2022 ] Mean training loss: 1.6940. +[ Thu Sep 8 17:21:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:21:19 2022 ] Training epoch: 5 +[ Thu Sep 8 17:21:19 2022 ] Learning rate: 0.075 +[ Thu Sep 8 17:24:42 2022 ] Mean training loss: 1.5027. +[ Thu Sep 8 17:24:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:24:42 2022 ] Training epoch: 6 +[ Thu Sep 8 17:24:42 2022 ] Learning rate: 0.09 +[ Thu Sep 8 17:28:05 2022 ] Mean training loss: 1.3904. +[ Thu Sep 8 17:28:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:28:05 2022 ] Training epoch: 7 +[ Thu Sep 8 17:28:05 2022 ] Learning rate: 0.10500000000000001 +[ Thu Sep 8 17:31:28 2022 ] Mean training loss: 1.3177. +[ Thu Sep 8 17:31:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:31:28 2022 ] Training epoch: 8 +[ Thu Sep 8 17:31:28 2022 ] Learning rate: 0.12 +[ Thu Sep 8 17:34:51 2022 ] Mean training loss: 1.2143. +[ Thu Sep 8 17:34:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:34:51 2022 ] Training epoch: 9 +[ Thu Sep 8 17:34:51 2022 ] Learning rate: 0.13499999999999998 +[ Thu Sep 8 17:38:13 2022 ] Mean training loss: 1.1919. +[ Thu Sep 8 17:38:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:38:13 2022 ] Training epoch: 10 +[ Thu Sep 8 17:38:13 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:41:35 2022 ] Mean training loss: 1.1271. +[ Thu Sep 8 17:41:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:41:35 2022 ] Training epoch: 11 +[ Thu Sep 8 17:41:35 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:44:58 2022 ] Mean training loss: 1.0812. +[ Thu Sep 8 17:44:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:44:58 2022 ] Training epoch: 12 +[ Thu Sep 8 17:44:58 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:48:21 2022 ] Mean training loss: 1.0247. +[ Thu Sep 8 17:48:21 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:48:21 2022 ] Training epoch: 13 +[ Thu Sep 8 17:48:21 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:51:44 2022 ] Mean training loss: 0.9885. +[ Thu Sep 8 17:51:44 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:51:44 2022 ] Training epoch: 14 +[ Thu Sep 8 17:51:44 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:55:07 2022 ] Mean training loss: 0.9666. +[ Thu Sep 8 17:55:07 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:55:07 2022 ] Training epoch: 15 +[ Thu Sep 8 17:55:07 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:58:29 2022 ] Mean training loss: 0.9116. +[ Thu Sep 8 17:58:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:58:29 2022 ] Training epoch: 16 +[ Thu Sep 8 17:58:29 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:01:52 2022 ] Mean training loss: 0.9041. +[ Thu Sep 8 18:01:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:01:52 2022 ] Training epoch: 17 +[ Thu Sep 8 18:01:52 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:05:15 2022 ] Mean training loss: 0.8950. +[ Thu Sep 8 18:05:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:05:15 2022 ] Training epoch: 18 +[ Thu Sep 8 18:05:15 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:08:37 2022 ] Mean training loss: 0.8623. +[ Thu Sep 8 18:08:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:08:37 2022 ] Training epoch: 19 +[ Thu Sep 8 18:08:37 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:12:01 2022 ] Mean training loss: 0.8450. +[ Thu Sep 8 18:12:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:12:01 2022 ] Training epoch: 20 +[ Thu Sep 8 18:12:01 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:15:24 2022 ] Mean training loss: 0.8082. +[ Thu Sep 8 18:15:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:15:24 2022 ] Training epoch: 21 +[ Thu Sep 8 18:15:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:18:47 2022 ] Mean training loss: 0.7992. +[ Thu Sep 8 18:18:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:18:47 2022 ] Training epoch: 22 +[ Thu Sep 8 18:18:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:22:09 2022 ] Mean training loss: 0.7911. +[ Thu Sep 8 18:22:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:22:09 2022 ] Training epoch: 23 +[ Thu Sep 8 18:22:09 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:25:32 2022 ] Mean training loss: 0.7590. +[ Thu Sep 8 18:25:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:25:32 2022 ] Training epoch: 24 +[ Thu Sep 8 18:25:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:28:54 2022 ] Mean training loss: 0.7569. +[ Thu Sep 8 18:28:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:28:54 2022 ] Training epoch: 25 +[ Thu Sep 8 18:28:54 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:32:16 2022 ] Mean training loss: 0.7495. +[ Thu Sep 8 18:32:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:32:16 2022 ] Training epoch: 26 +[ Thu Sep 8 18:32:16 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:35:39 2022 ] Mean training loss: 0.7439. +[ Thu Sep 8 18:35:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:35:39 2022 ] Training epoch: 27 +[ Thu Sep 8 18:35:39 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:39:01 2022 ] Mean training loss: 0.7216. +[ Thu Sep 8 18:39:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:39:01 2022 ] Training epoch: 28 +[ Thu Sep 8 18:39:01 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:42:24 2022 ] Mean training loss: 0.7199. +[ Thu Sep 8 18:42:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:42:24 2022 ] Training epoch: 29 +[ Thu Sep 8 18:42:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:45:48 2022 ] Mean training loss: 0.6933. +[ Thu Sep 8 18:45:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:45:48 2022 ] Training epoch: 30 +[ Thu Sep 8 18:45:48 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:49:11 2022 ] Mean training loss: 0.6866. +[ Thu Sep 8 18:49:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:49:11 2022 ] Training epoch: 31 +[ Thu Sep 8 18:49:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:52:34 2022 ] Mean training loss: 0.6778. +[ Thu Sep 8 18:52:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:52:34 2022 ] Training epoch: 32 +[ Thu Sep 8 18:52:34 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:55:57 2022 ] Mean training loss: 0.6950. +[ Thu Sep 8 18:55:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:55:57 2022 ] Training epoch: 33 +[ Thu Sep 8 18:55:57 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:59:20 2022 ] Mean training loss: 0.6740. +[ Thu Sep 8 18:59:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:59:20 2022 ] Training epoch: 34 +[ Thu Sep 8 18:59:20 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:02:44 2022 ] Mean training loss: 0.6846. +[ Thu Sep 8 19:02:44 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:02:44 2022 ] Training epoch: 35 +[ Thu Sep 8 19:02:44 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:06:08 2022 ] Mean training loss: 0.6776. +[ Thu Sep 8 19:06:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:06:08 2022 ] Training epoch: 36 +[ Thu Sep 8 19:06:08 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:09:31 2022 ] Mean training loss: 0.6309. +[ Thu Sep 8 19:09:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:09:31 2022 ] Training epoch: 37 +[ Thu Sep 8 19:09:31 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:12:55 2022 ] Mean training loss: 0.6563. +[ Thu Sep 8 19:12:55 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:12:55 2022 ] Training epoch: 38 +[ Thu Sep 8 19:12:55 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:16:17 2022 ] Mean training loss: 0.6415. +[ Thu Sep 8 19:16:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:16:17 2022 ] Training epoch: 39 +[ Thu Sep 8 19:16:17 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:19:40 2022 ] Mean training loss: 0.6491. +[ Thu Sep 8 19:19:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:19:40 2022 ] Training epoch: 40 +[ Thu Sep 8 19:19:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:23:02 2022 ] Mean training loss: 0.6362. +[ Thu Sep 8 19:23:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:23:02 2022 ] Training epoch: 41 +[ Thu Sep 8 19:23:02 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:26:25 2022 ] Mean training loss: 0.6469. +[ Thu Sep 8 19:26:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:26:25 2022 ] Training epoch: 42 +[ Thu Sep 8 19:26:25 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:29:48 2022 ] Mean training loss: 0.6334. +[ Thu Sep 8 19:29:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:29:48 2022 ] Training epoch: 43 +[ Thu Sep 8 19:29:48 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:33:11 2022 ] Mean training loss: 0.6247. +[ Thu Sep 8 19:33:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:33:11 2022 ] Training epoch: 44 +[ Thu Sep 8 19:33:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:36:35 2022 ] Mean training loss: 0.6268. +[ Thu Sep 8 19:36:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:36:35 2022 ] Training epoch: 45 +[ Thu Sep 8 19:36:35 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:39:58 2022 ] Mean training loss: 0.6221. +[ Thu Sep 8 19:39:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:39:58 2022 ] Training epoch: 46 +[ Thu Sep 8 19:39:58 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:43:21 2022 ] Mean training loss: 0.5956. +[ Thu Sep 8 19:43:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:43:21 2022 ] Training epoch: 47 +[ Thu Sep 8 19:43:21 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:46:44 2022 ] Mean training loss: 0.6218. +[ Thu Sep 8 19:46:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:46:44 2022 ] Training epoch: 48 +[ Thu Sep 8 19:46:44 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:50:06 2022 ] Mean training loss: 0.6134. +[ Thu Sep 8 19:50:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:50:06 2022 ] Training epoch: 49 +[ Thu Sep 8 19:50:06 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:53:29 2022 ] Mean training loss: 0.6152. +[ Thu Sep 8 19:53:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:53:29 2022 ] Training epoch: 50 +[ Thu Sep 8 19:53:29 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:56:52 2022 ] Mean training loss: 0.6170. +[ Thu Sep 8 19:56:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:56:52 2022 ] Training epoch: 51 +[ Thu Sep 8 19:56:52 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:00:15 2022 ] Mean training loss: 0.2832. +[ Thu Sep 8 20:00:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:00:15 2022 ] Eval epoch: 51 +[ Thu Sep 8 20:02:29 2022 ] Epoch 51 Curr Acc: (10756/16487)65.24% +[ Thu Sep 8 20:02:29 2022 ] Epoch 51 Best Acc 65.24% +[ Thu Sep 8 20:02:29 2022 ] Training epoch: 52 +[ Thu Sep 8 20:02:29 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:05:52 2022 ] Mean training loss: 0.1790. +[ Thu Sep 8 20:05:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:05:52 2022 ] Eval epoch: 52 +[ Thu Sep 8 20:08:05 2022 ] Epoch 52 Curr Acc: (10904/16487)66.14% +[ Thu Sep 8 20:08:05 2022 ] Epoch 52 Best Acc 66.14% +[ Thu Sep 8 20:08:05 2022 ] Training epoch: 53 +[ Thu Sep 8 20:08:05 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:11:29 2022 ] Mean training loss: 0.1376. +[ Thu Sep 8 20:11:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:11:29 2022 ] Eval epoch: 53 +[ Thu Sep 8 20:13:42 2022 ] Epoch 53 Curr Acc: (10856/16487)65.85% +[ Thu Sep 8 20:13:42 2022 ] Epoch 52 Best Acc 66.14% +[ Thu Sep 8 20:13:42 2022 ] Training epoch: 54 +[ Thu Sep 8 20:13:42 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:17:06 2022 ] Mean training loss: 0.1071. +[ Thu Sep 8 20:17:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:17:06 2022 ] Eval epoch: 54 +[ Thu Sep 8 20:19:19 2022 ] Epoch 54 Curr Acc: (10995/16487)66.69% +[ Thu Sep 8 20:19:19 2022 ] Epoch 54 Best Acc 66.69% +[ Thu Sep 8 20:19:19 2022 ] Training epoch: 55 +[ Thu Sep 8 20:19:19 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:22:42 2022 ] Mean training loss: 0.0883. +[ Thu Sep 8 20:22:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:22:42 2022 ] Eval epoch: 55 +[ Thu Sep 8 20:24:56 2022 ] Epoch 55 Curr Acc: (11004/16487)66.74% +[ Thu Sep 8 20:24:56 2022 ] Epoch 55 Best Acc 66.74% +[ Thu Sep 8 20:24:56 2022 ] Training epoch: 56 +[ Thu Sep 8 20:24:56 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:28:18 2022 ] Mean training loss: 0.0733. +[ Thu Sep 8 20:28:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:28:18 2022 ] Eval epoch: 56 +[ Thu Sep 8 20:30:32 2022 ] Epoch 56 Curr Acc: (10605/16487)64.32% +[ Thu Sep 8 20:30:32 2022 ] Epoch 55 Best Acc 66.74% +[ Thu Sep 8 20:30:32 2022 ] Training epoch: 57 +[ Thu Sep 8 20:30:32 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:33:56 2022 ] Mean training loss: 0.0621. +[ Thu Sep 8 20:33:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:33:56 2022 ] Eval epoch: 57 +[ Thu Sep 8 20:36:09 2022 ] Epoch 57 Curr Acc: (10745/16487)65.17% +[ Thu Sep 8 20:36:09 2022 ] Epoch 55 Best Acc 66.74% +[ Thu Sep 8 20:36:09 2022 ] Training epoch: 58 +[ Thu Sep 8 20:36:09 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:39:32 2022 ] Mean training loss: 0.0508. +[ Thu Sep 8 20:39:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:39:32 2022 ] Eval epoch: 58 +[ Thu Sep 8 20:41:45 2022 ] Epoch 58 Curr Acc: (11035/16487)66.93% +[ Thu Sep 8 20:41:45 2022 ] Epoch 58 Best Acc 66.93% +[ Thu Sep 8 20:41:45 2022 ] Training epoch: 59 +[ Thu Sep 8 20:41:45 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:45:08 2022 ] Mean training loss: 0.0404. +[ Thu Sep 8 20:45:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:45:08 2022 ] Eval epoch: 59 +[ Thu Sep 8 20:47:21 2022 ] Epoch 59 Curr Acc: (10968/16487)66.53% +[ Thu Sep 8 20:47:21 2022 ] Epoch 58 Best Acc 66.93% +[ Thu Sep 8 20:47:21 2022 ] Training epoch: 60 +[ Thu Sep 8 20:47:21 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:50:44 2022 ] Mean training loss: 0.0337. +[ Thu Sep 8 20:50:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:50:44 2022 ] Eval epoch: 60 +[ Thu Sep 8 20:52:57 2022 ] Epoch 60 Curr Acc: (10982/16487)66.61% +[ Thu Sep 8 20:52:57 2022 ] Epoch 58 Best Acc 66.93% +[ Thu Sep 8 20:52:57 2022 ] Training epoch: 61 +[ Thu Sep 8 20:52:57 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:56:21 2022 ] Mean training loss: 0.0337. +[ Thu Sep 8 20:56:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:56:21 2022 ] Eval epoch: 61 +[ Thu Sep 8 20:58:34 2022 ] Epoch 61 Curr Acc: (10856/16487)65.85% +[ Thu Sep 8 20:58:34 2022 ] Epoch 58 Best Acc 66.93% +[ Thu Sep 8 20:58:34 2022 ] Training epoch: 62 +[ Thu Sep 8 20:58:34 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:01:57 2022 ] Mean training loss: 0.0259. +[ Thu Sep 8 21:01:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:01:57 2022 ] Eval epoch: 62 +[ Thu Sep 8 21:04:10 2022 ] Epoch 62 Curr Acc: (10751/16487)65.21% +[ Thu Sep 8 21:04:10 2022 ] Epoch 58 Best Acc 66.93% +[ Thu Sep 8 21:04:10 2022 ] Training epoch: 63 +[ Thu Sep 8 21:04:10 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:07:33 2022 ] Mean training loss: 0.0283. +[ Thu Sep 8 21:07:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:07:33 2022 ] Eval epoch: 63 +[ Thu Sep 8 21:09:46 2022 ] Epoch 63 Curr Acc: (10894/16487)66.08% +[ Thu Sep 8 21:09:46 2022 ] Epoch 58 Best Acc 66.93% +[ Thu Sep 8 21:09:46 2022 ] Training epoch: 64 +[ Thu Sep 8 21:09:46 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:13:09 2022 ] Mean training loss: 0.0221. +[ Thu Sep 8 21:13:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:13:09 2022 ] Eval epoch: 64 +[ Thu Sep 8 21:15:22 2022 ] Epoch 64 Curr Acc: (10604/16487)64.32% +[ Thu Sep 8 21:15:22 2022 ] Epoch 58 Best Acc 66.93% +[ Thu Sep 8 21:15:22 2022 ] Training epoch: 65 +[ Thu Sep 8 21:15:22 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:18:45 2022 ] Mean training loss: 0.0232. +[ Thu Sep 8 21:18:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:18:45 2022 ] Eval epoch: 65 +[ Thu Sep 8 21:20:59 2022 ] Epoch 65 Curr Acc: (11056/16487)67.06% +[ Thu Sep 8 21:20:59 2022 ] Epoch 65 Best Acc 67.06% +[ Thu Sep 8 21:20:59 2022 ] Training epoch: 66 +[ Thu Sep 8 21:20:59 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:24:21 2022 ] Mean training loss: 0.0244. +[ Thu Sep 8 21:24:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:24:21 2022 ] Eval epoch: 66 +[ Thu Sep 8 21:26:35 2022 ] Epoch 66 Curr Acc: (11123/16487)67.47% +[ Thu Sep 8 21:26:35 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 21:26:35 2022 ] Training epoch: 67 +[ Thu Sep 8 21:26:35 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:29:57 2022 ] Mean training loss: 0.0222. +[ Thu Sep 8 21:29:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:29:57 2022 ] Eval epoch: 67 +[ Thu Sep 8 21:32:11 2022 ] Epoch 67 Curr Acc: (10701/16487)64.91% +[ Thu Sep 8 21:32:11 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 21:32:11 2022 ] Training epoch: 68 +[ Thu Sep 8 21:32:11 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:35:34 2022 ] Mean training loss: 0.0180. +[ Thu Sep 8 21:35:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:35:34 2022 ] Eval epoch: 68 +[ Thu Sep 8 21:37:47 2022 ] Epoch 68 Curr Acc: (11027/16487)66.88% +[ Thu Sep 8 21:37:47 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 21:37:47 2022 ] Training epoch: 69 +[ Thu Sep 8 21:37:47 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:41:09 2022 ] Mean training loss: 0.0232. +[ Thu Sep 8 21:41:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:41:10 2022 ] Eval epoch: 69 +[ Thu Sep 8 21:43:23 2022 ] Epoch 69 Curr Acc: (9883/16487)59.94% +[ Thu Sep 8 21:43:23 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 21:43:23 2022 ] Training epoch: 70 +[ Thu Sep 8 21:43:23 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:46:46 2022 ] Mean training loss: 0.0215. +[ Thu Sep 8 21:46:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:46:46 2022 ] Eval epoch: 70 +[ Thu Sep 8 21:48:59 2022 ] Epoch 70 Curr Acc: (10901/16487)66.12% +[ Thu Sep 8 21:48:59 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 21:48:59 2022 ] Training epoch: 71 +[ Thu Sep 8 21:48:59 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:52:21 2022 ] Mean training loss: 0.0157. +[ Thu Sep 8 21:52:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:52:21 2022 ] Eval epoch: 71 +[ Thu Sep 8 21:54:35 2022 ] Epoch 71 Curr Acc: (10940/16487)66.36% +[ Thu Sep 8 21:54:35 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 21:54:35 2022 ] Training epoch: 72 +[ Thu Sep 8 21:54:35 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:57:57 2022 ] Mean training loss: 0.0144. +[ Thu Sep 8 21:57:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:57:57 2022 ] Eval epoch: 72 +[ Thu Sep 8 22:00:10 2022 ] Epoch 72 Curr Acc: (10979/16487)66.59% +[ Thu Sep 8 22:00:10 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 22:00:10 2022 ] Training epoch: 73 +[ Thu Sep 8 22:00:10 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:03:32 2022 ] Mean training loss: 0.0136. +[ Thu Sep 8 22:03:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:03:32 2022 ] Eval epoch: 73 +[ Thu Sep 8 22:05:46 2022 ] Epoch 73 Curr Acc: (11004/16487)66.74% +[ Thu Sep 8 22:05:46 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 22:05:46 2022 ] Training epoch: 74 +[ Thu Sep 8 22:05:46 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:09:08 2022 ] Mean training loss: 0.0132. +[ Thu Sep 8 22:09:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:09:08 2022 ] Eval epoch: 74 +[ Thu Sep 8 22:11:22 2022 ] Epoch 74 Curr Acc: (10837/16487)65.73% +[ Thu Sep 8 22:11:22 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 22:11:22 2022 ] Training epoch: 75 +[ Thu Sep 8 22:11:22 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:14:44 2022 ] Mean training loss: 0.0111. +[ Thu Sep 8 22:14:44 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 22:14:44 2022 ] Eval epoch: 75 +[ Thu Sep 8 22:16:57 2022 ] Epoch 75 Curr Acc: (11048/16487)67.01% +[ Thu Sep 8 22:16:57 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 22:16:57 2022 ] Training epoch: 76 +[ Thu Sep 8 22:16:57 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:20:20 2022 ] Mean training loss: 0.0130. +[ Thu Sep 8 22:20:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:20:20 2022 ] Eval epoch: 76 +[ Thu Sep 8 22:22:34 2022 ] Epoch 76 Curr Acc: (10994/16487)66.68% +[ Thu Sep 8 22:22:34 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 22:22:34 2022 ] Training epoch: 77 +[ Thu Sep 8 22:22:34 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:25:57 2022 ] Mean training loss: 0.0118. +[ Thu Sep 8 22:25:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:25:57 2022 ] Eval epoch: 77 +[ Thu Sep 8 22:28:10 2022 ] Epoch 77 Curr Acc: (10816/16487)65.60% +[ Thu Sep 8 22:28:10 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 22:28:10 2022 ] Training epoch: 78 +[ Thu Sep 8 22:28:10 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:31:32 2022 ] Mean training loss: 0.0122. +[ Thu Sep 8 22:31:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:31:33 2022 ] Eval epoch: 78 +[ Thu Sep 8 22:33:46 2022 ] Epoch 78 Curr Acc: (11105/16487)67.36% +[ Thu Sep 8 22:33:46 2022 ] Epoch 66 Best Acc 67.47% +[ Thu Sep 8 22:33:46 2022 ] Training epoch: 79 +[ Thu Sep 8 22:33:46 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:37:08 2022 ] Mean training loss: 0.0126. +[ Thu Sep 8 22:37:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:37:08 2022 ] Eval epoch: 79 +[ Thu Sep 8 22:39:21 2022 ] Epoch 79 Curr Acc: (11131/16487)67.51% +[ Thu Sep 8 22:39:21 2022 ] Epoch 79 Best Acc 67.51% +[ Thu Sep 8 22:39:21 2022 ] Training epoch: 80 +[ Thu Sep 8 22:39:21 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:42:44 2022 ] Mean training loss: 0.0109. +[ Thu Sep 8 22:42:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:42:44 2022 ] Eval epoch: 80 +[ Thu Sep 8 22:44:57 2022 ] Epoch 80 Curr Acc: (10881/16487)66.00% +[ Thu Sep 8 22:44:57 2022 ] Epoch 79 Best Acc 67.51% +[ Thu Sep 8 22:44:57 2022 ] Training epoch: 81 +[ Thu Sep 8 22:44:57 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:48:20 2022 ] Mean training loss: 0.0097. +[ Thu Sep 8 22:48:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:48:20 2022 ] Eval epoch: 81 +[ Thu Sep 8 22:50:33 2022 ] Epoch 81 Curr Acc: (10938/16487)66.34% +[ Thu Sep 8 22:50:33 2022 ] Epoch 79 Best Acc 67.51% +[ Thu Sep 8 22:50:33 2022 ] Training epoch: 82 +[ Thu Sep 8 22:50:33 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:53:56 2022 ] Mean training loss: 0.0108. +[ Thu Sep 8 22:53:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:53:56 2022 ] Eval epoch: 82 +[ Thu Sep 8 22:56:09 2022 ] Epoch 82 Curr Acc: (10571/16487)64.12% +[ Thu Sep 8 22:56:09 2022 ] Epoch 79 Best Acc 67.51% +[ Thu Sep 8 22:56:09 2022 ] Training epoch: 83 +[ Thu Sep 8 22:56:09 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:59:32 2022 ] Mean training loss: 0.0117. +[ Thu Sep 8 22:59:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:59:32 2022 ] Eval epoch: 83 +[ Thu Sep 8 23:01:45 2022 ] Epoch 83 Curr Acc: (10818/16487)65.62% +[ Thu Sep 8 23:01:45 2022 ] Epoch 79 Best Acc 67.51% +[ Thu Sep 8 23:01:45 2022 ] Training epoch: 84 +[ Thu Sep 8 23:01:45 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:05:07 2022 ] Mean training loss: 0.0100. +[ Thu Sep 8 23:05:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:05:07 2022 ] Eval epoch: 84 +[ Thu Sep 8 23:07:20 2022 ] Epoch 84 Curr Acc: (11069/16487)67.14% +[ Thu Sep 8 23:07:20 2022 ] Epoch 79 Best Acc 67.51% +[ Thu Sep 8 23:07:20 2022 ] Training epoch: 85 +[ Thu Sep 8 23:07:20 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:10:42 2022 ] Mean training loss: 0.0100. +[ Thu Sep 8 23:10:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:10:42 2022 ] Eval epoch: 85 +[ Thu Sep 8 23:12:55 2022 ] Epoch 85 Curr Acc: (10897/16487)66.09% +[ Thu Sep 8 23:12:55 2022 ] Epoch 79 Best Acc 67.51% +[ Thu Sep 8 23:12:55 2022 ] Training epoch: 86 +[ Thu Sep 8 23:12:55 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:16:18 2022 ] Mean training loss: 0.0105. +[ Thu Sep 8 23:16:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:16:18 2022 ] Eval epoch: 86 +[ Thu Sep 8 23:18:31 2022 ] Epoch 86 Curr Acc: (10906/16487)66.15% +[ Thu Sep 8 23:18:31 2022 ] Epoch 79 Best Acc 67.51% +[ Thu Sep 8 23:18:31 2022 ] Training epoch: 87 +[ Thu Sep 8 23:18:31 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:21:54 2022 ] Mean training loss: 0.0098. +[ Thu Sep 8 23:21:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:21:54 2022 ] Eval epoch: 87 +[ Thu Sep 8 23:24:07 2022 ] Epoch 87 Curr Acc: (11064/16487)67.11% +[ Thu Sep 8 23:24:07 2022 ] Epoch 79 Best Acc 67.51% +[ Thu Sep 8 23:24:07 2022 ] Training epoch: 88 +[ Thu Sep 8 23:24:07 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:27:30 2022 ] Mean training loss: 0.0088. +[ Thu Sep 8 23:27:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:27:30 2022 ] Eval epoch: 88 +[ Thu Sep 8 23:29:43 2022 ] Epoch 88 Curr Acc: (10900/16487)66.11% +[ Thu Sep 8 23:29:43 2022 ] Epoch 79 Best Acc 67.51% +[ Thu Sep 8 23:29:43 2022 ] Training epoch: 89 +[ Thu Sep 8 23:29:43 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:33:06 2022 ] Mean training loss: 0.0093. +[ Thu Sep 8 23:33:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:33:06 2022 ] Eval epoch: 89 +[ Thu Sep 8 23:35:20 2022 ] Epoch 89 Curr Acc: (11142/16487)67.58% +[ Thu Sep 8 23:35:20 2022 ] Epoch 89 Best Acc 67.58% +[ Thu Sep 8 23:35:20 2022 ] Training epoch: 90 +[ Thu Sep 8 23:35:20 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:38:42 2022 ] Mean training loss: 0.0088. +[ Thu Sep 8 23:38:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:38:43 2022 ] Eval epoch: 90 +[ Thu Sep 8 23:40:56 2022 ] Epoch 90 Curr Acc: (10824/16487)65.65% +[ Thu Sep 8 23:40:56 2022 ] Epoch 89 Best Acc 67.58% +[ Thu Sep 8 23:40:56 2022 ] Training epoch: 91 +[ Thu Sep 8 23:40:56 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:44:18 2022 ] Mean training loss: 0.0098. +[ Thu Sep 8 23:44:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:44:18 2022 ] Eval epoch: 91 +[ Thu Sep 8 23:46:32 2022 ] Epoch 91 Curr Acc: (10916/16487)66.21% +[ Thu Sep 8 23:46:32 2022 ] Epoch 89 Best Acc 67.58% +[ Thu Sep 8 23:46:32 2022 ] Training epoch: 92 +[ Thu Sep 8 23:46:32 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:49:55 2022 ] Mean training loss: 0.0099. +[ Thu Sep 8 23:49:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:49:55 2022 ] Eval epoch: 92 +[ Thu Sep 8 23:52:08 2022 ] Epoch 92 Curr Acc: (11208/16487)67.98% +[ Thu Sep 8 23:52:08 2022 ] Epoch 92 Best Acc 67.98% +[ Thu Sep 8 23:52:08 2022 ] Training epoch: 93 +[ Thu Sep 8 23:52:08 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:55:31 2022 ] Mean training loss: 0.0100. +[ Thu Sep 8 23:55:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:55:31 2022 ] Eval epoch: 93 +[ Thu Sep 8 23:57:44 2022 ] Epoch 93 Curr Acc: (10882/16487)66.00% +[ Thu Sep 8 23:57:44 2022 ] Epoch 92 Best Acc 67.98% +[ Thu Sep 8 23:57:44 2022 ] Training epoch: 94 +[ Thu Sep 8 23:57:44 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:01:07 2022 ] Mean training loss: 0.0093. +[ Fri Sep 9 00:01:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:01:07 2022 ] Eval epoch: 94 +[ Fri Sep 9 00:03:21 2022 ] Epoch 94 Curr Acc: (11084/16487)67.23% +[ Fri Sep 9 00:03:21 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 00:03:21 2022 ] Training epoch: 95 +[ Fri Sep 9 00:03:21 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:06:44 2022 ] Mean training loss: 0.0097. +[ Fri Sep 9 00:06:44 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 00:06:44 2022 ] Eval epoch: 95 +[ Fri Sep 9 00:08:57 2022 ] Epoch 95 Curr Acc: (11150/16487)67.63% +[ Fri Sep 9 00:08:57 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 00:08:57 2022 ] Training epoch: 96 +[ Fri Sep 9 00:08:57 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:12:20 2022 ] Mean training loss: 0.0093. +[ Fri Sep 9 00:12:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:12:20 2022 ] Eval epoch: 96 +[ Fri Sep 9 00:14:33 2022 ] Epoch 96 Curr Acc: (11081/16487)67.21% +[ Fri Sep 9 00:14:33 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 00:14:33 2022 ] Training epoch: 97 +[ Fri Sep 9 00:14:33 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:17:56 2022 ] Mean training loss: 0.0097. +[ Fri Sep 9 00:17:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:17:56 2022 ] Eval epoch: 97 +[ Fri Sep 9 00:20:09 2022 ] Epoch 97 Curr Acc: (10968/16487)66.53% +[ Fri Sep 9 00:20:09 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 00:20:09 2022 ] Training epoch: 98 +[ Fri Sep 9 00:20:09 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:23:32 2022 ] Mean training loss: 0.0097. +[ Fri Sep 9 00:23:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:23:32 2022 ] Eval epoch: 98 +[ Fri Sep 9 00:25:46 2022 ] Epoch 98 Curr Acc: (11041/16487)66.97% +[ Fri Sep 9 00:25:46 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 00:25:46 2022 ] Training epoch: 99 +[ Fri Sep 9 00:25:46 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:29:09 2022 ] Mean training loss: 0.0093. +[ Fri Sep 9 00:29:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:29:09 2022 ] Eval epoch: 99 +[ Fri Sep 9 00:31:22 2022 ] Epoch 99 Curr Acc: (11002/16487)66.73% +[ Fri Sep 9 00:31:22 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 00:31:22 2022 ] Training epoch: 100 +[ Fri Sep 9 00:31:22 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:34:45 2022 ] Mean training loss: 0.0097. +[ Fri Sep 9 00:34:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:34:45 2022 ] Eval epoch: 100 +[ Fri Sep 9 00:36:59 2022 ] Epoch 100 Curr Acc: (11154/16487)67.65% +[ Fri Sep 9 00:36:59 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 00:36:59 2022 ] Training epoch: 101 +[ Fri Sep 9 00:36:59 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:40:22 2022 ] Mean training loss: 0.0089. +[ Fri Sep 9 00:40:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:40:22 2022 ] Eval epoch: 101 +[ Fri Sep 9 00:42:35 2022 ] Epoch 101 Curr Acc: (10980/16487)66.60% +[ Fri Sep 9 00:42:35 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 00:42:35 2022 ] Training epoch: 102 +[ Fri Sep 9 00:42:35 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:45:59 2022 ] Mean training loss: 0.0097. +[ Fri Sep 9 00:45:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:45:59 2022 ] Eval epoch: 102 +[ Fri Sep 9 00:48:12 2022 ] Epoch 102 Curr Acc: (10838/16487)65.74% +[ Fri Sep 9 00:48:12 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 00:48:12 2022 ] Training epoch: 103 +[ Fri Sep 9 00:48:12 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:51:35 2022 ] Mean training loss: 0.0100. +[ Fri Sep 9 00:51:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:51:35 2022 ] Eval epoch: 103 +[ Fri Sep 9 00:53:48 2022 ] Epoch 103 Curr Acc: (10786/16487)65.42% +[ Fri Sep 9 00:53:48 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 00:53:48 2022 ] Training epoch: 104 +[ Fri Sep 9 00:53:48 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:57:11 2022 ] Mean training loss: 0.0086. +[ Fri Sep 9 00:57:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:57:11 2022 ] Eval epoch: 104 +[ Fri Sep 9 00:59:24 2022 ] Epoch 104 Curr Acc: (10835/16487)65.72% +[ Fri Sep 9 00:59:24 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 00:59:24 2022 ] Training epoch: 105 +[ Fri Sep 9 00:59:24 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:02:47 2022 ] Mean training loss: 0.0086. +[ Fri Sep 9 01:02:47 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 01:02:47 2022 ] Eval epoch: 105 +[ Fri Sep 9 01:05:00 2022 ] Epoch 105 Curr Acc: (10931/16487)66.30% +[ Fri Sep 9 01:05:00 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 01:05:00 2022 ] Training epoch: 106 +[ Fri Sep 9 01:05:00 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:08:23 2022 ] Mean training loss: 0.0101. +[ Fri Sep 9 01:08:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:08:23 2022 ] Eval epoch: 106 +[ Fri Sep 9 01:10:36 2022 ] Epoch 106 Curr Acc: (10672/16487)64.73% +[ Fri Sep 9 01:10:36 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 01:10:36 2022 ] Training epoch: 107 +[ Fri Sep 9 01:10:36 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:13:59 2022 ] Mean training loss: 0.0087. +[ Fri Sep 9 01:13:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:13:59 2022 ] Eval epoch: 107 +[ Fri Sep 9 01:16:12 2022 ] Epoch 107 Curr Acc: (10891/16487)66.06% +[ Fri Sep 9 01:16:12 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 01:16:12 2022 ] Training epoch: 108 +[ Fri Sep 9 01:16:12 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:19:35 2022 ] Mean training loss: 0.0089. +[ Fri Sep 9 01:19:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:19:35 2022 ] Eval epoch: 108 +[ Fri Sep 9 01:21:48 2022 ] Epoch 108 Curr Acc: (10792/16487)65.46% +[ Fri Sep 9 01:21:48 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 01:21:48 2022 ] Training epoch: 109 +[ Fri Sep 9 01:21:48 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:25:10 2022 ] Mean training loss: 0.0100. +[ Fri Sep 9 01:25:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:25:11 2022 ] Eval epoch: 109 +[ Fri Sep 9 01:27:24 2022 ] Epoch 109 Curr Acc: (10863/16487)65.89% +[ Fri Sep 9 01:27:24 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 01:27:24 2022 ] Training epoch: 110 +[ Fri Sep 9 01:27:24 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:30:46 2022 ] Mean training loss: 0.0086. +[ Fri Sep 9 01:30:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:30:46 2022 ] Eval epoch: 110 +[ Fri Sep 9 01:32:59 2022 ] Epoch 110 Curr Acc: (10705/16487)64.93% +[ Fri Sep 9 01:32:59 2022 ] Epoch 92 Best Acc 67.98% +[ Fri Sep 9 01:32:59 2022 ] epoch: 92, best accuracy: 0.679808333838782 +[ Fri Sep 9 01:32:59 2022 ] Experiment: ./work_dir/ntu/xsub_bm +[ Fri Sep 9 01:33:00 2022 ] # generator parameters: 2.896055 M. +[ Fri Sep 9 01:33:00 2022 ] Load weights from ./runs/ntu/xsub_bm/runs-91-90712.pt. +[ Fri Sep 9 01:33:00 2022 ] Eval epoch: 1 +[ Fri Sep 9 01:35:12 2022 ] Epoch 1 Curr Acc: (11208/16487)67.98% +[ Fri Sep 9 01:35:12 2022 ] Epoch 92 Best Acc 67.98% diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/config.yaml b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..afe6464e60e09ab9a062dec455e6b6f59ab7eeba --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu/xsub_j.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 60 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu/xsub_j/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu/xsub_j diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..2a2a78b3dff11d9ae5dfb8c973f8c8cef3a0e3fb --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67269cd275552d91bf25ddf60f2b6ff09b6b67c78439c39e1f342ee49ca819ec +size 4979902 diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/log.txt b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..7d9084aa20d3adb6e6a961ffba279459637e59dd --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/log.txt @@ -0,0 +1,631 @@ +[ Thu Sep 8 17:07:45 2022 ] # generator parameters: 2.896055 M. +[ Thu Sep 8 17:07:45 2022 ] Parameters: +{'work_dir': './work_dir/ntu/xsub_j', 'model_saved_name': './runs/ntu/xsub_j/runs', 'config': 'config/ntu/xsub_j.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Thu Sep 8 17:07:45 2022 ] Training epoch: 1 +[ Thu Sep 8 17:07:45 2022 ] Learning rate: 0.015 +[ Thu Sep 8 17:11:08 2022 ] Mean training loss: 3.0195. +[ Thu Sep 8 17:11:08 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 17:11:08 2022 ] Training epoch: 2 +[ Thu Sep 8 17:11:08 2022 ] Learning rate: 0.03 +[ Thu Sep 8 17:14:31 2022 ] Mean training loss: 2.1926. +[ Thu Sep 8 17:14:31 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 17:14:31 2022 ] Training epoch: 3 +[ Thu Sep 8 17:14:31 2022 ] Learning rate: 0.045 +[ Thu Sep 8 17:17:52 2022 ] Mean training loss: 1.8503. +[ Thu Sep 8 17:17:52 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 17:17:52 2022 ] Training epoch: 4 +[ Thu Sep 8 17:17:52 2022 ] Learning rate: 0.06 +[ Thu Sep 8 17:21:14 2022 ] Mean training loss: 1.6219. +[ Thu Sep 8 17:21:14 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:21:14 2022 ] Training epoch: 5 +[ Thu Sep 8 17:21:14 2022 ] Learning rate: 0.075 +[ Thu Sep 8 17:24:35 2022 ] Mean training loss: 1.4539. +[ Thu Sep 8 17:24:35 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:24:35 2022 ] Training epoch: 6 +[ Thu Sep 8 17:24:35 2022 ] Learning rate: 0.09 +[ Thu Sep 8 17:27:58 2022 ] Mean training loss: 1.3453. +[ Thu Sep 8 17:27:58 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:27:58 2022 ] Training epoch: 7 +[ Thu Sep 8 17:27:58 2022 ] Learning rate: 0.10500000000000001 +[ Thu Sep 8 17:31:19 2022 ] Mean training loss: 1.2608. +[ Thu Sep 8 17:31:19 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:31:19 2022 ] Training epoch: 8 +[ Thu Sep 8 17:31:19 2022 ] Learning rate: 0.12 +[ Thu Sep 8 17:34:41 2022 ] Mean training loss: 1.2075. +[ Thu Sep 8 17:34:41 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 17:34:41 2022 ] Training epoch: 9 +[ Thu Sep 8 17:34:41 2022 ] Learning rate: 0.13499999999999998 +[ Thu Sep 8 17:38:03 2022 ] Mean training loss: 1.1737. +[ Thu Sep 8 17:38:03 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:38:03 2022 ] Training epoch: 10 +[ Thu Sep 8 17:38:03 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:41:25 2022 ] Mean training loss: 1.1203. +[ Thu Sep 8 17:41:25 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:41:25 2022 ] Training epoch: 11 +[ Thu Sep 8 17:41:25 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:44:48 2022 ] Mean training loss: 1.0656. +[ Thu Sep 8 17:44:48 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 17:44:48 2022 ] Training epoch: 12 +[ Thu Sep 8 17:44:48 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:48:09 2022 ] Mean training loss: 1.0205. +[ Thu Sep 8 17:48:09 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:48:09 2022 ] Training epoch: 13 +[ Thu Sep 8 17:48:09 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:51:31 2022 ] Mean training loss: 0.9889. +[ Thu Sep 8 17:51:31 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:51:31 2022 ] Training epoch: 14 +[ Thu Sep 8 17:51:31 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:54:53 2022 ] Mean training loss: 0.9602. +[ Thu Sep 8 17:54:53 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 17:54:53 2022 ] Training epoch: 15 +[ Thu Sep 8 17:54:53 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:58:15 2022 ] Mean training loss: 0.9204. +[ Thu Sep 8 17:58:15 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:58:15 2022 ] Training epoch: 16 +[ Thu Sep 8 17:58:15 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:01:37 2022 ] Mean training loss: 0.8923. +[ Thu Sep 8 18:01:37 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 18:01:37 2022 ] Training epoch: 17 +[ Thu Sep 8 18:01:37 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:04:59 2022 ] Mean training loss: 0.8704. +[ Thu Sep 8 18:04:59 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 18:04:59 2022 ] Training epoch: 18 +[ Thu Sep 8 18:04:59 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:08:21 2022 ] Mean training loss: 0.8575. +[ Thu Sep 8 18:08:21 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:08:21 2022 ] Training epoch: 19 +[ Thu Sep 8 18:08:21 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:11:42 2022 ] Mean training loss: 0.8183. +[ Thu Sep 8 18:11:42 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:11:42 2022 ] Training epoch: 20 +[ Thu Sep 8 18:11:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:15:04 2022 ] Mean training loss: 0.8045. +[ Thu Sep 8 18:15:04 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:15:04 2022 ] Training epoch: 21 +[ Thu Sep 8 18:15:04 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:18:26 2022 ] Mean training loss: 0.8075. +[ Thu Sep 8 18:18:26 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 18:18:26 2022 ] Training epoch: 22 +[ Thu Sep 8 18:18:26 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:21:49 2022 ] Mean training loss: 0.7965. +[ Thu Sep 8 18:21:49 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 18:21:49 2022 ] Training epoch: 23 +[ Thu Sep 8 18:21:49 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:25:11 2022 ] Mean training loss: 0.7809. +[ Thu Sep 8 18:25:11 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 18:25:11 2022 ] Training epoch: 24 +[ Thu Sep 8 18:25:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:28:32 2022 ] Mean training loss: 0.7482. +[ Thu Sep 8 18:28:32 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:28:32 2022 ] Training epoch: 25 +[ Thu Sep 8 18:28:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:31:54 2022 ] Mean training loss: 0.7497. +[ Thu Sep 8 18:31:54 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:31:54 2022 ] Training epoch: 26 +[ Thu Sep 8 18:31:54 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:35:16 2022 ] Mean training loss: 0.7401. +[ Thu Sep 8 18:35:16 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 18:35:16 2022 ] Training epoch: 27 +[ Thu Sep 8 18:35:16 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:38:38 2022 ] Mean training loss: 0.7310. +[ Thu Sep 8 18:38:38 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 18:38:38 2022 ] Training epoch: 28 +[ Thu Sep 8 18:38:38 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:42:00 2022 ] Mean training loss: 0.7214. +[ Thu Sep 8 18:42:00 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:42:00 2022 ] Training epoch: 29 +[ Thu Sep 8 18:42:00 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:45:22 2022 ] Mean training loss: 0.6995. +[ Thu Sep 8 18:45:22 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 18:45:22 2022 ] Training epoch: 30 +[ Thu Sep 8 18:45:22 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:48:44 2022 ] Mean training loss: 0.6931. +[ Thu Sep 8 18:48:44 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 18:48:44 2022 ] Training epoch: 31 +[ Thu Sep 8 18:48:44 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:52:06 2022 ] Mean training loss: 0.6972. +[ Thu Sep 8 18:52:06 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:52:06 2022 ] Training epoch: 32 +[ Thu Sep 8 18:52:06 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:55:28 2022 ] Mean training loss: 0.6870. +[ Thu Sep 8 18:55:28 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 18:55:28 2022 ] Training epoch: 33 +[ Thu Sep 8 18:55:28 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:58:49 2022 ] Mean training loss: 0.6716. +[ Thu Sep 8 18:58:49 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 18:58:49 2022 ] Training epoch: 34 +[ Thu Sep 8 18:58:49 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:02:11 2022 ] Mean training loss: 0.6840. +[ Thu Sep 8 19:02:11 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:02:11 2022 ] Training epoch: 35 +[ Thu Sep 8 19:02:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:05:33 2022 ] Mean training loss: 0.6804. +[ Thu Sep 8 19:05:33 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:05:33 2022 ] Training epoch: 36 +[ Thu Sep 8 19:05:33 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:08:55 2022 ] Mean training loss: 0.6658. +[ Thu Sep 8 19:08:55 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:08:55 2022 ] Training epoch: 37 +[ Thu Sep 8 19:08:55 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:12:17 2022 ] Mean training loss: 0.6654. +[ Thu Sep 8 19:12:17 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:12:17 2022 ] Training epoch: 38 +[ Thu Sep 8 19:12:17 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:15:38 2022 ] Mean training loss: 0.6568. +[ Thu Sep 8 19:15:38 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:15:38 2022 ] Training epoch: 39 +[ Thu Sep 8 19:15:38 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:19:00 2022 ] Mean training loss: 0.6439. +[ Thu Sep 8 19:19:00 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:19:00 2022 ] Training epoch: 40 +[ Thu Sep 8 19:19:00 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:22:21 2022 ] Mean training loss: 0.6398. +[ Thu Sep 8 19:22:21 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:22:21 2022 ] Training epoch: 41 +[ Thu Sep 8 19:22:21 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:25:42 2022 ] Mean training loss: 0.6464. +[ Thu Sep 8 19:25:42 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:25:42 2022 ] Training epoch: 42 +[ Thu Sep 8 19:25:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:29:02 2022 ] Mean training loss: 0.6393. +[ Thu Sep 8 19:29:02 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:29:02 2022 ] Training epoch: 43 +[ Thu Sep 8 19:29:02 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:32:24 2022 ] Mean training loss: 0.6244. +[ Thu Sep 8 19:32:24 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:32:24 2022 ] Training epoch: 44 +[ Thu Sep 8 19:32:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:35:44 2022 ] Mean training loss: 0.6277. +[ Thu Sep 8 19:35:44 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:35:44 2022 ] Training epoch: 45 +[ Thu Sep 8 19:35:44 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:39:05 2022 ] Mean training loss: 0.6040. +[ Thu Sep 8 19:39:05 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:39:05 2022 ] Training epoch: 46 +[ Thu Sep 8 19:39:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:42:27 2022 ] Mean training loss: 0.6229. +[ Thu Sep 8 19:42:27 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:42:27 2022 ] Training epoch: 47 +[ Thu Sep 8 19:42:27 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:45:49 2022 ] Mean training loss: 0.6233. +[ Thu Sep 8 19:45:49 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:45:49 2022 ] Training epoch: 48 +[ Thu Sep 8 19:45:49 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:49:11 2022 ] Mean training loss: 0.6187. +[ Thu Sep 8 19:49:11 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:49:11 2022 ] Training epoch: 49 +[ Thu Sep 8 19:49:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:52:33 2022 ] Mean training loss: 0.6209. +[ Thu Sep 8 19:52:33 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:52:33 2022 ] Training epoch: 50 +[ Thu Sep 8 19:52:33 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:55:55 2022 ] Mean training loss: 0.6215. +[ Thu Sep 8 19:55:55 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:55:55 2022 ] Training epoch: 51 +[ Thu Sep 8 19:55:55 2022 ] Learning rate: 0.015 +[ Thu Sep 8 19:59:17 2022 ] Mean training loss: 0.3007. +[ Thu Sep 8 19:59:17 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:59:17 2022 ] Eval epoch: 51 +[ Thu Sep 8 20:01:29 2022 ] Epoch 51 Curr Acc: (10902/16487)66.12% +[ Thu Sep 8 20:01:29 2022 ] Epoch 51 Best Acc 66.12% +[ Thu Sep 8 20:01:29 2022 ] Training epoch: 52 +[ Thu Sep 8 20:01:29 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:04:51 2022 ] Mean training loss: 0.2042. +[ Thu Sep 8 20:04:51 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:04:51 2022 ] Eval epoch: 52 +[ Thu Sep 8 20:07:04 2022 ] Epoch 52 Curr Acc: (11412/16487)69.22% +[ Thu Sep 8 20:07:04 2022 ] Epoch 52 Best Acc 69.22% +[ Thu Sep 8 20:07:04 2022 ] Training epoch: 53 +[ Thu Sep 8 20:07:04 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:10:26 2022 ] Mean training loss: 0.1695. +[ Thu Sep 8 20:10:26 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:10:26 2022 ] Eval epoch: 53 +[ Thu Sep 8 20:12:38 2022 ] Epoch 53 Curr Acc: (11367/16487)68.95% +[ Thu Sep 8 20:12:38 2022 ] Epoch 52 Best Acc 69.22% +[ Thu Sep 8 20:12:38 2022 ] Training epoch: 54 +[ Thu Sep 8 20:12:38 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:16:00 2022 ] Mean training loss: 0.1382. +[ Thu Sep 8 20:16:00 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:16:00 2022 ] Eval epoch: 54 +[ Thu Sep 8 20:18:13 2022 ] Epoch 54 Curr Acc: (11563/16487)70.13% +[ Thu Sep 8 20:18:13 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 20:18:13 2022 ] Training epoch: 55 +[ Thu Sep 8 20:18:13 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:21:35 2022 ] Mean training loss: 0.1144. +[ Thu Sep 8 20:21:35 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:21:35 2022 ] Eval epoch: 55 +[ Thu Sep 8 20:23:47 2022 ] Epoch 55 Curr Acc: (11322/16487)68.67% +[ Thu Sep 8 20:23:47 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 20:23:47 2022 ] Training epoch: 56 +[ Thu Sep 8 20:23:47 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:27:09 2022 ] Mean training loss: 0.1028. +[ Thu Sep 8 20:27:09 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:27:09 2022 ] Eval epoch: 56 +[ Thu Sep 8 20:29:22 2022 ] Epoch 56 Curr Acc: (11107/16487)67.37% +[ Thu Sep 8 20:29:22 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 20:29:22 2022 ] Training epoch: 57 +[ Thu Sep 8 20:29:22 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:32:44 2022 ] Mean training loss: 0.0895. +[ Thu Sep 8 20:32:44 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:32:44 2022 ] Eval epoch: 57 +[ Thu Sep 8 20:34:56 2022 ] Epoch 57 Curr Acc: (11374/16487)68.99% +[ Thu Sep 8 20:34:56 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 20:34:56 2022 ] Training epoch: 58 +[ Thu Sep 8 20:34:56 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:38:18 2022 ] Mean training loss: 0.0792. +[ Thu Sep 8 20:38:18 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:38:18 2022 ] Eval epoch: 58 +[ Thu Sep 8 20:40:31 2022 ] Epoch 58 Curr Acc: (11445/16487)69.42% +[ Thu Sep 8 20:40:31 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 20:40:31 2022 ] Training epoch: 59 +[ Thu Sep 8 20:40:31 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:43:52 2022 ] Mean training loss: 0.0660. +[ Thu Sep 8 20:43:52 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:43:52 2022 ] Eval epoch: 59 +[ Thu Sep 8 20:46:04 2022 ] Epoch 59 Curr Acc: (11110/16487)67.39% +[ Thu Sep 8 20:46:04 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 20:46:04 2022 ] Training epoch: 60 +[ Thu Sep 8 20:46:04 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:49:26 2022 ] Mean training loss: 0.0554. +[ Thu Sep 8 20:49:26 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:49:26 2022 ] Eval epoch: 60 +[ Thu Sep 8 20:51:38 2022 ] Epoch 60 Curr Acc: (10888/16487)66.04% +[ Thu Sep 8 20:51:38 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 20:51:38 2022 ] Training epoch: 61 +[ Thu Sep 8 20:51:38 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:55:00 2022 ] Mean training loss: 0.0540. +[ Thu Sep 8 20:55:00 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 20:55:00 2022 ] Eval epoch: 61 +[ Thu Sep 8 20:57:13 2022 ] Epoch 61 Curr Acc: (11119/16487)67.44% +[ Thu Sep 8 20:57:13 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 20:57:13 2022 ] Training epoch: 62 +[ Thu Sep 8 20:57:13 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:00:34 2022 ] Mean training loss: 0.0477. +[ Thu Sep 8 21:00:34 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:00:34 2022 ] Eval epoch: 62 +[ Thu Sep 8 21:02:47 2022 ] Epoch 62 Curr Acc: (11069/16487)67.14% +[ Thu Sep 8 21:02:47 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 21:02:47 2022 ] Training epoch: 63 +[ Thu Sep 8 21:02:47 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:06:09 2022 ] Mean training loss: 0.0419. +[ Thu Sep 8 21:06:09 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:06:09 2022 ] Eval epoch: 63 +[ Thu Sep 8 21:08:21 2022 ] Epoch 63 Curr Acc: (10941/16487)66.36% +[ Thu Sep 8 21:08:21 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 21:08:21 2022 ] Training epoch: 64 +[ Thu Sep 8 21:08:21 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:11:43 2022 ] Mean training loss: 0.0414. +[ Thu Sep 8 21:11:43 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:11:43 2022 ] Eval epoch: 64 +[ Thu Sep 8 21:13:56 2022 ] Epoch 64 Curr Acc: (11043/16487)66.98% +[ Thu Sep 8 21:13:56 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 21:13:56 2022 ] Training epoch: 65 +[ Thu Sep 8 21:13:56 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:17:17 2022 ] Mean training loss: 0.0403. +[ Thu Sep 8 21:17:17 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 21:17:17 2022 ] Eval epoch: 65 +[ Thu Sep 8 21:19:30 2022 ] Epoch 65 Curr Acc: (11271/16487)68.36% +[ Thu Sep 8 21:19:30 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 21:19:30 2022 ] Training epoch: 66 +[ Thu Sep 8 21:19:30 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:22:52 2022 ] Mean training loss: 0.0337. +[ Thu Sep 8 21:22:52 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:22:52 2022 ] Eval epoch: 66 +[ Thu Sep 8 21:25:04 2022 ] Epoch 66 Curr Acc: (11390/16487)69.08% +[ Thu Sep 8 21:25:04 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 21:25:04 2022 ] Training epoch: 67 +[ Thu Sep 8 21:25:04 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:28:26 2022 ] Mean training loss: 0.0312. +[ Thu Sep 8 21:28:26 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 21:28:26 2022 ] Eval epoch: 67 +[ Thu Sep 8 21:30:38 2022 ] Epoch 67 Curr Acc: (11022/16487)66.85% +[ Thu Sep 8 21:30:38 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 21:30:38 2022 ] Training epoch: 68 +[ Thu Sep 8 21:30:38 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:34:00 2022 ] Mean training loss: 0.0294. +[ Thu Sep 8 21:34:00 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 21:34:00 2022 ] Eval epoch: 68 +[ Thu Sep 8 21:36:13 2022 ] Epoch 68 Curr Acc: (11086/16487)67.24% +[ Thu Sep 8 21:36:13 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 21:36:13 2022 ] Training epoch: 69 +[ Thu Sep 8 21:36:13 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:39:34 2022 ] Mean training loss: 0.0278. +[ Thu Sep 8 21:39:34 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:39:34 2022 ] Eval epoch: 69 +[ Thu Sep 8 21:41:47 2022 ] Epoch 69 Curr Acc: (11356/16487)68.88% +[ Thu Sep 8 21:41:47 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 21:41:47 2022 ] Training epoch: 70 +[ Thu Sep 8 21:41:47 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:45:09 2022 ] Mean training loss: 0.0266. +[ Thu Sep 8 21:45:09 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 21:45:09 2022 ] Eval epoch: 70 +[ Thu Sep 8 21:47:21 2022 ] Epoch 70 Curr Acc: (11207/16487)67.97% +[ Thu Sep 8 21:47:21 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 21:47:21 2022 ] Training epoch: 71 +[ Thu Sep 8 21:47:21 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:50:43 2022 ] Mean training loss: 0.0207. +[ Thu Sep 8 21:50:43 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 21:50:43 2022 ] Eval epoch: 71 +[ Thu Sep 8 21:52:55 2022 ] Epoch 71 Curr Acc: (11328/16487)68.71% +[ Thu Sep 8 21:52:55 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 21:52:55 2022 ] Training epoch: 72 +[ Thu Sep 8 21:52:55 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:56:18 2022 ] Mean training loss: 0.0185. +[ Thu Sep 8 21:56:18 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 21:56:18 2022 ] Eval epoch: 72 +[ Thu Sep 8 21:58:30 2022 ] Epoch 72 Curr Acc: (11160/16487)67.69% +[ Thu Sep 8 21:58:30 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 21:58:30 2022 ] Training epoch: 73 +[ Thu Sep 8 21:58:30 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:01:51 2022 ] Mean training loss: 0.0202. +[ Thu Sep 8 22:01:51 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:01:51 2022 ] Eval epoch: 73 +[ Thu Sep 8 22:04:04 2022 ] Epoch 73 Curr Acc: (11321/16487)68.67% +[ Thu Sep 8 22:04:04 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 22:04:04 2022 ] Training epoch: 74 +[ Thu Sep 8 22:04:04 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:07:25 2022 ] Mean training loss: 0.0174. +[ Thu Sep 8 22:07:25 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:07:26 2022 ] Eval epoch: 74 +[ Thu Sep 8 22:09:38 2022 ] Epoch 74 Curr Acc: (11098/16487)67.31% +[ Thu Sep 8 22:09:38 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 22:09:38 2022 ] Training epoch: 75 +[ Thu Sep 8 22:09:38 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:13:00 2022 ] Mean training loss: 0.0149. +[ Thu Sep 8 22:13:00 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:13:00 2022 ] Eval epoch: 75 +[ Thu Sep 8 22:15:12 2022 ] Epoch 75 Curr Acc: (11172/16487)67.76% +[ Thu Sep 8 22:15:12 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 22:15:13 2022 ] Training epoch: 76 +[ Thu Sep 8 22:15:13 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:18:34 2022 ] Mean training loss: 0.0162. +[ Thu Sep 8 22:18:34 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:18:34 2022 ] Eval epoch: 76 +[ Thu Sep 8 22:20:47 2022 ] Epoch 76 Curr Acc: (11432/16487)69.34% +[ Thu Sep 8 22:20:47 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 22:20:47 2022 ] Training epoch: 77 +[ Thu Sep 8 22:20:47 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:24:08 2022 ] Mean training loss: 0.0139. +[ Thu Sep 8 22:24:08 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:24:09 2022 ] Eval epoch: 77 +[ Thu Sep 8 22:26:21 2022 ] Epoch 77 Curr Acc: (11165/16487)67.72% +[ Thu Sep 8 22:26:21 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 22:26:21 2022 ] Training epoch: 78 +[ Thu Sep 8 22:26:21 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:29:43 2022 ] Mean training loss: 0.0148. +[ Thu Sep 8 22:29:43 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:29:43 2022 ] Eval epoch: 78 +[ Thu Sep 8 22:31:55 2022 ] Epoch 78 Curr Acc: (11269/16487)68.35% +[ Thu Sep 8 22:31:55 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 22:31:55 2022 ] Training epoch: 79 +[ Thu Sep 8 22:31:55 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:35:17 2022 ] Mean training loss: 0.0151. +[ Thu Sep 8 22:35:17 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:35:17 2022 ] Eval epoch: 79 +[ Thu Sep 8 22:37:30 2022 ] Epoch 79 Curr Acc: (11329/16487)68.71% +[ Thu Sep 8 22:37:30 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 22:37:30 2022 ] Training epoch: 80 +[ Thu Sep 8 22:37:30 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:40:52 2022 ] Mean training loss: 0.0136. +[ Thu Sep 8 22:40:52 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 22:40:52 2022 ] Eval epoch: 80 +[ Thu Sep 8 22:43:04 2022 ] Epoch 80 Curr Acc: (11280/16487)68.42% +[ Thu Sep 8 22:43:04 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 22:43:04 2022 ] Training epoch: 81 +[ Thu Sep 8 22:43:04 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:46:26 2022 ] Mean training loss: 0.0149. +[ Thu Sep 8 22:46:26 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:46:26 2022 ] Eval epoch: 81 +[ Thu Sep 8 22:48:38 2022 ] Epoch 81 Curr Acc: (11301/16487)68.54% +[ Thu Sep 8 22:48:38 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 22:48:38 2022 ] Training epoch: 82 +[ Thu Sep 8 22:48:38 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:52:00 2022 ] Mean training loss: 0.0144. +[ Thu Sep 8 22:52:00 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 22:52:00 2022 ] Eval epoch: 82 +[ Thu Sep 8 22:54:12 2022 ] Epoch 82 Curr Acc: (10996/16487)66.69% +[ Thu Sep 8 22:54:12 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 22:54:12 2022 ] Training epoch: 83 +[ Thu Sep 8 22:54:12 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:57:34 2022 ] Mean training loss: 0.0143. +[ Thu Sep 8 22:57:34 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 22:57:34 2022 ] Eval epoch: 83 +[ Thu Sep 8 22:59:47 2022 ] Epoch 83 Curr Acc: (11310/16487)68.60% +[ Thu Sep 8 22:59:47 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 22:59:47 2022 ] Training epoch: 84 +[ Thu Sep 8 22:59:47 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:03:09 2022 ] Mean training loss: 0.0149. +[ Thu Sep 8 23:03:09 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:03:09 2022 ] Eval epoch: 84 +[ Thu Sep 8 23:05:21 2022 ] Epoch 84 Curr Acc: (11367/16487)68.95% +[ Thu Sep 8 23:05:21 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 23:05:21 2022 ] Training epoch: 85 +[ Thu Sep 8 23:05:21 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:08:43 2022 ] Mean training loss: 0.0117. +[ Thu Sep 8 23:08:43 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:08:43 2022 ] Eval epoch: 85 +[ Thu Sep 8 23:10:56 2022 ] Epoch 85 Curr Acc: (11345/16487)68.81% +[ Thu Sep 8 23:10:56 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 23:10:56 2022 ] Training epoch: 86 +[ Thu Sep 8 23:10:56 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:14:17 2022 ] Mean training loss: 0.0130. +[ Thu Sep 8 23:14:17 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:14:17 2022 ] Eval epoch: 86 +[ Thu Sep 8 23:16:30 2022 ] Epoch 86 Curr Acc: (11269/16487)68.35% +[ Thu Sep 8 23:16:30 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 23:16:30 2022 ] Training epoch: 87 +[ Thu Sep 8 23:16:30 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:19:51 2022 ] Mean training loss: 0.0118. +[ Thu Sep 8 23:19:51 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 23:19:51 2022 ] Eval epoch: 87 +[ Thu Sep 8 23:22:03 2022 ] Epoch 87 Curr Acc: (11509/16487)69.81% +[ Thu Sep 8 23:22:03 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 23:22:03 2022 ] Training epoch: 88 +[ Thu Sep 8 23:22:03 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:25:25 2022 ] Mean training loss: 0.0112. +[ Thu Sep 8 23:25:25 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 23:25:25 2022 ] Eval epoch: 88 +[ Thu Sep 8 23:27:38 2022 ] Epoch 88 Curr Acc: (11416/16487)69.24% +[ Thu Sep 8 23:27:38 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 23:27:38 2022 ] Training epoch: 89 +[ Thu Sep 8 23:27:38 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:31:00 2022 ] Mean training loss: 0.0118. +[ Thu Sep 8 23:31:00 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:31:00 2022 ] Eval epoch: 89 +[ Thu Sep 8 23:33:12 2022 ] Epoch 89 Curr Acc: (11423/16487)69.28% +[ Thu Sep 8 23:33:12 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 23:33:12 2022 ] Training epoch: 90 +[ Thu Sep 8 23:33:12 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:36:34 2022 ] Mean training loss: 0.0142. +[ Thu Sep 8 23:36:34 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:36:34 2022 ] Eval epoch: 90 +[ Thu Sep 8 23:38:46 2022 ] Epoch 90 Curr Acc: (11305/16487)68.57% +[ Thu Sep 8 23:38:46 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 23:38:46 2022 ] Training epoch: 91 +[ Thu Sep 8 23:38:46 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:42:08 2022 ] Mean training loss: 0.0120. +[ Thu Sep 8 23:42:08 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:42:08 2022 ] Eval epoch: 91 +[ Thu Sep 8 23:44:20 2022 ] Epoch 91 Curr Acc: (11478/16487)69.62% +[ Thu Sep 8 23:44:20 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 23:44:20 2022 ] Training epoch: 92 +[ Thu Sep 8 23:44:20 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:47:42 2022 ] Mean training loss: 0.0129. +[ Thu Sep 8 23:47:42 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:47:42 2022 ] Eval epoch: 92 +[ Thu Sep 8 23:49:54 2022 ] Epoch 92 Curr Acc: (11338/16487)68.77% +[ Thu Sep 8 23:49:54 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 23:49:54 2022 ] Training epoch: 93 +[ Thu Sep 8 23:49:54 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:53:16 2022 ] Mean training loss: 0.0124. +[ Thu Sep 8 23:53:16 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:53:16 2022 ] Eval epoch: 93 +[ Thu Sep 8 23:55:28 2022 ] Epoch 93 Curr Acc: (11374/16487)68.99% +[ Thu Sep 8 23:55:28 2022 ] Epoch 54 Best Acc 70.13% +[ Thu Sep 8 23:55:28 2022 ] Training epoch: 94 +[ Thu Sep 8 23:55:28 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:58:50 2022 ] Mean training loss: 0.0114. +[ Thu Sep 8 23:58:50 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 23:58:50 2022 ] Eval epoch: 94 +[ Fri Sep 9 00:01:03 2022 ] Epoch 94 Curr Acc: (11406/16487)69.18% +[ Fri Sep 9 00:01:03 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 00:01:03 2022 ] Training epoch: 95 +[ Fri Sep 9 00:01:03 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:04:25 2022 ] Mean training loss: 0.0121. +[ Fri Sep 9 00:04:25 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:04:25 2022 ] Eval epoch: 95 +[ Fri Sep 9 00:06:38 2022 ] Epoch 95 Curr Acc: (11247/16487)68.22% +[ Fri Sep 9 00:06:38 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 00:06:38 2022 ] Training epoch: 96 +[ Fri Sep 9 00:06:38 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:10:00 2022 ] Mean training loss: 0.0120. +[ Fri Sep 9 00:10:00 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:10:00 2022 ] Eval epoch: 96 +[ Fri Sep 9 00:12:12 2022 ] Epoch 96 Curr Acc: (11538/16487)69.98% +[ Fri Sep 9 00:12:12 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 00:12:12 2022 ] Training epoch: 97 +[ Fri Sep 9 00:12:12 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:15:34 2022 ] Mean training loss: 0.0118. +[ Fri Sep 9 00:15:34 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:15:34 2022 ] Eval epoch: 97 +[ Fri Sep 9 00:17:47 2022 ] Epoch 97 Curr Acc: (11263/16487)68.31% +[ Fri Sep 9 00:17:47 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 00:17:47 2022 ] Training epoch: 98 +[ Fri Sep 9 00:17:47 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:21:09 2022 ] Mean training loss: 0.0121. +[ Fri Sep 9 00:21:09 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:21:09 2022 ] Eval epoch: 98 +[ Fri Sep 9 00:23:22 2022 ] Epoch 98 Curr Acc: (11427/16487)69.31% +[ Fri Sep 9 00:23:22 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 00:23:22 2022 ] Training epoch: 99 +[ Fri Sep 9 00:23:22 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:26:44 2022 ] Mean training loss: 0.0116. +[ Fri Sep 9 00:26:44 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:26:44 2022 ] Eval epoch: 99 +[ Fri Sep 9 00:28:57 2022 ] Epoch 99 Curr Acc: (11217/16487)68.04% +[ Fri Sep 9 00:28:57 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 00:28:57 2022 ] Training epoch: 100 +[ Fri Sep 9 00:28:57 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:32:18 2022 ] Mean training loss: 0.0122. +[ Fri Sep 9 00:32:18 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:32:18 2022 ] Eval epoch: 100 +[ Fri Sep 9 00:34:31 2022 ] Epoch 100 Curr Acc: (11455/16487)69.48% +[ Fri Sep 9 00:34:31 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 00:34:31 2022 ] Training epoch: 101 +[ Fri Sep 9 00:34:31 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:37:52 2022 ] Mean training loss: 0.0125. +[ Fri Sep 9 00:37:52 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:37:52 2022 ] Eval epoch: 101 +[ Fri Sep 9 00:40:05 2022 ] Epoch 101 Curr Acc: (11545/16487)70.02% +[ Fri Sep 9 00:40:05 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 00:40:05 2022 ] Training epoch: 102 +[ Fri Sep 9 00:40:05 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:43:27 2022 ] Mean training loss: 0.0123. +[ Fri Sep 9 00:43:27 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:43:27 2022 ] Eval epoch: 102 +[ Fri Sep 9 00:45:39 2022 ] Epoch 102 Curr Acc: (11452/16487)69.46% +[ Fri Sep 9 00:45:39 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 00:45:39 2022 ] Training epoch: 103 +[ Fri Sep 9 00:45:39 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:49:01 2022 ] Mean training loss: 0.0130. +[ Fri Sep 9 00:49:01 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:49:01 2022 ] Eval epoch: 103 +[ Fri Sep 9 00:51:13 2022 ] Epoch 103 Curr Acc: (11380/16487)69.02% +[ Fri Sep 9 00:51:13 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 00:51:13 2022 ] Training epoch: 104 +[ Fri Sep 9 00:51:13 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:54:34 2022 ] Mean training loss: 0.0120. +[ Fri Sep 9 00:54:34 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 00:54:34 2022 ] Eval epoch: 104 +[ Fri Sep 9 00:56:47 2022 ] Epoch 104 Curr Acc: (11517/16487)69.86% +[ Fri Sep 9 00:56:47 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 00:56:47 2022 ] Training epoch: 105 +[ Fri Sep 9 00:56:47 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:00:08 2022 ] Mean training loss: 0.0142. +[ Fri Sep 9 01:00:08 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 01:00:08 2022 ] Eval epoch: 105 +[ Fri Sep 9 01:02:21 2022 ] Epoch 105 Curr Acc: (11195/16487)67.90% +[ Fri Sep 9 01:02:21 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 01:02:21 2022 ] Training epoch: 106 +[ Fri Sep 9 01:02:21 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:05:42 2022 ] Mean training loss: 0.0107. +[ Fri Sep 9 01:05:42 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 01:05:42 2022 ] Eval epoch: 106 +[ Fri Sep 9 01:07:55 2022 ] Epoch 106 Curr Acc: (11496/16487)69.73% +[ Fri Sep 9 01:07:55 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 01:07:55 2022 ] Training epoch: 107 +[ Fri Sep 9 01:07:55 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:11:17 2022 ] Mean training loss: 0.0116. +[ Fri Sep 9 01:11:17 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 01:11:17 2022 ] Eval epoch: 107 +[ Fri Sep 9 01:13:29 2022 ] Epoch 107 Curr Acc: (11146/16487)67.60% +[ Fri Sep 9 01:13:29 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 01:13:29 2022 ] Training epoch: 108 +[ Fri Sep 9 01:13:29 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:16:51 2022 ] Mean training loss: 0.0110. +[ Fri Sep 9 01:16:51 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 01:16:51 2022 ] Eval epoch: 108 +[ Fri Sep 9 01:19:03 2022 ] Epoch 108 Curr Acc: (11175/16487)67.78% +[ Fri Sep 9 01:19:04 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 01:19:04 2022 ] Training epoch: 109 +[ Fri Sep 9 01:19:04 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:22:26 2022 ] Mean training loss: 0.0119. +[ Fri Sep 9 01:22:26 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 01:22:26 2022 ] Eval epoch: 109 +[ Fri Sep 9 01:24:38 2022 ] Epoch 109 Curr Acc: (11179/16487)67.80% +[ Fri Sep 9 01:24:38 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 01:24:38 2022 ] Training epoch: 110 +[ Fri Sep 9 01:24:38 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:28:00 2022 ] Mean training loss: 0.0118. +[ Fri Sep 9 01:28:00 2022 ] Time consumption: [Data]02%, [Network]98% +[ Fri Sep 9 01:28:00 2022 ] Eval epoch: 110 +[ Fri Sep 9 01:30:12 2022 ] Epoch 110 Curr Acc: (11249/16487)68.23% +[ Fri Sep 9 01:30:12 2022 ] Epoch 54 Best Acc 70.13% +[ Fri Sep 9 01:30:12 2022 ] epoch: 54, best accuracy: 0.7013404500515558 +[ Fri Sep 9 01:30:12 2022 ] Experiment: ./work_dir/ntu/xsub_j +[ Fri Sep 9 01:30:12 2022 ] # generator parameters: 2.896055 M. +[ Fri Sep 9 01:30:12 2022 ] Load weights from ./runs/ntu/xsub_j/runs-53-53244.pt. +[ Fri Sep 9 01:30:12 2022 ] Eval epoch: 1 +[ Fri Sep 9 01:32:25 2022 ] Epoch 1 Curr Acc: (11563/16487)70.13% +[ Fri Sep 9 01:32:25 2022 ] Epoch 54 Best Acc 70.13% diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/config.yaml b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9918bb3108f9f15723c169ad38b454dcd154dad7 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu/xsub_jm.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 60 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu/xsub_jm/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint_motion.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint_motion.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu/xsub_jm diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..4de7c12f6860e9e8723af9c9c55dce14e16aeb98 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a12cdadad8f353eda1d8d0c2215e1b376b764e52df61fea5d94dc748ff426cea +size 4979902 diff --git a/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/log.txt b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..4be8dabe26918457fbfe690d0769eba0425b47ce --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/log.txt @@ -0,0 +1,631 @@ +[ Thu Sep 8 17:07:46 2022 ] # generator parameters: 2.896055 M. +[ Thu Sep 8 17:07:46 2022 ] Parameters: +{'work_dir': './work_dir/ntu/xsub_jm', 'model_saved_name': './runs/ntu/xsub_jm/runs', 'config': 'config/ntu/xsub_jm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Thu Sep 8 17:07:46 2022 ] Training epoch: 1 +[ Thu Sep 8 17:07:46 2022 ] Learning rate: 0.015 +[ Thu Sep 8 17:11:08 2022 ] Mean training loss: 3.0683. +[ Thu Sep 8 17:11:08 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 17:11:08 2022 ] Training epoch: 2 +[ Thu Sep 8 17:11:08 2022 ] Learning rate: 0.03 +[ Thu Sep 8 17:14:28 2022 ] Mean training loss: 2.2041. +[ Thu Sep 8 17:14:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:14:28 2022 ] Training epoch: 3 +[ Thu Sep 8 17:14:28 2022 ] Learning rate: 0.045 +[ Thu Sep 8 17:17:48 2022 ] Mean training loss: 1.7924. +[ Thu Sep 8 17:17:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:17:48 2022 ] Training epoch: 4 +[ Thu Sep 8 17:17:48 2022 ] Learning rate: 0.06 +[ Thu Sep 8 17:21:09 2022 ] Mean training loss: 1.5479. +[ Thu Sep 8 17:21:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:21:09 2022 ] Training epoch: 5 +[ Thu Sep 8 17:21:09 2022 ] Learning rate: 0.075 +[ Thu Sep 8 17:24:29 2022 ] Mean training loss: 1.3728. +[ Thu Sep 8 17:24:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:24:29 2022 ] Training epoch: 6 +[ Thu Sep 8 17:24:29 2022 ] Learning rate: 0.09 +[ Thu Sep 8 17:27:50 2022 ] Mean training loss: 1.2656. +[ Thu Sep 8 17:27:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:27:50 2022 ] Training epoch: 7 +[ Thu Sep 8 17:27:50 2022 ] Learning rate: 0.10500000000000001 +[ Thu Sep 8 17:31:11 2022 ] Mean training loss: 1.1959. +[ Thu Sep 8 17:31:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:31:11 2022 ] Training epoch: 8 +[ Thu Sep 8 17:31:11 2022 ] Learning rate: 0.12 +[ Thu Sep 8 17:34:32 2022 ] Mean training loss: 1.1293. +[ Thu Sep 8 17:34:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:34:32 2022 ] Training epoch: 9 +[ Thu Sep 8 17:34:32 2022 ] Learning rate: 0.13499999999999998 +[ Thu Sep 8 17:37:53 2022 ] Mean training loss: 1.1130. +[ Thu Sep 8 17:37:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:37:53 2022 ] Training epoch: 10 +[ Thu Sep 8 17:37:53 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:41:15 2022 ] Mean training loss: 1.0630. +[ Thu Sep 8 17:41:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:41:15 2022 ] Training epoch: 11 +[ Thu Sep 8 17:41:15 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:44:36 2022 ] Mean training loss: 1.0072. +[ Thu Sep 8 17:44:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:44:36 2022 ] Training epoch: 12 +[ Thu Sep 8 17:44:36 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:47:57 2022 ] Mean training loss: 0.9853. +[ Thu Sep 8 17:47:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:47:57 2022 ] Training epoch: 13 +[ Thu Sep 8 17:47:57 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:51:18 2022 ] Mean training loss: 0.9481. +[ Thu Sep 8 17:51:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:51:18 2022 ] Training epoch: 14 +[ Thu Sep 8 17:51:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:54:39 2022 ] Mean training loss: 0.8977. +[ Thu Sep 8 17:54:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:54:39 2022 ] Training epoch: 15 +[ Thu Sep 8 17:54:39 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:57:59 2022 ] Mean training loss: 0.8794. +[ Thu Sep 8 17:57:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:57:59 2022 ] Training epoch: 16 +[ Thu Sep 8 17:57:59 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:01:20 2022 ] Mean training loss: 0.8747. +[ Thu Sep 8 18:01:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:01:20 2022 ] Training epoch: 17 +[ Thu Sep 8 18:01:20 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:04:40 2022 ] Mean training loss: 0.8299. +[ Thu Sep 8 18:04:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:04:40 2022 ] Training epoch: 18 +[ Thu Sep 8 18:04:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:08:01 2022 ] Mean training loss: 0.8331. +[ Thu Sep 8 18:08:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:08:01 2022 ] Training epoch: 19 +[ Thu Sep 8 18:08:01 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:11:22 2022 ] Mean training loss: 0.7954. +[ Thu Sep 8 18:11:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:11:22 2022 ] Training epoch: 20 +[ Thu Sep 8 18:11:22 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:14:43 2022 ] Mean training loss: 0.7783. +[ Thu Sep 8 18:14:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:14:43 2022 ] Training epoch: 21 +[ Thu Sep 8 18:14:43 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:18:05 2022 ] Mean training loss: 0.7715. +[ Thu Sep 8 18:18:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:18:05 2022 ] Training epoch: 22 +[ Thu Sep 8 18:18:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:21:26 2022 ] Mean training loss: 0.7561. +[ Thu Sep 8 18:21:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:21:26 2022 ] Training epoch: 23 +[ Thu Sep 8 18:21:26 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:24:48 2022 ] Mean training loss: 0.7335. +[ Thu Sep 8 18:24:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:24:48 2022 ] Training epoch: 24 +[ Thu Sep 8 18:24:48 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:28:09 2022 ] Mean training loss: 0.7450. +[ Thu Sep 8 18:28:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:28:09 2022 ] Training epoch: 25 +[ Thu Sep 8 18:28:09 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:31:30 2022 ] Mean training loss: 0.7210. +[ Thu Sep 8 18:31:30 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:31:30 2022 ] Training epoch: 26 +[ Thu Sep 8 18:31:30 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:34:49 2022 ] Mean training loss: 0.7067. +[ Thu Sep 8 18:34:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:34:49 2022 ] Training epoch: 27 +[ Thu Sep 8 18:34:49 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:38:10 2022 ] Mean training loss: 0.7015. +[ Thu Sep 8 18:38:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:38:10 2022 ] Training epoch: 28 +[ Thu Sep 8 18:38:10 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:41:30 2022 ] Mean training loss: 0.6972. +[ Thu Sep 8 18:41:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:41:30 2022 ] Training epoch: 29 +[ Thu Sep 8 18:41:30 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:44:51 2022 ] Mean training loss: 0.6854. +[ Thu Sep 8 18:44:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:44:51 2022 ] Training epoch: 30 +[ Thu Sep 8 18:44:51 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:48:11 2022 ] Mean training loss: 0.6598. +[ Thu Sep 8 18:48:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:48:11 2022 ] Training epoch: 31 +[ Thu Sep 8 18:48:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:51:32 2022 ] Mean training loss: 0.6721. +[ Thu Sep 8 18:51:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:51:32 2022 ] Training epoch: 32 +[ Thu Sep 8 18:51:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:54:53 2022 ] Mean training loss: 0.6566. +[ Thu Sep 8 18:54:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:54:53 2022 ] Training epoch: 33 +[ Thu Sep 8 18:54:53 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:58:14 2022 ] Mean training loss: 0.6786. +[ Thu Sep 8 18:58:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:58:14 2022 ] Training epoch: 34 +[ Thu Sep 8 18:58:14 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:01:35 2022 ] Mean training loss: 0.6565. +[ Thu Sep 8 19:01:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:01:35 2022 ] Training epoch: 35 +[ Thu Sep 8 19:01:35 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:04:56 2022 ] Mean training loss: 0.6696. +[ Thu Sep 8 19:04:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:04:56 2022 ] Training epoch: 36 +[ Thu Sep 8 19:04:56 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:08:17 2022 ] Mean training loss: 0.6123. +[ Thu Sep 8 19:08:17 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:08:17 2022 ] Training epoch: 37 +[ Thu Sep 8 19:08:17 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:11:37 2022 ] Mean training loss: 0.6360. +[ Thu Sep 8 19:11:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:11:37 2022 ] Training epoch: 38 +[ Thu Sep 8 19:11:37 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:14:59 2022 ] Mean training loss: 0.6435. +[ Thu Sep 8 19:14:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:14:59 2022 ] Training epoch: 39 +[ Thu Sep 8 19:14:59 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:18:20 2022 ] Mean training loss: 0.6291. +[ Thu Sep 8 19:18:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:18:20 2022 ] Training epoch: 40 +[ Thu Sep 8 19:18:20 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:21:42 2022 ] Mean training loss: 0.6281. +[ Thu Sep 8 19:21:42 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:21:42 2022 ] Training epoch: 41 +[ Thu Sep 8 19:21:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:25:03 2022 ] Mean training loss: 0.6269. +[ Thu Sep 8 19:25:03 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:25:03 2022 ] Training epoch: 42 +[ Thu Sep 8 19:25:03 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:28:24 2022 ] Mean training loss: 0.6014. +[ Thu Sep 8 19:28:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:28:24 2022 ] Training epoch: 43 +[ Thu Sep 8 19:28:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:31:45 2022 ] Mean training loss: 0.6220. +[ Thu Sep 8 19:31:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:31:45 2022 ] Training epoch: 44 +[ Thu Sep 8 19:31:45 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:35:05 2022 ] Mean training loss: 0.6377. +[ Thu Sep 8 19:35:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:35:05 2022 ] Training epoch: 45 +[ Thu Sep 8 19:35:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:38:26 2022 ] Mean training loss: 0.5951. +[ Thu Sep 8 19:38:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:38:26 2022 ] Training epoch: 46 +[ Thu Sep 8 19:38:26 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:41:47 2022 ] Mean training loss: 0.6152. +[ Thu Sep 8 19:41:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:41:47 2022 ] Training epoch: 47 +[ Thu Sep 8 19:41:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:45:07 2022 ] Mean training loss: 0.6070. +[ Thu Sep 8 19:45:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:45:07 2022 ] Training epoch: 48 +[ Thu Sep 8 19:45:07 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:48:28 2022 ] Mean training loss: 0.5967. +[ Thu Sep 8 19:48:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:48:28 2022 ] Training epoch: 49 +[ Thu Sep 8 19:48:28 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:51:49 2022 ] Mean training loss: 0.5964. +[ Thu Sep 8 19:51:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:51:49 2022 ] Training epoch: 50 +[ Thu Sep 8 19:51:49 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:55:10 2022 ] Mean training loss: 0.5941. +[ Thu Sep 8 19:55:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:55:10 2022 ] Training epoch: 51 +[ Thu Sep 8 19:55:10 2022 ] Learning rate: 0.015 +[ Thu Sep 8 19:58:31 2022 ] Mean training loss: 0.2782. +[ Thu Sep 8 19:58:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:58:31 2022 ] Eval epoch: 51 +[ Thu Sep 8 20:00:43 2022 ] Epoch 51 Curr Acc: (10345/16487)62.75% +[ Thu Sep 8 20:00:43 2022 ] Epoch 51 Best Acc 62.75% +[ Thu Sep 8 20:00:43 2022 ] Training epoch: 52 +[ Thu Sep 8 20:00:43 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:04:04 2022 ] Mean training loss: 0.1700. +[ Thu Sep 8 20:04:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:04:04 2022 ] Eval epoch: 52 +[ Thu Sep 8 20:06:15 2022 ] Epoch 52 Curr Acc: (10828/16487)65.68% +[ Thu Sep 8 20:06:15 2022 ] Epoch 52 Best Acc 65.68% +[ Thu Sep 8 20:06:15 2022 ] Training epoch: 53 +[ Thu Sep 8 20:06:15 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:09:36 2022 ] Mean training loss: 0.1363. +[ Thu Sep 8 20:09:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:09:36 2022 ] Eval epoch: 53 +[ Thu Sep 8 20:11:47 2022 ] Epoch 53 Curr Acc: (10646/16487)64.57% +[ Thu Sep 8 20:11:47 2022 ] Epoch 52 Best Acc 65.68% +[ Thu Sep 8 20:11:47 2022 ] Training epoch: 54 +[ Thu Sep 8 20:11:47 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:15:08 2022 ] Mean training loss: 0.1032. +[ Thu Sep 8 20:15:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:15:08 2022 ] Eval epoch: 54 +[ Thu Sep 8 20:17:19 2022 ] Epoch 54 Curr Acc: (10731/16487)65.09% +[ Thu Sep 8 20:17:19 2022 ] Epoch 52 Best Acc 65.68% +[ Thu Sep 8 20:17:19 2022 ] Training epoch: 55 +[ Thu Sep 8 20:17:19 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:20:40 2022 ] Mean training loss: 0.0870. +[ Thu Sep 8 20:20:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:20:40 2022 ] Eval epoch: 55 +[ Thu Sep 8 20:22:51 2022 ] Epoch 55 Curr Acc: (10940/16487)66.36% +[ Thu Sep 8 20:22:51 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 20:22:51 2022 ] Training epoch: 56 +[ Thu Sep 8 20:22:51 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:26:11 2022 ] Mean training loss: 0.0690. +[ Thu Sep 8 20:26:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:26:11 2022 ] Eval epoch: 56 +[ Thu Sep 8 20:28:22 2022 ] Epoch 56 Curr Acc: (10542/16487)63.94% +[ Thu Sep 8 20:28:22 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 20:28:22 2022 ] Training epoch: 57 +[ Thu Sep 8 20:28:22 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:31:42 2022 ] Mean training loss: 0.0608. +[ Thu Sep 8 20:31:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:31:43 2022 ] Eval epoch: 57 +[ Thu Sep 8 20:33:54 2022 ] Epoch 57 Curr Acc: (10609/16487)64.35% +[ Thu Sep 8 20:33:54 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 20:33:54 2022 ] Training epoch: 58 +[ Thu Sep 8 20:33:54 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:37:14 2022 ] Mean training loss: 0.0489. +[ Thu Sep 8 20:37:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:37:14 2022 ] Eval epoch: 58 +[ Thu Sep 8 20:39:25 2022 ] Epoch 58 Curr Acc: (10830/16487)65.69% +[ Thu Sep 8 20:39:25 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 20:39:25 2022 ] Training epoch: 59 +[ Thu Sep 8 20:39:25 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:42:45 2022 ] Mean training loss: 0.0413. +[ Thu Sep 8 20:42:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:42:45 2022 ] Eval epoch: 59 +[ Thu Sep 8 20:44:56 2022 ] Epoch 59 Curr Acc: (10588/16487)64.22% +[ Thu Sep 8 20:44:56 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 20:44:56 2022 ] Training epoch: 60 +[ Thu Sep 8 20:44:56 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:48:17 2022 ] Mean training loss: 0.0338. +[ Thu Sep 8 20:48:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:48:17 2022 ] Eval epoch: 60 +[ Thu Sep 8 20:50:28 2022 ] Epoch 60 Curr Acc: (10630/16487)64.48% +[ Thu Sep 8 20:50:28 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 20:50:28 2022 ] Training epoch: 61 +[ Thu Sep 8 20:50:28 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:53:48 2022 ] Mean training loss: 0.0319. +[ Thu Sep 8 20:53:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:53:48 2022 ] Eval epoch: 61 +[ Thu Sep 8 20:55:59 2022 ] Epoch 61 Curr Acc: (10636/16487)64.51% +[ Thu Sep 8 20:55:59 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 20:55:59 2022 ] Training epoch: 62 +[ Thu Sep 8 20:55:59 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:59:19 2022 ] Mean training loss: 0.0293. +[ Thu Sep 8 20:59:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:59:19 2022 ] Eval epoch: 62 +[ Thu Sep 8 21:01:31 2022 ] Epoch 62 Curr Acc: (10644/16487)64.56% +[ Thu Sep 8 21:01:31 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 21:01:31 2022 ] Training epoch: 63 +[ Thu Sep 8 21:01:31 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:04:52 2022 ] Mean training loss: 0.0260. +[ Thu Sep 8 21:04:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:04:52 2022 ] Eval epoch: 63 +[ Thu Sep 8 21:07:03 2022 ] Epoch 63 Curr Acc: (10884/16487)66.02% +[ Thu Sep 8 21:07:03 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 21:07:03 2022 ] Training epoch: 64 +[ Thu Sep 8 21:07:03 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:10:24 2022 ] Mean training loss: 0.0219. +[ Thu Sep 8 21:10:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:10:24 2022 ] Eval epoch: 64 +[ Thu Sep 8 21:12:35 2022 ] Epoch 64 Curr Acc: (10453/16487)63.40% +[ Thu Sep 8 21:12:35 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 21:12:35 2022 ] Training epoch: 65 +[ Thu Sep 8 21:12:35 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:15:56 2022 ] Mean training loss: 0.0255. +[ Thu Sep 8 21:15:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:15:56 2022 ] Eval epoch: 65 +[ Thu Sep 8 21:18:07 2022 ] Epoch 65 Curr Acc: (10705/16487)64.93% +[ Thu Sep 8 21:18:07 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 21:18:07 2022 ] Training epoch: 66 +[ Thu Sep 8 21:18:07 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:21:28 2022 ] Mean training loss: 0.0226. +[ Thu Sep 8 21:21:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:21:28 2022 ] Eval epoch: 66 +[ Thu Sep 8 21:23:39 2022 ] Epoch 66 Curr Acc: (10797/16487)65.49% +[ Thu Sep 8 21:23:39 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 21:23:39 2022 ] Training epoch: 67 +[ Thu Sep 8 21:23:39 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:27:00 2022 ] Mean training loss: 0.0182. +[ Thu Sep 8 21:27:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:27:00 2022 ] Eval epoch: 67 +[ Thu Sep 8 21:29:11 2022 ] Epoch 67 Curr Acc: (10685/16487)64.81% +[ Thu Sep 8 21:29:11 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 21:29:11 2022 ] Training epoch: 68 +[ Thu Sep 8 21:29:11 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:32:32 2022 ] Mean training loss: 0.0168. +[ Thu Sep 8 21:32:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:32:32 2022 ] Eval epoch: 68 +[ Thu Sep 8 21:34:43 2022 ] Epoch 68 Curr Acc: (10819/16487)65.62% +[ Thu Sep 8 21:34:43 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 21:34:43 2022 ] Training epoch: 69 +[ Thu Sep 8 21:34:43 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:38:04 2022 ] Mean training loss: 0.0169. +[ Thu Sep 8 21:38:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:38:04 2022 ] Eval epoch: 69 +[ Thu Sep 8 21:40:15 2022 ] Epoch 69 Curr Acc: (9777/16487)59.30% +[ Thu Sep 8 21:40:15 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 21:40:15 2022 ] Training epoch: 70 +[ Thu Sep 8 21:40:15 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:43:36 2022 ] Mean training loss: 0.0226. +[ Thu Sep 8 21:43:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:43:36 2022 ] Eval epoch: 70 +[ Thu Sep 8 21:45:47 2022 ] Epoch 70 Curr Acc: (10540/16487)63.93% +[ Thu Sep 8 21:45:47 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 21:45:47 2022 ] Training epoch: 71 +[ Thu Sep 8 21:45:47 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:49:08 2022 ] Mean training loss: 0.0186. +[ Thu Sep 8 21:49:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:49:08 2022 ] Eval epoch: 71 +[ Thu Sep 8 21:51:19 2022 ] Epoch 71 Curr Acc: (10906/16487)66.15% +[ Thu Sep 8 21:51:19 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 21:51:19 2022 ] Training epoch: 72 +[ Thu Sep 8 21:51:19 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:54:40 2022 ] Mean training loss: 0.0140. +[ Thu Sep 8 21:54:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:54:41 2022 ] Eval epoch: 72 +[ Thu Sep 8 21:56:52 2022 ] Epoch 72 Curr Acc: (10678/16487)64.77% +[ Thu Sep 8 21:56:52 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 21:56:52 2022 ] Training epoch: 73 +[ Thu Sep 8 21:56:52 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:00:12 2022 ] Mean training loss: 0.0141. +[ Thu Sep 8 22:00:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:00:12 2022 ] Eval epoch: 73 +[ Thu Sep 8 22:02:23 2022 ] Epoch 73 Curr Acc: (10847/16487)65.79% +[ Thu Sep 8 22:02:23 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 22:02:23 2022 ] Training epoch: 74 +[ Thu Sep 8 22:02:23 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:05:44 2022 ] Mean training loss: 0.0123. +[ Thu Sep 8 22:05:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:05:44 2022 ] Eval epoch: 74 +[ Thu Sep 8 22:07:55 2022 ] Epoch 74 Curr Acc: (10615/16487)64.38% +[ Thu Sep 8 22:07:55 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 22:07:55 2022 ] Training epoch: 75 +[ Thu Sep 8 22:07:55 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:11:15 2022 ] Mean training loss: 0.0108. +[ Thu Sep 8 22:11:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:11:15 2022 ] Eval epoch: 75 +[ Thu Sep 8 22:13:27 2022 ] Epoch 75 Curr Acc: (10935/16487)66.32% +[ Thu Sep 8 22:13:27 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 22:13:27 2022 ] Training epoch: 76 +[ Thu Sep 8 22:13:27 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:16:48 2022 ] Mean training loss: 0.0120. +[ Thu Sep 8 22:16:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:16:48 2022 ] Eval epoch: 76 +[ Thu Sep 8 22:18:59 2022 ] Epoch 76 Curr Acc: (10867/16487)65.91% +[ Thu Sep 8 22:18:59 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 22:18:59 2022 ] Training epoch: 77 +[ Thu Sep 8 22:18:59 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:22:20 2022 ] Mean training loss: 0.0102. +[ Thu Sep 8 22:22:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:22:20 2022 ] Eval epoch: 77 +[ Thu Sep 8 22:24:31 2022 ] Epoch 77 Curr Acc: (10603/16487)64.31% +[ Thu Sep 8 22:24:31 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 22:24:31 2022 ] Training epoch: 78 +[ Thu Sep 8 22:24:31 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:27:51 2022 ] Mean training loss: 0.0102. +[ Thu Sep 8 22:27:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:27:51 2022 ] Eval epoch: 78 +[ Thu Sep 8 22:30:03 2022 ] Epoch 78 Curr Acc: (10937/16487)66.34% +[ Thu Sep 8 22:30:03 2022 ] Epoch 55 Best Acc 66.36% +[ Thu Sep 8 22:30:03 2022 ] Training epoch: 79 +[ Thu Sep 8 22:30:03 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:33:23 2022 ] Mean training loss: 0.0114. +[ Thu Sep 8 22:33:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:33:23 2022 ] Eval epoch: 79 +[ Thu Sep 8 22:35:35 2022 ] Epoch 79 Curr Acc: (11049/16487)67.02% +[ Thu Sep 8 22:35:35 2022 ] Epoch 79 Best Acc 67.02% +[ Thu Sep 8 22:35:35 2022 ] Training epoch: 80 +[ Thu Sep 8 22:35:35 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:38:55 2022 ] Mean training loss: 0.0108. +[ Thu Sep 8 22:38:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:38:55 2022 ] Eval epoch: 80 +[ Thu Sep 8 22:41:07 2022 ] Epoch 80 Curr Acc: (10682/16487)64.79% +[ Thu Sep 8 22:41:07 2022 ] Epoch 79 Best Acc 67.02% +[ Thu Sep 8 22:41:07 2022 ] Training epoch: 81 +[ Thu Sep 8 22:41:07 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:44:27 2022 ] Mean training loss: 0.0099. +[ Thu Sep 8 22:44:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:44:27 2022 ] Eval epoch: 81 +[ Thu Sep 8 22:46:38 2022 ] Epoch 81 Curr Acc: (11079/16487)67.20% +[ Thu Sep 8 22:46:38 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 22:46:38 2022 ] Training epoch: 82 +[ Thu Sep 8 22:46:38 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:49:59 2022 ] Mean training loss: 0.0107. +[ Thu Sep 8 22:49:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:49:59 2022 ] Eval epoch: 82 +[ Thu Sep 8 22:52:10 2022 ] Epoch 82 Curr Acc: (10525/16487)63.84% +[ Thu Sep 8 22:52:10 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 22:52:10 2022 ] Training epoch: 83 +[ Thu Sep 8 22:52:10 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:55:30 2022 ] Mean training loss: 0.0095. +[ Thu Sep 8 22:55:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:55:30 2022 ] Eval epoch: 83 +[ Thu Sep 8 22:57:41 2022 ] Epoch 83 Curr Acc: (10832/16487)65.70% +[ Thu Sep 8 22:57:41 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 22:57:41 2022 ] Training epoch: 84 +[ Thu Sep 8 22:57:41 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:01:01 2022 ] Mean training loss: 0.0110. +[ Thu Sep 8 23:01:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:01:02 2022 ] Eval epoch: 84 +[ Thu Sep 8 23:03:13 2022 ] Epoch 84 Curr Acc: (10785/16487)65.42% +[ Thu Sep 8 23:03:13 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 23:03:13 2022 ] Training epoch: 85 +[ Thu Sep 8 23:03:13 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:06:33 2022 ] Mean training loss: 0.0099. +[ Thu Sep 8 23:06:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:06:33 2022 ] Eval epoch: 85 +[ Thu Sep 8 23:08:45 2022 ] Epoch 85 Curr Acc: (10775/16487)65.35% +[ Thu Sep 8 23:08:45 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 23:08:45 2022 ] Training epoch: 86 +[ Thu Sep 8 23:08:45 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:12:06 2022 ] Mean training loss: 0.0103. +[ Thu Sep 8 23:12:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:12:06 2022 ] Eval epoch: 86 +[ Thu Sep 8 23:14:17 2022 ] Epoch 86 Curr Acc: (10707/16487)64.94% +[ Thu Sep 8 23:14:17 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 23:14:17 2022 ] Training epoch: 87 +[ Thu Sep 8 23:14:17 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:17:38 2022 ] Mean training loss: 0.0099. +[ Thu Sep 8 23:17:38 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 23:17:38 2022 ] Eval epoch: 87 +[ Thu Sep 8 23:19:49 2022 ] Epoch 87 Curr Acc: (10859/16487)65.86% +[ Thu Sep 8 23:19:49 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 23:19:49 2022 ] Training epoch: 88 +[ Thu Sep 8 23:19:49 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:23:09 2022 ] Mean training loss: 0.0093. +[ Thu Sep 8 23:23:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:23:09 2022 ] Eval epoch: 88 +[ Thu Sep 8 23:25:20 2022 ] Epoch 88 Curr Acc: (10745/16487)65.17% +[ Thu Sep 8 23:25:20 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 23:25:20 2022 ] Training epoch: 89 +[ Thu Sep 8 23:25:20 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:28:40 2022 ] Mean training loss: 0.0092. +[ Thu Sep 8 23:28:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:28:40 2022 ] Eval epoch: 89 +[ Thu Sep 8 23:30:51 2022 ] Epoch 89 Curr Acc: (10769/16487)65.32% +[ Thu Sep 8 23:30:51 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 23:30:51 2022 ] Training epoch: 90 +[ Thu Sep 8 23:30:51 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:34:12 2022 ] Mean training loss: 0.0092. +[ Thu Sep 8 23:34:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:34:12 2022 ] Eval epoch: 90 +[ Thu Sep 8 23:36:24 2022 ] Epoch 90 Curr Acc: (10529/16487)63.86% +[ Thu Sep 8 23:36:24 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 23:36:24 2022 ] Training epoch: 91 +[ Thu Sep 8 23:36:24 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:39:45 2022 ] Mean training loss: 0.0094. +[ Thu Sep 8 23:39:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:39:45 2022 ] Eval epoch: 91 +[ Thu Sep 8 23:41:56 2022 ] Epoch 91 Curr Acc: (10627/16487)64.46% +[ Thu Sep 8 23:41:56 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 23:41:56 2022 ] Training epoch: 92 +[ Thu Sep 8 23:41:56 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:45:17 2022 ] Mean training loss: 0.0101. +[ Thu Sep 8 23:45:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:45:17 2022 ] Eval epoch: 92 +[ Thu Sep 8 23:47:28 2022 ] Epoch 92 Curr Acc: (10965/16487)66.51% +[ Thu Sep 8 23:47:28 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 23:47:28 2022 ] Training epoch: 93 +[ Thu Sep 8 23:47:28 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:50:49 2022 ] Mean training loss: 0.0089. +[ Thu Sep 8 23:50:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:50:49 2022 ] Eval epoch: 93 +[ Thu Sep 8 23:53:00 2022 ] Epoch 93 Curr Acc: (10688/16487)64.83% +[ Thu Sep 8 23:53:00 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 23:53:00 2022 ] Training epoch: 94 +[ Thu Sep 8 23:53:00 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:56:20 2022 ] Mean training loss: 0.0093. +[ Thu Sep 8 23:56:20 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 23:56:20 2022 ] Eval epoch: 94 +[ Thu Sep 8 23:58:31 2022 ] Epoch 94 Curr Acc: (10938/16487)66.34% +[ Thu Sep 8 23:58:31 2022 ] Epoch 81 Best Acc 67.20% +[ Thu Sep 8 23:58:31 2022 ] Training epoch: 95 +[ Thu Sep 8 23:58:31 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:01:53 2022 ] Mean training loss: 0.0093. +[ Fri Sep 9 00:01:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:01:53 2022 ] Eval epoch: 95 +[ Fri Sep 9 00:04:04 2022 ] Epoch 95 Curr Acc: (10874/16487)65.95% +[ Fri Sep 9 00:04:04 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 00:04:04 2022 ] Training epoch: 96 +[ Fri Sep 9 00:04:04 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:07:24 2022 ] Mean training loss: 0.0082. +[ Fri Sep 9 00:07:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:07:24 2022 ] Eval epoch: 96 +[ Fri Sep 9 00:09:36 2022 ] Epoch 96 Curr Acc: (10877/16487)65.97% +[ Fri Sep 9 00:09:36 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 00:09:36 2022 ] Training epoch: 97 +[ Fri Sep 9 00:09:36 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:12:55 2022 ] Mean training loss: 0.0089. +[ Fri Sep 9 00:12:55 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 00:12:55 2022 ] Eval epoch: 97 +[ Fri Sep 9 00:15:06 2022 ] Epoch 97 Curr Acc: (10844/16487)65.77% +[ Fri Sep 9 00:15:06 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 00:15:06 2022 ] Training epoch: 98 +[ Fri Sep 9 00:15:06 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:18:28 2022 ] Mean training loss: 0.0083. +[ Fri Sep 9 00:18:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:18:28 2022 ] Eval epoch: 98 +[ Fri Sep 9 00:20:39 2022 ] Epoch 98 Curr Acc: (10985/16487)66.63% +[ Fri Sep 9 00:20:39 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 00:20:39 2022 ] Training epoch: 99 +[ Fri Sep 9 00:20:39 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:24:01 2022 ] Mean training loss: 0.0095. +[ Fri Sep 9 00:24:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:24:01 2022 ] Eval epoch: 99 +[ Fri Sep 9 00:26:12 2022 ] Epoch 99 Curr Acc: (10930/16487)66.29% +[ Fri Sep 9 00:26:12 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 00:26:12 2022 ] Training epoch: 100 +[ Fri Sep 9 00:26:12 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:29:33 2022 ] Mean training loss: 0.0086. +[ Fri Sep 9 00:29:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:29:33 2022 ] Eval epoch: 100 +[ Fri Sep 9 00:31:44 2022 ] Epoch 100 Curr Acc: (11079/16487)67.20% +[ Fri Sep 9 00:31:44 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 00:31:44 2022 ] Training epoch: 101 +[ Fri Sep 9 00:31:44 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:35:04 2022 ] Mean training loss: 0.0087. +[ Fri Sep 9 00:35:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:35:04 2022 ] Eval epoch: 101 +[ Fri Sep 9 00:37:16 2022 ] Epoch 101 Curr Acc: (10870/16487)65.93% +[ Fri Sep 9 00:37:16 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 00:37:16 2022 ] Training epoch: 102 +[ Fri Sep 9 00:37:16 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:40:37 2022 ] Mean training loss: 0.0101. +[ Fri Sep 9 00:40:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:40:37 2022 ] Eval epoch: 102 +[ Fri Sep 9 00:42:48 2022 ] Epoch 102 Curr Acc: (10517/16487)63.79% +[ Fri Sep 9 00:42:48 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 00:42:48 2022 ] Training epoch: 103 +[ Fri Sep 9 00:42:48 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:46:09 2022 ] Mean training loss: 0.0102. +[ Fri Sep 9 00:46:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:46:09 2022 ] Eval epoch: 103 +[ Fri Sep 9 00:48:20 2022 ] Epoch 103 Curr Acc: (10707/16487)64.94% +[ Fri Sep 9 00:48:20 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 00:48:20 2022 ] Training epoch: 104 +[ Fri Sep 9 00:48:20 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:51:41 2022 ] Mean training loss: 0.0081. +[ Fri Sep 9 00:51:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:51:41 2022 ] Eval epoch: 104 +[ Fri Sep 9 00:53:52 2022 ] Epoch 104 Curr Acc: (10923/16487)66.25% +[ Fri Sep 9 00:53:52 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 00:53:52 2022 ] Training epoch: 105 +[ Fri Sep 9 00:53:52 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:57:13 2022 ] Mean training loss: 0.0100. +[ Fri Sep 9 00:57:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:57:13 2022 ] Eval epoch: 105 +[ Fri Sep 9 00:59:24 2022 ] Epoch 105 Curr Acc: (11071/16487)67.15% +[ Fri Sep 9 00:59:24 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 00:59:24 2022 ] Training epoch: 106 +[ Fri Sep 9 00:59:24 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:02:44 2022 ] Mean training loss: 0.0107. +[ Fri Sep 9 01:02:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:02:44 2022 ] Eval epoch: 106 +[ Fri Sep 9 01:04:56 2022 ] Epoch 106 Curr Acc: (10881/16487)66.00% +[ Fri Sep 9 01:04:56 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 01:04:56 2022 ] Training epoch: 107 +[ Fri Sep 9 01:04:56 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:08:17 2022 ] Mean training loss: 0.0086. +[ Fri Sep 9 01:08:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:08:17 2022 ] Eval epoch: 107 +[ Fri Sep 9 01:10:28 2022 ] Epoch 107 Curr Acc: (11012/16487)66.79% +[ Fri Sep 9 01:10:28 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 01:10:28 2022 ] Training epoch: 108 +[ Fri Sep 9 01:10:28 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:13:49 2022 ] Mean training loss: 0.0093. +[ Fri Sep 9 01:13:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:13:49 2022 ] Eval epoch: 108 +[ Fri Sep 9 01:16:00 2022 ] Epoch 108 Curr Acc: (10579/16487)64.17% +[ Fri Sep 9 01:16:00 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 01:16:00 2022 ] Training epoch: 109 +[ Fri Sep 9 01:16:00 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:19:21 2022 ] Mean training loss: 0.0093. +[ Fri Sep 9 01:19:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:19:21 2022 ] Eval epoch: 109 +[ Fri Sep 9 01:21:32 2022 ] Epoch 109 Curr Acc: (10492/16487)63.64% +[ Fri Sep 9 01:21:32 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 01:21:32 2022 ] Training epoch: 110 +[ Fri Sep 9 01:21:32 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:24:53 2022 ] Mean training loss: 0.0088. +[ Fri Sep 9 01:24:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:24:53 2022 ] Eval epoch: 110 +[ Fri Sep 9 01:27:04 2022 ] Epoch 110 Curr Acc: (10646/16487)64.57% +[ Fri Sep 9 01:27:04 2022 ] Epoch 81 Best Acc 67.20% +[ Fri Sep 9 01:27:04 2022 ] epoch: 81, best accuracy: 0.6719839873839996 +[ Fri Sep 9 01:27:04 2022 ] Experiment: ./work_dir/ntu/xsub_jm +[ Fri Sep 9 01:27:04 2022 ] # generator parameters: 2.896055 M. +[ Fri Sep 9 01:27:04 2022 ] Load weights from ./runs/ntu/xsub_jm/runs-80-79866.pt. +[ Fri Sep 9 01:27:04 2022 ] Eval epoch: 1 +[ Fri Sep 9 01:29:16 2022 ] Epoch 1 Curr Acc: (11079/16487)67.20% +[ Fri Sep 9 01:29:16 2022 ] Epoch 81 Best Acc 67.20% diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_b/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu60_xview/xview_b/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_b/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_b/config.yaml b/ckpt/Others/MST-GCN/ntu60_xview/xview_b/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fea3b1acd00ddf672e932a22d36fc78423d47b85 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_b/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu/xview_b.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 60 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu/xview_b/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_data_bone.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_data_bone.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu/xview_b diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_b/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu60_xview/xview_b/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..22c1a2e47eb40b61d641fa9ac21c3ed1fe7c7146 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_b/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:06ab885eed680978525867050ff38f49953575d878938632dfe8e2b7592b6690 +size 5718404 diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_b/log.txt b/ckpt/Others/MST-GCN/ntu60_xview/xview_b/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..ea0af22ba698444a7c42b46d7f67076ce46919a2 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_b/log.txt @@ -0,0 +1,631 @@ +[ Thu Sep 8 17:08:20 2022 ] # generator parameters: 2.896055 M. +[ Thu Sep 8 17:08:20 2022 ] Parameters: +{'work_dir': './work_dir/ntu/xview_b', 'model_saved_name': './runs/ntu/xview_b/runs', 'config': 'config/ntu/xview_b.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Thu Sep 8 17:08:20 2022 ] Training epoch: 1 +[ Thu Sep 8 17:08:20 2022 ] Learning rate: 0.015 +[ Thu Sep 8 17:11:40 2022 ] Mean training loss: 3.1483. +[ Thu Sep 8 17:11:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:11:40 2022 ] Training epoch: 2 +[ Thu Sep 8 17:11:40 2022 ] Learning rate: 0.03 +[ Thu Sep 8 17:15:00 2022 ] Mean training loss: 2.3484. +[ Thu Sep 8 17:15:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:15:00 2022 ] Training epoch: 3 +[ Thu Sep 8 17:15:00 2022 ] Learning rate: 0.045 +[ Thu Sep 8 17:18:20 2022 ] Mean training loss: 1.9959. +[ Thu Sep 8 17:18:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:18:20 2022 ] Training epoch: 4 +[ Thu Sep 8 17:18:20 2022 ] Learning rate: 0.06 +[ Thu Sep 8 17:21:40 2022 ] Mean training loss: 1.7325. +[ Thu Sep 8 17:21:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:21:40 2022 ] Training epoch: 5 +[ Thu Sep 8 17:21:40 2022 ] Learning rate: 0.075 +[ Thu Sep 8 17:24:59 2022 ] Mean training loss: 1.5892. +[ Thu Sep 8 17:24:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:24:59 2022 ] Training epoch: 6 +[ Thu Sep 8 17:24:59 2022 ] Learning rate: 0.09 +[ Thu Sep 8 17:28:19 2022 ] Mean training loss: 1.4584. +[ Thu Sep 8 17:28:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:28:19 2022 ] Training epoch: 7 +[ Thu Sep 8 17:28:19 2022 ] Learning rate: 0.10500000000000001 +[ Thu Sep 8 17:31:38 2022 ] Mean training loss: 1.3584. +[ Thu Sep 8 17:31:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:31:38 2022 ] Training epoch: 8 +[ Thu Sep 8 17:31:38 2022 ] Learning rate: 0.12 +[ Thu Sep 8 17:34:58 2022 ] Mean training loss: 1.2929. +[ Thu Sep 8 17:34:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:34:58 2022 ] Training epoch: 9 +[ Thu Sep 8 17:34:58 2022 ] Learning rate: 0.13499999999999998 +[ Thu Sep 8 17:38:18 2022 ] Mean training loss: 1.2372. +[ Thu Sep 8 17:38:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:38:18 2022 ] Training epoch: 10 +[ Thu Sep 8 17:38:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:41:38 2022 ] Mean training loss: 1.2403. +[ Thu Sep 8 17:41:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:41:38 2022 ] Training epoch: 11 +[ Thu Sep 8 17:41:38 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:44:58 2022 ] Mean training loss: 1.1513. +[ Thu Sep 8 17:44:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:44:58 2022 ] Training epoch: 12 +[ Thu Sep 8 17:44:58 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:48:17 2022 ] Mean training loss: 1.1086. +[ Thu Sep 8 17:48:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:48:17 2022 ] Training epoch: 13 +[ Thu Sep 8 17:48:17 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:51:36 2022 ] Mean training loss: 1.0458. +[ Thu Sep 8 17:51:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:51:36 2022 ] Training epoch: 14 +[ Thu Sep 8 17:51:36 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:54:56 2022 ] Mean training loss: 1.0168. +[ Thu Sep 8 17:54:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:54:56 2022 ] Training epoch: 15 +[ Thu Sep 8 17:54:56 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:58:15 2022 ] Mean training loss: 0.9899. +[ Thu Sep 8 17:58:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:58:15 2022 ] Training epoch: 16 +[ Thu Sep 8 17:58:15 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:01:35 2022 ] Mean training loss: 0.9604. +[ Thu Sep 8 18:01:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:01:35 2022 ] Training epoch: 17 +[ Thu Sep 8 18:01:35 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:04:54 2022 ] Mean training loss: 0.9206. +[ Thu Sep 8 18:04:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:04:54 2022 ] Training epoch: 18 +[ Thu Sep 8 18:04:54 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:08:14 2022 ] Mean training loss: 0.9170. +[ Thu Sep 8 18:08:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:08:14 2022 ] Training epoch: 19 +[ Thu Sep 8 18:08:14 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:11:33 2022 ] Mean training loss: 0.8993. +[ Thu Sep 8 18:11:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:11:33 2022 ] Training epoch: 20 +[ Thu Sep 8 18:11:33 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:14:53 2022 ] Mean training loss: 0.8801. +[ Thu Sep 8 18:14:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:14:53 2022 ] Training epoch: 21 +[ Thu Sep 8 18:14:53 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:18:13 2022 ] Mean training loss: 0.8675. +[ Thu Sep 8 18:18:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:18:13 2022 ] Training epoch: 22 +[ Thu Sep 8 18:18:13 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:21:34 2022 ] Mean training loss: 0.8431. +[ Thu Sep 8 18:21:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:21:34 2022 ] Training epoch: 23 +[ Thu Sep 8 18:21:34 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:24:53 2022 ] Mean training loss: 0.8398. +[ Thu Sep 8 18:24:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:24:53 2022 ] Training epoch: 24 +[ Thu Sep 8 18:24:53 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:28:13 2022 ] Mean training loss: 0.8492. +[ Thu Sep 8 18:28:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:28:13 2022 ] Training epoch: 25 +[ Thu Sep 8 18:28:13 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:31:32 2022 ] Mean training loss: 0.8149. +[ Thu Sep 8 18:31:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:31:32 2022 ] Training epoch: 26 +[ Thu Sep 8 18:31:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:34:52 2022 ] Mean training loss: 0.8054. +[ Thu Sep 8 18:34:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:34:52 2022 ] Training epoch: 27 +[ Thu Sep 8 18:34:52 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:38:11 2022 ] Mean training loss: 0.7965. +[ Thu Sep 8 18:38:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:38:11 2022 ] Training epoch: 28 +[ Thu Sep 8 18:38:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:41:31 2022 ] Mean training loss: 0.7980. +[ Thu Sep 8 18:41:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:41:31 2022 ] Training epoch: 29 +[ Thu Sep 8 18:41:31 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:44:51 2022 ] Mean training loss: 0.7702. +[ Thu Sep 8 18:44:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:44:51 2022 ] Training epoch: 30 +[ Thu Sep 8 18:44:51 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:48:10 2022 ] Mean training loss: 0.7586. +[ Thu Sep 8 18:48:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:48:10 2022 ] Training epoch: 31 +[ Thu Sep 8 18:48:10 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:51:29 2022 ] Mean training loss: 0.7650. +[ Thu Sep 8 18:51:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:51:29 2022 ] Training epoch: 32 +[ Thu Sep 8 18:51:29 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:54:49 2022 ] Mean training loss: 0.7614. +[ Thu Sep 8 18:54:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:54:49 2022 ] Training epoch: 33 +[ Thu Sep 8 18:54:49 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:58:08 2022 ] Mean training loss: 0.7581. +[ Thu Sep 8 18:58:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:58:08 2022 ] Training epoch: 34 +[ Thu Sep 8 18:58:08 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:01:27 2022 ] Mean training loss: 0.7449. +[ Thu Sep 8 19:01:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:01:27 2022 ] Training epoch: 35 +[ Thu Sep 8 19:01:27 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:04:47 2022 ] Mean training loss: 0.7406. +[ Thu Sep 8 19:04:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:04:47 2022 ] Training epoch: 36 +[ Thu Sep 8 19:04:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:08:07 2022 ] Mean training loss: 0.7246. +[ Thu Sep 8 19:08:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:08:07 2022 ] Training epoch: 37 +[ Thu Sep 8 19:08:07 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:11:28 2022 ] Mean training loss: 0.7257. +[ Thu Sep 8 19:11:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:11:28 2022 ] Training epoch: 38 +[ Thu Sep 8 19:11:28 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:14:49 2022 ] Mean training loss: 0.7255. +[ Thu Sep 8 19:14:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:14:49 2022 ] Training epoch: 39 +[ Thu Sep 8 19:14:49 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:18:11 2022 ] Mean training loss: 0.7094. +[ Thu Sep 8 19:18:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:18:11 2022 ] Training epoch: 40 +[ Thu Sep 8 19:18:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:21:32 2022 ] Mean training loss: 0.7119. +[ Thu Sep 8 19:21:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:21:32 2022 ] Training epoch: 41 +[ Thu Sep 8 19:21:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:24:53 2022 ] Mean training loss: 0.7044. +[ Thu Sep 8 19:24:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:24:53 2022 ] Training epoch: 42 +[ Thu Sep 8 19:24:53 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:28:14 2022 ] Mean training loss: 0.7079. +[ Thu Sep 8 19:28:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:28:14 2022 ] Training epoch: 43 +[ Thu Sep 8 19:28:14 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:31:36 2022 ] Mean training loss: 0.7061. +[ Thu Sep 8 19:31:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:31:36 2022 ] Training epoch: 44 +[ Thu Sep 8 19:31:36 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:34:57 2022 ] Mean training loss: 0.6839. +[ Thu Sep 8 19:34:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:34:57 2022 ] Training epoch: 45 +[ Thu Sep 8 19:34:57 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:38:18 2022 ] Mean training loss: 0.7108. +[ Thu Sep 8 19:38:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:38:18 2022 ] Training epoch: 46 +[ Thu Sep 8 19:38:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:41:40 2022 ] Mean training loss: 0.6922. +[ Thu Sep 8 19:41:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:41:40 2022 ] Training epoch: 47 +[ Thu Sep 8 19:41:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:45:01 2022 ] Mean training loss: 0.6817. +[ Thu Sep 8 19:45:01 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 19:45:01 2022 ] Training epoch: 48 +[ Thu Sep 8 19:45:01 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:48:20 2022 ] Mean training loss: 0.6987. +[ Thu Sep 8 19:48:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:48:20 2022 ] Training epoch: 49 +[ Thu Sep 8 19:48:20 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:51:41 2022 ] Mean training loss: 0.6709. +[ Thu Sep 8 19:51:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:51:41 2022 ] Training epoch: 50 +[ Thu Sep 8 19:51:41 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:55:01 2022 ] Mean training loss: 0.6657. +[ Thu Sep 8 19:55:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:55:01 2022 ] Training epoch: 51 +[ Thu Sep 8 19:55:01 2022 ] Learning rate: 0.015 +[ Thu Sep 8 19:58:21 2022 ] Mean training loss: 0.3436. +[ Thu Sep 8 19:58:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:58:21 2022 ] Eval epoch: 51 +[ Thu Sep 8 20:00:51 2022 ] Epoch 51 Curr Acc: (13340/18932)70.46% +[ Thu Sep 8 20:00:51 2022 ] Epoch 51 Best Acc 70.46% +[ Thu Sep 8 20:00:51 2022 ] Training epoch: 52 +[ Thu Sep 8 20:00:51 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:04:11 2022 ] Mean training loss: 0.2439. +[ Thu Sep 8 20:04:11 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 20:04:11 2022 ] Eval epoch: 52 +[ Thu Sep 8 20:06:41 2022 ] Epoch 52 Curr Acc: (13542/18932)71.53% +[ Thu Sep 8 20:06:41 2022 ] Epoch 52 Best Acc 71.53% +[ Thu Sep 8 20:06:41 2022 ] Training epoch: 53 +[ Thu Sep 8 20:06:41 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:10:02 2022 ] Mean training loss: 0.2031. +[ Thu Sep 8 20:10:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:10:02 2022 ] Eval epoch: 53 +[ Thu Sep 8 20:12:32 2022 ] Epoch 53 Curr Acc: (14119/18932)74.58% +[ Thu Sep 8 20:12:32 2022 ] Epoch 53 Best Acc 74.58% +[ Thu Sep 8 20:12:32 2022 ] Training epoch: 54 +[ Thu Sep 8 20:12:32 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:15:52 2022 ] Mean training loss: 0.1679. +[ Thu Sep 8 20:15:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:15:52 2022 ] Eval epoch: 54 +[ Thu Sep 8 20:18:22 2022 ] Epoch 54 Curr Acc: (13930/18932)73.58% +[ Thu Sep 8 20:18:22 2022 ] Epoch 53 Best Acc 74.58% +[ Thu Sep 8 20:18:22 2022 ] Training epoch: 55 +[ Thu Sep 8 20:18:22 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:21:42 2022 ] Mean training loss: 0.1441. +[ Thu Sep 8 20:21:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:21:42 2022 ] Eval epoch: 55 +[ Thu Sep 8 20:24:12 2022 ] Epoch 55 Curr Acc: (14013/18932)74.02% +[ Thu Sep 8 20:24:12 2022 ] Epoch 53 Best Acc 74.58% +[ Thu Sep 8 20:24:12 2022 ] Training epoch: 56 +[ Thu Sep 8 20:24:12 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:27:32 2022 ] Mean training loss: 0.1333. +[ Thu Sep 8 20:27:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:27:32 2022 ] Eval epoch: 56 +[ Thu Sep 8 20:30:02 2022 ] Epoch 56 Curr Acc: (14029/18932)74.10% +[ Thu Sep 8 20:30:02 2022 ] Epoch 53 Best Acc 74.58% +[ Thu Sep 8 20:30:02 2022 ] Training epoch: 57 +[ Thu Sep 8 20:30:02 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:33:22 2022 ] Mean training loss: 0.1113. +[ Thu Sep 8 20:33:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:33:22 2022 ] Eval epoch: 57 +[ Thu Sep 8 20:35:52 2022 ] Epoch 57 Curr Acc: (13977/18932)73.83% +[ Thu Sep 8 20:35:52 2022 ] Epoch 53 Best Acc 74.58% +[ Thu Sep 8 20:35:52 2022 ] Training epoch: 58 +[ Thu Sep 8 20:35:52 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:39:12 2022 ] Mean training loss: 0.0975. +[ Thu Sep 8 20:39:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:39:12 2022 ] Eval epoch: 58 +[ Thu Sep 8 20:41:42 2022 ] Epoch 58 Curr Acc: (13873/18932)73.28% +[ Thu Sep 8 20:41:42 2022 ] Epoch 53 Best Acc 74.58% +[ Thu Sep 8 20:41:42 2022 ] Training epoch: 59 +[ Thu Sep 8 20:41:42 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:45:01 2022 ] Mean training loss: 0.0813. +[ Thu Sep 8 20:45:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:45:01 2022 ] Eval epoch: 59 +[ Thu Sep 8 20:47:31 2022 ] Epoch 59 Curr Acc: (13932/18932)73.59% +[ Thu Sep 8 20:47:31 2022 ] Epoch 53 Best Acc 74.58% +[ Thu Sep 8 20:47:31 2022 ] Training epoch: 60 +[ Thu Sep 8 20:47:31 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:50:51 2022 ] Mean training loss: 0.0764. +[ Thu Sep 8 20:50:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:50:51 2022 ] Eval epoch: 60 +[ Thu Sep 8 20:53:21 2022 ] Epoch 60 Curr Acc: (13969/18932)73.79% +[ Thu Sep 8 20:53:21 2022 ] Epoch 53 Best Acc 74.58% +[ Thu Sep 8 20:53:21 2022 ] Training epoch: 61 +[ Thu Sep 8 20:53:21 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:56:41 2022 ] Mean training loss: 0.0668. +[ Thu Sep 8 20:56:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:56:41 2022 ] Eval epoch: 61 +[ Thu Sep 8 20:59:12 2022 ] Epoch 61 Curr Acc: (13955/18932)73.71% +[ Thu Sep 8 20:59:12 2022 ] Epoch 53 Best Acc 74.58% +[ Thu Sep 8 20:59:12 2022 ] Training epoch: 62 +[ Thu Sep 8 20:59:12 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:02:31 2022 ] Mean training loss: 0.0569. +[ Thu Sep 8 21:02:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:02:31 2022 ] Eval epoch: 62 +[ Thu Sep 8 21:05:02 2022 ] Epoch 62 Curr Acc: (14121/18932)74.59% +[ Thu Sep 8 21:05:02 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 21:05:02 2022 ] Training epoch: 63 +[ Thu Sep 8 21:05:02 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:08:21 2022 ] Mean training loss: 0.0595. +[ Thu Sep 8 21:08:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:08:21 2022 ] Eval epoch: 63 +[ Thu Sep 8 21:10:52 2022 ] Epoch 63 Curr Acc: (13903/18932)73.44% +[ Thu Sep 8 21:10:52 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 21:10:52 2022 ] Training epoch: 64 +[ Thu Sep 8 21:10:52 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:14:11 2022 ] Mean training loss: 0.0501. +[ Thu Sep 8 21:14:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:14:11 2022 ] Eval epoch: 64 +[ Thu Sep 8 21:16:42 2022 ] Epoch 64 Curr Acc: (13641/18932)72.05% +[ Thu Sep 8 21:16:42 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 21:16:42 2022 ] Training epoch: 65 +[ Thu Sep 8 21:16:42 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:20:02 2022 ] Mean training loss: 0.0485. +[ Thu Sep 8 21:20:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:20:02 2022 ] Eval epoch: 65 +[ Thu Sep 8 21:22:32 2022 ] Epoch 65 Curr Acc: (14036/18932)74.14% +[ Thu Sep 8 21:22:32 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 21:22:32 2022 ] Training epoch: 66 +[ Thu Sep 8 21:22:32 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:25:52 2022 ] Mean training loss: 0.0491. +[ Thu Sep 8 21:25:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:25:52 2022 ] Eval epoch: 66 +[ Thu Sep 8 21:28:22 2022 ] Epoch 66 Curr Acc: (13744/18932)72.60% +[ Thu Sep 8 21:28:22 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 21:28:22 2022 ] Training epoch: 67 +[ Thu Sep 8 21:28:22 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:31:41 2022 ] Mean training loss: 0.0444. +[ Thu Sep 8 21:31:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:31:41 2022 ] Eval epoch: 67 +[ Thu Sep 8 21:34:11 2022 ] Epoch 67 Curr Acc: (14007/18932)73.99% +[ Thu Sep 8 21:34:11 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 21:34:11 2022 ] Training epoch: 68 +[ Thu Sep 8 21:34:11 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:37:31 2022 ] Mean training loss: 0.0429. +[ Thu Sep 8 21:37:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:37:31 2022 ] Eval epoch: 68 +[ Thu Sep 8 21:40:01 2022 ] Epoch 68 Curr Acc: (13815/18932)72.97% +[ Thu Sep 8 21:40:01 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 21:40:01 2022 ] Training epoch: 69 +[ Thu Sep 8 21:40:01 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:43:21 2022 ] Mean training loss: 0.0333. +[ Thu Sep 8 21:43:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:43:21 2022 ] Eval epoch: 69 +[ Thu Sep 8 21:45:51 2022 ] Epoch 69 Curr Acc: (13866/18932)73.24% +[ Thu Sep 8 21:45:51 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 21:45:51 2022 ] Training epoch: 70 +[ Thu Sep 8 21:45:51 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:49:10 2022 ] Mean training loss: 0.0378. +[ Thu Sep 8 21:49:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:49:10 2022 ] Eval epoch: 70 +[ Thu Sep 8 21:51:40 2022 ] Epoch 70 Curr Acc: (13681/18932)72.26% +[ Thu Sep 8 21:51:40 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 21:51:40 2022 ] Training epoch: 71 +[ Thu Sep 8 21:51:40 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:55:00 2022 ] Mean training loss: 0.0269. +[ Thu Sep 8 21:55:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:55:00 2022 ] Eval epoch: 71 +[ Thu Sep 8 21:57:30 2022 ] Epoch 71 Curr Acc: (13738/18932)72.56% +[ Thu Sep 8 21:57:30 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 21:57:30 2022 ] Training epoch: 72 +[ Thu Sep 8 21:57:30 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:00:50 2022 ] Mean training loss: 0.0207. +[ Thu Sep 8 22:00:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:00:50 2022 ] Eval epoch: 72 +[ Thu Sep 8 22:03:20 2022 ] Epoch 72 Curr Acc: (13721/18932)72.48% +[ Thu Sep 8 22:03:20 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 22:03:20 2022 ] Training epoch: 73 +[ Thu Sep 8 22:03:20 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:06:38 2022 ] Mean training loss: 0.0201. +[ Thu Sep 8 22:06:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:06:38 2022 ] Eval epoch: 73 +[ Thu Sep 8 22:09:09 2022 ] Epoch 73 Curr Acc: (13837/18932)73.09% +[ Thu Sep 8 22:09:09 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 22:09:09 2022 ] Training epoch: 74 +[ Thu Sep 8 22:09:09 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:12:28 2022 ] Mean training loss: 0.0198. +[ Thu Sep 8 22:12:28 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 22:12:28 2022 ] Eval epoch: 74 +[ Thu Sep 8 22:14:58 2022 ] Epoch 74 Curr Acc: (13882/18932)73.33% +[ Thu Sep 8 22:14:58 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 22:14:58 2022 ] Training epoch: 75 +[ Thu Sep 8 22:14:58 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:18:18 2022 ] Mean training loss: 0.0200. +[ Thu Sep 8 22:18:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:18:18 2022 ] Eval epoch: 75 +[ Thu Sep 8 22:20:48 2022 ] Epoch 75 Curr Acc: (13590/18932)71.78% +[ Thu Sep 8 22:20:48 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 22:20:48 2022 ] Training epoch: 76 +[ Thu Sep 8 22:20:48 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:24:07 2022 ] Mean training loss: 0.0180. +[ Thu Sep 8 22:24:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:24:07 2022 ] Eval epoch: 76 +[ Thu Sep 8 22:26:37 2022 ] Epoch 76 Curr Acc: (13718/18932)72.46% +[ Thu Sep 8 22:26:37 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 22:26:37 2022 ] Training epoch: 77 +[ Thu Sep 8 22:26:37 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:29:56 2022 ] Mean training loss: 0.0161. +[ Thu Sep 8 22:29:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:29:56 2022 ] Eval epoch: 77 +[ Thu Sep 8 22:32:26 2022 ] Epoch 77 Curr Acc: (13810/18932)72.95% +[ Thu Sep 8 22:32:26 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 22:32:26 2022 ] Training epoch: 78 +[ Thu Sep 8 22:32:26 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:35:44 2022 ] Mean training loss: 0.0172. +[ Thu Sep 8 22:35:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:35:44 2022 ] Eval epoch: 78 +[ Thu Sep 8 22:38:15 2022 ] Epoch 78 Curr Acc: (14042/18932)74.17% +[ Thu Sep 8 22:38:15 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 22:38:15 2022 ] Training epoch: 79 +[ Thu Sep 8 22:38:15 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:41:34 2022 ] Mean training loss: 0.0172. +[ Thu Sep 8 22:41:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:41:34 2022 ] Eval epoch: 79 +[ Thu Sep 8 22:44:05 2022 ] Epoch 79 Curr Acc: (13868/18932)73.25% +[ Thu Sep 8 22:44:05 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 22:44:05 2022 ] Training epoch: 80 +[ Thu Sep 8 22:44:05 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:47:25 2022 ] Mean training loss: 0.0148. +[ Thu Sep 8 22:47:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:47:25 2022 ] Eval epoch: 80 +[ Thu Sep 8 22:49:55 2022 ] Epoch 80 Curr Acc: (13922/18932)73.54% +[ Thu Sep 8 22:49:55 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 22:49:55 2022 ] Training epoch: 81 +[ Thu Sep 8 22:49:55 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:53:15 2022 ] Mean training loss: 0.0156. +[ Thu Sep 8 22:53:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:53:15 2022 ] Eval epoch: 81 +[ Thu Sep 8 22:55:46 2022 ] Epoch 81 Curr Acc: (13907/18932)73.46% +[ Thu Sep 8 22:55:46 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 22:55:46 2022 ] Training epoch: 82 +[ Thu Sep 8 22:55:46 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:59:06 2022 ] Mean training loss: 0.0140. +[ Thu Sep 8 22:59:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:59:06 2022 ] Eval epoch: 82 +[ Thu Sep 8 23:01:36 2022 ] Epoch 82 Curr Acc: (13918/18932)73.52% +[ Thu Sep 8 23:01:36 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 23:01:36 2022 ] Training epoch: 83 +[ Thu Sep 8 23:01:36 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:04:56 2022 ] Mean training loss: 0.0146. +[ Thu Sep 8 23:04:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:04:56 2022 ] Eval epoch: 83 +[ Thu Sep 8 23:07:26 2022 ] Epoch 83 Curr Acc: (13787/18932)72.82% +[ Thu Sep 8 23:07:26 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 23:07:26 2022 ] Training epoch: 84 +[ Thu Sep 8 23:07:26 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:10:46 2022 ] Mean training loss: 0.0159. +[ Thu Sep 8 23:10:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:10:46 2022 ] Eval epoch: 84 +[ Thu Sep 8 23:13:16 2022 ] Epoch 84 Curr Acc: (14027/18932)74.09% +[ Thu Sep 8 23:13:16 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 23:13:16 2022 ] Training epoch: 85 +[ Thu Sep 8 23:13:16 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:16:36 2022 ] Mean training loss: 0.0148. +[ Thu Sep 8 23:16:36 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 23:16:36 2022 ] Eval epoch: 85 +[ Thu Sep 8 23:19:07 2022 ] Epoch 85 Curr Acc: (13930/18932)73.58% +[ Thu Sep 8 23:19:07 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 23:19:07 2022 ] Training epoch: 86 +[ Thu Sep 8 23:19:07 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:22:27 2022 ] Mean training loss: 0.0135. +[ Thu Sep 8 23:22:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:22:27 2022 ] Eval epoch: 86 +[ Thu Sep 8 23:24:57 2022 ] Epoch 86 Curr Acc: (14004/18932)73.97% +[ Thu Sep 8 23:24:57 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 23:24:57 2022 ] Training epoch: 87 +[ Thu Sep 8 23:24:57 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:28:16 2022 ] Mean training loss: 0.0131. +[ Thu Sep 8 23:28:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:28:17 2022 ] Eval epoch: 87 +[ Thu Sep 8 23:30:47 2022 ] Epoch 87 Curr Acc: (13929/18932)73.57% +[ Thu Sep 8 23:30:47 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 23:30:47 2022 ] Training epoch: 88 +[ Thu Sep 8 23:30:47 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:34:06 2022 ] Mean training loss: 0.0137. +[ Thu Sep 8 23:34:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:34:06 2022 ] Eval epoch: 88 +[ Thu Sep 8 23:36:37 2022 ] Epoch 88 Curr Acc: (13840/18932)73.10% +[ Thu Sep 8 23:36:37 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 23:36:37 2022 ] Training epoch: 89 +[ Thu Sep 8 23:36:37 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:39:56 2022 ] Mean training loss: 0.0147. +[ Thu Sep 8 23:39:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:39:57 2022 ] Eval epoch: 89 +[ Thu Sep 8 23:42:27 2022 ] Epoch 89 Curr Acc: (13861/18932)73.21% +[ Thu Sep 8 23:42:27 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 23:42:27 2022 ] Training epoch: 90 +[ Thu Sep 8 23:42:27 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:45:46 2022 ] Mean training loss: 0.0133. +[ Thu Sep 8 23:45:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:45:47 2022 ] Eval epoch: 90 +[ Thu Sep 8 23:48:17 2022 ] Epoch 90 Curr Acc: (13874/18932)73.28% +[ Thu Sep 8 23:48:17 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 23:48:17 2022 ] Training epoch: 91 +[ Thu Sep 8 23:48:17 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:51:37 2022 ] Mean training loss: 0.0120. +[ Thu Sep 8 23:51:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:51:37 2022 ] Eval epoch: 91 +[ Thu Sep 8 23:54:07 2022 ] Epoch 91 Curr Acc: (13992/18932)73.91% +[ Thu Sep 8 23:54:07 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 23:54:07 2022 ] Training epoch: 92 +[ Thu Sep 8 23:54:07 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:57:27 2022 ] Mean training loss: 0.0137. +[ Thu Sep 8 23:57:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:57:27 2022 ] Eval epoch: 92 +[ Thu Sep 8 23:59:57 2022 ] Epoch 92 Curr Acc: (14099/18932)74.47% +[ Thu Sep 8 23:59:57 2022 ] Epoch 62 Best Acc 74.59% +[ Thu Sep 8 23:59:57 2022 ] Training epoch: 93 +[ Thu Sep 8 23:59:57 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:03:16 2022 ] Mean training loss: 0.0138. +[ Fri Sep 9 00:03:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:03:16 2022 ] Eval epoch: 93 +[ Fri Sep 9 00:05:47 2022 ] Epoch 93 Curr Acc: (13910/18932)73.47% +[ Fri Sep 9 00:05:47 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 00:05:47 2022 ] Training epoch: 94 +[ Fri Sep 9 00:05:47 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:09:05 2022 ] Mean training loss: 0.0129. +[ Fri Sep 9 00:09:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:09:05 2022 ] Eval epoch: 94 +[ Fri Sep 9 00:11:35 2022 ] Epoch 94 Curr Acc: (14005/18932)73.98% +[ Fri Sep 9 00:11:35 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 00:11:35 2022 ] Training epoch: 95 +[ Fri Sep 9 00:11:35 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:14:54 2022 ] Mean training loss: 0.0136. +[ Fri Sep 9 00:14:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:14:54 2022 ] Eval epoch: 95 +[ Fri Sep 9 00:17:24 2022 ] Epoch 95 Curr Acc: (14025/18932)74.08% +[ Fri Sep 9 00:17:24 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 00:17:24 2022 ] Training epoch: 96 +[ Fri Sep 9 00:17:24 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:20:44 2022 ] Mean training loss: 0.0128. +[ Fri Sep 9 00:20:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:20:44 2022 ] Eval epoch: 96 +[ Fri Sep 9 00:23:14 2022 ] Epoch 96 Curr Acc: (13949/18932)73.68% +[ Fri Sep 9 00:23:14 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 00:23:14 2022 ] Training epoch: 97 +[ Fri Sep 9 00:23:14 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:26:34 2022 ] Mean training loss: 0.0128. +[ Fri Sep 9 00:26:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:26:34 2022 ] Eval epoch: 97 +[ Fri Sep 9 00:29:04 2022 ] Epoch 97 Curr Acc: (13903/18932)73.44% +[ Fri Sep 9 00:29:04 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 00:29:04 2022 ] Training epoch: 98 +[ Fri Sep 9 00:29:04 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:32:23 2022 ] Mean training loss: 0.0127. +[ Fri Sep 9 00:32:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:32:23 2022 ] Eval epoch: 98 +[ Fri Sep 9 00:34:53 2022 ] Epoch 98 Curr Acc: (13930/18932)73.58% +[ Fri Sep 9 00:34:53 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 00:34:53 2022 ] Training epoch: 99 +[ Fri Sep 9 00:34:53 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:38:13 2022 ] Mean training loss: 0.0136. +[ Fri Sep 9 00:38:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:38:13 2022 ] Eval epoch: 99 +[ Fri Sep 9 00:40:43 2022 ] Epoch 99 Curr Acc: (13665/18932)72.18% +[ Fri Sep 9 00:40:43 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 00:40:43 2022 ] Training epoch: 100 +[ Fri Sep 9 00:40:43 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:44:03 2022 ] Mean training loss: 0.0143. +[ Fri Sep 9 00:44:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:44:03 2022 ] Eval epoch: 100 +[ Fri Sep 9 00:46:33 2022 ] Epoch 100 Curr Acc: (14032/18932)74.12% +[ Fri Sep 9 00:46:33 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 00:46:33 2022 ] Training epoch: 101 +[ Fri Sep 9 00:46:33 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:49:53 2022 ] Mean training loss: 0.0156. +[ Fri Sep 9 00:49:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:49:53 2022 ] Eval epoch: 101 +[ Fri Sep 9 00:52:23 2022 ] Epoch 101 Curr Acc: (13956/18932)73.72% +[ Fri Sep 9 00:52:23 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 00:52:23 2022 ] Training epoch: 102 +[ Fri Sep 9 00:52:23 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:55:43 2022 ] Mean training loss: 0.0129. +[ Fri Sep 9 00:55:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:55:43 2022 ] Eval epoch: 102 +[ Fri Sep 9 00:58:13 2022 ] Epoch 102 Curr Acc: (13923/18932)73.54% +[ Fri Sep 9 00:58:13 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 00:58:13 2022 ] Training epoch: 103 +[ Fri Sep 9 00:58:13 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:01:32 2022 ] Mean training loss: 0.0128. +[ Fri Sep 9 01:01:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:01:33 2022 ] Eval epoch: 103 +[ Fri Sep 9 01:04:03 2022 ] Epoch 103 Curr Acc: (13836/18932)73.08% +[ Fri Sep 9 01:04:03 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 01:04:03 2022 ] Training epoch: 104 +[ Fri Sep 9 01:04:03 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:07:22 2022 ] Mean training loss: 0.0136. +[ Fri Sep 9 01:07:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:07:22 2022 ] Eval epoch: 104 +[ Fri Sep 9 01:09:53 2022 ] Epoch 104 Curr Acc: (13684/18932)72.28% +[ Fri Sep 9 01:09:53 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 01:09:53 2022 ] Training epoch: 105 +[ Fri Sep 9 01:09:53 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:13:12 2022 ] Mean training loss: 0.0119. +[ Fri Sep 9 01:13:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:13:12 2022 ] Eval epoch: 105 +[ Fri Sep 9 01:15:42 2022 ] Epoch 105 Curr Acc: (13812/18932)72.96% +[ Fri Sep 9 01:15:42 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 01:15:42 2022 ] Training epoch: 106 +[ Fri Sep 9 01:15:42 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:19:01 2022 ] Mean training loss: 0.0149. +[ Fri Sep 9 01:19:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:19:01 2022 ] Eval epoch: 106 +[ Fri Sep 9 01:21:31 2022 ] Epoch 106 Curr Acc: (13930/18932)73.58% +[ Fri Sep 9 01:21:31 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 01:21:31 2022 ] Training epoch: 107 +[ Fri Sep 9 01:21:31 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:24:50 2022 ] Mean training loss: 0.0139. +[ Fri Sep 9 01:24:50 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 01:24:50 2022 ] Eval epoch: 107 +[ Fri Sep 9 01:27:20 2022 ] Epoch 107 Curr Acc: (13993/18932)73.91% +[ Fri Sep 9 01:27:20 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 01:27:20 2022 ] Training epoch: 108 +[ Fri Sep 9 01:27:20 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:30:39 2022 ] Mean training loss: 0.0141. +[ Fri Sep 9 01:30:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:30:40 2022 ] Eval epoch: 108 +[ Fri Sep 9 01:33:10 2022 ] Epoch 108 Curr Acc: (14017/18932)74.04% +[ Fri Sep 9 01:33:10 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 01:33:10 2022 ] Training epoch: 109 +[ Fri Sep 9 01:33:10 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:36:30 2022 ] Mean training loss: 0.0124. +[ Fri Sep 9 01:36:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:36:30 2022 ] Eval epoch: 109 +[ Fri Sep 9 01:39:00 2022 ] Epoch 109 Curr Acc: (14043/18932)74.18% +[ Fri Sep 9 01:39:00 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 01:39:00 2022 ] Training epoch: 110 +[ Fri Sep 9 01:39:00 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:42:20 2022 ] Mean training loss: 0.0123. +[ Fri Sep 9 01:42:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:42:20 2022 ] Eval epoch: 110 +[ Fri Sep 9 01:44:50 2022 ] Epoch 110 Curr Acc: (13970/18932)73.79% +[ Fri Sep 9 01:44:50 2022 ] Epoch 62 Best Acc 74.59% +[ Fri Sep 9 01:44:50 2022 ] epoch: 62, best accuracy: 0.7458799915487007 +[ Fri Sep 9 01:44:50 2022 ] Experiment: ./work_dir/ntu/xview_b +[ Fri Sep 9 01:44:50 2022 ] # generator parameters: 2.896055 M. +[ Fri Sep 9 01:44:50 2022 ] Load weights from ./runs/ntu/xview_b/runs-61-61132.pt. +[ Fri Sep 9 01:44:50 2022 ] Eval epoch: 1 +[ Fri Sep 9 01:47:21 2022 ] Epoch 1 Curr Acc: (14121/18932)74.59% +[ Fri Sep 9 01:47:21 2022 ] Epoch 62 Best Acc 74.59% diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/config.yaml b/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2d60604958ce226cc5b7a466ab4e23f95d30d5d6 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu/xview_bm.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 60 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu/xview_bm/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_data_bone_motion.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_data_bone_motion.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu/xview_bm diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..72ff399e3ddd78de14dc72d485a19a7b24537e60 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f51cb1e9e9e9f48a96813c1f361acf2f88ef9b33dc67a43de3eeba9434ea6e71 +size 5718404 diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/log.txt b/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..e46f3b7df7329ee2786cd6c392d8863edbd48dca --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_bm/log.txt @@ -0,0 +1,631 @@ +[ Thu Sep 8 17:08:26 2022 ] # generator parameters: 2.896055 M. +[ Thu Sep 8 17:08:27 2022 ] Parameters: +{'work_dir': './work_dir/ntu/xview_bm', 'model_saved_name': './runs/ntu/xview_bm/runs', 'config': 'config/ntu/xview_bm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Thu Sep 8 17:08:27 2022 ] Training epoch: 1 +[ Thu Sep 8 17:08:27 2022 ] Learning rate: 0.015 +[ Thu Sep 8 17:11:49 2022 ] Mean training loss: 3.2108. +[ Thu Sep 8 17:11:49 2022 ] Time consumption: [Data]02%, [Network]98% +[ Thu Sep 8 17:11:49 2022 ] Training epoch: 2 +[ Thu Sep 8 17:11:49 2022 ] Learning rate: 0.03 +[ Thu Sep 8 17:15:11 2022 ] Mean training loss: 2.5280. +[ Thu Sep 8 17:15:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:15:11 2022 ] Training epoch: 3 +[ Thu Sep 8 17:15:11 2022 ] Learning rate: 0.045 +[ Thu Sep 8 17:18:33 2022 ] Mean training loss: 2.1088. +[ Thu Sep 8 17:18:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:18:33 2022 ] Training epoch: 4 +[ Thu Sep 8 17:18:33 2022 ] Learning rate: 0.06 +[ Thu Sep 8 17:21:55 2022 ] Mean training loss: 1.8071. +[ Thu Sep 8 17:21:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:21:55 2022 ] Training epoch: 5 +[ Thu Sep 8 17:21:55 2022 ] Learning rate: 0.075 +[ Thu Sep 8 17:25:16 2022 ] Mean training loss: 1.6173. +[ Thu Sep 8 17:25:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:25:16 2022 ] Training epoch: 6 +[ Thu Sep 8 17:25:16 2022 ] Learning rate: 0.09 +[ Thu Sep 8 17:28:38 2022 ] Mean training loss: 1.4668. +[ Thu Sep 8 17:28:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:28:38 2022 ] Training epoch: 7 +[ Thu Sep 8 17:28:38 2022 ] Learning rate: 0.10500000000000001 +[ Thu Sep 8 17:32:00 2022 ] Mean training loss: 1.3950. +[ Thu Sep 8 17:32:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:32:00 2022 ] Training epoch: 8 +[ Thu Sep 8 17:32:00 2022 ] Learning rate: 0.12 +[ Thu Sep 8 17:35:23 2022 ] Mean training loss: 1.3026. +[ Thu Sep 8 17:35:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:35:23 2022 ] Training epoch: 9 +[ Thu Sep 8 17:35:23 2022 ] Learning rate: 0.13499999999999998 +[ Thu Sep 8 17:38:45 2022 ] Mean training loss: 1.2720. +[ Thu Sep 8 17:38:45 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:38:45 2022 ] Training epoch: 10 +[ Thu Sep 8 17:38:45 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:42:06 2022 ] Mean training loss: 1.2205. +[ Thu Sep 8 17:42:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:42:06 2022 ] Training epoch: 11 +[ Thu Sep 8 17:42:06 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:45:28 2022 ] Mean training loss: 1.1465. +[ Thu Sep 8 17:45:28 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:45:28 2022 ] Training epoch: 12 +[ Thu Sep 8 17:45:28 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:48:51 2022 ] Mean training loss: 1.1062. +[ Thu Sep 8 17:48:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:48:51 2022 ] Training epoch: 13 +[ Thu Sep 8 17:48:51 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:52:13 2022 ] Mean training loss: 1.0360. +[ Thu Sep 8 17:52:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:52:13 2022 ] Training epoch: 14 +[ Thu Sep 8 17:52:13 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:55:35 2022 ] Mean training loss: 1.0269. +[ Thu Sep 8 17:55:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:55:35 2022 ] Training epoch: 15 +[ Thu Sep 8 17:55:35 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:58:56 2022 ] Mean training loss: 0.9875. +[ Thu Sep 8 17:58:56 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:58:56 2022 ] Training epoch: 16 +[ Thu Sep 8 17:58:56 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:02:18 2022 ] Mean training loss: 0.9633. +[ Thu Sep 8 18:02:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:02:18 2022 ] Training epoch: 17 +[ Thu Sep 8 18:02:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:05:40 2022 ] Mean training loss: 0.9229. +[ Thu Sep 8 18:05:40 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:05:40 2022 ] Training epoch: 18 +[ Thu Sep 8 18:05:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:09:02 2022 ] Mean training loss: 0.9080. +[ Thu Sep 8 18:09:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:09:02 2022 ] Training epoch: 19 +[ Thu Sep 8 18:09:02 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:12:24 2022 ] Mean training loss: 0.9056. +[ Thu Sep 8 18:12:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:12:24 2022 ] Training epoch: 20 +[ Thu Sep 8 18:12:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:15:45 2022 ] Mean training loss: 0.8655. +[ Thu Sep 8 18:15:45 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:15:45 2022 ] Training epoch: 21 +[ Thu Sep 8 18:15:45 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:19:07 2022 ] Mean training loss: 0.8559. +[ Thu Sep 8 18:19:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:19:07 2022 ] Training epoch: 22 +[ Thu Sep 8 18:19:07 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:22:29 2022 ] Mean training loss: 0.8353. +[ Thu Sep 8 18:22:29 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:22:29 2022 ] Training epoch: 23 +[ Thu Sep 8 18:22:29 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:25:50 2022 ] Mean training loss: 0.8104. +[ Thu Sep 8 18:25:50 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:25:50 2022 ] Training epoch: 24 +[ Thu Sep 8 18:25:50 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:29:11 2022 ] Mean training loss: 0.8355. +[ Thu Sep 8 18:29:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:29:11 2022 ] Training epoch: 25 +[ Thu Sep 8 18:29:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:32:33 2022 ] Mean training loss: 0.7994. +[ Thu Sep 8 18:32:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:32:33 2022 ] Training epoch: 26 +[ Thu Sep 8 18:32:33 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:35:55 2022 ] Mean training loss: 0.7868. +[ Thu Sep 8 18:35:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:35:55 2022 ] Training epoch: 27 +[ Thu Sep 8 18:35:55 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:39:17 2022 ] Mean training loss: 0.7717. +[ Thu Sep 8 18:39:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:39:17 2022 ] Training epoch: 28 +[ Thu Sep 8 18:39:17 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:42:39 2022 ] Mean training loss: 0.7697. +[ Thu Sep 8 18:42:39 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:42:39 2022 ] Training epoch: 29 +[ Thu Sep 8 18:42:39 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:46:00 2022 ] Mean training loss: 0.7481. +[ Thu Sep 8 18:46:00 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:46:00 2022 ] Training epoch: 30 +[ Thu Sep 8 18:46:00 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:49:22 2022 ] Mean training loss: 0.7593. +[ Thu Sep 8 18:49:22 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:49:22 2022 ] Training epoch: 31 +[ Thu Sep 8 18:49:22 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:52:44 2022 ] Mean training loss: 0.7358. +[ Thu Sep 8 18:52:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:52:44 2022 ] Training epoch: 32 +[ Thu Sep 8 18:52:44 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:56:05 2022 ] Mean training loss: 0.7413. +[ Thu Sep 8 18:56:05 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:56:05 2022 ] Training epoch: 33 +[ Thu Sep 8 18:56:05 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:59:27 2022 ] Mean training loss: 0.7301. +[ Thu Sep 8 18:59:27 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 18:59:27 2022 ] Training epoch: 34 +[ Thu Sep 8 18:59:27 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:02:48 2022 ] Mean training loss: 0.7238. +[ Thu Sep 8 19:02:48 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:02:48 2022 ] Training epoch: 35 +[ Thu Sep 8 19:02:48 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:06:10 2022 ] Mean training loss: 0.7290. +[ Thu Sep 8 19:06:10 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:06:10 2022 ] Training epoch: 36 +[ Thu Sep 8 19:06:10 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:09:32 2022 ] Mean training loss: 0.7121. +[ Thu Sep 8 19:09:32 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:09:32 2022 ] Training epoch: 37 +[ Thu Sep 8 19:09:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:12:53 2022 ] Mean training loss: 0.7048. +[ Thu Sep 8 19:12:53 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:12:53 2022 ] Training epoch: 38 +[ Thu Sep 8 19:12:53 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:16:15 2022 ] Mean training loss: 0.6835. +[ Thu Sep 8 19:16:15 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:16:15 2022 ] Training epoch: 39 +[ Thu Sep 8 19:16:15 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:19:36 2022 ] Mean training loss: 0.6868. +[ Thu Sep 8 19:19:36 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:19:36 2022 ] Training epoch: 40 +[ Thu Sep 8 19:19:36 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:22:58 2022 ] Mean training loss: 0.6963. +[ Thu Sep 8 19:22:58 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:22:58 2022 ] Training epoch: 41 +[ Thu Sep 8 19:22:58 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:26:20 2022 ] Mean training loss: 0.6769. +[ Thu Sep 8 19:26:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:26:20 2022 ] Training epoch: 42 +[ Thu Sep 8 19:26:20 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:29:41 2022 ] Mean training loss: 0.6870. +[ Thu Sep 8 19:29:41 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:29:41 2022 ] Training epoch: 43 +[ Thu Sep 8 19:29:41 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:33:03 2022 ] Mean training loss: 0.6810. +[ Thu Sep 8 19:33:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:33:03 2022 ] Training epoch: 44 +[ Thu Sep 8 19:33:03 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:36:24 2022 ] Mean training loss: 0.6523. +[ Thu Sep 8 19:36:24 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:36:24 2022 ] Training epoch: 45 +[ Thu Sep 8 19:36:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:39:46 2022 ] Mean training loss: 0.6810. +[ Thu Sep 8 19:39:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:39:46 2022 ] Training epoch: 46 +[ Thu Sep 8 19:39:46 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:43:07 2022 ] Mean training loss: 0.6648. +[ Thu Sep 8 19:43:07 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:43:07 2022 ] Training epoch: 47 +[ Thu Sep 8 19:43:07 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:46:29 2022 ] Mean training loss: 0.6393. +[ Thu Sep 8 19:46:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:46:29 2022 ] Training epoch: 48 +[ Thu Sep 8 19:46:29 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:49:51 2022 ] Mean training loss: 0.6549. +[ Thu Sep 8 19:49:51 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:49:51 2022 ] Training epoch: 49 +[ Thu Sep 8 19:49:51 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:53:12 2022 ] Mean training loss: 0.6391. +[ Thu Sep 8 19:53:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:53:12 2022 ] Training epoch: 50 +[ Thu Sep 8 19:53:12 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:56:34 2022 ] Mean training loss: 0.6535. +[ Thu Sep 8 19:56:34 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 19:56:34 2022 ] Training epoch: 51 +[ Thu Sep 8 19:56:34 2022 ] Learning rate: 0.015 +[ Thu Sep 8 19:59:56 2022 ] Mean training loss: 0.2922. +[ Thu Sep 8 19:59:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:59:56 2022 ] Eval epoch: 51 +[ Thu Sep 8 20:02:32 2022 ] Epoch 51 Curr Acc: (13393/18932)70.74% +[ Thu Sep 8 20:02:32 2022 ] Epoch 51 Best Acc 70.74% +[ Thu Sep 8 20:02:32 2022 ] Training epoch: 52 +[ Thu Sep 8 20:02:32 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:05:53 2022 ] Mean training loss: 0.1823. +[ Thu Sep 8 20:05:53 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 20:05:53 2022 ] Eval epoch: 52 +[ Thu Sep 8 20:08:24 2022 ] Epoch 52 Curr Acc: (13547/18932)71.56% +[ Thu Sep 8 20:08:24 2022 ] Epoch 52 Best Acc 71.56% +[ Thu Sep 8 20:08:24 2022 ] Training epoch: 53 +[ Thu Sep 8 20:08:24 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:11:45 2022 ] Mean training loss: 0.1477. +[ Thu Sep 8 20:11:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:11:45 2022 ] Eval epoch: 53 +[ Thu Sep 8 20:14:15 2022 ] Epoch 53 Curr Acc: (13403/18932)70.80% +[ Thu Sep 8 20:14:15 2022 ] Epoch 52 Best Acc 71.56% +[ Thu Sep 8 20:14:15 2022 ] Training epoch: 54 +[ Thu Sep 8 20:14:15 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:17:36 2022 ] Mean training loss: 0.1138. +[ Thu Sep 8 20:17:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:17:36 2022 ] Eval epoch: 54 +[ Thu Sep 8 20:20:06 2022 ] Epoch 54 Curr Acc: (13643/18932)72.06% +[ Thu Sep 8 20:20:06 2022 ] Epoch 54 Best Acc 72.06% +[ Thu Sep 8 20:20:06 2022 ] Training epoch: 55 +[ Thu Sep 8 20:20:06 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:23:27 2022 ] Mean training loss: 0.0959. +[ Thu Sep 8 20:23:27 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 20:23:27 2022 ] Eval epoch: 55 +[ Thu Sep 8 20:25:57 2022 ] Epoch 55 Curr Acc: (13598/18932)71.83% +[ Thu Sep 8 20:25:57 2022 ] Epoch 54 Best Acc 72.06% +[ Thu Sep 8 20:25:57 2022 ] Training epoch: 56 +[ Thu Sep 8 20:25:57 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:29:18 2022 ] Mean training loss: 0.0766. +[ Thu Sep 8 20:29:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:29:18 2022 ] Eval epoch: 56 +[ Thu Sep 8 20:31:48 2022 ] Epoch 56 Curr Acc: (13619/18932)71.94% +[ Thu Sep 8 20:31:48 2022 ] Epoch 54 Best Acc 72.06% +[ Thu Sep 8 20:31:48 2022 ] Training epoch: 57 +[ Thu Sep 8 20:31:48 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:35:09 2022 ] Mean training loss: 0.0665. +[ Thu Sep 8 20:35:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:35:09 2022 ] Eval epoch: 57 +[ Thu Sep 8 20:37:40 2022 ] Epoch 57 Curr Acc: (13603/18932)71.85% +[ Thu Sep 8 20:37:40 2022 ] Epoch 54 Best Acc 72.06% +[ Thu Sep 8 20:37:40 2022 ] Training epoch: 58 +[ Thu Sep 8 20:37:40 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:41:01 2022 ] Mean training loss: 0.0552. +[ Thu Sep 8 20:41:01 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 20:41:01 2022 ] Eval epoch: 58 +[ Thu Sep 8 20:43:31 2022 ] Epoch 58 Curr Acc: (13462/18932)71.11% +[ Thu Sep 8 20:43:31 2022 ] Epoch 54 Best Acc 72.06% +[ Thu Sep 8 20:43:31 2022 ] Training epoch: 59 +[ Thu Sep 8 20:43:31 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:46:52 2022 ] Mean training loss: 0.0457. +[ Thu Sep 8 20:46:52 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 20:46:52 2022 ] Eval epoch: 59 +[ Thu Sep 8 20:49:22 2022 ] Epoch 59 Curr Acc: (13710/18932)72.42% +[ Thu Sep 8 20:49:22 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 20:49:22 2022 ] Training epoch: 60 +[ Thu Sep 8 20:49:22 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:52:44 2022 ] Mean training loss: 0.0477. +[ Thu Sep 8 20:52:44 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 20:52:44 2022 ] Eval epoch: 60 +[ Thu Sep 8 20:55:14 2022 ] Epoch 60 Curr Acc: (13465/18932)71.12% +[ Thu Sep 8 20:55:14 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 20:55:14 2022 ] Training epoch: 61 +[ Thu Sep 8 20:55:14 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:58:36 2022 ] Mean training loss: 0.0411. +[ Thu Sep 8 20:58:36 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 20:58:36 2022 ] Eval epoch: 61 +[ Thu Sep 8 21:01:06 2022 ] Epoch 61 Curr Acc: (13580/18932)71.73% +[ Thu Sep 8 21:01:06 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 21:01:06 2022 ] Training epoch: 62 +[ Thu Sep 8 21:01:06 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:04:28 2022 ] Mean training loss: 0.0329. +[ Thu Sep 8 21:04:28 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 21:04:28 2022 ] Eval epoch: 62 +[ Thu Sep 8 21:06:58 2022 ] Epoch 62 Curr Acc: (13468/18932)71.14% +[ Thu Sep 8 21:06:58 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 21:06:58 2022 ] Training epoch: 63 +[ Thu Sep 8 21:06:58 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:10:20 2022 ] Mean training loss: 0.0294. +[ Thu Sep 8 21:10:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:10:20 2022 ] Eval epoch: 63 +[ Thu Sep 8 21:12:51 2022 ] Epoch 63 Curr Acc: (13585/18932)71.76% +[ Thu Sep 8 21:12:51 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 21:12:51 2022 ] Training epoch: 64 +[ Thu Sep 8 21:12:51 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:16:12 2022 ] Mean training loss: 0.0281. +[ Thu Sep 8 21:16:12 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 21:16:12 2022 ] Eval epoch: 64 +[ Thu Sep 8 21:18:43 2022 ] Epoch 64 Curr Acc: (12882/18932)68.04% +[ Thu Sep 8 21:18:43 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 21:18:43 2022 ] Training epoch: 65 +[ Thu Sep 8 21:18:43 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:22:04 2022 ] Mean training loss: 0.0256. +[ Thu Sep 8 21:22:04 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 21:22:04 2022 ] Eval epoch: 65 +[ Thu Sep 8 21:24:35 2022 ] Epoch 65 Curr Acc: (13302/18932)70.26% +[ Thu Sep 8 21:24:35 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 21:24:35 2022 ] Training epoch: 66 +[ Thu Sep 8 21:24:35 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:27:56 2022 ] Mean training loss: 0.0290. +[ Thu Sep 8 21:27:56 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 21:27:56 2022 ] Eval epoch: 66 +[ Thu Sep 8 21:30:26 2022 ] Epoch 66 Curr Acc: (13439/18932)70.99% +[ Thu Sep 8 21:30:26 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 21:30:26 2022 ] Training epoch: 67 +[ Thu Sep 8 21:30:26 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:33:47 2022 ] Mean training loss: 0.0199. +[ Thu Sep 8 21:33:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:33:47 2022 ] Eval epoch: 67 +[ Thu Sep 8 21:36:18 2022 ] Epoch 67 Curr Acc: (13345/18932)70.49% +[ Thu Sep 8 21:36:18 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 21:36:18 2022 ] Training epoch: 68 +[ Thu Sep 8 21:36:18 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:39:39 2022 ] Mean training loss: 0.0200. +[ Thu Sep 8 21:39:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:39:39 2022 ] Eval epoch: 68 +[ Thu Sep 8 21:42:09 2022 ] Epoch 68 Curr Acc: (13478/18932)71.19% +[ Thu Sep 8 21:42:09 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 21:42:09 2022 ] Training epoch: 69 +[ Thu Sep 8 21:42:09 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:45:30 2022 ] Mean training loss: 0.0164. +[ Thu Sep 8 21:45:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:45:30 2022 ] Eval epoch: 69 +[ Thu Sep 8 21:48:01 2022 ] Epoch 69 Curr Acc: (13368/18932)70.61% +[ Thu Sep 8 21:48:01 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 21:48:01 2022 ] Training epoch: 70 +[ Thu Sep 8 21:48:01 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:51:22 2022 ] Mean training loss: 0.0171. +[ Thu Sep 8 21:51:22 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 21:51:22 2022 ] Eval epoch: 70 +[ Thu Sep 8 21:53:52 2022 ] Epoch 70 Curr Acc: (13525/18932)71.44% +[ Thu Sep 8 21:53:52 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 21:53:52 2022 ] Training epoch: 71 +[ Thu Sep 8 21:53:52 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:57:13 2022 ] Mean training loss: 0.0151. +[ Thu Sep 8 21:57:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:57:13 2022 ] Eval epoch: 71 +[ Thu Sep 8 21:59:44 2022 ] Epoch 71 Curr Acc: (13634/18932)72.02% +[ Thu Sep 8 21:59:44 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 21:59:44 2022 ] Training epoch: 72 +[ Thu Sep 8 21:59:44 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:03:05 2022 ] Mean training loss: 0.0133. +[ Thu Sep 8 22:03:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:03:05 2022 ] Eval epoch: 72 +[ Thu Sep 8 22:05:35 2022 ] Epoch 72 Curr Acc: (13480/18932)71.20% +[ Thu Sep 8 22:05:35 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 22:05:35 2022 ] Training epoch: 73 +[ Thu Sep 8 22:05:35 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:08:56 2022 ] Mean training loss: 0.0125. +[ Thu Sep 8 22:08:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:08:56 2022 ] Eval epoch: 73 +[ Thu Sep 8 22:11:26 2022 ] Epoch 73 Curr Acc: (13563/18932)71.64% +[ Thu Sep 8 22:11:26 2022 ] Epoch 59 Best Acc 72.42% +[ Thu Sep 8 22:11:26 2022 ] Training epoch: 74 +[ Thu Sep 8 22:11:26 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:14:47 2022 ] Mean training loss: 0.0132. +[ Thu Sep 8 22:14:47 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 22:14:47 2022 ] Eval epoch: 74 +[ Thu Sep 8 22:17:18 2022 ] Epoch 74 Curr Acc: (13751/18932)72.63% +[ Thu Sep 8 22:17:18 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 22:17:18 2022 ] Training epoch: 75 +[ Thu Sep 8 22:17:18 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:20:39 2022 ] Mean training loss: 0.0116. +[ Thu Sep 8 22:20:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:20:39 2022 ] Eval epoch: 75 +[ Thu Sep 8 22:23:09 2022 ] Epoch 75 Curr Acc: (13623/18932)71.96% +[ Thu Sep 8 22:23:09 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 22:23:09 2022 ] Training epoch: 76 +[ Thu Sep 8 22:23:09 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:26:30 2022 ] Mean training loss: 0.0126. +[ Thu Sep 8 22:26:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:26:30 2022 ] Eval epoch: 76 +[ Thu Sep 8 22:29:01 2022 ] Epoch 76 Curr Acc: (13610/18932)71.89% +[ Thu Sep 8 22:29:01 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 22:29:01 2022 ] Training epoch: 77 +[ Thu Sep 8 22:29:01 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:32:21 2022 ] Mean training loss: 0.0111. +[ Thu Sep 8 22:32:21 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 22:32:21 2022 ] Eval epoch: 77 +[ Thu Sep 8 22:34:52 2022 ] Epoch 77 Curr Acc: (13326/18932)70.39% +[ Thu Sep 8 22:34:52 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 22:34:52 2022 ] Training epoch: 78 +[ Thu Sep 8 22:34:52 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:38:13 2022 ] Mean training loss: 0.0114. +[ Thu Sep 8 22:38:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:38:13 2022 ] Eval epoch: 78 +[ Thu Sep 8 22:40:44 2022 ] Epoch 78 Curr Acc: (13349/18932)70.51% +[ Thu Sep 8 22:40:44 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 22:40:44 2022 ] Training epoch: 79 +[ Thu Sep 8 22:40:44 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:44:05 2022 ] Mean training loss: 0.0100. +[ Thu Sep 8 22:44:05 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 22:44:05 2022 ] Eval epoch: 79 +[ Thu Sep 8 22:46:35 2022 ] Epoch 79 Curr Acc: (13672/18932)72.22% +[ Thu Sep 8 22:46:35 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 22:46:35 2022 ] Training epoch: 80 +[ Thu Sep 8 22:46:35 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:49:57 2022 ] Mean training loss: 0.0101. +[ Thu Sep 8 22:49:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:49:57 2022 ] Eval epoch: 80 +[ Thu Sep 8 22:52:27 2022 ] Epoch 80 Curr Acc: (13663/18932)72.17% +[ Thu Sep 8 22:52:27 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 22:52:27 2022 ] Training epoch: 81 +[ Thu Sep 8 22:52:27 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:55:49 2022 ] Mean training loss: 0.0093. +[ Thu Sep 8 22:55:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:55:49 2022 ] Eval epoch: 81 +[ Thu Sep 8 22:58:19 2022 ] Epoch 81 Curr Acc: (13272/18932)70.10% +[ Thu Sep 8 22:58:19 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 22:58:19 2022 ] Training epoch: 82 +[ Thu Sep 8 22:58:19 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:01:40 2022 ] Mean training loss: 0.0111. +[ Thu Sep 8 23:01:40 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 23:01:40 2022 ] Eval epoch: 82 +[ Thu Sep 8 23:04:11 2022 ] Epoch 82 Curr Acc: (13453/18932)71.06% +[ Thu Sep 8 23:04:11 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 23:04:11 2022 ] Training epoch: 83 +[ Thu Sep 8 23:04:11 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:07:32 2022 ] Mean training loss: 0.0093. +[ Thu Sep 8 23:07:32 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 23:07:32 2022 ] Eval epoch: 83 +[ Thu Sep 8 23:10:02 2022 ] Epoch 83 Curr Acc: (12956/18932)68.43% +[ Thu Sep 8 23:10:02 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 23:10:02 2022 ] Training epoch: 84 +[ Thu Sep 8 23:10:02 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:13:23 2022 ] Mean training loss: 0.0121. +[ Thu Sep 8 23:13:23 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 23:13:23 2022 ] Eval epoch: 84 +[ Thu Sep 8 23:15:54 2022 ] Epoch 84 Curr Acc: (13587/18932)71.77% +[ Thu Sep 8 23:15:54 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 23:15:54 2022 ] Training epoch: 85 +[ Thu Sep 8 23:15:54 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:19:15 2022 ] Mean training loss: 0.0106. +[ Thu Sep 8 23:19:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:19:15 2022 ] Eval epoch: 85 +[ Thu Sep 8 23:21:46 2022 ] Epoch 85 Curr Acc: (12987/18932)68.60% +[ Thu Sep 8 23:21:46 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 23:21:46 2022 ] Training epoch: 86 +[ Thu Sep 8 23:21:46 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:25:07 2022 ] Mean training loss: 0.0100. +[ Thu Sep 8 23:25:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:25:07 2022 ] Eval epoch: 86 +[ Thu Sep 8 23:27:38 2022 ] Epoch 86 Curr Acc: (13236/18932)69.91% +[ Thu Sep 8 23:27:38 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 23:27:38 2022 ] Training epoch: 87 +[ Thu Sep 8 23:27:38 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:30:59 2022 ] Mean training loss: 0.0099. +[ Thu Sep 8 23:30:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:30:59 2022 ] Eval epoch: 87 +[ Thu Sep 8 23:33:30 2022 ] Epoch 87 Curr Acc: (13431/18932)70.94% +[ Thu Sep 8 23:33:30 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 23:33:30 2022 ] Training epoch: 88 +[ Thu Sep 8 23:33:30 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:36:51 2022 ] Mean training loss: 0.0097. +[ Thu Sep 8 23:36:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:36:51 2022 ] Eval epoch: 88 +[ Thu Sep 8 23:39:21 2022 ] Epoch 88 Curr Acc: (13546/18932)71.55% +[ Thu Sep 8 23:39:21 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 23:39:21 2022 ] Training epoch: 89 +[ Thu Sep 8 23:39:21 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:42:43 2022 ] Mean training loss: 0.0090. +[ Thu Sep 8 23:42:43 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 23:42:43 2022 ] Eval epoch: 89 +[ Thu Sep 8 23:45:13 2022 ] Epoch 89 Curr Acc: (13651/18932)72.11% +[ Thu Sep 8 23:45:13 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 23:45:13 2022 ] Training epoch: 90 +[ Thu Sep 8 23:45:13 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:48:34 2022 ] Mean training loss: 0.0102. +[ Thu Sep 8 23:48:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:48:34 2022 ] Eval epoch: 90 +[ Thu Sep 8 23:51:05 2022 ] Epoch 90 Curr Acc: (13565/18932)71.65% +[ Thu Sep 8 23:51:05 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 23:51:05 2022 ] Training epoch: 91 +[ Thu Sep 8 23:51:05 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:54:26 2022 ] Mean training loss: 0.0087. +[ Thu Sep 8 23:54:26 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 23:54:26 2022 ] Eval epoch: 91 +[ Thu Sep 8 23:56:56 2022 ] Epoch 91 Curr Acc: (13700/18932)72.36% +[ Thu Sep 8 23:56:56 2022 ] Epoch 74 Best Acc 72.63% +[ Thu Sep 8 23:56:56 2022 ] Training epoch: 92 +[ Thu Sep 8 23:56:56 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:00:18 2022 ] Mean training loss: 0.0097. +[ Fri Sep 9 00:00:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:00:18 2022 ] Eval epoch: 92 +[ Fri Sep 9 00:02:48 2022 ] Epoch 92 Curr Acc: (13491/18932)71.26% +[ Fri Sep 9 00:02:48 2022 ] Epoch 74 Best Acc 72.63% +[ Fri Sep 9 00:02:48 2022 ] Training epoch: 93 +[ Fri Sep 9 00:02:48 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:06:09 2022 ] Mean training loss: 0.0106. +[ Fri Sep 9 00:06:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:06:09 2022 ] Eval epoch: 93 +[ Fri Sep 9 00:08:40 2022 ] Epoch 93 Curr Acc: (13766/18932)72.71% +[ Fri Sep 9 00:08:40 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 00:08:40 2022 ] Training epoch: 94 +[ Fri Sep 9 00:08:40 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:12:01 2022 ] Mean training loss: 0.0097. +[ Fri Sep 9 00:12:01 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 00:12:01 2022 ] Eval epoch: 94 +[ Fri Sep 9 00:14:32 2022 ] Epoch 94 Curr Acc: (13345/18932)70.49% +[ Fri Sep 9 00:14:32 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 00:14:32 2022 ] Training epoch: 95 +[ Fri Sep 9 00:14:32 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:17:53 2022 ] Mean training loss: 0.0092. +[ Fri Sep 9 00:17:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:17:53 2022 ] Eval epoch: 95 +[ Fri Sep 9 00:20:24 2022 ] Epoch 95 Curr Acc: (13638/18932)72.04% +[ Fri Sep 9 00:20:24 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 00:20:24 2022 ] Training epoch: 96 +[ Fri Sep 9 00:20:24 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:23:45 2022 ] Mean training loss: 0.0099. +[ Fri Sep 9 00:23:45 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 00:23:45 2022 ] Eval epoch: 96 +[ Fri Sep 9 00:26:15 2022 ] Epoch 96 Curr Acc: (13402/18932)70.79% +[ Fri Sep 9 00:26:15 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 00:26:15 2022 ] Training epoch: 97 +[ Fri Sep 9 00:26:15 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:29:37 2022 ] Mean training loss: 0.0094. +[ Fri Sep 9 00:29:37 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 00:29:37 2022 ] Eval epoch: 97 +[ Fri Sep 9 00:32:07 2022 ] Epoch 97 Curr Acc: (13691/18932)72.32% +[ Fri Sep 9 00:32:07 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 00:32:07 2022 ] Training epoch: 98 +[ Fri Sep 9 00:32:08 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:35:28 2022 ] Mean training loss: 0.0102. +[ Fri Sep 9 00:35:28 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 00:35:28 2022 ] Eval epoch: 98 +[ Fri Sep 9 00:37:59 2022 ] Epoch 98 Curr Acc: (13539/18932)71.51% +[ Fri Sep 9 00:37:59 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 00:37:59 2022 ] Training epoch: 99 +[ Fri Sep 9 00:37:59 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:41:20 2022 ] Mean training loss: 0.0085. +[ Fri Sep 9 00:41:20 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 00:41:20 2022 ] Eval epoch: 99 +[ Fri Sep 9 00:43:51 2022 ] Epoch 99 Curr Acc: (12862/18932)67.94% +[ Fri Sep 9 00:43:51 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 00:43:51 2022 ] Training epoch: 100 +[ Fri Sep 9 00:43:51 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:47:12 2022 ] Mean training loss: 0.0109. +[ Fri Sep 9 00:47:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:47:12 2022 ] Eval epoch: 100 +[ Fri Sep 9 00:49:43 2022 ] Epoch 100 Curr Acc: (13589/18932)71.78% +[ Fri Sep 9 00:49:43 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 00:49:43 2022 ] Training epoch: 101 +[ Fri Sep 9 00:49:43 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:53:04 2022 ] Mean training loss: 0.0095. +[ Fri Sep 9 00:53:04 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 00:53:04 2022 ] Eval epoch: 101 +[ Fri Sep 9 00:55:34 2022 ] Epoch 101 Curr Acc: (13585/18932)71.76% +[ Fri Sep 9 00:55:34 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 00:55:34 2022 ] Training epoch: 102 +[ Fri Sep 9 00:55:34 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:58:55 2022 ] Mean training loss: 0.0096. +[ Fri Sep 9 00:58:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:58:55 2022 ] Eval epoch: 102 +[ Fri Sep 9 01:01:26 2022 ] Epoch 102 Curr Acc: (13439/18932)70.99% +[ Fri Sep 9 01:01:26 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 01:01:26 2022 ] Training epoch: 103 +[ Fri Sep 9 01:01:26 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:04:47 2022 ] Mean training loss: 0.0081. +[ Fri Sep 9 01:04:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:04:47 2022 ] Eval epoch: 103 +[ Fri Sep 9 01:07:17 2022 ] Epoch 103 Curr Acc: (13417/18932)70.87% +[ Fri Sep 9 01:07:17 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 01:07:17 2022 ] Training epoch: 104 +[ Fri Sep 9 01:07:17 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:10:38 2022 ] Mean training loss: 0.0092. +[ Fri Sep 9 01:10:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:10:38 2022 ] Eval epoch: 104 +[ Fri Sep 9 01:13:08 2022 ] Epoch 104 Curr Acc: (13421/18932)70.89% +[ Fri Sep 9 01:13:08 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 01:13:08 2022 ] Training epoch: 105 +[ Fri Sep 9 01:13:08 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:16:29 2022 ] Mean training loss: 0.0086. +[ Fri Sep 9 01:16:29 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 01:16:29 2022 ] Eval epoch: 105 +[ Fri Sep 9 01:18:59 2022 ] Epoch 105 Curr Acc: (13473/18932)71.17% +[ Fri Sep 9 01:18:59 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 01:18:59 2022 ] Training epoch: 106 +[ Fri Sep 9 01:18:59 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:22:21 2022 ] Mean training loss: 0.0098. +[ Fri Sep 9 01:22:21 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 01:22:21 2022 ] Eval epoch: 106 +[ Fri Sep 9 01:24:51 2022 ] Epoch 106 Curr Acc: (13675/18932)72.23% +[ Fri Sep 9 01:24:51 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 01:24:51 2022 ] Training epoch: 107 +[ Fri Sep 9 01:24:51 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:28:13 2022 ] Mean training loss: 0.0089. +[ Fri Sep 9 01:28:13 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 01:28:13 2022 ] Eval epoch: 107 +[ Fri Sep 9 01:30:43 2022 ] Epoch 107 Curr Acc: (13614/18932)71.91% +[ Fri Sep 9 01:30:43 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 01:30:43 2022 ] Training epoch: 108 +[ Fri Sep 9 01:30:43 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:34:04 2022 ] Mean training loss: 0.0097. +[ Fri Sep 9 01:34:04 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 01:34:04 2022 ] Eval epoch: 108 +[ Fri Sep 9 01:36:35 2022 ] Epoch 108 Curr Acc: (13745/18932)72.60% +[ Fri Sep 9 01:36:35 2022 ] Epoch 93 Best Acc 72.71% +[ Fri Sep 9 01:36:35 2022 ] Training epoch: 109 +[ Fri Sep 9 01:36:35 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:39:56 2022 ] Mean training loss: 0.0094. +[ Fri Sep 9 01:39:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:39:56 2022 ] Eval epoch: 109 +[ Fri Sep 9 01:42:26 2022 ] Epoch 109 Curr Acc: (13796/18932)72.87% +[ Fri Sep 9 01:42:26 2022 ] Epoch 109 Best Acc 72.87% +[ Fri Sep 9 01:42:26 2022 ] Training epoch: 110 +[ Fri Sep 9 01:42:26 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:45:47 2022 ] Mean training loss: 0.0098. +[ Fri Sep 9 01:45:47 2022 ] Time consumption: [Data]01%, [Network]98% +[ Fri Sep 9 01:45:47 2022 ] Eval epoch: 110 +[ Fri Sep 9 01:48:18 2022 ] Epoch 110 Curr Acc: (13453/18932)71.06% +[ Fri Sep 9 01:48:18 2022 ] Epoch 109 Best Acc 72.87% +[ Fri Sep 9 01:48:18 2022 ] epoch: 109, best accuracy: 0.7287132896682865 +[ Fri Sep 9 01:48:18 2022 ] Experiment: ./work_dir/ntu/xview_bm +[ Fri Sep 9 01:48:18 2022 ] # generator parameters: 2.896055 M. +[ Fri Sep 9 01:48:18 2022 ] Load weights from ./runs/ntu/xview_bm/runs-108-107474.pt. +[ Fri Sep 9 01:48:18 2022 ] Eval epoch: 1 +[ Fri Sep 9 01:50:48 2022 ] Epoch 1 Curr Acc: (13796/18932)72.87% +[ Fri Sep 9 01:50:48 2022 ] Epoch 109 Best Acc 72.87% diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_j/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu60_xview/xview_j/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_j/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_j/config.yaml b/ckpt/Others/MST-GCN/ntu60_xview/xview_j/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..968d06aad5d3b33dd2f574f50de1620017598755 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_j/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu/xview_j.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 60 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu/xview_j/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_data_joint.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_data_joint.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu/xview_j diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_j/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu60_xview/xview_j/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..8932e749fc16b50bc0c52136b9f7a884ea299064 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_j/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd06a565afd3374406210bd601b0d8b72bba63ec3c31e5e967c8cd9cf74745c0 +size 5718404 diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_j/log.txt b/ckpt/Others/MST-GCN/ntu60_xview/xview_j/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..05886631920e54375d8174c4963b7a1aab9af561 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_j/log.txt @@ -0,0 +1,631 @@ +[ Thu Sep 8 17:08:33 2022 ] # generator parameters: 2.896055 M. +[ Thu Sep 8 17:08:34 2022 ] Parameters: +{'work_dir': './work_dir/ntu/xview_j', 'model_saved_name': './runs/ntu/xview_j/runs', 'config': 'config/ntu/xview_j.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Thu Sep 8 17:08:34 2022 ] Training epoch: 1 +[ Thu Sep 8 17:08:34 2022 ] Learning rate: 0.015 +[ Thu Sep 8 17:11:57 2022 ] Mean training loss: 3.2236. +[ Thu Sep 8 17:11:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:11:57 2022 ] Training epoch: 2 +[ Thu Sep 8 17:11:57 2022 ] Learning rate: 0.03 +[ Thu Sep 8 17:15:20 2022 ] Mean training loss: 2.4631. +[ Thu Sep 8 17:15:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:15:20 2022 ] Training epoch: 3 +[ Thu Sep 8 17:15:20 2022 ] Learning rate: 0.045 +[ Thu Sep 8 17:18:43 2022 ] Mean training loss: 2.0892. +[ Thu Sep 8 17:18:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:18:43 2022 ] Training epoch: 4 +[ Thu Sep 8 17:18:43 2022 ] Learning rate: 0.06 +[ Thu Sep 8 17:22:05 2022 ] Mean training loss: 1.7947. +[ Thu Sep 8 17:22:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:22:05 2022 ] Training epoch: 5 +[ Thu Sep 8 17:22:05 2022 ] Learning rate: 0.075 +[ Thu Sep 8 17:25:27 2022 ] Mean training loss: 1.6131. +[ Thu Sep 8 17:25:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:25:27 2022 ] Training epoch: 6 +[ Thu Sep 8 17:25:27 2022 ] Learning rate: 0.09 +[ Thu Sep 8 17:28:49 2022 ] Mean training loss: 1.4667. +[ Thu Sep 8 17:28:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:28:49 2022 ] Training epoch: 7 +[ Thu Sep 8 17:28:49 2022 ] Learning rate: 0.10500000000000001 +[ Thu Sep 8 17:32:12 2022 ] Mean training loss: 1.3731. +[ Thu Sep 8 17:32:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:32:12 2022 ] Training epoch: 8 +[ Thu Sep 8 17:32:12 2022 ] Learning rate: 0.12 +[ Thu Sep 8 17:35:34 2022 ] Mean training loss: 1.3032. +[ Thu Sep 8 17:35:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:35:34 2022 ] Training epoch: 9 +[ Thu Sep 8 17:35:34 2022 ] Learning rate: 0.13499999999999998 +[ Thu Sep 8 17:38:56 2022 ] Mean training loss: 1.2545. +[ Thu Sep 8 17:38:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:38:56 2022 ] Training epoch: 10 +[ Thu Sep 8 17:38:56 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:42:18 2022 ] Mean training loss: 1.2348. +[ Thu Sep 8 17:42:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:42:18 2022 ] Training epoch: 11 +[ Thu Sep 8 17:42:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:45:40 2022 ] Mean training loss: 1.1505. +[ Thu Sep 8 17:45:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:45:40 2022 ] Training epoch: 12 +[ Thu Sep 8 17:45:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:49:02 2022 ] Mean training loss: 1.0974. +[ Thu Sep 8 17:49:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:49:02 2022 ] Training epoch: 13 +[ Thu Sep 8 17:49:02 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:52:24 2022 ] Mean training loss: 1.0536. +[ Thu Sep 8 17:52:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:52:24 2022 ] Training epoch: 14 +[ Thu Sep 8 17:52:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:55:47 2022 ] Mean training loss: 1.0029. +[ Thu Sep 8 17:55:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:55:47 2022 ] Training epoch: 15 +[ Thu Sep 8 17:55:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:59:10 2022 ] Mean training loss: 0.9541. +[ Thu Sep 8 17:59:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:59:10 2022 ] Training epoch: 16 +[ Thu Sep 8 17:59:10 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:02:33 2022 ] Mean training loss: 0.9493. +[ Thu Sep 8 18:02:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:02:33 2022 ] Training epoch: 17 +[ Thu Sep 8 18:02:33 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:05:56 2022 ] Mean training loss: 0.9255. +[ Thu Sep 8 18:05:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:05:56 2022 ] Training epoch: 18 +[ Thu Sep 8 18:05:56 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:09:18 2022 ] Mean training loss: 0.8999. +[ Thu Sep 8 18:09:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:09:18 2022 ] Training epoch: 19 +[ Thu Sep 8 18:09:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:12:40 2022 ] Mean training loss: 0.8743. +[ Thu Sep 8 18:12:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:12:40 2022 ] Training epoch: 20 +[ Thu Sep 8 18:12:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:16:02 2022 ] Mean training loss: 0.8682. +[ Thu Sep 8 18:16:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:16:02 2022 ] Training epoch: 21 +[ Thu Sep 8 18:16:02 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:19:25 2022 ] Mean training loss: 0.8350. +[ Thu Sep 8 18:19:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:19:25 2022 ] Training epoch: 22 +[ Thu Sep 8 18:19:25 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:22:47 2022 ] Mean training loss: 0.8195. +[ Thu Sep 8 18:22:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:22:47 2022 ] Training epoch: 23 +[ Thu Sep 8 18:22:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:26:09 2022 ] Mean training loss: 0.8074. +[ Thu Sep 8 18:26:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:26:09 2022 ] Training epoch: 24 +[ Thu Sep 8 18:26:09 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:29:32 2022 ] Mean training loss: 0.7901. +[ Thu Sep 8 18:29:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:29:32 2022 ] Training epoch: 25 +[ Thu Sep 8 18:29:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:32:54 2022 ] Mean training loss: 0.7801. +[ Thu Sep 8 18:32:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:32:54 2022 ] Training epoch: 26 +[ Thu Sep 8 18:32:54 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:36:15 2022 ] Mean training loss: 0.7635. +[ Thu Sep 8 18:36:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:36:15 2022 ] Training epoch: 27 +[ Thu Sep 8 18:36:15 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:39:36 2022 ] Mean training loss: 0.7732. +[ Thu Sep 8 18:39:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:39:36 2022 ] Training epoch: 28 +[ Thu Sep 8 18:39:36 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:42:57 2022 ] Mean training loss: 0.7639. +[ Thu Sep 8 18:42:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:42:57 2022 ] Training epoch: 29 +[ Thu Sep 8 18:42:57 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:46:18 2022 ] Mean training loss: 0.7230. +[ Thu Sep 8 18:46:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:46:18 2022 ] Training epoch: 30 +[ Thu Sep 8 18:46:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:49:39 2022 ] Mean training loss: 0.7327. +[ Thu Sep 8 18:49:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:49:39 2022 ] Training epoch: 31 +[ Thu Sep 8 18:49:39 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:53:00 2022 ] Mean training loss: 0.7295. +[ Thu Sep 8 18:53:00 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:53:00 2022 ] Training epoch: 32 +[ Thu Sep 8 18:53:00 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:56:21 2022 ] Mean training loss: 0.7260. +[ Thu Sep 8 18:56:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:56:21 2022 ] Training epoch: 33 +[ Thu Sep 8 18:56:21 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:59:42 2022 ] Mean training loss: 0.6975. +[ Thu Sep 8 18:59:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:59:42 2022 ] Training epoch: 34 +[ Thu Sep 8 18:59:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:03:03 2022 ] Mean training loss: 0.7037. +[ Thu Sep 8 19:03:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:03:03 2022 ] Training epoch: 35 +[ Thu Sep 8 19:03:03 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:06:24 2022 ] Mean training loss: 0.7090. +[ Thu Sep 8 19:06:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:06:24 2022 ] Training epoch: 36 +[ Thu Sep 8 19:06:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:09:46 2022 ] Mean training loss: 0.6900. +[ Thu Sep 8 19:09:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:09:46 2022 ] Training epoch: 37 +[ Thu Sep 8 19:09:46 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:13:07 2022 ] Mean training loss: 0.6831. +[ Thu Sep 8 19:13:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:13:07 2022 ] Training epoch: 38 +[ Thu Sep 8 19:13:07 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:16:28 2022 ] Mean training loss: 0.6805. +[ Thu Sep 8 19:16:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:16:28 2022 ] Training epoch: 39 +[ Thu Sep 8 19:16:28 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:19:49 2022 ] Mean training loss: 0.6807. +[ Thu Sep 8 19:19:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:19:49 2022 ] Training epoch: 40 +[ Thu Sep 8 19:19:49 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:23:10 2022 ] Mean training loss: 0.6900. +[ Thu Sep 8 19:23:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:23:10 2022 ] Training epoch: 41 +[ Thu Sep 8 19:23:10 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:26:31 2022 ] Mean training loss: 0.6537. +[ Thu Sep 8 19:26:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:26:31 2022 ] Training epoch: 42 +[ Thu Sep 8 19:26:31 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:29:52 2022 ] Mean training loss: 0.6627. +[ Thu Sep 8 19:29:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:29:52 2022 ] Training epoch: 43 +[ Thu Sep 8 19:29:52 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:33:13 2022 ] Mean training loss: 0.6491. +[ Thu Sep 8 19:33:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:33:13 2022 ] Training epoch: 44 +[ Thu Sep 8 19:33:13 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:36:34 2022 ] Mean training loss: 0.6561. +[ Thu Sep 8 19:36:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:36:34 2022 ] Training epoch: 45 +[ Thu Sep 8 19:36:34 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:39:55 2022 ] Mean training loss: 0.6447. +[ Thu Sep 8 19:39:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:39:55 2022 ] Training epoch: 46 +[ Thu Sep 8 19:39:55 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:43:17 2022 ] Mean training loss: 0.6468. +[ Thu Sep 8 19:43:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:43:17 2022 ] Training epoch: 47 +[ Thu Sep 8 19:43:17 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:46:37 2022 ] Mean training loss: 0.6137. +[ Thu Sep 8 19:46:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:46:37 2022 ] Training epoch: 48 +[ Thu Sep 8 19:46:37 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:49:57 2022 ] Mean training loss: 0.6511. +[ Thu Sep 8 19:49:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:49:57 2022 ] Training epoch: 49 +[ Thu Sep 8 19:49:57 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:53:18 2022 ] Mean training loss: 0.6319. +[ Thu Sep 8 19:53:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:53:18 2022 ] Training epoch: 50 +[ Thu Sep 8 19:53:18 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:56:38 2022 ] Mean training loss: 0.6398. +[ Thu Sep 8 19:56:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:56:38 2022 ] Training epoch: 51 +[ Thu Sep 8 19:56:38 2022 ] Learning rate: 0.015 +[ Thu Sep 8 19:59:59 2022 ] Mean training loss: 0.3135. +[ Thu Sep 8 19:59:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:59:59 2022 ] Eval epoch: 51 +[ Thu Sep 8 20:02:31 2022 ] Epoch 51 Curr Acc: (13516/18932)71.39% +[ Thu Sep 8 20:02:31 2022 ] Epoch 51 Best Acc 71.39% +[ Thu Sep 8 20:02:31 2022 ] Training epoch: 52 +[ Thu Sep 8 20:02:31 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:05:51 2022 ] Mean training loss: 0.2096. +[ Thu Sep 8 20:05:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:05:51 2022 ] Eval epoch: 52 +[ Thu Sep 8 20:08:22 2022 ] Epoch 52 Curr Acc: (13866/18932)73.24% +[ Thu Sep 8 20:08:22 2022 ] Epoch 52 Best Acc 73.24% +[ Thu Sep 8 20:08:22 2022 ] Training epoch: 53 +[ Thu Sep 8 20:08:22 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:11:43 2022 ] Mean training loss: 0.1805. +[ Thu Sep 8 20:11:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:11:43 2022 ] Eval epoch: 53 +[ Thu Sep 8 20:14:14 2022 ] Epoch 53 Curr Acc: (14263/18932)75.34% +[ Thu Sep 8 20:14:14 2022 ] Epoch 53 Best Acc 75.34% +[ Thu Sep 8 20:14:14 2022 ] Training epoch: 54 +[ Thu Sep 8 20:14:14 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:17:34 2022 ] Mean training loss: 0.1483. +[ Thu Sep 8 20:17:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:17:34 2022 ] Eval epoch: 54 +[ Thu Sep 8 20:20:05 2022 ] Epoch 54 Curr Acc: (14131/18932)74.64% +[ Thu Sep 8 20:20:05 2022 ] Epoch 53 Best Acc 75.34% +[ Thu Sep 8 20:20:05 2022 ] Training epoch: 55 +[ Thu Sep 8 20:20:05 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:23:25 2022 ] Mean training loss: 0.1304. +[ Thu Sep 8 20:23:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:23:25 2022 ] Eval epoch: 55 +[ Thu Sep 8 20:25:56 2022 ] Epoch 55 Curr Acc: (14248/18932)75.26% +[ Thu Sep 8 20:25:56 2022 ] Epoch 53 Best Acc 75.34% +[ Thu Sep 8 20:25:56 2022 ] Training epoch: 56 +[ Thu Sep 8 20:25:56 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:29:15 2022 ] Mean training loss: 0.1152. +[ Thu Sep 8 20:29:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:29:15 2022 ] Eval epoch: 56 +[ Thu Sep 8 20:31:46 2022 ] Epoch 56 Curr Acc: (14266/18932)75.35% +[ Thu Sep 8 20:31:46 2022 ] Epoch 56 Best Acc 75.35% +[ Thu Sep 8 20:31:46 2022 ] Training epoch: 57 +[ Thu Sep 8 20:31:46 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:35:06 2022 ] Mean training loss: 0.0975. +[ Thu Sep 8 20:35:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:35:06 2022 ] Eval epoch: 57 +[ Thu Sep 8 20:37:37 2022 ] Epoch 57 Curr Acc: (13798/18932)72.88% +[ Thu Sep 8 20:37:37 2022 ] Epoch 56 Best Acc 75.35% +[ Thu Sep 8 20:37:37 2022 ] Training epoch: 58 +[ Thu Sep 8 20:37:37 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:40:57 2022 ] Mean training loss: 0.0793. +[ Thu Sep 8 20:40:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:40:57 2022 ] Eval epoch: 58 +[ Thu Sep 8 20:43:28 2022 ] Epoch 58 Curr Acc: (14252/18932)75.28% +[ Thu Sep 8 20:43:28 2022 ] Epoch 56 Best Acc 75.35% +[ Thu Sep 8 20:43:28 2022 ] Training epoch: 59 +[ Thu Sep 8 20:43:28 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:46:48 2022 ] Mean training loss: 0.0713. +[ Thu Sep 8 20:46:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:46:48 2022 ] Eval epoch: 59 +[ Thu Sep 8 20:49:19 2022 ] Epoch 59 Curr Acc: (14324/18932)75.66% +[ Thu Sep 8 20:49:19 2022 ] Epoch 59 Best Acc 75.66% +[ Thu Sep 8 20:49:19 2022 ] Training epoch: 60 +[ Thu Sep 8 20:49:19 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:52:38 2022 ] Mean training loss: 0.0647. +[ Thu Sep 8 20:52:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:52:38 2022 ] Eval epoch: 60 +[ Thu Sep 8 20:55:09 2022 ] Epoch 60 Curr Acc: (14330/18932)75.69% +[ Thu Sep 8 20:55:09 2022 ] Epoch 60 Best Acc 75.69% +[ Thu Sep 8 20:55:09 2022 ] Training epoch: 61 +[ Thu Sep 8 20:55:09 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:58:30 2022 ] Mean training loss: 0.0594. +[ Thu Sep 8 20:58:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:58:30 2022 ] Eval epoch: 61 +[ Thu Sep 8 21:01:01 2022 ] Epoch 61 Curr Acc: (14203/18932)75.02% +[ Thu Sep 8 21:01:01 2022 ] Epoch 60 Best Acc 75.69% +[ Thu Sep 8 21:01:01 2022 ] Training epoch: 62 +[ Thu Sep 8 21:01:01 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:04:21 2022 ] Mean training loss: 0.0543. +[ Thu Sep 8 21:04:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:04:21 2022 ] Eval epoch: 62 +[ Thu Sep 8 21:06:52 2022 ] Epoch 62 Curr Acc: (14274/18932)75.40% +[ Thu Sep 8 21:06:52 2022 ] Epoch 60 Best Acc 75.69% +[ Thu Sep 8 21:06:52 2022 ] Training epoch: 63 +[ Thu Sep 8 21:06:52 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:10:12 2022 ] Mean training loss: 0.0455. +[ Thu Sep 8 21:10:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:10:12 2022 ] Eval epoch: 63 +[ Thu Sep 8 21:12:43 2022 ] Epoch 63 Curr Acc: (14159/18932)74.79% +[ Thu Sep 8 21:12:43 2022 ] Epoch 60 Best Acc 75.69% +[ Thu Sep 8 21:12:43 2022 ] Training epoch: 64 +[ Thu Sep 8 21:12:43 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:16:02 2022 ] Mean training loss: 0.0487. +[ Thu Sep 8 21:16:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:16:02 2022 ] Eval epoch: 64 +[ Thu Sep 8 21:18:33 2022 ] Epoch 64 Curr Acc: (13683/18932)72.27% +[ Thu Sep 8 21:18:33 2022 ] Epoch 60 Best Acc 75.69% +[ Thu Sep 8 21:18:33 2022 ] Training epoch: 65 +[ Thu Sep 8 21:18:33 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:21:53 2022 ] Mean training loss: 0.0422. +[ Thu Sep 8 21:21:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:21:53 2022 ] Eval epoch: 65 +[ Thu Sep 8 21:24:24 2022 ] Epoch 65 Curr Acc: (14517/18932)76.68% +[ Thu Sep 8 21:24:24 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 21:24:24 2022 ] Training epoch: 66 +[ Thu Sep 8 21:24:24 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:27:44 2022 ] Mean training loss: 0.0406. +[ Thu Sep 8 21:27:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:27:44 2022 ] Eval epoch: 66 +[ Thu Sep 8 21:30:15 2022 ] Epoch 66 Curr Acc: (14238/18932)75.21% +[ Thu Sep 8 21:30:15 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 21:30:15 2022 ] Training epoch: 67 +[ Thu Sep 8 21:30:15 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:33:35 2022 ] Mean training loss: 0.0323. +[ Thu Sep 8 21:33:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:33:35 2022 ] Eval epoch: 67 +[ Thu Sep 8 21:36:06 2022 ] Epoch 67 Curr Acc: (14106/18932)74.51% +[ Thu Sep 8 21:36:06 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 21:36:06 2022 ] Training epoch: 68 +[ Thu Sep 8 21:36:06 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:39:27 2022 ] Mean training loss: 0.0346. +[ Thu Sep 8 21:39:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:39:27 2022 ] Eval epoch: 68 +[ Thu Sep 8 21:41:58 2022 ] Epoch 68 Curr Acc: (14290/18932)75.48% +[ Thu Sep 8 21:41:58 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 21:41:58 2022 ] Training epoch: 69 +[ Thu Sep 8 21:41:58 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:45:19 2022 ] Mean training loss: 0.0310. +[ Thu Sep 8 21:45:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:45:19 2022 ] Eval epoch: 69 +[ Thu Sep 8 21:47:50 2022 ] Epoch 69 Curr Acc: (13667/18932)72.19% +[ Thu Sep 8 21:47:50 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 21:47:50 2022 ] Training epoch: 70 +[ Thu Sep 8 21:47:50 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:51:11 2022 ] Mean training loss: 0.0283. +[ Thu Sep 8 21:51:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:51:11 2022 ] Eval epoch: 70 +[ Thu Sep 8 21:53:42 2022 ] Epoch 70 Curr Acc: (14152/18932)74.75% +[ Thu Sep 8 21:53:42 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 21:53:42 2022 ] Training epoch: 71 +[ Thu Sep 8 21:53:42 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:57:03 2022 ] Mean training loss: 0.0219. +[ Thu Sep 8 21:57:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:57:03 2022 ] Eval epoch: 71 +[ Thu Sep 8 21:59:34 2022 ] Epoch 71 Curr Acc: (13871/18932)73.27% +[ Thu Sep 8 21:59:34 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 21:59:34 2022 ] Training epoch: 72 +[ Thu Sep 8 21:59:34 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:02:55 2022 ] Mean training loss: 0.0200. +[ Thu Sep 8 22:02:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:02:55 2022 ] Eval epoch: 72 +[ Thu Sep 8 22:05:26 2022 ] Epoch 72 Curr Acc: (13998/18932)73.94% +[ Thu Sep 8 22:05:26 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 22:05:26 2022 ] Training epoch: 73 +[ Thu Sep 8 22:05:26 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:08:47 2022 ] Mean training loss: 0.0172. +[ Thu Sep 8 22:08:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:08:47 2022 ] Eval epoch: 73 +[ Thu Sep 8 22:11:18 2022 ] Epoch 73 Curr Acc: (14198/18932)74.99% +[ Thu Sep 8 22:11:18 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 22:11:18 2022 ] Training epoch: 74 +[ Thu Sep 8 22:11:18 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:14:39 2022 ] Mean training loss: 0.0190. +[ Thu Sep 8 22:14:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:14:39 2022 ] Eval epoch: 74 +[ Thu Sep 8 22:17:10 2022 ] Epoch 74 Curr Acc: (14066/18932)74.30% +[ Thu Sep 8 22:17:10 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 22:17:10 2022 ] Training epoch: 75 +[ Thu Sep 8 22:17:10 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:20:31 2022 ] Mean training loss: 0.0160. +[ Thu Sep 8 22:20:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:20:31 2022 ] Eval epoch: 75 +[ Thu Sep 8 22:23:02 2022 ] Epoch 75 Curr Acc: (13939/18932)73.63% +[ Thu Sep 8 22:23:02 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 22:23:02 2022 ] Training epoch: 76 +[ Thu Sep 8 22:23:02 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:26:22 2022 ] Mean training loss: 0.0172. +[ Thu Sep 8 22:26:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:26:22 2022 ] Eval epoch: 76 +[ Thu Sep 8 22:28:53 2022 ] Epoch 76 Curr Acc: (13809/18932)72.94% +[ Thu Sep 8 22:28:53 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 22:28:53 2022 ] Training epoch: 77 +[ Thu Sep 8 22:28:53 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:32:14 2022 ] Mean training loss: 0.0166. +[ Thu Sep 8 22:32:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:32:14 2022 ] Eval epoch: 77 +[ Thu Sep 8 22:34:45 2022 ] Epoch 77 Curr Acc: (14071/18932)74.32% +[ Thu Sep 8 22:34:45 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 22:34:45 2022 ] Training epoch: 78 +[ Thu Sep 8 22:34:45 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:38:06 2022 ] Mean training loss: 0.0150. +[ Thu Sep 8 22:38:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:38:06 2022 ] Eval epoch: 78 +[ Thu Sep 8 22:40:37 2022 ] Epoch 78 Curr Acc: (14179/18932)74.89% +[ Thu Sep 8 22:40:37 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 22:40:37 2022 ] Training epoch: 79 +[ Thu Sep 8 22:40:37 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:43:58 2022 ] Mean training loss: 0.0171. +[ Thu Sep 8 22:43:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:43:58 2022 ] Eval epoch: 79 +[ Thu Sep 8 22:46:29 2022 ] Epoch 79 Curr Acc: (14082/18932)74.38% +[ Thu Sep 8 22:46:29 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 22:46:29 2022 ] Training epoch: 80 +[ Thu Sep 8 22:46:29 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:49:49 2022 ] Mean training loss: 0.0153. +[ Thu Sep 8 22:49:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:49:49 2022 ] Eval epoch: 80 +[ Thu Sep 8 22:52:20 2022 ] Epoch 80 Curr Acc: (14377/18932)75.94% +[ Thu Sep 8 22:52:20 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 22:52:20 2022 ] Training epoch: 81 +[ Thu Sep 8 22:52:20 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:55:41 2022 ] Mean training loss: 0.0138. +[ Thu Sep 8 22:55:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:55:41 2022 ] Eval epoch: 81 +[ Thu Sep 8 22:58:12 2022 ] Epoch 81 Curr Acc: (14380/18932)75.96% +[ Thu Sep 8 22:58:12 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 22:58:12 2022 ] Training epoch: 82 +[ Thu Sep 8 22:58:12 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:01:33 2022 ] Mean training loss: 0.0139. +[ Thu Sep 8 23:01:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:01:33 2022 ] Eval epoch: 82 +[ Thu Sep 8 23:04:03 2022 ] Epoch 82 Curr Acc: (13860/18932)73.21% +[ Thu Sep 8 23:04:03 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 23:04:03 2022 ] Training epoch: 83 +[ Thu Sep 8 23:04:03 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:07:24 2022 ] Mean training loss: 0.0131. +[ Thu Sep 8 23:07:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:07:24 2022 ] Eval epoch: 83 +[ Thu Sep 8 23:09:55 2022 ] Epoch 83 Curr Acc: (14194/18932)74.97% +[ Thu Sep 8 23:09:55 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 23:09:55 2022 ] Training epoch: 84 +[ Thu Sep 8 23:09:55 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:13:16 2022 ] Mean training loss: 0.0167. +[ Thu Sep 8 23:13:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:13:16 2022 ] Eval epoch: 84 +[ Thu Sep 8 23:15:47 2022 ] Epoch 84 Curr Acc: (14345/18932)75.77% +[ Thu Sep 8 23:15:47 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 23:15:47 2022 ] Training epoch: 85 +[ Thu Sep 8 23:15:47 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:19:08 2022 ] Mean training loss: 0.0150. +[ Thu Sep 8 23:19:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:19:08 2022 ] Eval epoch: 85 +[ Thu Sep 8 23:21:39 2022 ] Epoch 85 Curr Acc: (14421/18932)76.17% +[ Thu Sep 8 23:21:39 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 23:21:39 2022 ] Training epoch: 86 +[ Thu Sep 8 23:21:39 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:24:59 2022 ] Mean training loss: 0.0142. +[ Thu Sep 8 23:24:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:24:59 2022 ] Eval epoch: 86 +[ Thu Sep 8 23:27:30 2022 ] Epoch 86 Curr Acc: (14144/18932)74.71% +[ Thu Sep 8 23:27:30 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 23:27:30 2022 ] Training epoch: 87 +[ Thu Sep 8 23:27:30 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:30:51 2022 ] Mean training loss: 0.0148. +[ Thu Sep 8 23:30:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:30:51 2022 ] Eval epoch: 87 +[ Thu Sep 8 23:33:22 2022 ] Epoch 87 Curr Acc: (14208/18932)75.05% +[ Thu Sep 8 23:33:22 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 23:33:22 2022 ] Training epoch: 88 +[ Thu Sep 8 23:33:22 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:36:43 2022 ] Mean training loss: 0.0135. +[ Thu Sep 8 23:36:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:36:43 2022 ] Eval epoch: 88 +[ Thu Sep 8 23:39:14 2022 ] Epoch 88 Curr Acc: (14131/18932)74.64% +[ Thu Sep 8 23:39:14 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 23:39:14 2022 ] Training epoch: 89 +[ Thu Sep 8 23:39:14 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:42:35 2022 ] Mean training loss: 0.0133. +[ Thu Sep 8 23:42:35 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:42:35 2022 ] Eval epoch: 89 +[ Thu Sep 8 23:45:06 2022 ] Epoch 89 Curr Acc: (14084/18932)74.39% +[ Thu Sep 8 23:45:06 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 23:45:06 2022 ] Training epoch: 90 +[ Thu Sep 8 23:45:06 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:48:26 2022 ] Mean training loss: 0.0132. +[ Thu Sep 8 23:48:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:48:26 2022 ] Eval epoch: 90 +[ Thu Sep 8 23:50:57 2022 ] Epoch 90 Curr Acc: (14154/18932)74.76% +[ Thu Sep 8 23:50:57 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 23:50:57 2022 ] Training epoch: 91 +[ Thu Sep 8 23:50:57 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:54:18 2022 ] Mean training loss: 0.0122. +[ Thu Sep 8 23:54:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:54:18 2022 ] Eval epoch: 91 +[ Thu Sep 8 23:56:49 2022 ] Epoch 91 Curr Acc: (14041/18932)74.17% +[ Thu Sep 8 23:56:49 2022 ] Epoch 65 Best Acc 76.68% +[ Thu Sep 8 23:56:49 2022 ] Training epoch: 92 +[ Thu Sep 8 23:56:49 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:00:09 2022 ] Mean training loss: 0.0122. +[ Fri Sep 9 00:00:09 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:00:10 2022 ] Eval epoch: 92 +[ Fri Sep 9 00:02:41 2022 ] Epoch 92 Curr Acc: (14265/18932)75.35% +[ Fri Sep 9 00:02:41 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 00:02:41 2022 ] Training epoch: 93 +[ Fri Sep 9 00:02:41 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:06:02 2022 ] Mean training loss: 0.0119. +[ Fri Sep 9 00:06:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:06:02 2022 ] Eval epoch: 93 +[ Fri Sep 9 00:08:33 2022 ] Epoch 93 Curr Acc: (14222/18932)75.12% +[ Fri Sep 9 00:08:33 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 00:08:33 2022 ] Training epoch: 94 +[ Fri Sep 9 00:08:33 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:11:53 2022 ] Mean training loss: 0.0124. +[ Fri Sep 9 00:11:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:11:53 2022 ] Eval epoch: 94 +[ Fri Sep 9 00:14:25 2022 ] Epoch 94 Curr Acc: (14385/18932)75.98% +[ Fri Sep 9 00:14:25 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 00:14:25 2022 ] Training epoch: 95 +[ Fri Sep 9 00:14:25 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:17:46 2022 ] Mean training loss: 0.0126. +[ Fri Sep 9 00:17:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:17:46 2022 ] Eval epoch: 95 +[ Fri Sep 9 00:20:17 2022 ] Epoch 95 Curr Acc: (14324/18932)75.66% +[ Fri Sep 9 00:20:17 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 00:20:17 2022 ] Training epoch: 96 +[ Fri Sep 9 00:20:17 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:23:38 2022 ] Mean training loss: 0.0129. +[ Fri Sep 9 00:23:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:23:38 2022 ] Eval epoch: 96 +[ Fri Sep 9 00:26:09 2022 ] Epoch 96 Curr Acc: (14256/18932)75.30% +[ Fri Sep 9 00:26:09 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 00:26:09 2022 ] Training epoch: 97 +[ Fri Sep 9 00:26:09 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:29:30 2022 ] Mean training loss: 0.0121. +[ Fri Sep 9 00:29:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:29:30 2022 ] Eval epoch: 97 +[ Fri Sep 9 00:32:01 2022 ] Epoch 97 Curr Acc: (14370/18932)75.90% +[ Fri Sep 9 00:32:01 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 00:32:01 2022 ] Training epoch: 98 +[ Fri Sep 9 00:32:01 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:35:22 2022 ] Mean training loss: 0.0135. +[ Fri Sep 9 00:35:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:35:22 2022 ] Eval epoch: 98 +[ Fri Sep 9 00:37:53 2022 ] Epoch 98 Curr Acc: (14051/18932)74.22% +[ Fri Sep 9 00:37:53 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 00:37:53 2022 ] Training epoch: 99 +[ Fri Sep 9 00:37:53 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:41:14 2022 ] Mean training loss: 0.0122. +[ Fri Sep 9 00:41:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:41:14 2022 ] Eval epoch: 99 +[ Fri Sep 9 00:43:45 2022 ] Epoch 99 Curr Acc: (14095/18932)74.45% +[ Fri Sep 9 00:43:45 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 00:43:45 2022 ] Training epoch: 100 +[ Fri Sep 9 00:43:45 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:47:06 2022 ] Mean training loss: 0.0131. +[ Fri Sep 9 00:47:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:47:06 2022 ] Eval epoch: 100 +[ Fri Sep 9 00:49:37 2022 ] Epoch 100 Curr Acc: (14413/18932)76.13% +[ Fri Sep 9 00:49:37 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 00:49:37 2022 ] Training epoch: 101 +[ Fri Sep 9 00:49:37 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:52:58 2022 ] Mean training loss: 0.0128. +[ Fri Sep 9 00:52:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:52:58 2022 ] Eval epoch: 101 +[ Fri Sep 9 00:55:28 2022 ] Epoch 101 Curr Acc: (14202/18932)75.02% +[ Fri Sep 9 00:55:28 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 00:55:28 2022 ] Training epoch: 102 +[ Fri Sep 9 00:55:28 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:58:49 2022 ] Mean training loss: 0.0132. +[ Fri Sep 9 00:58:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:58:49 2022 ] Eval epoch: 102 +[ Fri Sep 9 01:01:20 2022 ] Epoch 102 Curr Acc: (14235/18932)75.19% +[ Fri Sep 9 01:01:20 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 01:01:20 2022 ] Training epoch: 103 +[ Fri Sep 9 01:01:20 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:04:41 2022 ] Mean training loss: 0.0117. +[ Fri Sep 9 01:04:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:04:41 2022 ] Eval epoch: 103 +[ Fri Sep 9 01:07:12 2022 ] Epoch 103 Curr Acc: (14005/18932)73.98% +[ Fri Sep 9 01:07:12 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 01:07:12 2022 ] Training epoch: 104 +[ Fri Sep 9 01:07:12 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:10:32 2022 ] Mean training loss: 0.0125. +[ Fri Sep 9 01:10:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:10:32 2022 ] Eval epoch: 104 +[ Fri Sep 9 01:13:02 2022 ] Epoch 104 Curr Acc: (14108/18932)74.52% +[ Fri Sep 9 01:13:02 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 01:13:02 2022 ] Training epoch: 105 +[ Fri Sep 9 01:13:02 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:16:22 2022 ] Mean training loss: 0.0115. +[ Fri Sep 9 01:16:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:16:22 2022 ] Eval epoch: 105 +[ Fri Sep 9 01:18:53 2022 ] Epoch 105 Curr Acc: (13605/18932)71.86% +[ Fri Sep 9 01:18:53 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 01:18:53 2022 ] Training epoch: 106 +[ Fri Sep 9 01:18:53 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:22:13 2022 ] Mean training loss: 0.0116. +[ Fri Sep 9 01:22:13 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:22:14 2022 ] Eval epoch: 106 +[ Fri Sep 9 01:24:44 2022 ] Epoch 106 Curr Acc: (14153/18932)74.76% +[ Fri Sep 9 01:24:44 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 01:24:44 2022 ] Training epoch: 107 +[ Fri Sep 9 01:24:44 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:28:05 2022 ] Mean training loss: 0.0120. +[ Fri Sep 9 01:28:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:28:05 2022 ] Eval epoch: 107 +[ Fri Sep 9 01:30:36 2022 ] Epoch 107 Curr Acc: (14435/18932)76.25% +[ Fri Sep 9 01:30:36 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 01:30:36 2022 ] Training epoch: 108 +[ Fri Sep 9 01:30:36 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:33:57 2022 ] Mean training loss: 0.0115. +[ Fri Sep 9 01:33:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:33:57 2022 ] Eval epoch: 108 +[ Fri Sep 9 01:36:28 2022 ] Epoch 108 Curr Acc: (14382/18932)75.97% +[ Fri Sep 9 01:36:28 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 01:36:28 2022 ] Training epoch: 109 +[ Fri Sep 9 01:36:28 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:39:48 2022 ] Mean training loss: 0.0123. +[ Fri Sep 9 01:39:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:39:48 2022 ] Eval epoch: 109 +[ Fri Sep 9 01:42:19 2022 ] Epoch 109 Curr Acc: (14321/18932)75.64% +[ Fri Sep 9 01:42:19 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 01:42:19 2022 ] Training epoch: 110 +[ Fri Sep 9 01:42:19 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:45:40 2022 ] Mean training loss: 0.0126. +[ Fri Sep 9 01:45:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:45:40 2022 ] Eval epoch: 110 +[ Fri Sep 9 01:48:10 2022 ] Epoch 110 Curr Acc: (14173/18932)74.86% +[ Fri Sep 9 01:48:10 2022 ] Epoch 65 Best Acc 76.68% +[ Fri Sep 9 01:48:10 2022 ] epoch: 65, best accuracy: 0.7667969575322205 +[ Fri Sep 9 01:48:10 2022 ] Experiment: ./work_dir/ntu/xview_j +[ Fri Sep 9 01:48:10 2022 ] # generator parameters: 2.896055 M. +[ Fri Sep 9 01:48:10 2022 ] Load weights from ./runs/ntu/xview_j/runs-64-64090.pt. +[ Fri Sep 9 01:48:10 2022 ] Eval epoch: 1 +[ Fri Sep 9 01:50:41 2022 ] Epoch 1 Curr Acc: (14517/18932)76.68% +[ Fri Sep 9 01:50:41 2022 ] Epoch 65 Best Acc 76.68% diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/AEMST_GCN.py b/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/AEMST_GCN.py new file mode 100644 index 0000000000000000000000000000000000000000..f134821ff516334960b79636ec389cf61a5715c9 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/AEMST_GCN.py @@ -0,0 +1,168 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import math + +import sys +sys.path.append('../') +from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \ + MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \ + MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer +from model.activations import Activations +from model.utils import import_class, conv_branch_init, conv_init, bn_init +from model.attentions import Attention_Layer + +# import model.attentions + +__block_type__ = { + 'basic': (Basic_GCN_layer, Basic_TCN_layer), + 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer), + 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer), + 'ms': (MS_GCN_layer, MS_TCN_layer), + 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer), +} + + +class Model(nn.Module): + def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten, + **kwargs): + super(Model, self).__init__() + kwargs['act'] = Activations(kwargs['act']) + atten = None if atten == 'None' else atten + if graph is None: + raise ValueError() + else: + Graph = import_class(graph) + self.graph = Graph(**graph_args) + A = self.graph.A + + self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point) + + self.layers = nn.ModuleList() + + for i, block in enumerate(block_args): + if i == 0: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type='basic', + atten=None, **kwargs)) + else: + self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2], + kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type, + atten=atten, **kwargs)) + + self.gap = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(block_args[-1][1], num_class) + + for m in self.modules(): + if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer): + for mm in m.modules(): + if isinstance(mm, nn.Conv2d): + conv_branch_init(mm, self.graph.A.shape[0]) + if isinstance(mm, nn.BatchNorm2d): + bn_init(mm, 1) + elif isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + bn_init(m, 1) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class)) + + def forward(self, x): + N, C, T, V, M = x.size() + + x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T + x = self.data_bn(x) + x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V) + + for i, layer in enumerate(self.layers): + x = layer(x) + + features = x + + x = self.gap(x).view(N, M, -1).mean(dim=1) + x = self.fc(x) + + return features, x + + +class MST_GCN_block(nn.Module): + def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs): + super(MST_GCN_block, self).__init__() + self.atten = atten + self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A, + residual=residual, **kwargs) + self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride, + residual=residual, **kwargs) + if atten is not None: + self.att = Attention_Layer(out_channels, atten, **kwargs) + + def forward(self, x): + return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x)) + + +if __name__ == '__main__': + import sys + import time + + parts = [ + np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm + np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm + np.array([13, 14, 15, 16]) - 1, # left_leg + np.array([17, 18, 19, 20]) - 1, # right_leg + np.array([1, 2, 3, 4, 21]) - 1 # torso + ] + + warmup_iter = 3 + test_iter = 10 + sys.path.append('/home/chenzhan/mywork/MST-GCN/') + from thop import profile + basic_channels = 112 + cfgs = { + 'num_class': 2, + 'num_point': 25, + 'num_person': 1, + 'block_args': [[2, basic_channels, False, 1], + [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], + [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], + [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]], + 'graph': 'graph.ntu_rgb_d.Graph', + 'graph_args': {'labeling_mode': 'spatial'}, + 'kernel_size': 9, + 'block_type': 'ms', + 'reduct_ratio': 2, + 'expand_ratio': 0, + 't_scale': 4, + 'layer_type': 'sep', + 'act': 'relu', + 's_scale': 4, + 'atten': 'stcja', + 'bias': True, + 'parts': parts + } + + model = Model(**cfgs) + + N, C, T, V, M = 4, 2, 16, 25, 1 + inputs = torch.rand(N, C, T, V, M) + + for i in range(warmup_iter + test_iter): + if i == warmup_iter: + start_time = time.time() + outputs = model(inputs) + end_time = time.time() + + total_time = end_time - start_time + print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format( + total_time, test_iter, total_time/test_iter/N)) + + print(outputs.size()) + + hereflops, params = profile(model, inputs=(inputs,), verbose=False) + print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N)) + print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6)) + + + + diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/config.yaml b/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b855035f1baa6e9a7bf2eff8b15f25cba2930305 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/config.yaml @@ -0,0 +1,107 @@ +base_lr: 0.15 +batch_size: 8 +config: config/ntu/xview_jm.yaml +device: +- 0 +eval_interval: 5 +feeder: feeders.feeder.Feeder +ignore_weights: [] +local_rank: 0 +log_interval: 100 +model: model.AEMST_GCN.Model +model_args: + act: relu + atten: None + bias: true + block_args: + - - 3 + - 112 + - false + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 112 + - true + - 1 + - - 112 + - 224 + - true + - 2 + - - 224 + - 224 + - true + - 1 + - - 224 + - 224 + - true + - 1 + - - 224 + - 448 + - true + - 2 + - - 448 + - 448 + - true + - 1 + - - 448 + - 448 + - true + - 1 + block_type: ms + expand_ratio: 0 + graph: graph.ntu_rgb_d.Graph + graph_args: + labeling_mode: spatial + kernel_size: 9 + layer_type: basic + num_class: 60 + num_person: 2 + num_point: 25 + reduct_ratio: 2 + s_scale: 4 + t_scale: 4 +model_path: '' +model_saved_name: ./runs/ntu/xview_jm/runs +nesterov: true +num_epoch: 110 +num_worker: 32 +only_train_epoch: 0 +only_train_part: false +optimizer: SGD +phase: train +print_log: true +save_interval: 1 +save_score: true +seed: 1 +show_topk: +- 1 +- 5 +start_epoch: 0 +step: +- 50 +- 70 +- 90 +test_batch_size: 64 +test_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_data_joint_motion.npy + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_label.pkl +train_feeder_args: + data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_data_joint_motion.npy + debug: false + label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_label.pkl + normalization: false + random_choose: false + random_move: false + random_shift: false + window_size: -1 +warm_up_epoch: 10 +weight_decay: 0.0001 +weights: null +work_dir: ./work_dir/ntu/xview_jm diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/epoch1_test_score.pkl b/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/epoch1_test_score.pkl new file mode 100644 index 0000000000000000000000000000000000000000..14bd8e182ec6ab1689070c2642190b8cf8bb8b11 --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/epoch1_test_score.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a0bf1d8e526c8897ce4e32c7408fdf2517962aa1ebcc5cb1939ea90d8482c40e +size 5718404 diff --git a/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/log.txt b/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/log.txt new file mode 100644 index 0000000000000000000000000000000000000000..e202a07678737cbb8cc78797736f4a471fa61a1f --- /dev/null +++ b/ckpt/Others/MST-GCN/ntu60_xview/xview_jm/log.txt @@ -0,0 +1,631 @@ +[ Thu Sep 8 17:08:40 2022 ] # generator parameters: 2.896055 M. +[ Thu Sep 8 17:08:40 2022 ] Parameters: +{'work_dir': './work_dir/ntu/xview_jm', 'model_saved_name': './runs/ntu/xview_jm/runs', 'config': 'config/ntu/xview_jm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0} + +[ Thu Sep 8 17:08:40 2022 ] Training epoch: 1 +[ Thu Sep 8 17:08:40 2022 ] Learning rate: 0.015 +[ Thu Sep 8 17:12:04 2022 ] Mean training loss: 3.0743. +[ Thu Sep 8 17:12:04 2022 ] Time consumption: [Data]01%, [Network]98% +[ Thu Sep 8 17:12:04 2022 ] Training epoch: 2 +[ Thu Sep 8 17:12:04 2022 ] Learning rate: 0.03 +[ Thu Sep 8 17:15:28 2022 ] Mean training loss: 2.2504. +[ Thu Sep 8 17:15:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:15:28 2022 ] Training epoch: 3 +[ Thu Sep 8 17:15:28 2022 ] Learning rate: 0.045 +[ Thu Sep 8 17:18:52 2022 ] Mean training loss: 1.8510. +[ Thu Sep 8 17:18:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:18:52 2022 ] Training epoch: 4 +[ Thu Sep 8 17:18:52 2022 ] Learning rate: 0.06 +[ Thu Sep 8 17:22:16 2022 ] Mean training loss: 1.6080. +[ Thu Sep 8 17:22:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:22:16 2022 ] Training epoch: 5 +[ Thu Sep 8 17:22:16 2022 ] Learning rate: 0.075 +[ Thu Sep 8 17:25:40 2022 ] Mean training loss: 1.4463. +[ Thu Sep 8 17:25:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:25:40 2022 ] Training epoch: 6 +[ Thu Sep 8 17:25:40 2022 ] Learning rate: 0.09 +[ Thu Sep 8 17:29:04 2022 ] Mean training loss: 1.3116. +[ Thu Sep 8 17:29:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:29:04 2022 ] Training epoch: 7 +[ Thu Sep 8 17:29:04 2022 ] Learning rate: 0.10500000000000001 +[ Thu Sep 8 17:32:28 2022 ] Mean training loss: 1.2506. +[ Thu Sep 8 17:32:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:32:28 2022 ] Training epoch: 8 +[ Thu Sep 8 17:32:28 2022 ] Learning rate: 0.12 +[ Thu Sep 8 17:35:52 2022 ] Mean training loss: 1.1850. +[ Thu Sep 8 17:35:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:35:52 2022 ] Training epoch: 9 +[ Thu Sep 8 17:35:52 2022 ] Learning rate: 0.13499999999999998 +[ Thu Sep 8 17:39:16 2022 ] Mean training loss: 1.1585. +[ Thu Sep 8 17:39:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:39:16 2022 ] Training epoch: 10 +[ Thu Sep 8 17:39:16 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:42:40 2022 ] Mean training loss: 1.1338. +[ Thu Sep 8 17:42:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:42:40 2022 ] Training epoch: 11 +[ Thu Sep 8 17:42:40 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:46:04 2022 ] Mean training loss: 1.0590. +[ Thu Sep 8 17:46:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:46:04 2022 ] Training epoch: 12 +[ Thu Sep 8 17:46:04 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:49:28 2022 ] Mean training loss: 1.0316. +[ Thu Sep 8 17:49:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:49:28 2022 ] Training epoch: 13 +[ Thu Sep 8 17:49:28 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:52:52 2022 ] Mean training loss: 0.9832. +[ Thu Sep 8 17:52:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:52:52 2022 ] Training epoch: 14 +[ Thu Sep 8 17:52:52 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:56:16 2022 ] Mean training loss: 0.9655. +[ Thu Sep 8 17:56:16 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:56:16 2022 ] Training epoch: 15 +[ Thu Sep 8 17:56:16 2022 ] Learning rate: 0.15 +[ Thu Sep 8 17:59:39 2022 ] Mean training loss: 0.9212. +[ Thu Sep 8 17:59:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 17:59:39 2022 ] Training epoch: 16 +[ Thu Sep 8 17:59:39 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:03:03 2022 ] Mean training loss: 0.9088. +[ Thu Sep 8 18:03:03 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:03:03 2022 ] Training epoch: 17 +[ Thu Sep 8 18:03:03 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:06:27 2022 ] Mean training loss: 0.8856. +[ Thu Sep 8 18:06:27 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:06:27 2022 ] Training epoch: 18 +[ Thu Sep 8 18:06:27 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:09:50 2022 ] Mean training loss: 0.8865. +[ Thu Sep 8 18:09:50 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:09:50 2022 ] Training epoch: 19 +[ Thu Sep 8 18:09:50 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:13:14 2022 ] Mean training loss: 0.8623. +[ Thu Sep 8 18:13:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:13:14 2022 ] Training epoch: 20 +[ Thu Sep 8 18:13:14 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:16:37 2022 ] Mean training loss: 0.8513. +[ Thu Sep 8 18:16:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:16:37 2022 ] Training epoch: 21 +[ Thu Sep 8 18:16:37 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:20:01 2022 ] Mean training loss: 0.8307. +[ Thu Sep 8 18:20:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:20:01 2022 ] Training epoch: 22 +[ Thu Sep 8 18:20:01 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:23:24 2022 ] Mean training loss: 0.8210. +[ Thu Sep 8 18:23:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:23:24 2022 ] Training epoch: 23 +[ Thu Sep 8 18:23:24 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:26:47 2022 ] Mean training loss: 0.7883. +[ Thu Sep 8 18:26:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:26:47 2022 ] Training epoch: 24 +[ Thu Sep 8 18:26:47 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:30:10 2022 ] Mean training loss: 0.7809. +[ Thu Sep 8 18:30:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:30:10 2022 ] Training epoch: 25 +[ Thu Sep 8 18:30:10 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:33:33 2022 ] Mean training loss: 0.7950. +[ Thu Sep 8 18:33:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:33:33 2022 ] Training epoch: 26 +[ Thu Sep 8 18:33:33 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:36:57 2022 ] Mean training loss: 0.7731. +[ Thu Sep 8 18:36:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:36:57 2022 ] Training epoch: 27 +[ Thu Sep 8 18:36:57 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:40:21 2022 ] Mean training loss: 0.7634. +[ Thu Sep 8 18:40:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:40:21 2022 ] Training epoch: 28 +[ Thu Sep 8 18:40:21 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:43:44 2022 ] Mean training loss: 0.7428. +[ Thu Sep 8 18:43:44 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:43:44 2022 ] Training epoch: 29 +[ Thu Sep 8 18:43:44 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:47:08 2022 ] Mean training loss: 0.7390. +[ Thu Sep 8 18:47:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:47:08 2022 ] Training epoch: 30 +[ Thu Sep 8 18:47:08 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:50:31 2022 ] Mean training loss: 0.7363. +[ Thu Sep 8 18:50:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:50:31 2022 ] Training epoch: 31 +[ Thu Sep 8 18:50:31 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:53:55 2022 ] Mean training loss: 0.7134. +[ Thu Sep 8 18:53:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:53:55 2022 ] Training epoch: 32 +[ Thu Sep 8 18:53:55 2022 ] Learning rate: 0.15 +[ Thu Sep 8 18:57:19 2022 ] Mean training loss: 0.7277. +[ Thu Sep 8 18:57:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 18:57:19 2022 ] Training epoch: 33 +[ Thu Sep 8 18:57:19 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:00:42 2022 ] Mean training loss: 0.6978. +[ Thu Sep 8 19:00:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:00:42 2022 ] Training epoch: 34 +[ Thu Sep 8 19:00:42 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:04:06 2022 ] Mean training loss: 0.7159. +[ Thu Sep 8 19:04:06 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:04:06 2022 ] Training epoch: 35 +[ Thu Sep 8 19:04:06 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:07:28 2022 ] Mean training loss: 0.7043. +[ Thu Sep 8 19:07:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:07:28 2022 ] Training epoch: 36 +[ Thu Sep 8 19:07:28 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:10:51 2022 ] Mean training loss: 0.7179. +[ Thu Sep 8 19:10:51 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:10:51 2022 ] Training epoch: 37 +[ Thu Sep 8 19:10:51 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:14:14 2022 ] Mean training loss: 0.6743. +[ Thu Sep 8 19:14:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:14:14 2022 ] Training epoch: 38 +[ Thu Sep 8 19:14:14 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:17:38 2022 ] Mean training loss: 0.6980. +[ Thu Sep 8 19:17:38 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:17:38 2022 ] Training epoch: 39 +[ Thu Sep 8 19:17:38 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:21:01 2022 ] Mean training loss: 0.6658. +[ Thu Sep 8 19:21:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:21:01 2022 ] Training epoch: 40 +[ Thu Sep 8 19:21:01 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:24:25 2022 ] Mean training loss: 0.6730. +[ Thu Sep 8 19:24:25 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:24:25 2022 ] Training epoch: 41 +[ Thu Sep 8 19:24:25 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:27:48 2022 ] Mean training loss: 0.6786. +[ Thu Sep 8 19:27:48 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:27:48 2022 ] Training epoch: 42 +[ Thu Sep 8 19:27:48 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:31:11 2022 ] Mean training loss: 0.6778. +[ Thu Sep 8 19:31:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:31:11 2022 ] Training epoch: 43 +[ Thu Sep 8 19:31:11 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:34:34 2022 ] Mean training loss: 0.6848. +[ Thu Sep 8 19:34:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:34:34 2022 ] Training epoch: 44 +[ Thu Sep 8 19:34:34 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:37:57 2022 ] Mean training loss: 0.6674. +[ Thu Sep 8 19:37:57 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:37:57 2022 ] Training epoch: 45 +[ Thu Sep 8 19:37:57 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:41:21 2022 ] Mean training loss: 0.6681. +[ Thu Sep 8 19:41:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:41:21 2022 ] Training epoch: 46 +[ Thu Sep 8 19:41:21 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:44:45 2022 ] Mean training loss: 0.6613. +[ Thu Sep 8 19:44:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:44:45 2022 ] Training epoch: 47 +[ Thu Sep 8 19:44:45 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:48:08 2022 ] Mean training loss: 0.6687. +[ Thu Sep 8 19:48:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:48:08 2022 ] Training epoch: 48 +[ Thu Sep 8 19:48:08 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:51:32 2022 ] Mean training loss: 0.6512. +[ Thu Sep 8 19:51:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:51:32 2022 ] Training epoch: 49 +[ Thu Sep 8 19:51:32 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:54:55 2022 ] Mean training loss: 0.6368. +[ Thu Sep 8 19:54:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:54:55 2022 ] Training epoch: 50 +[ Thu Sep 8 19:54:55 2022 ] Learning rate: 0.15 +[ Thu Sep 8 19:58:19 2022 ] Mean training loss: 0.6761. +[ Thu Sep 8 19:58:19 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 19:58:19 2022 ] Training epoch: 51 +[ Thu Sep 8 19:58:19 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:01:43 2022 ] Mean training loss: 0.2993. +[ Thu Sep 8 20:01:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:01:43 2022 ] Eval epoch: 51 +[ Thu Sep 8 20:04:20 2022 ] Epoch 51 Curr Acc: (13453/18932)71.06% +[ Thu Sep 8 20:04:20 2022 ] Epoch 51 Best Acc 71.06% +[ Thu Sep 8 20:04:20 2022 ] Training epoch: 52 +[ Thu Sep 8 20:04:20 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:07:42 2022 ] Mean training loss: 0.1897. +[ Thu Sep 8 20:07:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:07:42 2022 ] Eval epoch: 52 +[ Thu Sep 8 20:10:13 2022 ] Epoch 52 Curr Acc: (13630/18932)71.99% +[ Thu Sep 8 20:10:13 2022 ] Epoch 52 Best Acc 71.99% +[ Thu Sep 8 20:10:13 2022 ] Training epoch: 53 +[ Thu Sep 8 20:10:13 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:13:36 2022 ] Mean training loss: 0.1561. +[ Thu Sep 8 20:13:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:13:36 2022 ] Eval epoch: 53 +[ Thu Sep 8 20:16:07 2022 ] Epoch 53 Curr Acc: (13530/18932)71.47% +[ Thu Sep 8 20:16:07 2022 ] Epoch 52 Best Acc 71.99% +[ Thu Sep 8 20:16:07 2022 ] Training epoch: 54 +[ Thu Sep 8 20:16:07 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:19:29 2022 ] Mean training loss: 0.1210. +[ Thu Sep 8 20:19:29 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:19:29 2022 ] Eval epoch: 54 +[ Thu Sep 8 20:22:00 2022 ] Epoch 54 Curr Acc: (13702/18932)72.37% +[ Thu Sep 8 20:22:00 2022 ] Epoch 54 Best Acc 72.37% +[ Thu Sep 8 20:22:00 2022 ] Training epoch: 55 +[ Thu Sep 8 20:22:00 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:25:22 2022 ] Mean training loss: 0.1070. +[ Thu Sep 8 20:25:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:25:22 2022 ] Eval epoch: 55 +[ Thu Sep 8 20:27:53 2022 ] Epoch 55 Curr Acc: (13789/18932)72.83% +[ Thu Sep 8 20:27:53 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 20:27:53 2022 ] Training epoch: 56 +[ Thu Sep 8 20:27:53 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:31:15 2022 ] Mean training loss: 0.0841. +[ Thu Sep 8 20:31:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:31:15 2022 ] Eval epoch: 56 +[ Thu Sep 8 20:33:46 2022 ] Epoch 56 Curr Acc: (13674/18932)72.23% +[ Thu Sep 8 20:33:46 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 20:33:46 2022 ] Training epoch: 57 +[ Thu Sep 8 20:33:46 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:37:08 2022 ] Mean training loss: 0.0722. +[ Thu Sep 8 20:37:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:37:08 2022 ] Eval epoch: 57 +[ Thu Sep 8 20:39:39 2022 ] Epoch 57 Curr Acc: (13659/18932)72.15% +[ Thu Sep 8 20:39:39 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 20:39:39 2022 ] Training epoch: 58 +[ Thu Sep 8 20:39:39 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:43:02 2022 ] Mean training loss: 0.0622. +[ Thu Sep 8 20:43:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:43:02 2022 ] Eval epoch: 58 +[ Thu Sep 8 20:45:33 2022 ] Epoch 58 Curr Acc: (13777/18932)72.77% +[ Thu Sep 8 20:45:33 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 20:45:33 2022 ] Training epoch: 59 +[ Thu Sep 8 20:45:33 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:48:56 2022 ] Mean training loss: 0.0501. +[ Thu Sep 8 20:48:56 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:48:56 2022 ] Eval epoch: 59 +[ Thu Sep 8 20:51:27 2022 ] Epoch 59 Curr Acc: (13735/18932)72.55% +[ Thu Sep 8 20:51:27 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 20:51:27 2022 ] Training epoch: 60 +[ Thu Sep 8 20:51:27 2022 ] Learning rate: 0.015 +[ Thu Sep 8 20:54:49 2022 ] Mean training loss: 0.0453. +[ Thu Sep 8 20:54:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 20:54:49 2022 ] Eval epoch: 60 +[ Thu Sep 8 20:57:20 2022 ] Epoch 60 Curr Acc: (13665/18932)72.18% +[ Thu Sep 8 20:57:20 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 20:57:20 2022 ] Training epoch: 61 +[ Thu Sep 8 20:57:20 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:00:43 2022 ] Mean training loss: 0.0368. +[ Thu Sep 8 21:00:43 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:00:43 2022 ] Eval epoch: 61 +[ Thu Sep 8 21:03:14 2022 ] Epoch 61 Curr Acc: (13724/18932)72.49% +[ Thu Sep 8 21:03:14 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 21:03:14 2022 ] Training epoch: 62 +[ Thu Sep 8 21:03:14 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:06:37 2022 ] Mean training loss: 0.0351. +[ Thu Sep 8 21:06:37 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:06:37 2022 ] Eval epoch: 62 +[ Thu Sep 8 21:09:08 2022 ] Epoch 62 Curr Acc: (13694/18932)72.33% +[ Thu Sep 8 21:09:08 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 21:09:08 2022 ] Training epoch: 63 +[ Thu Sep 8 21:09:08 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:12:31 2022 ] Mean training loss: 0.0325. +[ Thu Sep 8 21:12:31 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:12:31 2022 ] Eval epoch: 63 +[ Thu Sep 8 21:15:02 2022 ] Epoch 63 Curr Acc: (13489/18932)71.25% +[ Thu Sep 8 21:15:02 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 21:15:02 2022 ] Training epoch: 64 +[ Thu Sep 8 21:15:02 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:18:24 2022 ] Mean training loss: 0.0279. +[ Thu Sep 8 21:18:24 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:18:25 2022 ] Eval epoch: 64 +[ Thu Sep 8 21:20:56 2022 ] Epoch 64 Curr Acc: (12592/18932)66.51% +[ Thu Sep 8 21:20:56 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 21:20:56 2022 ] Training epoch: 65 +[ Thu Sep 8 21:20:56 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:24:18 2022 ] Mean training loss: 0.0304. +[ Thu Sep 8 21:24:18 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:24:19 2022 ] Eval epoch: 65 +[ Thu Sep 8 21:26:50 2022 ] Epoch 65 Curr Acc: (13522/18932)71.42% +[ Thu Sep 8 21:26:50 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 21:26:50 2022 ] Training epoch: 66 +[ Thu Sep 8 21:26:50 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:30:12 2022 ] Mean training loss: 0.0310. +[ Thu Sep 8 21:30:12 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:30:12 2022 ] Eval epoch: 66 +[ Thu Sep 8 21:32:43 2022 ] Epoch 66 Curr Acc: (13558/18932)71.61% +[ Thu Sep 8 21:32:43 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 21:32:43 2022 ] Training epoch: 67 +[ Thu Sep 8 21:32:43 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:36:05 2022 ] Mean training loss: 0.0250. +[ Thu Sep 8 21:36:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:36:05 2022 ] Eval epoch: 67 +[ Thu Sep 8 21:38:36 2022 ] Epoch 67 Curr Acc: (13715/18932)72.44% +[ Thu Sep 8 21:38:36 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 21:38:37 2022 ] Training epoch: 68 +[ Thu Sep 8 21:38:37 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:41:59 2022 ] Mean training loss: 0.0244. +[ Thu Sep 8 21:41:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:41:59 2022 ] Eval epoch: 68 +[ Thu Sep 8 21:44:30 2022 ] Epoch 68 Curr Acc: (13722/18932)72.48% +[ Thu Sep 8 21:44:30 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 21:44:30 2022 ] Training epoch: 69 +[ Thu Sep 8 21:44:30 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:47:53 2022 ] Mean training loss: 0.0217. +[ Thu Sep 8 21:47:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:47:53 2022 ] Eval epoch: 69 +[ Thu Sep 8 21:50:24 2022 ] Epoch 69 Curr Acc: (13609/18932)71.88% +[ Thu Sep 8 21:50:24 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 21:50:24 2022 ] Training epoch: 70 +[ Thu Sep 8 21:50:24 2022 ] Learning rate: 0.015 +[ Thu Sep 8 21:53:47 2022 ] Mean training loss: 0.0195. +[ Thu Sep 8 21:53:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:53:47 2022 ] Eval epoch: 70 +[ Thu Sep 8 21:56:18 2022 ] Epoch 70 Curr Acc: (13618/18932)71.93% +[ Thu Sep 8 21:56:18 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 21:56:18 2022 ] Training epoch: 71 +[ Thu Sep 8 21:56:18 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 21:59:40 2022 ] Mean training loss: 0.0156. +[ Thu Sep 8 21:59:40 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 21:59:40 2022 ] Eval epoch: 71 +[ Thu Sep 8 22:02:11 2022 ] Epoch 71 Curr Acc: (13773/18932)72.75% +[ Thu Sep 8 22:02:11 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 22:02:11 2022 ] Training epoch: 72 +[ Thu Sep 8 22:02:11 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:05:34 2022 ] Mean training loss: 0.0127. +[ Thu Sep 8 22:05:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:05:34 2022 ] Eval epoch: 72 +[ Thu Sep 8 22:08:05 2022 ] Epoch 72 Curr Acc: (13673/18932)72.22% +[ Thu Sep 8 22:08:05 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 22:08:05 2022 ] Training epoch: 73 +[ Thu Sep 8 22:08:05 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:11:28 2022 ] Mean training loss: 0.0124. +[ Thu Sep 8 22:11:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:11:28 2022 ] Eval epoch: 73 +[ Thu Sep 8 22:13:59 2022 ] Epoch 73 Curr Acc: (13763/18932)72.70% +[ Thu Sep 8 22:13:59 2022 ] Epoch 55 Best Acc 72.83% +[ Thu Sep 8 22:13:59 2022 ] Training epoch: 74 +[ Thu Sep 8 22:13:59 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:17:21 2022 ] Mean training loss: 0.0125. +[ Thu Sep 8 22:17:21 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:17:21 2022 ] Eval epoch: 74 +[ Thu Sep 8 22:19:52 2022 ] Epoch 74 Curr Acc: (13828/18932)73.04% +[ Thu Sep 8 22:19:52 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 22:19:52 2022 ] Training epoch: 75 +[ Thu Sep 8 22:19:52 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:23:15 2022 ] Mean training loss: 0.0116. +[ Thu Sep 8 22:23:15 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:23:15 2022 ] Eval epoch: 75 +[ Thu Sep 8 22:25:46 2022 ] Epoch 75 Curr Acc: (13781/18932)72.79% +[ Thu Sep 8 22:25:46 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 22:25:46 2022 ] Training epoch: 76 +[ Thu Sep 8 22:25:46 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:29:08 2022 ] Mean training loss: 0.0124. +[ Thu Sep 8 22:29:08 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:29:08 2022 ] Eval epoch: 76 +[ Thu Sep 8 22:31:39 2022 ] Epoch 76 Curr Acc: (13711/18932)72.42% +[ Thu Sep 8 22:31:39 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 22:31:39 2022 ] Training epoch: 77 +[ Thu Sep 8 22:31:39 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:35:02 2022 ] Mean training loss: 0.0117. +[ Thu Sep 8 22:35:02 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:35:02 2022 ] Eval epoch: 77 +[ Thu Sep 8 22:37:33 2022 ] Epoch 77 Curr Acc: (13787/18932)72.82% +[ Thu Sep 8 22:37:33 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 22:37:33 2022 ] Training epoch: 78 +[ Thu Sep 8 22:37:33 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:40:55 2022 ] Mean training loss: 0.0106. +[ Thu Sep 8 22:40:55 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:40:55 2022 ] Eval epoch: 78 +[ Thu Sep 8 22:43:26 2022 ] Epoch 78 Curr Acc: (13663/18932)72.17% +[ Thu Sep 8 22:43:26 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 22:43:26 2022 ] Training epoch: 79 +[ Thu Sep 8 22:43:26 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:46:49 2022 ] Mean training loss: 0.0099. +[ Thu Sep 8 22:46:49 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:46:49 2022 ] Eval epoch: 79 +[ Thu Sep 8 22:49:20 2022 ] Epoch 79 Curr Acc: (13589/18932)71.78% +[ Thu Sep 8 22:49:20 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 22:49:20 2022 ] Training epoch: 80 +[ Thu Sep 8 22:49:20 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:52:42 2022 ] Mean training loss: 0.0107. +[ Thu Sep 8 22:52:42 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:52:42 2022 ] Eval epoch: 80 +[ Thu Sep 8 22:55:13 2022 ] Epoch 80 Curr Acc: (13709/18932)72.41% +[ Thu Sep 8 22:55:13 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 22:55:14 2022 ] Training epoch: 81 +[ Thu Sep 8 22:55:14 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 22:58:36 2022 ] Mean training loss: 0.0097. +[ Thu Sep 8 22:58:36 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 22:58:36 2022 ] Eval epoch: 81 +[ Thu Sep 8 23:01:07 2022 ] Epoch 81 Curr Acc: (12847/18932)67.86% +[ Thu Sep 8 23:01:07 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 23:01:07 2022 ] Training epoch: 82 +[ Thu Sep 8 23:01:07 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:04:30 2022 ] Mean training loss: 0.0098. +[ Thu Sep 8 23:04:30 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:04:30 2022 ] Eval epoch: 82 +[ Thu Sep 8 23:07:01 2022 ] Epoch 82 Curr Acc: (13667/18932)72.19% +[ Thu Sep 8 23:07:01 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 23:07:01 2022 ] Training epoch: 83 +[ Thu Sep 8 23:07:01 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:10:23 2022 ] Mean training loss: 0.0091. +[ Thu Sep 8 23:10:23 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:10:23 2022 ] Eval epoch: 83 +[ Thu Sep 8 23:12:54 2022 ] Epoch 83 Curr Acc: (12600/18932)66.55% +[ Thu Sep 8 23:12:54 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 23:12:54 2022 ] Training epoch: 84 +[ Thu Sep 8 23:12:54 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:16:17 2022 ] Mean training loss: 0.0112. +[ Thu Sep 8 23:16:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:16:17 2022 ] Eval epoch: 84 +[ Thu Sep 8 23:18:48 2022 ] Epoch 84 Curr Acc: (13746/18932)72.61% +[ Thu Sep 8 23:18:48 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 23:18:48 2022 ] Training epoch: 85 +[ Thu Sep 8 23:18:48 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:22:11 2022 ] Mean training loss: 0.0122. +[ Thu Sep 8 23:22:11 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:22:11 2022 ] Eval epoch: 85 +[ Thu Sep 8 23:24:42 2022 ] Epoch 85 Curr Acc: (12834/18932)67.79% +[ Thu Sep 8 23:24:42 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 23:24:42 2022 ] Training epoch: 86 +[ Thu Sep 8 23:24:42 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:28:05 2022 ] Mean training loss: 0.0110. +[ Thu Sep 8 23:28:05 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:28:05 2022 ] Eval epoch: 86 +[ Thu Sep 8 23:30:36 2022 ] Epoch 86 Curr Acc: (13702/18932)72.37% +[ Thu Sep 8 23:30:36 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 23:30:36 2022 ] Training epoch: 87 +[ Thu Sep 8 23:30:36 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:33:59 2022 ] Mean training loss: 0.0105. +[ Thu Sep 8 23:33:59 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:33:59 2022 ] Eval epoch: 87 +[ Thu Sep 8 23:36:30 2022 ] Epoch 87 Curr Acc: (13796/18932)72.87% +[ Thu Sep 8 23:36:30 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 23:36:30 2022 ] Training epoch: 88 +[ Thu Sep 8 23:36:30 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:39:53 2022 ] Mean training loss: 0.0091. +[ Thu Sep 8 23:39:53 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:39:53 2022 ] Eval epoch: 88 +[ Thu Sep 8 23:42:24 2022 ] Epoch 88 Curr Acc: (13778/18932)72.78% +[ Thu Sep 8 23:42:24 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 23:42:24 2022 ] Training epoch: 89 +[ Thu Sep 8 23:42:24 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:45:46 2022 ] Mean training loss: 0.0087. +[ Thu Sep 8 23:45:46 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:45:47 2022 ] Eval epoch: 89 +[ Thu Sep 8 23:48:18 2022 ] Epoch 89 Curr Acc: (13708/18932)72.41% +[ Thu Sep 8 23:48:18 2022 ] Epoch 74 Best Acc 73.04% +[ Thu Sep 8 23:48:18 2022 ] Training epoch: 90 +[ Thu Sep 8 23:48:18 2022 ] Learning rate: 0.0015000000000000002 +[ Thu Sep 8 23:51:41 2022 ] Mean training loss: 0.0105. +[ Thu Sep 8 23:51:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:51:41 2022 ] Eval epoch: 90 +[ Thu Sep 8 23:54:12 2022 ] Epoch 90 Curr Acc: (13832/18932)73.06% +[ Thu Sep 8 23:54:12 2022 ] Epoch 90 Best Acc 73.06% +[ Thu Sep 8 23:54:12 2022 ] Training epoch: 91 +[ Thu Sep 8 23:54:12 2022 ] Learning rate: 0.00015000000000000004 +[ Thu Sep 8 23:57:34 2022 ] Mean training loss: 0.0093. +[ Thu Sep 8 23:57:34 2022 ] Time consumption: [Data]01%, [Network]99% +[ Thu Sep 8 23:57:35 2022 ] Eval epoch: 91 +[ Fri Sep 9 00:00:06 2022 ] Epoch 91 Curr Acc: (13636/18932)72.03% +[ Fri Sep 9 00:00:06 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 00:00:06 2022 ] Training epoch: 92 +[ Fri Sep 9 00:00:06 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:03:28 2022 ] Mean training loss: 0.0102. +[ Fri Sep 9 00:03:28 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:03:28 2022 ] Eval epoch: 92 +[ Fri Sep 9 00:05:59 2022 ] Epoch 92 Curr Acc: (13718/18932)72.46% +[ Fri Sep 9 00:05:59 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 00:05:59 2022 ] Training epoch: 93 +[ Fri Sep 9 00:05:59 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:09:22 2022 ] Mean training loss: 0.0099. +[ Fri Sep 9 00:09:22 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:09:22 2022 ] Eval epoch: 93 +[ Fri Sep 9 00:11:54 2022 ] Epoch 93 Curr Acc: (13814/18932)72.97% +[ Fri Sep 9 00:11:54 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 00:11:54 2022 ] Training epoch: 94 +[ Fri Sep 9 00:11:54 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:15:17 2022 ] Mean training loss: 0.0090. +[ Fri Sep 9 00:15:17 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:15:17 2022 ] Eval epoch: 94 +[ Fri Sep 9 00:17:48 2022 ] Epoch 94 Curr Acc: (13773/18932)72.75% +[ Fri Sep 9 00:17:48 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 00:17:48 2022 ] Training epoch: 95 +[ Fri Sep 9 00:17:48 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:21:10 2022 ] Mean training loss: 0.0091. +[ Fri Sep 9 00:21:10 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:21:10 2022 ] Eval epoch: 95 +[ Fri Sep 9 00:23:41 2022 ] Epoch 95 Curr Acc: (13715/18932)72.44% +[ Fri Sep 9 00:23:41 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 00:23:41 2022 ] Training epoch: 96 +[ Fri Sep 9 00:23:41 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:27:04 2022 ] Mean training loss: 0.0088. +[ Fri Sep 9 00:27:04 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:27:04 2022 ] Eval epoch: 96 +[ Fri Sep 9 00:29:35 2022 ] Epoch 96 Curr Acc: (13690/18932)72.31% +[ Fri Sep 9 00:29:35 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 00:29:35 2022 ] Training epoch: 97 +[ Fri Sep 9 00:29:35 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:32:58 2022 ] Mean training loss: 0.0081. +[ Fri Sep 9 00:32:58 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:32:58 2022 ] Eval epoch: 97 +[ Fri Sep 9 00:35:29 2022 ] Epoch 97 Curr Acc: (13672/18932)72.22% +[ Fri Sep 9 00:35:29 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 00:35:29 2022 ] Training epoch: 98 +[ Fri Sep 9 00:35:29 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:38:52 2022 ] Mean training loss: 0.0101. +[ Fri Sep 9 00:38:52 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:38:52 2022 ] Eval epoch: 98 +[ Fri Sep 9 00:41:23 2022 ] Epoch 98 Curr Acc: (13782/18932)72.80% +[ Fri Sep 9 00:41:23 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 00:41:23 2022 ] Training epoch: 99 +[ Fri Sep 9 00:41:23 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:44:45 2022 ] Mean training loss: 0.0097. +[ Fri Sep 9 00:44:45 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:44:45 2022 ] Eval epoch: 99 +[ Fri Sep 9 00:47:16 2022 ] Epoch 99 Curr Acc: (12581/18932)66.45% +[ Fri Sep 9 00:47:16 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 00:47:16 2022 ] Training epoch: 100 +[ Fri Sep 9 00:47:16 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:50:39 2022 ] Mean training loss: 0.0111. +[ Fri Sep 9 00:50:39 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:50:39 2022 ] Eval epoch: 100 +[ Fri Sep 9 00:53:10 2022 ] Epoch 100 Curr Acc: (13734/18932)72.54% +[ Fri Sep 9 00:53:10 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 00:53:10 2022 ] Training epoch: 101 +[ Fri Sep 9 00:53:10 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 00:56:32 2022 ] Mean training loss: 0.0104. +[ Fri Sep 9 00:56:32 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 00:56:32 2022 ] Eval epoch: 101 +[ Fri Sep 9 00:59:03 2022 ] Epoch 101 Curr Acc: (13797/18932)72.88% +[ Fri Sep 9 00:59:03 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 00:59:03 2022 ] Training epoch: 102 +[ Fri Sep 9 00:59:03 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:02:26 2022 ] Mean training loss: 0.0099. +[ Fri Sep 9 01:02:26 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:02:26 2022 ] Eval epoch: 102 +[ Fri Sep 9 01:04:57 2022 ] Epoch 102 Curr Acc: (13223/18932)69.84% +[ Fri Sep 9 01:04:57 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 01:04:57 2022 ] Training epoch: 103 +[ Fri Sep 9 01:04:57 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:08:20 2022 ] Mean training loss: 0.0080. +[ Fri Sep 9 01:08:20 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:08:20 2022 ] Eval epoch: 103 +[ Fri Sep 9 01:10:51 2022 ] Epoch 103 Curr Acc: (13668/18932)72.20% +[ Fri Sep 9 01:10:51 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 01:10:51 2022 ] Training epoch: 104 +[ Fri Sep 9 01:10:51 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:14:14 2022 ] Mean training loss: 0.0090. +[ Fri Sep 9 01:14:14 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:14:14 2022 ] Eval epoch: 104 +[ Fri Sep 9 01:16:45 2022 ] Epoch 104 Curr Acc: (13464/18932)71.12% +[ Fri Sep 9 01:16:45 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 01:16:45 2022 ] Training epoch: 105 +[ Fri Sep 9 01:16:45 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:20:07 2022 ] Mean training loss: 0.0097. +[ Fri Sep 9 01:20:07 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:20:07 2022 ] Eval epoch: 105 +[ Fri Sep 9 01:22:38 2022 ] Epoch 105 Curr Acc: (13599/18932)71.83% +[ Fri Sep 9 01:22:38 2022 ] Epoch 90 Best Acc 73.06% +[ Fri Sep 9 01:22:38 2022 ] Training epoch: 106 +[ Fri Sep 9 01:22:38 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:26:01 2022 ] Mean training loss: 0.0113. +[ Fri Sep 9 01:26:01 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:26:01 2022 ] Eval epoch: 106 +[ Fri Sep 9 01:28:32 2022 ] Epoch 106 Curr Acc: (13852/18932)73.17% +[ Fri Sep 9 01:28:32 2022 ] Epoch 106 Best Acc 73.17% +[ Fri Sep 9 01:28:32 2022 ] Training epoch: 107 +[ Fri Sep 9 01:28:32 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:31:54 2022 ] Mean training loss: 0.0095. +[ Fri Sep 9 01:31:54 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:31:54 2022 ] Eval epoch: 107 +[ Fri Sep 9 01:34:25 2022 ] Epoch 107 Curr Acc: (13672/18932)72.22% +[ Fri Sep 9 01:34:25 2022 ] Epoch 106 Best Acc 73.17% +[ Fri Sep 9 01:34:25 2022 ] Training epoch: 108 +[ Fri Sep 9 01:34:25 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:37:47 2022 ] Mean training loss: 0.0106. +[ Fri Sep 9 01:37:47 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:37:47 2022 ] Eval epoch: 108 +[ Fri Sep 9 01:40:18 2022 ] Epoch 108 Curr Acc: (13743/18932)72.59% +[ Fri Sep 9 01:40:18 2022 ] Epoch 106 Best Acc 73.17% +[ Fri Sep 9 01:40:18 2022 ] Training epoch: 109 +[ Fri Sep 9 01:40:18 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:43:41 2022 ] Mean training loss: 0.0092. +[ Fri Sep 9 01:43:41 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:43:41 2022 ] Eval epoch: 109 +[ Fri Sep 9 01:46:11 2022 ] Epoch 109 Curr Acc: (13849/18932)73.15% +[ Fri Sep 9 01:46:11 2022 ] Epoch 106 Best Acc 73.17% +[ Fri Sep 9 01:46:11 2022 ] Training epoch: 110 +[ Fri Sep 9 01:46:11 2022 ] Learning rate: 0.00015000000000000004 +[ Fri Sep 9 01:49:33 2022 ] Mean training loss: 0.0087. +[ Fri Sep 9 01:49:33 2022 ] Time consumption: [Data]01%, [Network]99% +[ Fri Sep 9 01:49:33 2022 ] Eval epoch: 110 +[ Fri Sep 9 01:52:04 2022 ] Epoch 110 Curr Acc: (13667/18932)72.19% +[ Fri Sep 9 01:52:04 2022 ] Epoch 106 Best Acc 73.17% +[ Fri Sep 9 01:52:04 2022 ] epoch: 106, best accuracy: 0.7316712444538348 +[ Fri Sep 9 01:52:04 2022 ] Experiment: ./work_dir/ntu/xview_jm +[ Fri Sep 9 01:52:04 2022 ] # generator parameters: 2.896055 M. +[ Fri Sep 9 01:52:04 2022 ] Load weights from ./runs/ntu/xview_jm/runs-105-104516.pt. +[ Fri Sep 9 01:52:04 2022 ] Eval epoch: 1 +[ Fri Sep 9 01:54:35 2022 ] Epoch 1 Curr Acc: (13852/18932)73.17% +[ Fri Sep 9 01:54:35 2022 ] Epoch 106 Best Acc 73.17%