forked from kennymckormick/pyskl
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path20240929_151820.log.json
25 lines (25 loc) · 7.63 KB
/
20240929_151820.log.json
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
{"env_info": "sys.platform: linux\nPython: 3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0]\nCUDA available: True\nGPU 0: NVIDIA RTX 2000 Ada Generation\nCUDA_HOME: /usr/local/cuda\nNVCC: Cuda compilation tools, release 11.8, V11.8.89\nGCC: x86_64-linux-gnu-gcc (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nPyTorch: 2.0.1+cu118\nPyTorch compiling details: PyTorch built with:\n - GCC 9.3\n - C++ Version: 201703\n - Intel(R) oneAPI Math Kernel Library Version 2022.2-Product Build 20220804 for Intel(R) 64 architecture applications\n - Intel(R) MKL-DNN v2.7.3 (Git Hash 6dbeffbae1f23cbbeae17adb7b5b13f1f37c080e)\n - OpenMP 201511 (a.k.a. OpenMP 4.5)\n - LAPACK is enabled (usually provided by MKL)\n - NNPACK is enabled\n - CPU capability usage: AVX2\n - CUDA Runtime 11.8\n - NVCC architecture flags: -gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_90,code=sm_90\n - CuDNN 8.7\n - Magma 2.6.1\n - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.8, CUDNN_VERSION=8.7.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -Wno-deprecated -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wunused-local-typedefs -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-error=deprecated-declarations -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=redundant-decls -Wno-error=old-style-cast -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Werror=cast-function-type -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_DISABLE_GPU_ASSERTS=ON, TORCH_VERSION=2.0.1, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=1, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF, \n\nTorchVision: 0.15.2+cu118\nOpenCV: 4.10.0\nMMCV: 1.5.0\nMMCV Compiler: GCC 11.4\nMMCV CUDA Compiler: 11.8\npyskl: 0.1.0+274a397", "seed": 1690257781, "config_name": "config_slow.py", "work_dir": "test_slow_mp_val"}
{"mode": "train", "epoch": 1, "iter": 20, "lr": 0.39931, "memory": 2132, "data_time": 0.14263, "top1_acc": 0.18438, "top5_acc": 0.825, "loss_cls": 1.81591, "loss": 1.81591, "grad_norm": 0.23148, "time": 0.36543}
{"mode": "train", "epoch": 2, "iter": 20, "lr": 0.39545, "memory": 2132, "data_time": 0.10976, "top1_acc": 0.19062, "top5_acc": 0.84062, "loss_cls": 1.85754, "loss": 1.85754, "grad_norm": 0.25214, "time": 0.30926}
{"mode": "train", "epoch": 3, "iter": 20, "lr": 0.38824, "memory": 2132, "data_time": 0.11059, "top1_acc": 0.20312, "top5_acc": 0.84062, "loss_cls": 1.8586, "loss": 1.8586, "grad_norm": 0.25119, "time": 0.30975}
{"mode": "train", "epoch": 4, "iter": 20, "lr": 0.3778, "memory": 2132, "data_time": 0.11014, "top1_acc": 0.20625, "top5_acc": 0.84062, "loss_cls": 1.85676, "loss": 1.85676, "grad_norm": 0.25048, "time": 0.30918}
{"mode": "train", "epoch": 5, "iter": 20, "lr": 0.36433, "memory": 2132, "data_time": 0.1121, "top1_acc": 0.20312, "top5_acc": 0.85625, "loss_cls": 1.85799, "loss": 1.85799, "grad_norm": 0.25179, "time": 0.31155}
{"mode": "train", "epoch": 6, "iter": 20, "lr": 0.34804, "memory": 2132, "data_time": 0.11014, "top1_acc": 0.19375, "top5_acc": 0.84688, "loss_cls": 1.86055, "loss": 1.86055, "grad_norm": 0.25398, "time": 0.30914}
{"mode": "train", "epoch": 7, "iter": 20, "lr": 0.32922, "memory": 2132, "data_time": 0.1104, "top1_acc": 0.1875, "top5_acc": 0.85312, "loss_cls": 1.86378, "loss": 1.86378, "grad_norm": 0.25667, "time": 0.30978}
{"mode": "train", "epoch": 8, "iter": 20, "lr": 0.30819, "memory": 2132, "data_time": 0.11129, "top1_acc": 0.1875, "top5_acc": 0.85625, "loss_cls": 1.86731, "loss": 1.86731, "grad_norm": 0.25963, "time": 0.31019}
{"mode": "train", "epoch": 9, "iter": 20, "lr": 0.28531, "memory": 2132, "data_time": 0.11038, "top1_acc": 0.1875, "top5_acc": 0.86875, "loss_cls": 1.87045, "loss": 1.87045, "grad_norm": 0.26239, "time": 0.31029}
{"mode": "train", "epoch": 10, "iter": 20, "lr": 0.26097, "memory": 2132, "data_time": 0.11249, "top1_acc": 0.1625, "top5_acc": 0.8625, "loss_cls": 1.8721, "loss": 1.8721, "grad_norm": 0.26417, "time": 0.31146}
{"mode": "train", "epoch": 11, "iter": 20, "lr": 0.23559, "memory": 2132, "data_time": 0.11, "top1_acc": 0.15937, "top5_acc": 0.85312, "loss_cls": 1.8708, "loss": 1.8708, "grad_norm": 0.26399, "time": 0.30959}
{"mode": "train", "epoch": 12, "iter": 20, "lr": 0.2096, "memory": 2132, "data_time": 0.11066, "top1_acc": 0.14062, "top5_acc": 0.83438, "loss_cls": 1.8652, "loss": 1.8652, "grad_norm": 0.26122, "time": 0.3111}
{"mode": "train", "epoch": 13, "iter": 20, "lr": 0.18344, "memory": 2132, "data_time": 0.10929, "top1_acc": 0.1375, "top5_acc": 0.82188, "loss_cls": 1.855, "loss": 1.855, "grad_norm": 0.25579, "time": 0.30874}
{"mode": "train", "epoch": 14, "iter": 20, "lr": 0.15756, "memory": 2132, "data_time": 0.10993, "top1_acc": 0.12812, "top5_acc": 0.83125, "loss_cls": 1.8418, "loss": 1.8418, "grad_norm": 0.24797, "time": 0.30947}
{"mode": "train", "epoch": 15, "iter": 20, "lr": 0.13242, "memory": 2132, "data_time": 0.11153, "top1_acc": 0.12812, "top5_acc": 0.80938, "loss_cls": 1.82856, "loss": 1.82856, "grad_norm": 0.23943, "time": 0.31102}
{"mode": "train", "epoch": 16, "iter": 20, "lr": 0.10843, "memory": 2132, "data_time": 0.11191, "top1_acc": 0.1375, "top5_acc": 0.80938, "loss_cls": 1.81778, "loss": 1.81778, "grad_norm": 0.23209, "time": 0.3108}
{"mode": "train", "epoch": 17, "iter": 20, "lr": 0.086, "memory": 2132, "data_time": 0.11007, "top1_acc": 0.12812, "top5_acc": 0.79375, "loss_cls": 1.81006, "loss": 1.81006, "grad_norm": 0.22664, "time": 0.30925}
{"mode": "train", "epoch": 18, "iter": 20, "lr": 0.06553, "memory": 2132, "data_time": 0.11029, "top1_acc": 0.125, "top5_acc": 0.79062, "loss_cls": 1.80451, "loss": 1.80451, "grad_norm": 0.22263, "time": 0.31024}
{"mode": "train", "epoch": 19, "iter": 20, "lr": 0.04735, "memory": 2132, "data_time": 0.10857, "top1_acc": 0.125, "top5_acc": 0.79062, "loss_cls": 1.80042, "loss": 1.80042, "grad_norm": 0.21963, "time": 0.30836}
{"mode": "train", "epoch": 20, "iter": 20, "lr": 0.03179, "memory": 2132, "data_time": 0.10959, "top1_acc": 0.125, "top5_acc": 0.79062, "loss_cls": 1.79738, "loss": 1.79738, "grad_norm": 0.21739, "time": 0.30909}
{"mode": "train", "epoch": 21, "iter": 20, "lr": 0.01911, "memory": 2132, "data_time": 0.11105, "top1_acc": 0.125, "top5_acc": 0.79062, "loss_cls": 1.79515, "loss": 1.79515, "grad_norm": 0.21573, "time": 0.31099}
{"mode": "train", "epoch": 22, "iter": 20, "lr": 0.00952, "memory": 2132, "data_time": 0.11014, "top1_acc": 0.11875, "top5_acc": 0.78125, "loss_cls": 1.79355, "loss": 1.79355, "grad_norm": 0.21453, "time": 0.30944}
{"mode": "train", "epoch": 23, "iter": 20, "lr": 0.00319, "memory": 2132, "data_time": 0.1106, "top1_acc": 0.11875, "top5_acc": 0.78125, "loss_cls": 1.79249, "loss": 1.79249, "grad_norm": 0.21373, "time": 0.31002}
{"mode": "train", "epoch": 24, "iter": 20, "lr": 0.00023, "memory": 2132, "data_time": 0.1095, "top1_acc": 0.11562, "top5_acc": 0.8, "loss_cls": 1.7919, "loss": 1.7919, "grad_norm": 0.21329, "time": 0.30875}