forked from voidism/EAR
-
Notifications
You must be signed in to change notification settings - Fork 0
/
utils.py
57 lines (50 loc) · 1.63 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import os
from os.path import exists, join
import json
import torch
from torch.utils.tensorboard import SummaryWriter
from datetime import datetime
class Recorder():
def __init__(self, id, log=True, name="name"):
self.log = log
now = datetime.now()
date = now.strftime("%y-%m-%d")
self.dir = f"./cache/{name}-{date}-{id}"
if self.log:
os.mkdir(self.dir)
self.f = open(os.path.join(self.dir, "log.txt"), "w")
self.writer = SummaryWriter(os.path.join(self.dir, "log"), flush_secs=60)
def write_config(self, args, models, name):
if self.log:
with open(os.path.join(self.dir, "config.txt"), "w") as f:
print(name, file=f)
print(args, file=f)
print(file=f)
for (i, x) in enumerate(models):
print(x, file=f)
print(file=f)
print(args)
print()
for (i, x) in enumerate(models):
print(x)
print()
def print(self, x=None):
if x is not None:
print(x, flush=True)
else:
print()
if self.log:
if x is not None:
print(x, file=self.f)
else:
print(file=self.f)
def plot(self, tag, values, step):
if self.log:
self.writer.add_scalars(tag, values, step)
def __del__(self):
if self.log:
self.f.close()
self.writer.close()
def save(self, model, name):
if self.log:
torch.save(model.state_dict(), os.path.join(self.dir, name))