-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathvsd_utils.py
96 lines (77 loc) · 2.85 KB
/
vsd_utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
import logging
import os
import time
import numpy as np
import torch
import random
import json
from datetime import timedelta
class LogFormatter():
def __init__(self):
self.start_time = time.time()
def format(self, record):
elapsed_seconds = round(record.created - self.start_time)
prefix = "%s - %s - %s" % (
record.levelname,
time.strftime('%x %X'),
timedelta(seconds=elapsed_seconds)
)
message = record.getMessage()
message = message.replace('\n', '\n' + ' ' * (len(prefix) + 3))
return "%s - %s" % (prefix, message)
def create_logger(log_dir, dump=True):
filepath = os.path.join(log_dir, 'net_launcher_log.log')
if not os.path.exists(log_dir):
os.makedirs(log_dir)
# # Safety check
# if os.path.exists(filepath) and opt.checkpoint == "":
# logging.warning("Experiment already exists!")
# Create logger
log_formatter = LogFormatter()
if dump:
# create file handler and set level to info
file_handler = logging.FileHandler(filepath, "a")
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(log_formatter)
# create console handler and set level to info
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(log_formatter)
# create logger and set level to info
logger = logging.getLogger()
logger.handlers = []
logger.setLevel(logging.INFO)
logger.propagate = False
if dump:
logger.addHandler(file_handler)
logger.addHandler(console_handler)
# reset logger elapsed time
def reset_time():
log_formatter.start_time = time.time()
logger.reset_time = reset_time
logger.info('Created main log at ' + str(filepath))
return logger
def set_initial_random_seed(random_seed):
if random_seed != -1:
np.random.seed(random_seed)
torch.random.manual_seed(random_seed)
random.seed(random_seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(random_seed)
class ParamsBase():
def str(self):
attrs = [item for item in self.__dir__() if not item.startswith('_')]
str = 'Params:: ' + ''.join(['{} : {}\n'.format(at, getattr(self, at)) for at in attrs])
return str
def todict(self):
attrs = [item for item in self.__dir__() if not item.startswith('_')]
d = {at: getattr(self, at) for at in attrs}
d.pop('str')
d.pop('todict')
d.pop('save')
return d
def save(self):
r = [item.split(' : ') for item in self.str().split('\n')]
d = {i[0]: i[1] for i in r[:-1]}
with open(self.log_dir + '/params.json', 'w') as f:
json.dump(d, f)