forked from scottandrews/chumpy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmonitor.py
149 lines (122 loc) · 3.97 KB
/
monitor.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
'''
Logging service for tracking dr tree changes from root objective
and record every step that incrementally changes the dr tree
'''
import os, sys, time
import json
import psutil
import scipy.sparse as sp
import numpy as np
from . import reordering
_TWO_20 = float(2 **20)
'''
memory utils
'''
def pdb_mem():
from .monitor import get_current_memory
mem = get_current_memory()
if mem > 7000:
import pdb;pdb.set_trace()
def get_peak_mem():
'''
this returns peak memory use since process starts till the moment its called
'''
import resource
rusage_denom = 1024.
if sys.platform == 'darwin':
# ... it seems that in OSX the output is different units ...
rusage_denom = rusage_denom * rusage_denom
mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / rusage_denom
return mem
def get_current_memory():
p = psutil.Process(os.getpid())
mem = p.memory_info()[0]/_TWO_20
return mem
'''
Helper for Profiler
'''
def build_cache_info(k, v, info_dict):
if v is not None:
issparse = sp.issparse(v)
size = v.size
if issparse:
nonzero = len(v.data)
else:
nonzero = np.count_nonzero(v)
info_dict[k.short_name] = {
'sparse': issparse,
'size' : str(size),
'nonzero' : nonzero,
}
def cache_info(ch_node):
result = {}
if isinstance(ch_node, reordering.Concatenate) and hasattr(ch_node, 'dr_cached') and len(ch_node.dr_cached) > 0:
for k, v in ch_node.dr_cached.items():
build_cache_info(k, v, result)
elif len(ch_node._cache['drs']) > 0:
for k, v in ch_node._cache['drs'].items():
build_cache_info(k, v, result)
return result
class DrWrtProfiler(object):
base_path = os.path.abspath('profiles')
def __init__(self, root, base_path=None):
self.root = root.obj
self.history = []
ts = time.time()
if base_path:
self.base_path = base_path
self.path = os.path.join(self.base_path, 'profile_%s.json' % str(ts))
self.root_path = os.path.join(self.base_path, 'root_%s.json' % str(ts))
with open(self.root_path, 'w') as f:
json.dump(self.dump_tree(self.root), f, indent=4)
def dump_tree(self, node):
if not hasattr(node, 'dterms'):
return []
node_dict = self.serialize_node(node, verbose=False)
if hasattr(node, 'visited') and node.visited:
node_dict.update({'indirect':True})
return node_dict
node.visited = True
children_list = []
for dterm in node.dterms:
if hasattr(node, dterm):
child = getattr(node, dterm)
if hasattr(child, 'dterms') or hasattr(child, 'terms'):
children_list.append(self.dump_tree(child))
node_dict.update({'children':children_list})
return node_dict
def serialize_node(self, ch_node, verbose=True):
node_id = id(ch_node)
name = ch_node.short_name
ts = time.time()
status = ch_node._status
mem = get_current_memory()
node_cache_info = cache_info(ch_node)
rec = {
'id': str(node_id),
'indirect' : False,
}
if verbose:
rec.update({
'name':name,
'ts' : ts,
'status':status,
'mem': mem,
'cache': node_cache_info,
})
return rec
def show_tree(self, label):
'''
show tree from the root node
'''
self.root.show_tree_cache(label)
def record(self, ch_node):
'''
Incremental changes
'''
rec = self.serialize_node(ch_node)
self.history.append(rec)
def harvest(self):
print('collecting and dump to file %s' % self.path)
with open(self.path, 'w') as f:
json.dump(self.history, f, indent=4)