-
Notifications
You must be signed in to change notification settings - Fork 1
/
executor.py
96 lines (82 loc) · 3.1 KB
/
executor.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
# profile""" define the behaviors of excutor """
from __future__ import absolute_import
import numpy as np
from tensorwolf.topo import *
from tensorwolf.ops import *
import sys
import os
#reference: dlsys-autodiff
class Executor(object):
"""Executor computes values for given set of nodes in computation graph."""
def __init__(self, eval_node_list):
"""
Parameters
----------
eval_node_list: list of nodes whose values need to be computed.
topo_order: list of nodes in topological order
"""
self.eval_node_list = eval_node_list
self.topo_order = find_topo_sort(self.eval_node_list)
# profile
def run(self, feed_dict):
"""
Parameters
----------
feed_dict: a dictionary of node->np.ndarray supplied by user.
Returns
-------
A list of values for nodes in eval_node_list. NDArray or np.ndarray.
"""
node_to_val_map = {}
for node, value in feed_dict.items():
node_to_val_map[node] = np.array(value)
for node in self.topo_order:
if node in node_to_val_map:
continue
#print("Compute: ", node.name)
#print("Compute-Type: ", type(node.op))
input_vals = [node_to_val_map[n] for n in node.inputs]
value = node.op.compute(node, input_vals)
# if isinstance(value, np.ndarray):
# print("shape:", value.shape)
node_to_val_map[node] = value
# os.system("PAUSE")
return [node_to_val_map[n] for n in self.eval_node_list]
def gradients(output_node, node_list):
"""Take gradient of output node with respect to each node in node_list.
Parameters
----------
output_node: output node that we are taking derivative of.
node_list: list of nodes that we are taking derivative wrt.
Returns
-------
A list of gradient values, one for each node in node_list respectively.
"""
node_to_output_grads_list = {}
node_to_output_grads_list[output_node] = [oneslike_op(output_node)]
node_to_output_grad = {}
reverse_topo_order = reversed(find_topo_sort([output_node]))
for node in reverse_topo_order:
output_grad = sum_node_list(node_to_output_grads_list[node])
node_to_output_grad[node] = output_grad
input_grads_list = node.op.gradient(node, output_grad)
for i in range(len(node.inputs)):
if node.inputs[i] not in node_to_output_grads_list:
node_to_output_grads_list[node.inputs[i]] = []
node_to_output_grads_list[node.inputs[i]].append(
input_grads_list[i])
'''
print("node_list: ")
for i in node_list:
print(" ", i)
print("node_to_output_grad")
for i in node_to_output_grad:
print(" ", i)
'''
grad_node_list = [node_to_output_grad[node] for node in node_list]
return grad_node_list
def sum_node_list(node_list):
"""Custom sum func to avoid creating redundant nodes in Python sum func."""
from operator import add
from functools import reduce
return reduce(add, node_list)