Skip to content

Commit

Permalink
[New Feature] Support Paddle PIR Visualization 🚀 (#1263)
Browse files Browse the repository at this point in the history
* add new_ir visualization

* refine some interface

* fix pir to kwargs
  • Loading branch information
Difers authored Sep 20, 2023
1 parent e420b8c commit 6c9757b
Show file tree
Hide file tree
Showing 7 changed files with 291 additions and 157 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ repos:
- id: check-yaml
- id: check-symlinks
- id: destroyed-symlinks
- repo: https://gitlab.com/pycqa/flake8
- repo: https://github.com/pycqa/flake8
rev: 3.8.4
hooks:
- id: flake8
Expand Down
28 changes: 28 additions & 0 deletions demo/components/pir_translate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import paddle
from paddle import ir

from visualdl import LogWriter

paddle.enable_static()

main_program, start_program = (
paddle.static.Program(),
paddle.static.Program(),
)
with paddle.static.program_guard(main_program, start_program):
x = paddle.static.data("x", [1, 64, 64, 8], dtype="float32")
y = paddle.static.data("y", [1, 64, 64, 8], dtype="float32")
divide_out = paddle.divide(x, y)
tanh_out = paddle.tanh(divide_out)
conv2d = paddle.nn.Conv2D(8, 32, 1, bias_attr=False, data_format='NHWC')
batch_norm = paddle.nn.BatchNorm(32, act='relu', data_layout='NHWC')
out = batch_norm(conv2d(tanh_out))

newir_program = ir.translate_to_new_ir(main_program.desc)

with LogWriter(logdir="./log/program_test/") as writer:
writer.add_graph(
model=newir_program,
input_spec=[paddle.static.InputSpec([-1, 1, 28, 28], 'float32')],
verbose=True,
is_pir=True)
21 changes: 13 additions & 8 deletions visualdl/component/graph/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,24 @@
import tempfile

from .graph_component import analyse_model
from .graph_component import analyse_pir
from .utils import create_opname_scope
from .utils import print_model


def translate_graph(model, input_spec, verbose=True):
import paddle
def translate_graph(model, input_spec, verbose=True, **kwargs):
is_pir = kwargs.get('is_pir', False)
with tempfile.TemporaryDirectory() as tmp:
model._full_name = '{}[{}]'.format(model.__class__.__name__, "model")
create_opname_scope(model)
model = paddle.jit.to_static(model, input_spec)
paddle.jit.save(model, os.path.join(tmp, 'temp'))
model_data = open(os.path.join(tmp, 'temp.pdmodel'), 'rb').read()
result = analyse_model(model_data)
if (not is_pir):
model._full_name = '{}[{}]'.format(model.__class__.__name__,
"model")
create_opname_scope(model)
model = paddle.jit.to_static(model, input_spec)
paddle.jit.save(model, os.path.join(tmp, 'temp'))
model_data = open(os.path.join(tmp, 'temp.pdmodel'), 'rb').read()
result = analyse_model(model_data)
else:
result = analyse_pir(model)
if verbose:
print_model(result)
result = json.dumps(result, indent=2)
Expand Down
109 changes: 102 additions & 7 deletions visualdl/component/graph/graph_component.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
import pathlib
import re

from . import utils

_graph_version = '1.0.0'


Expand Down Expand Up @@ -73,9 +75,8 @@ def create_non_leaf_nodes(parent_node_name, child_node_name, all_ops,
if parent_node_name == '/': # root node
return
else:
create_non_leaf_nodes(
os.path.dirname(parent_node_name), parent_node_name, all_ops,
general_children_dict)
create_non_leaf_nodes(os.path.dirname(parent_node_name),
parent_node_name, all_ops, general_children_dict)


def construct_edges(var_name, all_ops, all_vars, all_edges):
Expand Down Expand Up @@ -298,8 +299,8 @@ def analyse_model(model_pb): # noqa: C901

all_op_names = list(all_ops.keys())
for op_name in all_op_names:
create_non_leaf_nodes(
os.path.dirname(op_name), op_name, all_ops, general_children_dict)
create_non_leaf_nodes(os.path.dirname(op_name), op_name, all_ops,
general_children_dict)

# fill all non-leaf node's 'output_nodes' 'input_nodes' 'output_vars' 'input_vars'
# post-order traverse tree
Expand Down Expand Up @@ -345,8 +346,9 @@ def analyse_model(model_pb): # noqa: C901
for src_node, to_node in all_edges.keys():
all_ops[src_node]['edge_output_nodes'].append(to_node)
all_ops[to_node]['edge_input_nodes'].append(src_node)
all_edges[(src_node, to_node)]['vars'] = list(
all_edges[(src_node, to_node)]['vars'])
all_edges[(src_node,
to_node)]['vars'] = list(all_edges[(src_node,
to_node)]['vars'])
if len(all_edges[(src_node, to_node)]['vars']) > 1:
all_edges[(src_node, to_node)]['label'] = str(
len(all_edges[(src_node, to_node)]['vars'])) + ' tensors'
Expand All @@ -361,3 +363,96 @@ def analyse_model(model_pb): # noqa: C901
'edges': list(all_edges.values())
}
return final_data


def analyse_pir(program):
from paddle.utils.unique_name import generate

all_ops = {}
all_vars = {}
all_edges = {}
# vars info
for op in (program.global_block().ops):
var_name = utils.gen_var_name(op.results())
all_vars[var_name] = {}
all_vars[var_name]['name'] = var_name
attrs = op.results()[0].get_defining_op().attrs()

if 'place' in attrs:
attrs['place'] = str(attrs['place'])
attrs['dtype'] = op.result(0).dtype.name

all_vars[var_name]['shape'] = op.result(0).shape
all_vars[var_name]['type'] = op.result(0).dtype.name
all_vars[var_name]['dtype'] = op.result(0).dtype.name

all_vars[var_name]['value'] = []
all_vars[var_name]['persistable'] = op.result(0).is_persistable
all_vars[var_name]['attrs'] = attrs
all_vars[var_name]['from_node'] = ''
all_vars[var_name]['to_nodes'] = []

# ops info
for op in (program.global_block().ops):
op_name = generate(op.name())

if op.num_operands() > 0:
all_ops[op_name] = {}
all_ops[op_name]['name'] = op_name
all_ops[op_name]['show_name'] = op_name
all_ops[op_name]['type'] = op.result(0).dtype.name
all_ops[op_name]['dtype'] = op.result(0).dtype.name

all_ops[op_name]['input_vars'] = {}
all_ops[op_name]['output_vars'] = {}

all_ops[op_name]['is_leaf_node'] = True
now_var = utils.gen_var_name(op.results())
for source in op.operands_source():
input_name = utils.gen_var_name(source)
all_ops[op_name]['input_vars'][input_name] = [input_name]
all_vars[input_name]['to_nodes'].append(op_name)
all_vars[now_var]['from_node'] = op_name
all_ops[op_name]['output_vars'][now_var] = [now_var]

all_ops[op_name]['attrs'] = attrs
all_ops[op_name]['attr_types'] = attrs
all_ops[op_name]['children_node'] = []
all_ops[op_name]['input_nodes'] = []
all_ops[op_name]['output_nodes'] = []
all_ops[op_name]['edge_input_nodes'] = []
all_ops[op_name]['edge_output_nodes'] = []

# create '/' op
all_ops['/'] = {}
all_ops['/']['name'] = '/'
all_ops['/']['show_name'] = '/'
all_ops['/']['type'] = ''
all_ops['/']['attrs'] = {}
all_ops['/']['input_vars'] = {}
all_ops['/']['output_vars'] = {}
all_ops['/']['is_leaf_node'] = False
all_ops['/']['children_node'] = []
for node in all_ops:
if node != '/':
all_ops['/']['children_node'].append(node)

for variable_name in all_vars:
if all_vars[variable_name]['from_node'] == '':
continue
from_node_name = all_vars[variable_name]['from_node']
for to_node_name in all_vars[variable_name]['to_nodes']:
if to_node_name != from_node_name:
all_ops[from_node_name]['output_nodes'].append(to_node_name)
all_ops[to_node_name]['input_nodes'].append(from_node_name)

# edge info
# TODO(Difers):add edge info in future

final_data = {
'version': _graph_version,
'nodes': list(all_ops.values()),
'vars': list(all_vars.values()),
'edges': list(all_edges.values())
}
return final_data
21 changes: 21 additions & 0 deletions visualdl/component/graph/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,27 @@

_name_scope_stack = deque()

# TODO(Difers): remove it when the new IR's "name" interface is available.
var_name = {}
var_idx = [0]


def gen_var_name(ops):
if not isinstance(ops, list):
ops = [ops]
for op in ops:
var = op.get_defining_op()
if var in var_name:
return var_name[var]
else:
try:
name = op.name
except ValueError:
name = "tmp_var_" + str(var_idx[0])
var_idx[0] += 1
var_name[var] = name
return var_name[var]


def _opname_creation_prehook(layer, inputs):
from paddle.static import name_scope
Expand Down
Loading

0 comments on commit 6c9757b

Please sign in to comment.