diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ef7474c75..ff825d21f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: check-yaml - id: check-symlinks - id: destroyed-symlinks - - repo: https://gitlab.com/pycqa/flake8 + - repo: https://github.com/pycqa/flake8 rev: 3.8.4 hooks: - id: flake8 diff --git a/demo/components/pir_translate.py b/demo/components/pir_translate.py new file mode 100644 index 000000000..6b7868116 --- /dev/null +++ b/demo/components/pir_translate.py @@ -0,0 +1,28 @@ +import paddle +from paddle import ir + +from visualdl import LogWriter + +paddle.enable_static() + +main_program, start_program = ( + paddle.static.Program(), + paddle.static.Program(), +) +with paddle.static.program_guard(main_program, start_program): + x = paddle.static.data("x", [1, 64, 64, 8], dtype="float32") + y = paddle.static.data("y", [1, 64, 64, 8], dtype="float32") + divide_out = paddle.divide(x, y) + tanh_out = paddle.tanh(divide_out) + conv2d = paddle.nn.Conv2D(8, 32, 1, bias_attr=False, data_format='NHWC') + batch_norm = paddle.nn.BatchNorm(32, act='relu', data_layout='NHWC') + out = batch_norm(conv2d(tanh_out)) + +newir_program = ir.translate_to_new_ir(main_program.desc) + +with LogWriter(logdir="./log/program_test/") as writer: + writer.add_graph( + model=newir_program, + input_spec=[paddle.static.InputSpec([-1, 1, 28, 28], 'float32')], + verbose=True, + is_pir=True) diff --git a/visualdl/component/graph/exporter.py b/visualdl/component/graph/exporter.py index 541deed37..3a9c545de 100644 --- a/visualdl/component/graph/exporter.py +++ b/visualdl/component/graph/exporter.py @@ -17,19 +17,24 @@ import tempfile from .graph_component import analyse_model +from .graph_component import analyse_pir from .utils import create_opname_scope from .utils import print_model -def translate_graph(model, input_spec, verbose=True): - import paddle +def translate_graph(model, input_spec, verbose=True, **kwargs): + is_pir = kwargs.get('is_pir', False) with tempfile.TemporaryDirectory() as tmp: - model._full_name = '{}[{}]'.format(model.__class__.__name__, "model") - create_opname_scope(model) - model = paddle.jit.to_static(model, input_spec) - paddle.jit.save(model, os.path.join(tmp, 'temp')) - model_data = open(os.path.join(tmp, 'temp.pdmodel'), 'rb').read() - result = analyse_model(model_data) + if (not is_pir): + model._full_name = '{}[{}]'.format(model.__class__.__name__, + "model") + create_opname_scope(model) + model = paddle.jit.to_static(model, input_spec) + paddle.jit.save(model, os.path.join(tmp, 'temp')) + model_data = open(os.path.join(tmp, 'temp.pdmodel'), 'rb').read() + result = analyse_model(model_data) + else: + result = analyse_pir(model) if verbose: print_model(result) result = json.dumps(result, indent=2) diff --git a/visualdl/component/graph/graph_component.py b/visualdl/component/graph/graph_component.py index be71a12b3..5e91869b3 100644 --- a/visualdl/component/graph/graph_component.py +++ b/visualdl/component/graph/graph_component.py @@ -17,6 +17,8 @@ import pathlib import re +from . import utils + _graph_version = '1.0.0' @@ -73,9 +75,8 @@ def create_non_leaf_nodes(parent_node_name, child_node_name, all_ops, if parent_node_name == '/': # root node return else: - create_non_leaf_nodes( - os.path.dirname(parent_node_name), parent_node_name, all_ops, - general_children_dict) + create_non_leaf_nodes(os.path.dirname(parent_node_name), + parent_node_name, all_ops, general_children_dict) def construct_edges(var_name, all_ops, all_vars, all_edges): @@ -298,8 +299,8 @@ def analyse_model(model_pb): # noqa: C901 all_op_names = list(all_ops.keys()) for op_name in all_op_names: - create_non_leaf_nodes( - os.path.dirname(op_name), op_name, all_ops, general_children_dict) + create_non_leaf_nodes(os.path.dirname(op_name), op_name, all_ops, + general_children_dict) # fill all non-leaf node's 'output_nodes' 'input_nodes' 'output_vars' 'input_vars' # post-order traverse tree @@ -345,8 +346,9 @@ def analyse_model(model_pb): # noqa: C901 for src_node, to_node in all_edges.keys(): all_ops[src_node]['edge_output_nodes'].append(to_node) all_ops[to_node]['edge_input_nodes'].append(src_node) - all_edges[(src_node, to_node)]['vars'] = list( - all_edges[(src_node, to_node)]['vars']) + all_edges[(src_node, + to_node)]['vars'] = list(all_edges[(src_node, + to_node)]['vars']) if len(all_edges[(src_node, to_node)]['vars']) > 1: all_edges[(src_node, to_node)]['label'] = str( len(all_edges[(src_node, to_node)]['vars'])) + ' tensors' @@ -361,3 +363,96 @@ def analyse_model(model_pb): # noqa: C901 'edges': list(all_edges.values()) } return final_data + + +def analyse_pir(program): + from paddle.utils.unique_name import generate + + all_ops = {} + all_vars = {} + all_edges = {} + # vars info + for op in (program.global_block().ops): + var_name = utils.gen_var_name(op.results()) + all_vars[var_name] = {} + all_vars[var_name]['name'] = var_name + attrs = op.results()[0].get_defining_op().attrs() + + if 'place' in attrs: + attrs['place'] = str(attrs['place']) + attrs['dtype'] = op.result(0).dtype.name + + all_vars[var_name]['shape'] = op.result(0).shape + all_vars[var_name]['type'] = op.result(0).dtype.name + all_vars[var_name]['dtype'] = op.result(0).dtype.name + + all_vars[var_name]['value'] = [] + all_vars[var_name]['persistable'] = op.result(0).is_persistable + all_vars[var_name]['attrs'] = attrs + all_vars[var_name]['from_node'] = '' + all_vars[var_name]['to_nodes'] = [] + + # ops info + for op in (program.global_block().ops): + op_name = generate(op.name()) + + if op.num_operands() > 0: + all_ops[op_name] = {} + all_ops[op_name]['name'] = op_name + all_ops[op_name]['show_name'] = op_name + all_ops[op_name]['type'] = op.result(0).dtype.name + all_ops[op_name]['dtype'] = op.result(0).dtype.name + + all_ops[op_name]['input_vars'] = {} + all_ops[op_name]['output_vars'] = {} + + all_ops[op_name]['is_leaf_node'] = True + now_var = utils.gen_var_name(op.results()) + for source in op.operands_source(): + input_name = utils.gen_var_name(source) + all_ops[op_name]['input_vars'][input_name] = [input_name] + all_vars[input_name]['to_nodes'].append(op_name) + all_vars[now_var]['from_node'] = op_name + all_ops[op_name]['output_vars'][now_var] = [now_var] + + all_ops[op_name]['attrs'] = attrs + all_ops[op_name]['attr_types'] = attrs + all_ops[op_name]['children_node'] = [] + all_ops[op_name]['input_nodes'] = [] + all_ops[op_name]['output_nodes'] = [] + all_ops[op_name]['edge_input_nodes'] = [] + all_ops[op_name]['edge_output_nodes'] = [] + + # create '/' op + all_ops['/'] = {} + all_ops['/']['name'] = '/' + all_ops['/']['show_name'] = '/' + all_ops['/']['type'] = '' + all_ops['/']['attrs'] = {} + all_ops['/']['input_vars'] = {} + all_ops['/']['output_vars'] = {} + all_ops['/']['is_leaf_node'] = False + all_ops['/']['children_node'] = [] + for node in all_ops: + if node != '/': + all_ops['/']['children_node'].append(node) + + for variable_name in all_vars: + if all_vars[variable_name]['from_node'] == '': + continue + from_node_name = all_vars[variable_name]['from_node'] + for to_node_name in all_vars[variable_name]['to_nodes']: + if to_node_name != from_node_name: + all_ops[from_node_name]['output_nodes'].append(to_node_name) + all_ops[to_node_name]['input_nodes'].append(from_node_name) + + # edge info + # TODO(Difers):add edge info in future + + final_data = { + 'version': _graph_version, + 'nodes': list(all_ops.values()), + 'vars': list(all_vars.values()), + 'edges': list(all_edges.values()) + } + return final_data diff --git a/visualdl/component/graph/utils.py b/visualdl/component/graph/utils.py index 4dd33abdf..9d1809b01 100644 --- a/visualdl/component/graph/utils.py +++ b/visualdl/component/graph/utils.py @@ -17,6 +17,27 @@ _name_scope_stack = deque() +# TODO(Difers): remove it when the new IR's "name" interface is available. +var_name = {} +var_idx = [0] + + +def gen_var_name(ops): + if not isinstance(ops, list): + ops = [ops] + for op in ops: + var = op.get_defining_op() + if var in var_name: + return var_name[var] + else: + try: + name = op.name + except ValueError: + name = "tmp_var_" + str(var_idx[0]) + var_idx[0] += 1 + var_name[var] = name + return var_name[var] + def _opname_creation_prehook(layer, inputs): from paddle.static import name_scope diff --git a/visualdl/server/app.py b/visualdl/server/app.py index 3e556bd2d..2f7b0f3b9 100644 --- a/visualdl/server/app.py +++ b/visualdl/server/app.py @@ -77,6 +77,8 @@ def get_locale(): lang = args.language if not lang or lang not in support_language: lang = request.accept_languages.best_match(support_language) + if (not lang): + lang = 'zh' return lang signal.signal( @@ -102,14 +104,13 @@ def append_query_string(url): if not args.api_only: - template = Template( - os.path.join(server_path, template_file_path), - PUBLIC_PATH=public_path, - BASE_URI=public_path, - API_URL=api_path, - TELEMETRY_ID='63a600296f8a71f576c4806376a9245b' - if args.telemetry else '', - THEME='' if args.theme is None else args.theme) + template = Template(os.path.join(server_path, template_file_path), + PUBLIC_PATH=public_path, + BASE_URI=public_path, + API_URL=api_path, + TELEMETRY_ID='63a600296f8a71f576c4806376a9245b' + if args.telemetry else '', + THEME='' if args.theme is None else args.theme) @app.route('/') def base(): @@ -124,8 +125,8 @@ def favicon(): @app.route(public_path + '/') def index(): - return redirect( - append_query_string(public_path + '/index'), code=302) + return redirect(append_query_string(public_path + '/index'), + code=302) @app.route(public_path + '/') def serve_static(filename): @@ -133,26 +134,25 @@ def serve_static(filename): response = template.render( filename if is_not_page_request else 'index.html') if not is_not_page_request: - response.set_cookie( - 'vdl_lng', - get_locale(), - path='/', - samesite='Strict', - secure=False, - httponly=False) + response.set_cookie('vdl_lng', + get_locale(), + path='/', + samesite='Strict', + secure=False, + httponly=False) return response @app.route(api_path + '/', methods=["GET", "POST"]) def serve_api(method): data, mimetype, headers = api_call(method, request.args) - return make_response( - Response(data, mimetype=mimetype, headers=headers)) + return make_response(Response(data, mimetype=mimetype, + headers=headers)) @app.route(api_path + '/profiler/', methods=["GET", "POST"]) def serve_profiler_api(method): data, mimetype, headers = profiler_api_call(method, request.args) - return make_response( - Response(data, mimetype=mimetype, headers=headers)) + return make_response(Response(data, mimetype=mimetype, + headers=headers)) @app.route(api_path + '/inference/', methods=["GET", "POST"]) def serve_inference_api(method): @@ -160,8 +160,8 @@ def serve_inference_api(method): data, mimetype, headers = inference_api_call(method, request.form) else: data, mimetype, headers = inference_api_call(method, request.args) - return make_response( - Response(data, mimetype=mimetype, headers=headers)) + return make_response(Response(data, mimetype=mimetype, + headers=headers)) @app.route(api_path + '/fastdeploy/', methods=["GET", "POST"]) def serve_fastdeploy_api(method): @@ -169,11 +169,11 @@ def serve_fastdeploy_api(method): data, mimetype, headers = fastdeploy_api_call(method, request.form) else: data, mimetype, headers = fastdeploy_api_call(method, request.args) - return make_response( - Response(data, mimetype=mimetype, headers=headers)) + return make_response(Response(data, mimetype=mimetype, + headers=headers)) - @app.route( - api_path + '/fastdeploy/fastdeploy_client', methods=["GET", "POST"]) + @app.route(api_path + '/fastdeploy/fastdeploy_client', + methods=["GET", "POST"]) def serve_fastdeploy_create_fastdeploy_client(): try: if request.method == 'POST': @@ -191,12 +191,11 @@ def serve_fastdeploy_create_fastdeploy_client(): return redirect( api_path + "/fastdeploy/fastdeploy_client/app?{}".format(args), code=302) - return redirect( - api_path + "/fastdeploy/fastdeploy_client/app", code=302) + return redirect(api_path + "/fastdeploy/fastdeploy_client/app", + code=302) - @app.route( - api_path + "/fastdeploy/fastdeploy_client/", - methods=["GET", "POST"]) + @app.route(api_path + "/fastdeploy/fastdeploy_client/", + methods=["GET", "POST"]) def request_fastdeploy_create_fastdeploy_client_app(path: str): ''' Gradio app server url interface. We route urls for gradio app to gradio server. @@ -241,16 +240,16 @@ def request_fastdeploy_create_fastdeploy_client_app(path: str): request.host_url.rstrip('/') + api_path + '/fastdeploy/fastdeploy_client/', 'http://localhost:{}/'.format(port)) - resp = requests.request( - method=request.method, - url=proxy_url, - headers={ - key: value - for (key, value) in request.headers if key != 'Host' - }, - data=request.get_data(), - cookies=request.cookies, - allow_redirects=False) + resp = requests.request(method=request.method, + url=proxy_url, + headers={ + key: value + for (key, value) in request.headers + if key != 'Host' + }, + data=request.get_data(), + cookies=request.cookies, + allow_redirects=False) if path == 'app': content = resp.content if request_args and 'server_id' in request_args: @@ -292,10 +291,9 @@ def request_fastdeploy_create_fastdeploy_client_app(path: str): 0).count('"label"') >= 2: http_port_match = re.search( '"value":\\s*"".*?"label":\\s*{}.*?}}'.format( - json.dumps( - "server port", - ensure_ascii=True).replace( - '\\', '\\\\')), content) + json.dumps("server port", + ensure_ascii=True).replace( + '\\', '\\\\')), content) default_http_port = http_port_match.group(0) if '"value": ""' in default_http_port: @@ -318,10 +316,9 @@ def request_fastdeploy_create_fastdeploy_client_app(path: str): 0).count('"label"') >= 2: metrics_port_match = re.search( '"value":\\s*"".*?"label":\\s*{}.*?}}'.format( - json.dumps( - "metrics port", - ensure_ascii=True).replace( - '\\', '\\\\')), content) + json.dumps("metrics port", + ensure_ascii=True).replace( + '\\', '\\\\')), content) default_metrics_port = metrics_port_match.group(0) if '"value": ""' in default_metrics_port: cur_metrics_port = default_metrics_port.replace( @@ -343,10 +340,9 @@ def request_fastdeploy_create_fastdeploy_client_app(path: str): 0).count('"label"') >= 2: model_name_match = re.search( '"value":\\s*"".*?"label":\\s*{}.*?}}'.format( - json.dumps( - "model name", - ensure_ascii=True).replace( - '\\', '\\\\')), content) + json.dumps("model name", + ensure_ascii=True).replace( + '\\', '\\\\')), content) default_model_name = model_name_match.group(0) if '"value": ""' in default_model_name: cur_model_name = default_model_name.replace( @@ -368,10 +364,9 @@ def request_fastdeploy_create_fastdeploy_client_app(path: str): 0).count('"label"') >= 2: model_version_match = re.search( '"value":\\s*"".*?"label":\\s*{}.*?}}'.format( - json.dumps( - "model version", - ensure_ascii=True).replace( - '\\', '\\\\')), content) + json.dumps("model version", + ensure_ascii=True).replace( + '\\', '\\\\')), content) default_model_version = model_version_match.group(0) if '"value": ""' in default_model_version: cur_model_version = default_model_version.replace( @@ -436,10 +431,9 @@ def request_fastdeploy_create_fastdeploy_client_app(path: str): http_port_match = re.search( '"label":\\s*{}.*?"value":\\s*"".*?}}'. format( - json.dumps( - "推理服务端口", - ensure_ascii=False).replace( - '\\', '\\\\')), content) + json.dumps("推理服务端口", + ensure_ascii=False).replace( + '\\', '\\\\')), content) if not http_port_match or http_port_match.group( 0).count('"label"') >= 2: http_port_match = re.search( @@ -478,10 +472,9 @@ def request_fastdeploy_create_fastdeploy_client_app(path: str): metrics_port_match = re.search( '"label":\\s*{}.*?"value":\\s*"".*?}}'. format( - json.dumps( - "性能服务端口", - ensure_ascii=False).replace( - '\\', '\\\\')), content) + json.dumps("性能服务端口", + ensure_ascii=False).replace( + '\\', '\\\\')), content) if not metrics_port_match or metrics_port_match.group( 0).count('"label"') >= 2: metrics_port_match = re.search( @@ -591,13 +584,12 @@ def request_fastdeploy_create_fastdeploy_client_app(path: str): @app.route(api_path + '/component_tabs') def component_tabs(): - data, mimetype, headers = get_component_tabs( - api_call, - profiler_api_call, - vdl_args=args, - request_args=request.args) - return make_response( - Response(data, mimetype=mimetype, headers=headers)) + data, mimetype, headers = get_component_tabs(api_call, + profiler_api_call, + vdl_args=args, + request_args=request.args) + return make_response(Response(data, mimetype=mimetype, + headers=headers)) @app.route(check_live_path) def check_live(): @@ -636,7 +628,7 @@ def _run(args): info('\033[1;33mVisualDL %s\033[0m', __version__) app = create_app(args) threading.Thread(target=wait_until_live, args=(args, )).start() - app.run(debug=False, host=args.host, port=args.port, threaded=False) + app.run(debug=True, host=args.host, port=args.port, threaded=False) def run(logdir=None, **options): diff --git a/visualdl/writer/writer.py b/visualdl/writer/writer.py index 9e3df6aff..0bcab4def 100644 --- a/visualdl/writer/writer.py +++ b/visualdl/writer/writer.py @@ -40,7 +40,6 @@ class DummyFileWriter(object): """A fake file writer that writes nothing to the disk. """ - def __init__(self, logdir): self._logdir = logdir @@ -76,7 +75,6 @@ class LogWriter(object): The class `LogWriter` provides APIs to create record file and add records to it. The class updates log file asynchronously without slowing down training. """ - def __init__(self, logdir=None, comment='', @@ -162,11 +160,10 @@ def add_meta(self, raise RuntimeError("% can't appear in tag!") walltime = round(time.time() * 1000) if walltime is None else walltime self._get_file_writer().add_record( - meta_data( - tag=tag, - display_name=display_name, - step=step, - walltime=walltime)) + meta_data(tag=tag, + display_name=display_name, + step=step, + walltime=walltime)) def add_scalar(self, tag, value, step, walltime=None): """Add a scalar to vdl record file. @@ -234,12 +231,11 @@ def add_image(self, tag, img, step, walltime=None, dataformats="HWC"): raise RuntimeError("% can't appear in tag!") walltime = round(time.time() * 1000) if walltime is None else walltime self._get_file_writer().add_record( - image( - tag=tag, - image_array=img, - step=step, - walltime=walltime, - dataformats=dataformats)) + image(tag=tag, + image_array=img, + step=step, + walltime=walltime, + dataformats=dataformats)) def add_figure(self, tag, figure, step, walltime=None): """Add an figure to vdl record file. @@ -283,9 +279,10 @@ def add_text(self, tag, text_string, step=None, walltime=None): raise RuntimeError("% can't appear in tag!") walltime = round(time.time() * 1000) if walltime is None else walltime self._get_file_writer().add_record( - text( - tag=tag, text_string=text_string, step=step, - walltime=walltime)) + text(tag=tag, + text_string=text_string, + step=step, + walltime=walltime)) def add_image_matrix(self, tag, @@ -317,14 +314,15 @@ def add_image_matrix(self, if '%' in tag: raise RuntimeError("% can't appear in tag!") walltime = round(time.time() * 1000) if walltime is None else walltime - img = merge_images( - imgs=imgs, dataformats=dataformats, scale=scale, rows=rows) - self.add_image( - tag=tag, - img=img, - step=step, - walltime=walltime, - dataformats=dataformats) + img = merge_images(imgs=imgs, + dataformats=dataformats, + scale=scale, + rows=rows) + self.add_image(tag=tag, + img=img, + step=step, + walltime=walltime, + dataformats=dataformats) def add_embeddings(self, tag, @@ -414,13 +412,12 @@ def add_embeddings(self, step = 0 walltime = round(time.time() * 1000) if walltime is None else walltime self._get_file_writer().add_record( - embedding( - tag=tag, - labels=metadata, - labels_meta=metadata_header, - hot_vectors=mat, - step=step, - walltime=walltime)) + embedding(tag=tag, + labels=metadata, + labels_meta=metadata_header, + hot_vectors=mat, + step=step, + walltime=walltime)) def add_audio(self, tag, @@ -461,12 +458,11 @@ def add_audio(self, if isinstance(audio_array, list): audio_array = np.array(audio_array) self._get_file_writer().add_record( - audio( - tag=tag, - audio_array=audio_array, - sample_rate=sample_rate, - step=step, - walltime=walltime)) + audio(tag=tag, + audio_array=audio_array, + sample_rate=sample_rate, + step=step, + walltime=walltime)) def add_histogram(self, tag, values, step, walltime=None, buckets=10): """Add an histogram to vdl record file. @@ -491,12 +487,11 @@ def add_histogram(self, tag, values, step, walltime=None, buckets=10): hist, bin_edges = np.histogram(values, bins=buckets) walltime = round(time.time() * 1000) if walltime is None else walltime self._get_file_writer().add_record( - histogram( - tag=tag, - hist=hist, - bin_edges=bin_edges, - step=step, - walltime=walltime)) + histogram(tag=tag, + hist=hist, + bin_edges=bin_edges, + step=step, + walltime=walltime)) def add_hparams(self, hparams_dict, metrics_list, walltime=None): """Add an histogram to vdl record file. @@ -531,11 +526,10 @@ def add_hparams(self, hparams_dict, metrics_list, walltime=None): walltime = round(time.time() * 1000) if walltime is None else walltime self._get_file_writer().add_record( - hparam( - name=md5(self.file_name), - hparam_dict=hparams_dict, - metric_list=metrics_list, - walltime=walltime)) + hparam(name=md5(self.file_name), + hparam_dict=hparams_dict, + metric_list=metrics_list, + walltime=walltime)) def add_pr_curve(self, tag, @@ -571,14 +565,13 @@ def add_pr_curve(self, raise RuntimeError("% can't appear in tag!") walltime = round(time.time() * 1000) if walltime is None else walltime self._get_file_writer().add_record( - pr_curve( - tag=tag, - labels=labels, - predictions=predictions, - step=step, - walltime=walltime, - num_thresholds=num_thresholds, - weights=weights)) + pr_curve(tag=tag, + labels=labels, + predictions=predictions, + step=step, + walltime=walltime, + num_thresholds=num_thresholds, + weights=weights)) def add_roc_curve(self, tag, @@ -612,16 +605,15 @@ def add_roc_curve(self, raise RuntimeError("% can't appear in tag!") walltime = round(time.time() * 1000) if walltime is None else walltime self._get_file_writer().add_record( - roc_curve( - tag=tag, - labels=labels, - predictions=predictions, - step=step, - walltime=walltime, - num_thresholds=num_thresholds, - weights=weights)) - - def add_graph(self, model, input_spec, verbose=False): + roc_curve(tag=tag, + labels=labels, + predictions=predictions, + step=step, + walltime=walltime, + num_thresholds=num_thresholds, + weights=weights)) + + def add_graph(self, model, input_spec, verbose=False, **kwargs): """ Add a model graph to vdl graph file. Args: @@ -662,7 +654,8 @@ def forward(self, inputs): verbose=True) """ try: - result = translate_graph(model, input_spec, verbose) + is_pir = kwargs.get('is_pir', False) + result = translate_graph(model, input_spec, verbose, is_pir=is_pir) except Exception as e: print("Failed to save model graph, error: {}".format(e)) raise e