diff --git a/pm4py/algo/discovery/inductive/base_case/single_activity.py b/pm4py/algo/discovery/inductive/base_case/single_activity.py index a23bb1c2d..3f8a74ca8 100644 --- a/pm4py/algo/discovery/inductive/base_case/single_activity.py +++ b/pm4py/algo/discovery/inductive/base_case/single_activity.py @@ -32,7 +32,10 @@ def holds(cls, obj=IMDataStructureUVCL, parameters: Optional[Dict[str, Any]] = N @classmethod def leaf(cls, obj=IMDataStructureUVCL, parameters: Optional[Dict[str, Any]] = None) -> ProcessTree: for t in obj.data_structure: - return ProcessTree(label=t[0]) + if t: + return ProcessTree(label=t[0]) + else: + return ProcessTree() class SingleActivityBaseCaseDFG(BaseCase[IMDataStructureDFG]): diff --git a/pm4py/algo/discovery/inductive/cuts/concurrency.py b/pm4py/algo/discovery/inductive/cuts/concurrency.py index f8d74613f..09f5d537b 100644 --- a/pm4py/algo/discovery/inductive/cuts/concurrency.py +++ b/pm4py/algo/discovery/inductive/cuts/concurrency.py @@ -16,18 +16,14 @@ ''' from abc import ABC from collections import Counter -from itertools import product from typing import List, Collection, Any, Optional, Generic, Dict -from pm4py.algo.discovery.inductive.cuts import utils as cut_util from pm4py.algo.discovery.inductive.cuts.abc import Cut, T from pm4py.algo.discovery.inductive.dtypes.im_dfg import InductiveDFG from pm4py.algo.discovery.inductive.dtypes.im_ds import IMDataStructureUVCL, IMDataStructureDFG from pm4py.objects.dfg import util as dfu from pm4py.objects.dfg.obj import DFG from pm4py.objects.process_tree.obj import Operator, ProcessTree -from pm4py.util.compression import util as comut -from pm4py.util.compression.dtypes import UVCL class ConcurrencyCut(Cut[T], ABC, Generic[T]): @@ -41,18 +37,35 @@ def holds(cls, obj: T, parameters: Optional[Dict[str, Any]] = None) -> Optional[ dfg = obj.dfg alphabet = dfu.get_vertices(dfg) alphabet = sorted(list(alphabet)) - msdw = comut.msdw(obj, comut.msd(obj)) if obj is not None and type(obj) is UVCL else None + edges = dfu.get_edges(dfg) + edges = sorted(list(edges)) + groups = [{a} for a in alphabet] if len(groups) == 0: return None - edges = dfu.get_edges(dfg) - edges = sorted(list(edges)) - for a, b in product(alphabet, alphabet): - if (a, b) not in edges or (b, a) not in edges: - groups = cut_util.merge_groups_based_on_activities(a, b, groups) - elif msdw is not None: - if (a in msdw and b in msdw[a]) or (b in msdw and a in msdw[b]): - groups = cut_util.merge_groups_based_on_activities(a, b, groups) + + cont = True + while cont: + cont = False + i = 0 + while i < len(groups): + j = i + 1 + while j < len(groups): + for act1 in groups[i]: + for act2 in groups[j]: + if (act1, act2) not in edges or (act2, act1) not in edges: + groups[i] = groups[i].union(groups[j]) + del groups[j] + cont = True + break + if cont: + break + if cont: + break + j = j + 1 + if cont: + break + i = i + 1 groups = list(sorted(groups, key=lambda g: len(g))) i = 0 diff --git a/pm4py/algo/discovery/inductive/cuts/sequence.py b/pm4py/algo/discovery/inductive/cuts/sequence.py index 2d1e5c8b0..808b86b87 100644 --- a/pm4py/algo/discovery/inductive/cuts/sequence.py +++ b/pm4py/algo/discovery/inductive/cuts/sequence.py @@ -18,11 +18,9 @@ import sys from abc import ABC from collections import Counter -from itertools import product from typing import Collection, Any, List, Optional, Generic, Dict from typing import Tuple -from pm4py.algo.discovery.inductive.cuts import utils as cut_util from pm4py.algo.discovery.inductive.cuts.abc import Cut from pm4py.algo.discovery.inductive.cuts.abc import T from pm4py.algo.discovery.inductive.dtypes.im_dfg import InductiveDFG @@ -38,6 +36,28 @@ class SequenceCut(Cut[T], ABC, Generic[T]): def operator(cls, parameters: Optional[Dict[str, Any]] = None) -> ProcessTree: return ProcessTree(operator=Operator.SEQUENCE) + @staticmethod + def check_merge_condition(g1, g2, trans_succ): + for a1 in g1: + for a2 in g2: + if (a2 in trans_succ[a1] and a1 in trans_succ[a2]) or (a2 not in trans_succ[a1] and a1 not in trans_succ[a2]): + return True + return False + + @staticmethod + def merge_groups(groups, trans_succ): + i = 0 + while i < len(groups): + j = i + 1 + while j < len(groups): + if SequenceCut.check_merge_condition(groups[i], groups[j], trans_succ): + groups[i] = groups[i].union(groups[j]) + del groups[j] + continue + j = j + 1 + i = i + 1 + return groups + @classmethod def holds(cls, obj: T, parameters: Optional[Dict[str, Any]] = None) -> Optional[List[Collection[Any]]]: ''' @@ -57,13 +77,15 @@ def holds(cls, obj: T, parameters: Optional[Dict[str, Any]] = None) -> Optional[ groups = [{a} for a in alphabet] if len(groups) == 0: return None - for a, b in product(alphabet, alphabet): - if (b in transitive_successors[a] and a in transitive_successors[b]) or ( - b not in transitive_successors[a] and a not in transitive_successors[b]): - groups = cut_util.merge_groups_based_on_activities(a, b, groups) + + old_size = None + while old_size != len(groups): + old_size = len(groups) + groups = SequenceCut.merge_groups(groups, transitive_successors) groups = list(sorted(groups, key=lambda g: len( transitive_predecessors[next(iter(g))]) + (len(alphabet) - len(transitive_successors[next(iter(g))])))) + return groups if len(groups) > 1 else None diff --git a/pm4py/algo/discovery/inductive/fall_through/empty_traces.py b/pm4py/algo/discovery/inductive/fall_through/empty_traces.py index 64c867d1f..cdc63c902 100644 --- a/pm4py/algo/discovery/inductive/fall_through/empty_traces.py +++ b/pm4py/algo/discovery/inductive/fall_through/empty_traces.py @@ -33,8 +33,11 @@ def apply(cls, obj: IMDataStructureUVCL, pool=None, manager=None, parameters: Op if cls.holds(obj, parameters): data_structure = copy(obj.data_structure) del data_structure[()] - return ProcessTree(operator=Operator.XOR), [IMDataStructureUVCL(Counter()), - IMDataStructureUVCL(data_structure)] + if data_structure: + return ProcessTree(operator=Operator.XOR), [IMDataStructureUVCL(Counter()), + IMDataStructureUVCL(data_structure)] + else: + return ProcessTree(), [] else: return None diff --git a/pm4py/algo/discovery/inductive/variants/imf.py b/pm4py/algo/discovery/inductive/variants/imf.py index 3f0fef7c9..b018c16b1 100644 --- a/pm4py/algo/discovery/inductive/variants/imf.py +++ b/pm4py/algo/discovery/inductive/variants/imf.py @@ -45,7 +45,7 @@ def apply(self, obj: IMDataStructureUVCL, parameters: Optional[Dict[str, Any]] = noise_threshold = exec_utils.get_param_value(IMFParameters.NOISE_THRESHOLD, parameters, 0.0) empty_traces = EmptyTracesUVCL.apply(obj, parameters) - if empty_traces is not None: + if empty_traces is not None and empty_traces[1]: number_original_traces = sum(y for y in obj.data_structure.values()) number_filtered_traces = sum(y for y in empty_traces[1][1].data_structure.values()) diff --git a/pm4py/meta.py b/pm4py/meta.py index ae5cf3b7f..3fa4345d7 100644 --- a/pm4py/meta.py +++ b/pm4py/meta.py @@ -16,7 +16,7 @@ ''' __name__ = 'pm4py' -VERSION = '2.7.11.1' +VERSION = '2.7.11.2' __version__ = VERSION __doc__ = 'Process mining for Python' __author__ = 'Fraunhofer Institute for Applied Information Technology FIT' diff --git a/pm4py/objects/bpmn/util/reduction.py b/pm4py/objects/bpmn/util/reduction.py index 92b4e2cf5..edbe49ad8 100644 --- a/pm4py/objects/bpmn/util/reduction.py +++ b/pm4py/objects/bpmn/util/reduction.py @@ -74,7 +74,7 @@ def reduce_xor_gateways(bpmn_graph, parameters=None): bpmn_graph.remove_flow(flow) if node in bpmn_graph.get_nodes(): bpmn_graph.remove_node(node) - bpmn_graph.add_flow(BPMN.Flow(source_node, target_node)) + bpmn_graph.add_flow(BPMN.SequenceFlow(source_node, target_node)) break return bpmn_graph diff --git a/pm4py/objects/conversion/bpmn/variants/to_petri_net.py b/pm4py/objects/conversion/bpmn/variants/to_petri_net.py index 569011c2d..be51bd487 100644 --- a/pm4py/objects/conversion/bpmn/variants/to_petri_net.py +++ b/pm4py/objects/conversion/bpmn/variants/to_petri_net.py @@ -19,6 +19,8 @@ from pm4py.objects.petri_net.utils import reduction from pm4py.objects.petri_net.obj import PetriNet, Marking +from pm4py.objects.bpmn.obj import BPMN +from pm4py.objects.petri_net.utils.petri_utils import remove_place from pm4py.objects.petri_net.utils.petri_utils import add_arc_from_to from pm4py.util import exec_utils, nx_utils @@ -113,26 +115,28 @@ def apply(bpmn_graph, parameters=None): source_count = {} target_count = {} for flow in bpmn_graph.get_flows(): - source = flow.get_source() - target = flow.get_target() - place = PetriNet.Place(str(flow.get_id())) - net.places.add(place) - flow_place[flow] = place - if source not in source_count: - source_count[source] = 0 - if target not in target_count: - target_count[target] = 0 - source_count[source] = source_count[source] + 1 - target_count[target] = target_count[target] + 1 + if isinstance(flow, BPMN.SequenceFlow): + source = flow.get_source() + target = flow.get_target() + place = PetriNet.Place(str(flow.get_id())) + net.places.add(place) + flow_place[flow] = place + if source not in source_count: + source_count[source] = 0 + if target not in target_count: + target_count[target] = 0 + source_count[source] = source_count[source] + 1 + target_count[target] = target_count[target] + 1 for flow in bpmn_graph.get_flows(): - source = flow.get_source() - target = flow.get_target() - place = PetriNet.Place(str(flow.get_id())) - if isinstance(source, BPMN.InclusiveGateway) and source_count[source] > 1: - inclusive_gateway_exit.add(place.name) - elif isinstance(target, BPMN.InclusiveGateway) and target_count[target] > 1: - inclusive_gateway_entry.add(place.name) + if isinstance(flow, BPMN.SequenceFlow): + source = flow.get_source() + target = flow.get_target() + place = PetriNet.Place(str(flow.get_id())) + if isinstance(source, BPMN.InclusiveGateway) and source_count[source] > 1: + inclusive_gateway_exit.add(place.name) + elif isinstance(target, BPMN.InclusiveGateway) and target_count[target] > 1: + inclusive_gateway_entry.add(place.name) # remove possible places that are both in inclusive_gateway_exit and inclusive_gateway_entry, # because we do not need to add invisibles in this situation @@ -145,77 +149,87 @@ def apply(bpmn_graph, parameters=None): trans_map = {} for node in bpmn_graph.get_nodes(): - entry_place = PetriNet.Place("ent_" + str(node.get_id())) - net.places.add(entry_place) - exiting_place = PetriNet.Place("exi_" + str(node.get_id())) - net.places.add(exiting_place) - if use_id: - label = str(node.get_id()) - else: - label = str(node.get_name()) if isinstance(node, BPMN.Task) else None - if not label: - label = None - transition = PetriNet.Transition(name=str(node.get_id()), label=label) - net.transitions.add(transition) - trans_map[node] = [transition] - add_arc_from_to(entry_place, transition, net) - add_arc_from_to(transition, exiting_place, net) - - if isinstance(node, BPMN.ParallelGateway) or isinstance(node, BPMN.InclusiveGateway): - if source_count[node] > 1: - exiting_object = PetriNet.Transition(str(uuid.uuid4()), None) - net.transitions.add(exiting_object) - add_arc_from_to(exiting_place, exiting_object, net) - trans_map[node].append(exiting_object) + if isinstance(node, BPMN.Task) or isinstance(node, BPMN.StartEvent) or isinstance(node, BPMN.EndEvent) or \ + isinstance(node, BPMN.ExclusiveGateway) or isinstance(node, BPMN.ParallelGateway) or \ + isinstance(node, BPMN.InclusiveGateway): + if not node in source_count: + source_count[node] = 0 + if not node in target_count: + target_count[node] = 0 + + entry_place = PetriNet.Place("ent_" + str(node.get_id())) + net.places.add(entry_place) + exiting_place = PetriNet.Place("exi_" + str(node.get_id())) + net.places.add(exiting_place) + if use_id: + label = str(node.get_id()) else: - exiting_object = exiting_place - - if target_count[node] > 1: - entering_object = PetriNet.Transition(str(uuid.uuid4()), None) - net.transitions.add(entering_object) - add_arc_from_to(entering_object, entry_place, net) - trans_map[node].append(entering_object) + label = str(node.get_name()) if isinstance(node, BPMN.Task) else None + if not label: + label = None + transition = PetriNet.Transition(name=str(node.get_id()), label=label) + net.transitions.add(transition) + trans_map[node] = [transition] + add_arc_from_to(entry_place, transition, net) + add_arc_from_to(transition, exiting_place, net) + + if isinstance(node, BPMN.ParallelGateway) or isinstance(node, BPMN.InclusiveGateway): + if source_count[node] > 1: + exiting_object = PetriNet.Transition(str(uuid.uuid4()), None) + net.transitions.add(exiting_object) + add_arc_from_to(exiting_place, exiting_object, net) + trans_map[node].append(exiting_object) + else: + exiting_object = exiting_place + + if target_count[node] > 1: + entering_object = PetriNet.Transition(str(uuid.uuid4()), None) + net.transitions.add(entering_object) + add_arc_from_to(entering_object, entry_place, net) + trans_map[node].append(entering_object) + else: + entering_object = entry_place + nodes_entering[node] = entering_object + nodes_exiting[node] = exiting_object else: - entering_object = entry_place - nodes_entering[node] = entering_object - nodes_exiting[node] = exiting_object - else: - nodes_entering[node] = entry_place - nodes_exiting[node] = exiting_place - - if isinstance(node, BPMN.StartEvent): - start_transition = PetriNet.Transition(str(uuid.uuid4()), None) - net.transitions.add(start_transition) - add_arc_from_to(source_place, start_transition, net) - add_arc_from_to(start_transition, entry_place, net) - trans_map[node].append(start_transition) - elif isinstance(node, BPMN.EndEvent): - end_transition = PetriNet.Transition(str(uuid.uuid4()), None) - net.transitions.add(end_transition) - add_arc_from_to(exiting_place, end_transition, net) - add_arc_from_to(end_transition, sink_place, net) - trans_map[node].append(end_transition) + nodes_entering[node] = entry_place + nodes_exiting[node] = exiting_place + + if isinstance(node, BPMN.StartEvent): + start_transition = PetriNet.Transition(str(uuid.uuid4()), None) + net.transitions.add(start_transition) + add_arc_from_to(source_place, start_transition, net) + add_arc_from_to(start_transition, entry_place, net) + trans_map[node].append(start_transition) + elif isinstance(node, BPMN.EndEvent): + end_transition = PetriNet.Transition(str(uuid.uuid4()), None) + net.transitions.add(end_transition) + add_arc_from_to(exiting_place, end_transition, net) + add_arc_from_to(end_transition, sink_place, net) + trans_map[node].append(end_transition) for flow in bpmn_graph.get_flows(): - source_object = nodes_exiting[flow.get_source()] - target_object = nodes_entering[flow.get_target()] - - if isinstance(source_object, PetriNet.Place): - inv1 = PetriNet.Transition(f"sfl_{flow.get_id()}", None) - net.transitions.add(inv1) - add_arc_from_to(source_object, inv1, net) - source_object = inv1 - trans_map[flow.source].append(inv1) - - if isinstance(target_object, PetriNet.Place): - inv2 = PetriNet.Transition(f"tfl_{flow.get_id()}", None) - net.transitions.add(inv2) - add_arc_from_to(inv2, target_object, net) - target_object = inv2 - trans_map[flow.target].append(inv2) - - add_arc_from_to(source_object, flow_place[flow], net) - add_arc_from_to(flow_place[flow], target_object, net) + if isinstance(flow, BPMN.SequenceFlow): + if flow.get_source() in nodes_exiting and flow.get_target() in nodes_entering: + source_object = nodes_exiting[flow.get_source()] + target_object = nodes_entering[flow.get_target()] + + if isinstance(source_object, PetriNet.Place): + inv1 = PetriNet.Transition(f"sfl_{flow.get_id()}", None) + net.transitions.add(inv1) + add_arc_from_to(source_object, inv1, net) + source_object = inv1 + trans_map[flow.source].append(inv1) + + if isinstance(target_object, PetriNet.Place): + inv2 = PetriNet.Transition(f"tfl_{flow.get_id()}", None) + net.transitions.add(inv2) + add_arc_from_to(inv2, target_object, net) + target_object = inv2 + trans_map[flow.target].append(inv2) + + add_arc_from_to(source_object, flow_place[flow], net) + add_arc_from_to(flow_place[flow], target_object, net) if inclusive_gateway_exit and inclusive_gateway_entry: # do the following steps if there are inclusive gateways: @@ -241,6 +255,10 @@ def apply(bpmn_graph, parameters=None): if enable_reduction: reduction.apply_simple_reduction(net) + for place in list(net.places): + if len(place.in_arcs) == 0 and len(place.out_arcs) == 0 and not place in im and not place in fm: + remove_place(net, place) + if return_flow_trans_map: return net, im, fm, flow_place, trans_map diff --git a/pm4py/objects/conversion/process_tree/variants/to_bpmn.py b/pm4py/objects/conversion/process_tree/variants/to_bpmn.py index 904485007..89108801e 100644 --- a/pm4py/objects/conversion/process_tree/variants/to_bpmn.py +++ b/pm4py/objects/conversion/process_tree/variants/to_bpmn.py @@ -84,8 +84,8 @@ def add_xor_gateway(bpmn, counts): split_name = "xor_" + str(counts.num_xor_gateways) + "_split" join_name = "xor_" + str(counts.num_xor_gateways) + "_join" - split = BPMN.ExclusiveGateway(name=split_name, gateway_direction=BPMN.Gateway.Direction.DIVERGING) - join = BPMN.ExclusiveGateway(name=join_name, gateway_direction=BPMN.Gateway.Direction.CONVERGING) + split = BPMN.ExclusiveGateway(name="", gateway_direction=BPMN.Gateway.Direction.DIVERGING) + join = BPMN.ExclusiveGateway(name="", gateway_direction=BPMN.Gateway.Direction.CONVERGING) bpmn.add_node(split) bpmn.add_node(join) @@ -98,8 +98,8 @@ def add_parallel_gateway(bpmn, counts): split_name = "parallel_" + str(counts.num_para_gateways) + "_split" join_name = "parallel_" + str(counts.num_para_gateways) + "_join" - split = BPMN.ParallelGateway(name=split_name, gateway_direction=BPMN.Gateway.Direction.DIVERGING) - join = BPMN.ParallelGateway(name=join_name, gateway_direction=BPMN.Gateway.Direction.CONVERGING) + split = BPMN.ParallelGateway(name="", gateway_direction=BPMN.Gateway.Direction.DIVERGING) + join = BPMN.ParallelGateway(name="", gateway_direction=BPMN.Gateway.Direction.CONVERGING) bpmn.add_node(split) bpmn.add_node(join) return bpmn, split, join, counts @@ -111,8 +111,8 @@ def add_inclusive_gateway(bpmn, counts): split_name = "parallel_" + str(counts.num_para_gateways) + "_split" join_name = "parallel_" + str(counts.num_para_gateways) + "_join" - split = BPMN.InclusiveGateway(name=split_name, gateway_direction=BPMN.Gateway.Direction.DIVERGING) - join = BPMN.InclusiveGateway(name=join_name, gateway_direction=BPMN.Gateway.Direction.CONVERGING) + split = BPMN.InclusiveGateway(name="", gateway_direction=BPMN.Gateway.Direction.DIVERGING) + join = BPMN.InclusiveGateway(name="", gateway_direction=BPMN.Gateway.Direction.CONVERGING) bpmn.add_node(split) bpmn.add_node(join) return bpmn, split, join, counts @@ -128,14 +128,14 @@ def recursively_add_tree(parent_tree, tree, bpmn, initial_event, final_event, co trans = tree if trans.label is None: bpmn, task, counts = add_tau_task(bpmn, counts) - bpmn.add_flow(BPMN.Flow(initial_event, task)) - bpmn.add_flow(BPMN.Flow(task, final_event)) + bpmn.add_flow(BPMN.SequenceFlow(initial_event, task)) + bpmn.add_flow(BPMN.SequenceFlow(task, final_event)) initial_connector = task final_connector = task else: bpmn, task, counts = add_task(bpmn, counts, trans.label) - bpmn.add_flow(BPMN.Flow(initial_event, task)) - bpmn.add_flow(BPMN.Flow(task, final_event)) + bpmn.add_flow(BPMN.SequenceFlow(initial_event, task)) + bpmn.add_flow(BPMN.SequenceFlow(task, final_event)) initial_connector = task final_connector = task @@ -145,8 +145,8 @@ def recursively_add_tree(parent_tree, tree, bpmn, initial_event, final_event, co bpmn, counts, x, y = recursively_add_tree(tree, subtree, bpmn, split_gateway, join_gateway, counts, rec_depth + 1) - bpmn.add_flow(BPMN.Flow(initial_event, split_gateway)) - bpmn.add_flow(BPMN.Flow(join_gateway, final_event)) + bpmn.add_flow(BPMN.SequenceFlow(initial_event, split_gateway)) + bpmn.add_flow(BPMN.SequenceFlow(join_gateway, final_event)) initial_connector = split_gateway final_connector = join_gateway @@ -156,8 +156,8 @@ def recursively_add_tree(parent_tree, tree, bpmn, initial_event, final_event, co bpmn, counts, x, y = recursively_add_tree(tree, subtree, bpmn, split_gateway, join_gateway, counts, rec_depth + 1) - bpmn.add_flow(BPMN.Flow(initial_event, split_gateway)) - bpmn.add_flow(BPMN.Flow(join_gateway, final_event)) + bpmn.add_flow(BPMN.SequenceFlow(initial_event, split_gateway)) + bpmn.add_flow(BPMN.SequenceFlow(join_gateway, final_event)) initial_connector = split_gateway final_connector = join_gateway @@ -167,8 +167,8 @@ def recursively_add_tree(parent_tree, tree, bpmn, initial_event, final_event, co bpmn, counts, x, y = recursively_add_tree(tree, subtree, bpmn, split_gateway, join_gateway, counts, rec_depth + 1) - bpmn.add_flow(BPMN.Flow(initial_event, split_gateway)) - bpmn.add_flow(BPMN.Flow(join_gateway, final_event)) + bpmn.add_flow(BPMN.SequenceFlow(initial_event, split_gateway)) + bpmn.add_flow(BPMN.SequenceFlow(join_gateway, final_event)) initial_connector = split_gateway final_connector = join_gateway @@ -195,8 +195,8 @@ def recursively_add_tree(parent_tree, tree, bpmn, initial_event, final_event, co bpmn, counts, i, y = recursively_add_tree(tree, do, bpmn, join, split, counts, rec_depth + 1) for redo in tree_childs[1:]: bpmn, counts, x, y = recursively_add_tree(tree, redo, bpmn, split, join, counts, rec_depth + 1) - bpmn.add_flow(BPMN.Flow(initial_event, join)) - bpmn.add_flow(BPMN.Flow(split, final_event)) + bpmn.add_flow(BPMN.SequenceFlow(initial_event, join)) + bpmn.add_flow(BPMN.SequenceFlow(split, final_event)) initial_connector = join final_connector = split @@ -217,7 +217,7 @@ def delete_tau_transitions(bpmn, counts): target = out_flow.get_target() bpmn.remove_flow(out_flow) bpmn.remove_flow(in_flow) - bpmn.add_flow(BPMN.Flow(source, target)) + bpmn.add_flow(BPMN.SequenceFlow(source, target)) else: for in_flow in copy.copy(in_arcs): bpmn.remove_flow(in_flow) diff --git a/pm4py/objects/conversion/wf_net/variants/to_bpmn.py b/pm4py/objects/conversion/wf_net/variants/to_bpmn.py index 35eb203d7..30c934ed3 100644 --- a/pm4py/objects/conversion/wf_net/variants/to_bpmn.py +++ b/pm4py/objects/conversion/wf_net/variants/to_bpmn.py @@ -72,23 +72,23 @@ def apply(net, im, fm, parameters=None): task = BPMN.Task(name=trans.label) bpmn_graph.add_node(task) - bpmn_graph.add_flow(BPMN.Flow(entering_node, task)) - bpmn_graph.add_flow(BPMN.Flow(task, exiting_node)) + bpmn_graph.add_flow(BPMN.SequenceFlow(entering_node, task)) + bpmn_graph.add_flow(BPMN.SequenceFlow(task, exiting_node)) entering_dictio[trans] = entering_node exiting_dictio[trans] = exiting_node for arc in net.arcs: - bpmn_graph.add_flow(BPMN.Flow(exiting_dictio[arc.source], entering_dictio[arc.target])) + bpmn_graph.add_flow(BPMN.SequenceFlow(exiting_dictio[arc.source], entering_dictio[arc.target])) start_node = BPMN.StartEvent(name="start", isInterrupting=True) end_node = BPMN.NormalEndEvent(name="end") bpmn_graph.add_node(start_node) bpmn_graph.add_node(end_node) for place in im: - bpmn_graph.add_flow(BPMN.Flow(start_node, entering_dictio[place])) + bpmn_graph.add_flow(BPMN.SequenceFlow(start_node, entering_dictio[place])) for place in fm: - bpmn_graph.add_flow(BPMN.Flow(exiting_dictio[place], end_node)) + bpmn_graph.add_flow(BPMN.SequenceFlow(exiting_dictio[place], end_node)) bpmn_graph = reduction.apply(bpmn_graph) diff --git a/tests/pm_extr_test/bas_inductive_test.py b/tests/pm_extr_test/bas_inductive_test.py index f7a3b5d14..1529e7331 100644 --- a/tests/pm_extr_test/bas_inductive_test.py +++ b/tests/pm_extr_test/bas_inductive_test.py @@ -3,6 +3,7 @@ from pm4py.util import constants, pandas_utils +import time import pm4py from pm4py.algo.discovery.inductive import algorithm as im_clean from pm4py.statistics.variants.log import get as variants_get @@ -56,8 +57,7 @@ if not activities.issubset(fp_tree_clean["activities"]): print("ALERT! activities of the tree are less than the ones in the log!") print(activities.difference(fp_tree_clean["activities"])) - print(activities.difference(fp_tree_im["activities"])) - input() + time.sleep(5) fp_conf_im_clean = pm4py.algo.conformance.footprints.variants.log_extensive.apply(fp_log, fp_tree_clean) fitness_im_clean = pm4py.algo.conformance.footprints.util.evaluation.fp_fitness(fp_log, fp_tree_clean, @@ -72,8 +72,9 @@ fitness_al_clean = alignment_based.evaluate(alignments_clean)["average_trace_fitness"] if fitness_al_clean < fitness_im_clean: - print("ALERT") - input() + print("ALERT", fitness_al_clean, fitness_im_clean) + time.sleep(5) + #input() else: print("OK ALIGNMENTS", fitness_al_clean)