Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/hotfixes' into release
Browse files Browse the repository at this point in the history
  • Loading branch information
fit-alessandro-berti committed Mar 22, 2024
2 parents 7652add + c9f1ebc commit df708f8
Show file tree
Hide file tree
Showing 11 changed files with 198 additions and 138 deletions.
5 changes: 4 additions & 1 deletion pm4py/algo/discovery/inductive/base_case/single_activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,10 @@ def holds(cls, obj=IMDataStructureUVCL, parameters: Optional[Dict[str, Any]] = N
@classmethod
def leaf(cls, obj=IMDataStructureUVCL, parameters: Optional[Dict[str, Any]] = None) -> ProcessTree:
for t in obj.data_structure:
return ProcessTree(label=t[0])
if t:
return ProcessTree(label=t[0])
else:
return ProcessTree()


class SingleActivityBaseCaseDFG(BaseCase[IMDataStructureDFG]):
Expand Down
39 changes: 26 additions & 13 deletions pm4py/algo/discovery/inductive/cuts/concurrency.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,14 @@
'''
from abc import ABC
from collections import Counter
from itertools import product
from typing import List, Collection, Any, Optional, Generic, Dict

from pm4py.algo.discovery.inductive.cuts import utils as cut_util
from pm4py.algo.discovery.inductive.cuts.abc import Cut, T
from pm4py.algo.discovery.inductive.dtypes.im_dfg import InductiveDFG
from pm4py.algo.discovery.inductive.dtypes.im_ds import IMDataStructureUVCL, IMDataStructureDFG
from pm4py.objects.dfg import util as dfu
from pm4py.objects.dfg.obj import DFG
from pm4py.objects.process_tree.obj import Operator, ProcessTree
from pm4py.util.compression import util as comut
from pm4py.util.compression.dtypes import UVCL


class ConcurrencyCut(Cut[T], ABC, Generic[T]):
Expand All @@ -41,18 +37,35 @@ def holds(cls, obj: T, parameters: Optional[Dict[str, Any]] = None) -> Optional[
dfg = obj.dfg
alphabet = dfu.get_vertices(dfg)
alphabet = sorted(list(alphabet))
msdw = comut.msdw(obj, comut.msd(obj)) if obj is not None and type(obj) is UVCL else None
edges = dfu.get_edges(dfg)
edges = sorted(list(edges))

groups = [{a} for a in alphabet]
if len(groups) == 0:
return None
edges = dfu.get_edges(dfg)
edges = sorted(list(edges))
for a, b in product(alphabet, alphabet):
if (a, b) not in edges or (b, a) not in edges:
groups = cut_util.merge_groups_based_on_activities(a, b, groups)
elif msdw is not None:
if (a in msdw and b in msdw[a]) or (b in msdw and a in msdw[b]):
groups = cut_util.merge_groups_based_on_activities(a, b, groups)

cont = True
while cont:
cont = False
i = 0
while i < len(groups):
j = i + 1
while j < len(groups):
for act1 in groups[i]:
for act2 in groups[j]:
if (act1, act2) not in edges or (act2, act1) not in edges:
groups[i] = groups[i].union(groups[j])
del groups[j]
cont = True
break
if cont:
break
if cont:
break
j = j + 1
if cont:
break
i = i + 1

groups = list(sorted(groups, key=lambda g: len(g)))
i = 0
Expand Down
34 changes: 28 additions & 6 deletions pm4py/algo/discovery/inductive/cuts/sequence.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,9 @@
import sys
from abc import ABC
from collections import Counter
from itertools import product
from typing import Collection, Any, List, Optional, Generic, Dict
from typing import Tuple

from pm4py.algo.discovery.inductive.cuts import utils as cut_util
from pm4py.algo.discovery.inductive.cuts.abc import Cut
from pm4py.algo.discovery.inductive.cuts.abc import T
from pm4py.algo.discovery.inductive.dtypes.im_dfg import InductiveDFG
Expand All @@ -38,6 +36,28 @@ class SequenceCut(Cut[T], ABC, Generic[T]):
def operator(cls, parameters: Optional[Dict[str, Any]] = None) -> ProcessTree:
return ProcessTree(operator=Operator.SEQUENCE)

@staticmethod
def check_merge_condition(g1, g2, trans_succ):
for a1 in g1:
for a2 in g2:
if (a2 in trans_succ[a1] and a1 in trans_succ[a2]) or (a2 not in trans_succ[a1] and a1 not in trans_succ[a2]):
return True
return False

@staticmethod
def merge_groups(groups, trans_succ):
i = 0
while i < len(groups):
j = i + 1
while j < len(groups):
if SequenceCut.check_merge_condition(groups[i], groups[j], trans_succ):
groups[i] = groups[i].union(groups[j])
del groups[j]
continue
j = j + 1
i = i + 1
return groups

@classmethod
def holds(cls, obj: T, parameters: Optional[Dict[str, Any]] = None) -> Optional[List[Collection[Any]]]:
'''
Expand All @@ -57,13 +77,15 @@ def holds(cls, obj: T, parameters: Optional[Dict[str, Any]] = None) -> Optional[
groups = [{a} for a in alphabet]
if len(groups) == 0:
return None
for a, b in product(alphabet, alphabet):
if (b in transitive_successors[a] and a in transitive_successors[b]) or (
b not in transitive_successors[a] and a not in transitive_successors[b]):
groups = cut_util.merge_groups_based_on_activities(a, b, groups)

old_size = None
while old_size != len(groups):
old_size = len(groups)
groups = SequenceCut.merge_groups(groups, transitive_successors)

groups = list(sorted(groups, key=lambda g: len(
transitive_predecessors[next(iter(g))]) + (len(alphabet) - len(transitive_successors[next(iter(g))]))))

return groups if len(groups) > 1 else None


Expand Down
7 changes: 5 additions & 2 deletions pm4py/algo/discovery/inductive/fall_through/empty_traces.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,11 @@ def apply(cls, obj: IMDataStructureUVCL, pool=None, manager=None, parameters: Op
if cls.holds(obj, parameters):
data_structure = copy(obj.data_structure)
del data_structure[()]
return ProcessTree(operator=Operator.XOR), [IMDataStructureUVCL(Counter()),
IMDataStructureUVCL(data_structure)]
if data_structure:
return ProcessTree(operator=Operator.XOR), [IMDataStructureUVCL(Counter()),
IMDataStructureUVCL(data_structure)]
else:
return ProcessTree(), []
else:
return None

Expand Down
2 changes: 1 addition & 1 deletion pm4py/algo/discovery/inductive/variants/imf.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def apply(self, obj: IMDataStructureUVCL, parameters: Optional[Dict[str, Any]] =
noise_threshold = exec_utils.get_param_value(IMFParameters.NOISE_THRESHOLD, parameters, 0.0)

empty_traces = EmptyTracesUVCL.apply(obj, parameters)
if empty_traces is not None:
if empty_traces is not None and empty_traces[1]:
number_original_traces = sum(y for y in obj.data_structure.values())
number_filtered_traces = sum(y for y in empty_traces[1][1].data_structure.values())

Expand Down
2 changes: 1 addition & 1 deletion pm4py/meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
'''

__name__ = 'pm4py'
VERSION = '2.7.11.1'
VERSION = '2.7.11.2'
__version__ = VERSION
__doc__ = 'Process mining for Python'
__author__ = 'Fraunhofer Institute for Applied Information Technology FIT'
Expand Down
2 changes: 1 addition & 1 deletion pm4py/objects/bpmn/util/reduction.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def reduce_xor_gateways(bpmn_graph, parameters=None):
bpmn_graph.remove_flow(flow)
if node in bpmn_graph.get_nodes():
bpmn_graph.remove_node(node)
bpmn_graph.add_flow(BPMN.Flow(source_node, target_node))
bpmn_graph.add_flow(BPMN.SequenceFlow(source_node, target_node))
break

return bpmn_graph
Expand Down
188 changes: 103 additions & 85 deletions pm4py/objects/conversion/bpmn/variants/to_petri_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@

from pm4py.objects.petri_net.utils import reduction
from pm4py.objects.petri_net.obj import PetriNet, Marking
from pm4py.objects.bpmn.obj import BPMN
from pm4py.objects.petri_net.utils.petri_utils import remove_place
from pm4py.objects.petri_net.utils.petri_utils import add_arc_from_to
from pm4py.util import exec_utils, nx_utils

Expand Down Expand Up @@ -113,26 +115,28 @@ def apply(bpmn_graph, parameters=None):
source_count = {}
target_count = {}
for flow in bpmn_graph.get_flows():
source = flow.get_source()
target = flow.get_target()
place = PetriNet.Place(str(flow.get_id()))
net.places.add(place)
flow_place[flow] = place
if source not in source_count:
source_count[source] = 0
if target not in target_count:
target_count[target] = 0
source_count[source] = source_count[source] + 1
target_count[target] = target_count[target] + 1
if isinstance(flow, BPMN.SequenceFlow):
source = flow.get_source()
target = flow.get_target()
place = PetriNet.Place(str(flow.get_id()))
net.places.add(place)
flow_place[flow] = place
if source not in source_count:
source_count[source] = 0
if target not in target_count:
target_count[target] = 0
source_count[source] = source_count[source] + 1
target_count[target] = target_count[target] + 1

for flow in bpmn_graph.get_flows():
source = flow.get_source()
target = flow.get_target()
place = PetriNet.Place(str(flow.get_id()))
if isinstance(source, BPMN.InclusiveGateway) and source_count[source] > 1:
inclusive_gateway_exit.add(place.name)
elif isinstance(target, BPMN.InclusiveGateway) and target_count[target] > 1:
inclusive_gateway_entry.add(place.name)
if isinstance(flow, BPMN.SequenceFlow):
source = flow.get_source()
target = flow.get_target()
place = PetriNet.Place(str(flow.get_id()))
if isinstance(source, BPMN.InclusiveGateway) and source_count[source] > 1:
inclusive_gateway_exit.add(place.name)
elif isinstance(target, BPMN.InclusiveGateway) and target_count[target] > 1:
inclusive_gateway_entry.add(place.name)

# remove possible places that are both in inclusive_gateway_exit and inclusive_gateway_entry,
# because we do not need to add invisibles in this situation
Expand All @@ -145,77 +149,87 @@ def apply(bpmn_graph, parameters=None):
trans_map = {}

for node in bpmn_graph.get_nodes():
entry_place = PetriNet.Place("ent_" + str(node.get_id()))
net.places.add(entry_place)
exiting_place = PetriNet.Place("exi_" + str(node.get_id()))
net.places.add(exiting_place)
if use_id:
label = str(node.get_id())
else:
label = str(node.get_name()) if isinstance(node, BPMN.Task) else None
if not label:
label = None
transition = PetriNet.Transition(name=str(node.get_id()), label=label)
net.transitions.add(transition)
trans_map[node] = [transition]
add_arc_from_to(entry_place, transition, net)
add_arc_from_to(transition, exiting_place, net)

if isinstance(node, BPMN.ParallelGateway) or isinstance(node, BPMN.InclusiveGateway):
if source_count[node] > 1:
exiting_object = PetriNet.Transition(str(uuid.uuid4()), None)
net.transitions.add(exiting_object)
add_arc_from_to(exiting_place, exiting_object, net)
trans_map[node].append(exiting_object)
if isinstance(node, BPMN.Task) or isinstance(node, BPMN.StartEvent) or isinstance(node, BPMN.EndEvent) or \
isinstance(node, BPMN.ExclusiveGateway) or isinstance(node, BPMN.ParallelGateway) or \
isinstance(node, BPMN.InclusiveGateway):
if not node in source_count:
source_count[node] = 0
if not node in target_count:
target_count[node] = 0

entry_place = PetriNet.Place("ent_" + str(node.get_id()))
net.places.add(entry_place)
exiting_place = PetriNet.Place("exi_" + str(node.get_id()))
net.places.add(exiting_place)
if use_id:
label = str(node.get_id())
else:
exiting_object = exiting_place

if target_count[node] > 1:
entering_object = PetriNet.Transition(str(uuid.uuid4()), None)
net.transitions.add(entering_object)
add_arc_from_to(entering_object, entry_place, net)
trans_map[node].append(entering_object)
label = str(node.get_name()) if isinstance(node, BPMN.Task) else None
if not label:
label = None
transition = PetriNet.Transition(name=str(node.get_id()), label=label)
net.transitions.add(transition)
trans_map[node] = [transition]
add_arc_from_to(entry_place, transition, net)
add_arc_from_to(transition, exiting_place, net)

if isinstance(node, BPMN.ParallelGateway) or isinstance(node, BPMN.InclusiveGateway):
if source_count[node] > 1:
exiting_object = PetriNet.Transition(str(uuid.uuid4()), None)
net.transitions.add(exiting_object)
add_arc_from_to(exiting_place, exiting_object, net)
trans_map[node].append(exiting_object)
else:
exiting_object = exiting_place

if target_count[node] > 1:
entering_object = PetriNet.Transition(str(uuid.uuid4()), None)
net.transitions.add(entering_object)
add_arc_from_to(entering_object, entry_place, net)
trans_map[node].append(entering_object)
else:
entering_object = entry_place
nodes_entering[node] = entering_object
nodes_exiting[node] = exiting_object
else:
entering_object = entry_place
nodes_entering[node] = entering_object
nodes_exiting[node] = exiting_object
else:
nodes_entering[node] = entry_place
nodes_exiting[node] = exiting_place

if isinstance(node, BPMN.StartEvent):
start_transition = PetriNet.Transition(str(uuid.uuid4()), None)
net.transitions.add(start_transition)
add_arc_from_to(source_place, start_transition, net)
add_arc_from_to(start_transition, entry_place, net)
trans_map[node].append(start_transition)
elif isinstance(node, BPMN.EndEvent):
end_transition = PetriNet.Transition(str(uuid.uuid4()), None)
net.transitions.add(end_transition)
add_arc_from_to(exiting_place, end_transition, net)
add_arc_from_to(end_transition, sink_place, net)
trans_map[node].append(end_transition)
nodes_entering[node] = entry_place
nodes_exiting[node] = exiting_place

if isinstance(node, BPMN.StartEvent):
start_transition = PetriNet.Transition(str(uuid.uuid4()), None)
net.transitions.add(start_transition)
add_arc_from_to(source_place, start_transition, net)
add_arc_from_to(start_transition, entry_place, net)
trans_map[node].append(start_transition)
elif isinstance(node, BPMN.EndEvent):
end_transition = PetriNet.Transition(str(uuid.uuid4()), None)
net.transitions.add(end_transition)
add_arc_from_to(exiting_place, end_transition, net)
add_arc_from_to(end_transition, sink_place, net)
trans_map[node].append(end_transition)

for flow in bpmn_graph.get_flows():
source_object = nodes_exiting[flow.get_source()]
target_object = nodes_entering[flow.get_target()]

if isinstance(source_object, PetriNet.Place):
inv1 = PetriNet.Transition(f"sfl_{flow.get_id()}", None)
net.transitions.add(inv1)
add_arc_from_to(source_object, inv1, net)
source_object = inv1
trans_map[flow.source].append(inv1)

if isinstance(target_object, PetriNet.Place):
inv2 = PetriNet.Transition(f"tfl_{flow.get_id()}", None)
net.transitions.add(inv2)
add_arc_from_to(inv2, target_object, net)
target_object = inv2
trans_map[flow.target].append(inv2)

add_arc_from_to(source_object, flow_place[flow], net)
add_arc_from_to(flow_place[flow], target_object, net)
if isinstance(flow, BPMN.SequenceFlow):
if flow.get_source() in nodes_exiting and flow.get_target() in nodes_entering:
source_object = nodes_exiting[flow.get_source()]
target_object = nodes_entering[flow.get_target()]

if isinstance(source_object, PetriNet.Place):
inv1 = PetriNet.Transition(f"sfl_{flow.get_id()}", None)
net.transitions.add(inv1)
add_arc_from_to(source_object, inv1, net)
source_object = inv1
trans_map[flow.source].append(inv1)

if isinstance(target_object, PetriNet.Place):
inv2 = PetriNet.Transition(f"tfl_{flow.get_id()}", None)
net.transitions.add(inv2)
add_arc_from_to(inv2, target_object, net)
target_object = inv2
trans_map[flow.target].append(inv2)

add_arc_from_to(source_object, flow_place[flow], net)
add_arc_from_to(flow_place[flow], target_object, net)

if inclusive_gateway_exit and inclusive_gateway_entry:
# do the following steps if there are inclusive gateways:
Expand All @@ -241,6 +255,10 @@ def apply(bpmn_graph, parameters=None):
if enable_reduction:
reduction.apply_simple_reduction(net)

for place in list(net.places):
if len(place.in_arcs) == 0 and len(place.out_arcs) == 0 and not place in im and not place in fm:
remove_place(net, place)

if return_flow_trans_map:
return net, im, fm, flow_place, trans_map

Expand Down
Loading

0 comments on commit df708f8

Please sign in to comment.