diff --git a/stn/config/config.py b/stn/config/config.py index 8ccaa29..ee4d1ee 100644 --- a/stn/config/config.py +++ b/stn/config/config.py @@ -1,9 +1,11 @@ -from stn.stn import STN +import copy + +from stn.methods.dsc_lp import DSC_LP +from stn.methods.fpc import get_minimal_network +from stn.methods.srea import srea from stn.pstn.pstn import PSTN +from stn.stn import STN from stn.stnu.stnu import STNU -from stn.methods.srea import srea -from stn.methods.fpc import get_minimal_network -from stn.methods.dsc_lp import DSC_LP class STNFactory(object): @@ -77,13 +79,29 @@ def srea_algorithm(stn): :param stn: stn (object) """ + archived_stn = PSTN() + stn = copy.deepcopy(stn) + archived_stn = stn.remove_old_timepoints(archived_stn) + result = srea(stn, debug=True) + if result is None: return risk_metric, dispatchable_graph = result dispatchable_graph.risk_metric = risk_metric + for i in archived_stn.nodes(): + if i != 0 and 'data' in archived_stn.nodes[i]: + dispatchable_graph.add_node(i, data=archived_stn.nodes[i]['data']) + dispatchable_graph.add_edge(i, 0, weight=archived_stn[i][0]['weight'], is_executed=True) + dispatchable_graph.add_edge(0, i, weight=archived_stn[0][i]['weight'], is_executed=True) + + if archived_stn.has_edge(i, i + 1): + dispatchable_graph.add_constraint(i, i + 1, -archived_stn[i + 1][i]['weight'], + archived_stn[i][i + 1]['weight']) + dispatchable_graph.execute_edge(i, i + 1) + return dispatchable_graph diff --git a/stn/methods/fpc.py b/stn/methods/fpc.py index f91638b..6b382d4 100644 --- a/stn/methods/fpc.py +++ b/stn/methods/fpc.py @@ -1,14 +1,12 @@ -import logging -import networkx as nx import copy +import networkx as nx """ Achieves full path consistency (fpc) by applying the Floyd Warshall algorithm to the STN""" def get_minimal_network(stn): - logger = logging.getLogger('stn.fpc') minimal_network = copy.deepcopy(stn) shortest_path_array = nx.floyd_warshall(stn) @@ -16,5 +14,3 @@ def get_minimal_network(stn): # Get minimal stn by updating the edges of the stn to reflect the shortest path distances minimal_network.update_edges(shortest_path_array) return minimal_network - else: - logger.debug("The minimal network is inconsistent. STP could not be solved") diff --git a/stn/methods/srea.py b/stn/methods/srea.py index 3b9e331..7247996 100644 --- a/stn/methods/srea.py +++ b/stn/methods/srea.py @@ -55,7 +55,7 @@ def setUpLP(stn, decouple): bounds = {} deltas = {} - prob = pulp.LpProblem('PSTN Robust Execution LP', pulp.LpMaximize) + prob = pulp.LpProblem('PSTN_Robust_Execution_LP', pulp.LpMaximize) for (i, j) in stn.edges(): weight = stn.get_edge_weight(i, j) @@ -103,7 +103,7 @@ def setUpLP(stn, decouple): return (bounds, deltas, prob) -def srea(inputstn, +def srea(stn, debug=False, debugLP=False, returnAlpha=True, @@ -112,7 +112,7 @@ def srea(inputstn, ub=0.999): """ Runs the SREA algorithm on an input STN - @param inputstn The STN that we are running SREA on + @param stn The STN that we are running SREA on @param debug Print optional status messages about alpha levels @param debugLP Print optional status messages about each run of the LP @param lb The starting lower bound on alpha for the binary search @@ -122,8 +122,6 @@ def srea(inputstn, or None if there is no solution """ - stn = copy.deepcopy(inputstn) - # dictionary of alphas for binary search alphas = {i: i / 1000.0 for i in range(1001)} @@ -267,6 +265,8 @@ def srea_LP(inputstn, prob.writeLP('STN.lp') pulp.LpSolverDefault.msg = 10 + pulp.LpSolverDefault.msg = 0 + # Based on https://stackoverflow.com/questions/27406858/pulp-solver-error # Sometimes pulp throws an exception instead of returning a problem with unfeasible status try: diff --git a/stn/node.py b/stn/node.py index 590c4d7..5bd5029 100644 --- a/stn/node.py +++ b/stn/node.py @@ -9,7 +9,7 @@ def __init__(self, task_id, node_type, is_executed=False, **kwargs): if isinstance(task_id, str): task_id = from_str(task_id) self.task_id = task_id - # The node can be of node_type zero_timepoint, start, pickup or delivery + # The node can be of node_type zero_timepoint, departure, start or finish self.node_type = node_type self.is_executed = is_executed self.action_id = kwargs.get("action_id") diff --git a/stn/pstn/pstn.py b/stn/pstn/pstn.py index e773491..289e8fb 100644 --- a/stn/pstn/pstn.py +++ b/stn/pstn/pstn.py @@ -46,7 +46,7 @@ def __init__(self): def __str__(self): to_print = "" - for (i, j, data) in self.edges.data(): + for (i, j, data) in sorted(self.edges.data()): if self.has_edge(j, i) and i < j: # Constraints with the zero timepoint if i == 0: @@ -93,7 +93,7 @@ def add_constraint(self, i, j, wji=0.0, wij=MAX_FLOAT, distribution=""): """ # The constraint is contingent if it has a probability distribution - is_contingent = distribution is not "" + is_contingent = distribution != "" super().add_constraint(i, j, wji, wij) @@ -115,9 +115,9 @@ def get_contingent_constraints(self): def add_intertimepoints_constraints(self, constraints, task): """ Adds constraints between the timepoints of a task Constraints between: - - start and pickup (contingent) - - pickup and delivery (contingent) - - delivery and next task (if any) (requirement) + - departure and start (contingent) + - start and finish (contingent) + - finish and next task (if any) (requirement) Args: constraints (list) : list of tuples that defines the pair of nodes between which a new constraint should be added Example: @@ -128,7 +128,7 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.nodes[i]['data'].node_type == "start": + if self.nodes[i]['data'].node_type == "departure": distribution = self.get_travel_time_distribution(task) if distribution.endswith("_0.0"): # the distribution has no variation (stdev is 0) # Make the constraint a requirement constraint @@ -137,14 +137,71 @@ def add_intertimepoints_constraints(self, constraints, task): else: self.add_constraint(i, j, distribution=distribution) - elif self.nodes[i]['data'].node_type == "pickup": + elif self.nodes[i]['data'].node_type == "start": distribution = self.get_work_time_distribution(task) - self.add_constraint(i, j, distribution=distribution) + if distribution.endswith("_0.0"): # the distribution has no variation (stdev is 0) + # Make the constraint a requirement constraint + mean = float(distribution.split("_")[1]) + self.add_constraint(i, j, mean, mean) + else: + self.add_constraint(i, j, distribution=distribution) - elif self.nodes[i]['data'].node_type == "delivery": - # wait time between finish of one task and start of the next one. Fixed to [0, inf] + elif self.nodes[i]['data'].node_type == "finish": + # wait time between finish of one task and departure of the next one. Fixed to [0, inf] self.add_constraint(i, j) + def update_travel_time(self, task): + position = self.get_task_position(task.task_id) + departure_node_id = 2 * position + (position-2) + start_node_id = departure_node_id + 1 + distribution = self.get_travel_time_distribution(task) + + if self.has_edge(departure_node_id, start_node_id): + if distribution.endswith("_0.0"): # the distribution has no variation (stdev is 0) + # Make the constraint a requirement constraint + mean = float(distribution.split("_")[1]) + self.add_constraint(departure_node_id, start_node_id, mean, mean) + else: + self.add_constraint(departure_node_id, start_node_id, distribution=distribution) + + def update_work_time(self, task): + position = self.get_task_position(task.task_id) + departure_node_id = 2 * position + (position-2) + start_node_id = departure_node_id + 1 + finish_node_id = start_node_id + 1 + distribution = self.get_work_time_distribution(task) + + if self.has_edge(start_node_id, finish_node_id): + if distribution.endswith("_0.0"): # the distribution has no variation (stdev is 0) + # Make the constraint a requirement constraint + mean = float(distribution.split("_")[1]) + self.add_constraint(start_node_id, finish_node_id, mean, mean) + else: + self.add_constraint(start_node_id, finish_node_id, distribution=distribution) + + def remove_node_ids(self, node_ids, archived_stn=None): + # Assumes that the node_ids are in consecutive order from node_id 1 onwards + for i in node_ids: + if archived_stn: + # Start adding nodes after the last node_id in archived_stn + start_node_id = list(archived_stn.nodes())[-1] + if start_node_id == 0: # skip the zero_timepoint + start_node_id = 1 + archived_stn.add_node(start_node_id, data=self.nodes[i]['data']) + archived_stn.add_edge(start_node_id, 0, weight=self[i][0]['weight'], is_executed=True) + archived_stn.add_edge(0, start_node_id, weight=self[0][i]['weight'], is_executed=True) + + if self.has_edge(i, i+1): + if 'is_contingent' in self[i][i+1]: + archived_stn.add_constraint(start_node_id, start_node_id+1, -self[i+1][i]['weight'], self[i][i+1]['weight'], self[i][i+1]['distribution']) + else: + archived_stn.add_constraint(start_node_id, start_node_id+1, -self[i+1][i]['weight'], self[i][i+1]['weight']) + else: + # Add dummy node + archived_stn.add_node(start_node_id+1) + self.remove_node(i) + return archived_stn + @staticmethod def get_travel_time_distribution(task): travel_time = task.get_edge("travel_time") diff --git a/stn/stn.py b/stn/stn.py index 8e769e1..50f0fe8 100644 --- a/stn/stn.py +++ b/stn/stn.py @@ -37,7 +37,7 @@ def __init__(self): def __str__(self): to_print = "" - for (i, j, data) in self.edges.data(): + for (i, j, data) in sorted(self.edges.data()): if self.has_edge(j, i) and i < j: # Constraints with the zero timepoint if i == 0: @@ -83,12 +83,12 @@ def add_zero_timepoint(self): self.add_node(0, data=node) def get_earliest_time(self): - edges = [e for e in self.edges] + edges = [e for e in sorted(self.edges)] first_edge = edges[0] return -self[first_edge[1]][0]['weight'] def get_latest_time(self): - edges = [e for e in self.edges] + edges = [e for e in sorted(self.edges)] last_edge = edges[-1] return self[0][last_edge[0]]['weight'] @@ -145,9 +145,9 @@ def add_timepoint(self, id, task, node_type, **kwargs): """ A timepoint is represented by a node in the STN The node can be of node_type: - zero_timepoint: references the schedule to the origin - - start : time at which the robot starts navigating towards the pickup location - - pickup : time at which the robot arrives starts the pickup action - - delivery : time at which the robot finishes the delivery action + - departure : time at which the robot starts navigating towards the start location + - start : time at which the robot arrives at the start location + - finish : time at which the robot finishes the last action """ node = Node(task.task_id, node_type, **kwargs) self.add_node(id, data=node) @@ -155,14 +155,14 @@ def add_timepoint(self, id, task, node_type, **kwargs): def add_task(self, task, position=1): """ A task is added as 3 timepoints and 5 constraints in the STN" Timepoints: - - start - - pickup time - - delivery time + - departure + - start time + - finish time Constraints: + - earliest and latest departure times + - travel time: time to go from current position to start the position - earliest and latest start times - - travel time: time to go from current position to pickup position) - - earliest and latest pickup times - - work time: time to perform the task (time to transport an object from the pickup to the delivery location) + - work time: time to perform the task (e.g. time to transport an object from the start to the finish location) - earliest and latest finish times If the task is not the first in the STN, add wait time constraint @@ -171,44 +171,44 @@ def add_task(self, task, position=1): """ self.logger.info("Adding task %s in position %s", task.task_id, position) - start_node_id = 2 * position + (position-2) - pickup_node_id = start_node_id + 1 - delivery_node_id = pickup_node_id + 1 + departure_node_id = 2 * position + (position-2) + start_node_id = departure_node_id + 1 + finish_node_id = start_node_id + 1 - # Remove constraint linking start_node_id and previous node (if any) - if self.has_edge(start_node_id-1, start_node_id) and start_node_id-1 != 0: - self.logger.debug("Deleting constraint: %s => %s", start_node_id-1, start_node_id) + # Remove constraint linking departure_node_id and previous node (if any) + if self.has_edge(departure_node_id-1, departure_node_id) and departure_node_id-1 != 0: + self.logger.debug("Deleting constraint: %s => %s", departure_node_id-1, departure_node_id) - self.remove_constraint(start_node_id-1, start_node_id) + self.remove_constraint(departure_node_id-1, departure_node_id) # Displace by 3 all nodes and constraints after position mapping = {} for node_id, data in self.nodes(data=True): - if node_id >= start_node_id: + if node_id >= departure_node_id: mapping[node_id] = node_id + 3 self.logger.debug("mapping: %s ", mapping) nx.relabel_nodes(self, mapping, copy=False) # Add new timepoints - self.add_timepoint(start_node_id, task, "start") - self.add_timepoint_constraint(start_node_id, task.get_timepoint("start")) + self.add_timepoint(departure_node_id, task, "departure") + self.add_timepoint_constraint(departure_node_id, task.get_timepoint("departure")) - self.add_timepoint(pickup_node_id, task, "pickup", action_id=task.pickup_action_id) - self.add_timepoint_constraint(pickup_node_id, task.get_timepoint("pickup")) + self.add_timepoint(start_node_id, task, "start", action_id=task.start_action_id) + self.add_timepoint_constraint(start_node_id, task.get_timepoint("start")) - self.add_timepoint(delivery_node_id, task, "delivery", action_id=task.delivery_action_id) - self.add_timepoint_constraint(delivery_node_id, task.get_timepoint("delivery")) + self.add_timepoint(finish_node_id, task, "finish", action_id=task.finish_action_id) + self.add_timepoint_constraint(finish_node_id, task.get_timepoint("finish")) # Add constraints between new nodes - new_constraints_between = [start_node_id, pickup_node_id, delivery_node_id] + new_constraints_between = [departure_node_id, start_node_id, finish_node_id] - # Check if there is a node after the new delivery node - if self.has_node(delivery_node_id+1): - new_constraints_between.append(delivery_node_id+1) + # Check if there is a node after the new finish node + if self.has_node(finish_node_id+1): + new_constraints_between.append(finish_node_id+1) - # Check if there is a node before the new start node - if self.has_node(start_node_id-1): - new_constraints_between.insert(0, start_node_id-1) + # Check if there is a node before the new departure node + if self.has_node(departure_node_id-1): + new_constraints_between.insert(0, departure_node_id-1) self.logger.debug("New constraints between nodes: %s", new_constraints_between) @@ -220,9 +220,9 @@ def add_task(self, task, position=1): def add_intertimepoints_constraints(self, constraints, task): """ Adds constraints between the timepoints of a task Constraints between: - - start and pickup - - pickup and delivery - - delivery and start next task (if any) + - departure and start + - start and finish + - finish and departure next task (if any) Args: constraints (list) : list of tuples that defines the pair of nodes between which a new constraint should be added Example: @@ -233,44 +233,63 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.nodes[i]['data'].node_type == "start": + if self.nodes[i]['data'].node_type == "departure": travel_time = self.get_travel_time(task) self.add_constraint(i, j, travel_time, travel_time) - elif self.nodes[i]['data'].node_type == "pickup": + elif self.nodes[i]['data'].node_type == "start": work_time = self.get_work_time(task) self.add_constraint(i, j, work_time, work_time) - elif self.nodes[i]['data'].node_type == "delivery": - # wait time between finish of one task and start of the next one. Fixed to [0, inf] + elif self.nodes[i]['data'].node_type == "finish": + # wait time between finish of one task and departure of the next one. Fixed to [0, inf] self.add_constraint(i, j) + def update_travel_time(self, task): + position = self.get_task_position(task.task_id) + departure_node_id = 2 * position + (position-2) + start_node_id = departure_node_id + 1 + travel_time = self.get_travel_time(task) + + if self.has_edge(departure_node_id, start_node_id): + self.add_constraint(departure_node_id, start_node_id, travel_time, travel_time) + + def update_work_time(self, task): + position = self.get_task_position(task.task_id) + departure_node_id = 2 * position + (position-2) + start_node_id = departure_node_id + 1 + finish_node_id = start_node_id + 1 + work_time = self.get_work_time(task) + + if self.has_edge(start_node_id, finish_node_id): + self.add_constraint(start_node_id, finish_node_id, work_time, work_time) + @staticmethod def get_travel_time(task): - """ Returns the mean of the travel time (time for going from current pose to pickup pose) + """ Returns the mean of the travel time (time for going from current pose to start pose) """ travel_time = task.get_edge("travel_time") return travel_time.mean @staticmethod def get_work_time(task): - """ Returns the mean of the work time (time to transport an object from the pickup to the delivery location) + """ Returns the mean of the work time (e.g. time to transport an object from the start to the finish location) """ work_time = task.get_edge("work_time") return work_time.mean @staticmethod - def create_timepoint_constraints(r_earliest_pickup, r_latest_pickup, travel_time, work_time): + def create_timepoint_constraints(r_earliest_start, r_latest_start, travel_time, work_time): + departure_constraint = Timepoint(name="departure", + r_earliest_time=r_earliest_start - travel_time.mean, + r_latest_time=r_latest_start - travel_time.mean) start_constraint = Timepoint(name="start", - r_earliest_time=r_earliest_pickup - travel_time.mean, - r_latest_time=r_latest_pickup - travel_time.mean) - pickup_constraint = Timepoint(name="pickup", - r_earliest_time=r_earliest_pickup, - r_latest_time=r_latest_pickup) - delivery_constraint = Timepoint(name="delivery", - r_earliest_time=r_earliest_pickup + work_time.mean, - r_latest_time=r_latest_pickup + work_time.mean) - return [start_constraint, pickup_constraint, delivery_constraint] + r_earliest_time=r_earliest_start, + r_latest_time=r_latest_start) + finish_constraint = Timepoint(name="finish", + r_earliest_time=r_earliest_start + work_time.mean, + r_latest_time=r_latest_start + work_time.mean) + return [departure_constraint, start_constraint, finish_constraint] def show_n_nodes_edges(self): """ Prints the number of nodes and edges in the stn @@ -279,74 +298,90 @@ def show_n_nodes_edges(self): self.logger.info("Edges: %s ", self.number_of_edges()) def update_task(self, task): - position = self.get_task_position(task.task_id) - start_node_id = 2 * position + (position-2) - pickup_node_id = start_node_id + 1 - delivery_node_id = pickup_node_id + 1 + departure_node_id, _ = self.get_node_by_type(task.task_id, "departure") + start_node_id, _ = self.get_node_by_type(task.task_id, "start") + finish_node_id, _ = self.get_node_by_type(task.task_id, "finish") # Adding an existing timepoint constraint updates the constraint - self.add_timepoint_constraint(start_node_id, task.get_timepoint("start")) - self.add_timepoint_constraint(pickup_node_id, task.get_timepoint("pickup")) - self.add_timepoint_constraint(delivery_node_id, task.get_timepoint("delivery")) + if departure_node_id: + self.add_timepoint_constraint(departure_node_id, task.get_timepoint("departure")) + if start_node_id: + self.add_timepoint_constraint(start_node_id, task.get_timepoint("start")) + if finish_node_id: + self.add_timepoint_constraint(finish_node_id, task.get_timepoint("finish")) # Add constraints between new nodes - new_constraints_between = [start_node_id, pickup_node_id, delivery_node_id] + if departure_node_id: + new_constraints_between = [departure_node_id, start_node_id, finish_node_id] + else: + new_constraints_between = [start_node_id, finish_node_id] - # Check if there is a node after the new delivery node - if self.has_node(delivery_node_id+1): - new_constraints_between.append(delivery_node_id+1) + # Check if there is a node after the new finish node + if self.has_node(finish_node_id+1): + new_constraints_between.append(finish_node_id+1) - # Check if there is a node before the new start node - if self.has_node(start_node_id-1): - new_constraints_between.insert(0, start_node_id-1) + # Check if there is a node before the new departure node + if departure_node_id and self.has_node(departure_node_id-1): + new_constraints_between.insert(0, departure_node_id-1) constraints = [((i), (i + 1)) for i in new_constraints_between[:-1]] self.add_intertimepoints_constraints(constraints, task) - def remove_task(self, position=1): + def remove_task(self, position=1, archived_stn=None): """ Removes the task from the given position""" self.logger.info("Removing task at position: %s", position) - start_node_id = 2 * position + (position-2) - pickup_node_id = start_node_id + 1 - delivery_node_id = pickup_node_id + 1 + departure_node_id = 2 * position + (position-2) + start_node_id = departure_node_id + 1 + finish_node_id = start_node_id + 1 + + node_ids = [departure_node_id, start_node_id, finish_node_id] new_constraints_between = list() - if self.has_node(start_node_id-1) and self.has_node(delivery_node_id+1): - new_constraints_between = [start_node_id-1, start_node_id] + if self.has_node(departure_node_id-1) and self.has_node(finish_node_id+1): + new_constraints_between = [departure_node_id-1, departure_node_id] - # Remove node and all adjacent edges - self.remove_node(start_node_id) - self.remove_node(pickup_node_id) - self.remove_node(delivery_node_id) + archived_stn = self.remove_node_ids(node_ids, archived_stn) - # Displace by -3 all nodes and constraints after position - mapping = {} - for node_id, data in self.nodes(data=True): - if node_id >= start_node_id: - mapping[node_id] = node_id - 3 - self.logger.debug("mapping: %s", mapping) - nx.relabel_nodes(self, mapping, copy=False) + self.displace_nodes(departure_node_id) if new_constraints_between: constraints = [((i), (i + 1)) for i in new_constraints_between[:-1]] self.logger.debug("Constraints: %s", constraints) for (i, j) in constraints: - if self.nodes[i]['data'].node_type == "delivery": - # wait time between finish of one task and start of the next one + if self.nodes[i]['data'].node_type == "finish": + # wait time between finish of one task and departure of the next one self.add_constraint(i, j) - def remove_node_ids(self, node_ids): + return archived_stn + + def remove_node_ids(self, node_ids, archived_stn=None): # Assumes that the node_ids are in consecutive order from node_id 1 onwards - for node_id in node_ids: - self.remove_node(node_id) + for i in node_ids: + if archived_stn: + # Start adding nodes after the last node_id in archived_stn + start_node_id = list(archived_stn.nodes())[-1] + if start_node_id == 0: # skip the zero_timepoint + start_node_id = 1 + archived_stn.add_node(start_node_id, data=self.nodes[i]['data']) + archived_stn.add_edge(start_node_id, 0, weight=self[i][0]['weight'], is_executed=True) + archived_stn.add_edge(0, start_node_id, weight=self[0][i]['weight'], is_executed=True) + + if self.has_edge(i, i+1): + archived_stn.add_constraint(start_node_id, start_node_id+1, -self[i+1][i]['weight'], self[i][i+1]['weight']) + else: + # Add dummy node + archived_stn.add_node(start_node_id+1) + self.remove_node(i) + return archived_stn - # Displace all remaining nodes by 3 + def displace_nodes(self, displace_after_node_id): + # Displace all remaining nodes by 3 after the given displace_after_node_id mapping = {} for node_id, data in self.nodes(data=True): - if node_id > 0: + if node_id > displace_after_node_id: mapping[node_id] = node_id - 3 nx.relabel_nodes(self, mapping, copy=False) @@ -358,10 +393,25 @@ def get_tasks(self): """ tasks = list() for i in self.nodes(): - if self.nodes[i]['data'].task_id not in tasks and self.nodes[i]['data'].node_type != 'zero_timepoint': + if 'data' in self.nodes[i] and \ + self.nodes[i]['data'].task_id not in tasks and\ + self.nodes[i]['data'].node_type != 'zero_timepoint': tasks.append(self.nodes[i]['data'].task_id) return tasks + def get_insertion_points(self, r_earliest_time): + """ Returns positions in the stn that have tasks whose latest start time are + are less or equal than the given earliest time + """ + insertion_points = list() + for i, data in self.nodes.data(): + if i == 0: # ignore ztp + continue + if data['data'].node_type == "start" and r_earliest_time <= self[0][i]['weight']: + task_position = math.ceil(i/3) + insertion_points.append(task_position) + return insertion_points + def is_consistent(self, shortest_path_array): """The STN is not consistent if it has negative cycles""" consistent = True @@ -443,7 +493,7 @@ def get_completion_time(self): completion_time = 0 task_ids = self.get_tasks() for i, task_id in enumerate(task_ids): - completion_time += self.get_time(task_id, "delivery", lower_bound=False) + completion_time += self.get_time(task_id, "finish") return completion_time @@ -460,16 +510,16 @@ def get_idle_time(self): for i, task_id in enumerate(task_ids): if i > 0: - r_earliest_delivery_time_previous_task = self.get_time(task_ids[i-1], "delivery") - r_earliest_start_time = self.get_time(task_ids[i], "start") - idle_time += round(r_earliest_start_time - r_earliest_delivery_time_previous_task) + r_earliest_finish_time_previous_task = self.get_time(task_ids[i-1], "finish") + r_earliest_departure_time = self.get_time(task_ids[i], "departure") + idle_time += round(r_earliest_departure_time - r_earliest_finish_time_previous_task) return idle_time def add_timepoint_constraint(self, node_id, timepoint_constraint): """ Adds the earliest and latest times to execute a timepoint (node) + Departure timepoint [earliest_departure_time, latest_departure_time] Start timepoint [earliest_start_time, latest_start_time] - Pickup timepoint [earliest_pickup_time, latest_pickup_time] - Delivery timepoint [earliest_delivery_time, lastest_delivery_time] + Finish timepoint [earliest_finish_time, latest_finish_time] """ self.add_constraint(0, node_id, timepoint_constraint.r_earliest_time, timepoint_constraint.r_latest_time) @@ -485,11 +535,11 @@ def get_next_timepoint(timepoint_name, prev_timepoint, edge_in_between): r_latest_time = prev_timepoint.r_latest_time + edge_in_between.mean return Timepoint(timepoint_name, r_earliest_time, r_latest_time) - def get_time(self, task_id, node_type='start', lower_bound=True): + def get_time(self, task_id, node_type='departure', lower_bound=True): _time = None for i, data in self.nodes.data(): - if task_id == data['data'].task_id and data['data'].node_type == node_type: + if 'data' in data and task_id == data['data'].task_id and data['data'].node_type == node_type: if lower_bound: _time = -self[i][0]['weight'] else: # upper bound @@ -497,6 +547,16 @@ def get_time(self, task_id, node_type='start', lower_bound=True): return _time + def get_times(self, task_id, node_type='departure'): + for i, data in self.nodes.data(): + if 'data' in data and task_id == data['data'].task_id and data['data'].node_type == node_type: + lower_bound = -self[i][0]['weight'] + upper_bound = self[0][i]['weight'] + _time = (lower_bound, upper_bound) + is_executed = data['data'].is_executed + return _time, is_executed + return None, None + def get_node_earliest_time(self, node_id): return -self[node_id][0]['weight'] @@ -513,7 +573,7 @@ def get_nodes_by_action(self, action_id): def get_nodes_by_task(self, task_id): nodes = list() - for node_id, data in self.nodes.data(): + for node_id, data in sorted(self.nodes.data()): if data['data'].task_id == task_id: node = (node_id, self.nodes[node_id]['data']) nodes.append(node) @@ -523,6 +583,7 @@ def get_node_by_type(self, task_id, node_type): for node_id, data in self.nodes.data(): if data['data'].task_id == task_id and data['data'].node_type == node_type: return node_id, self.nodes[node_id]['data'] + return None, None def set_action_id(self, node_id, action_id): self.nodes[node_id]['data'].action_id = action_id @@ -539,16 +600,16 @@ def get_task_id(self, position): Returns: (string) task id """ - start_node_id = 2 * position + (position-2) - pickup_node_id = start_node_id + 1 - delivery_node_id = pickup_node_id + 1 + departure_node_id = 2 * position + (position-2) + start_node_id = departure_node_id + 1 + finish_node_id = start_node_id + 1 - if self.has_node(start_node_id): + if self.has_node(departure_node_id): + task_id = self.nodes[departure_node_id]['data'].task_id + elif self.has_node(start_node_id): task_id = self.nodes[start_node_id]['data'].task_id - elif self.has_node(pickup_node_id): - task_id = self.nodes[pickup_node_id]['data'].task_id - elif self.has_node(delivery_node_id): - task_id = self.nodes[delivery_node_id]['data'].task_id + elif self.has_node(finish_node_id): + task_id = self.nodes[finish_node_id]['data'].task_id else: self.logger.error("There is no task in position %s", position) return @@ -557,22 +618,26 @@ def get_task_id(self, position): def get_task_position(self, task_id): for i, data in self.nodes.data(): + if task_id == data['data'].task_id and data['data'].node_type == 'departure': + return math.ceil(i/3) if task_id == data['data'].task_id and data['data'].node_type == 'start': return math.ceil(i/3) + if task_id == data['data'].task_id and data['data'].node_type == 'finish': + return math.ceil(i/3) - def get_earliest_task_id(self): + def get_earliest_task_id(self, node_type=None): """ Returns the id of the earliest task in the stn Returns: task_id (string) """ - # The first task in the graph is the task with the earliest start time - # The first task is in node 1, node 0 is reserved for the zero timepoint - - if self.has_node(1): - task_id = self.nodes[1]['data'].task_id - return task_id - - self.logger.debug("STN has no tasks yet") + # The first task in the graph is the task with the earliest time + # node 0 is reserved for the zero timepoint + for i, data in sorted(self.nodes.data()): + if i == 0: # ignore ztp + continue + if node_type is None or \ + (node_type is not None and data['data'].node_type == node_type): + return data['data'].task_id def get_task_nodes(self, task_id): """ Gets the nodes in the stn associated with the given task_id @@ -600,7 +665,7 @@ def get_task_node_ids(self, task_id): """ node_ids = list() - for i in self.nodes(): + for i in sorted(self.nodes()): if task_id == self.nodes[i]['data'].task_id: node_ids.append(i) @@ -650,15 +715,15 @@ def execute_edge(self, node_1, node_2): def execute_incoming_edge(self, task_id, node_type): finish_node_idx = self.get_edge_node_idx(task_id, node_type) - if node_type == "start": + if node_type == "departure": return - elif node_type == "pickup": - start_node_idx = self.get_edge_node_idx(task_id, "start") - elif node_type == "delivery": - start_node_idx = self.get_edge_node_idx(task_id, "pickup") - self.execute_edge(start_node_idx, finish_node_idx) + elif node_type == "start": + departure_node_idx = self.get_edge_node_idx(task_id, "departure") + elif node_type == "finish": + departure_node_idx = self.get_edge_node_idx(task_id, "start") + self.execute_edge(departure_node_idx, finish_node_idx) - def remove_old_timepoints(self): + def remove_old_timepoints(self, archived_stn=None): nodes_to_remove = list() for i in self.nodes(): node_data = self.nodes[i]['data'] @@ -667,8 +732,7 @@ def remove_old_timepoints(self): if node_data.is_executed and (self.has_edge(i, i+1) and self[i][i+1]['is_executed']): nodes_to_remove.append(i) - for node in nodes_to_remove: - self.remove_node(node) + return self.remove_node_ids(nodes_to_remove, archived_stn) def get_edge_node_idx(self, task_id, node_type): for i in self.nodes(): @@ -694,8 +758,9 @@ def to_json(self): def to_dict(self): stn = copy.deepcopy(self) - for i, data in self.nodes.data(): - stn.nodes[i]['data'] = self.nodes[i]['data'].to_dict() + for i, data in sorted(self.nodes.data()): + if 'data' in stn.nodes[i]: + stn.nodes[i]['data'] = self.nodes[i]['data'].to_dict() stn_dict = json_graph.node_link_data(stn) return stn_dict @@ -704,7 +769,11 @@ def from_json(cls, stn_json): stn = cls() dict_json = json.loads(stn_json) graph = json_graph.node_link_graph(dict_json) - stn.add_nodes_from([(i, {'data': Node.from_dict(graph.nodes[i]['data'])}) for i in graph.nodes()]) + for i in graph.nodes(): + if 'data' in graph.nodes[i]: + stn.add_node(i, data=Node.from_dict(graph.nodes[i]['data'])) + else: + stn.add_node(i) stn.add_edges_from(graph.edges(data=True)) return stn diff --git a/stn/stnu/stnu.py b/stn/stnu/stnu.py index 40a35b9..5136b18 100644 --- a/stn/stnu/stnu.py +++ b/stn/stnu/stnu.py @@ -20,7 +20,7 @@ def __init__(self): def __str__(self): to_print = "" - for (i, j, data) in self.edges.data(): + for (i, j, data) in sorted(self.edges.data()): if self.has_edge(j, i) and i < j: # Constraints with the zero timepoint if i == 0: @@ -108,7 +108,7 @@ def shrink_contingent_constraint(self, i, j, low, high): def add_intertimepoints_constraints(self, constraints, task): """ Adds constraints between the timepoints of a task Constraints between: - - navigation start and start (contingent) + - departure and start (contingent) - start and finish (contingent) - finish and next task (if any) (requirement) Args: @@ -121,21 +121,69 @@ def add_intertimepoints_constraints(self, constraints, task): """ for (i, j) in constraints: self.logger.debug("Adding constraint: %s ", (i, j)) - if self.nodes[i]['data'].node_type == "start": + if self.nodes[i]['data'].node_type == "departure": lower_bound, upper_bound = self.get_travel_time_bounded_duration(task) if lower_bound == upper_bound: self.add_constraint(i, j, 0, 0) else: self.add_constraint(i, j, lower_bound, upper_bound, is_contingent=True) - elif self.nodes[i]['data'].node_type == "pickup": + elif self.nodes[i]['data'].node_type == "start": lower_bound, upper_bound = self.get_work_time_bounded_duration(task) self.add_constraint(i, j, lower_bound, upper_bound, is_contingent=True) - elif self.nodes[i]['data'].node_type == "delivery": - # wait time between finish of one task and start of the next one. Fixed to [0, inf] + elif self.nodes[i]['data'].node_type == "finish": + # wait time between finish of one task and departure of the next one. Fixed to [0, inf] self.add_constraint(i, j, 0) + def update_travel_time(self, task): + position = self.get_task_position(task.task_id) + departure_node_id = 2 * position + (position-2) + start_node_id = departure_node_id + 1 + lower_bound, upper_bound = self.get_travel_time_bounded_duration(task) + + if self.has_edge(departure_node_id, start_node_id): + if lower_bound == upper_bound: + self.add_constraint(departure_node_id, start_node_id, 0, 0) + else: + self.add_constraint(departure_node_id, start_node_id, lower_bound, upper_bound, is_contingent=True) + + def update_work_time(self, task): + position = self.get_task_position(task.task_id) + departure_node_id = 2 * position + (position-2) + start_node_id = departure_node_id + 1 + finish_node_id = start_node_id + 1 + lower_bound, upper_bound = self.get_work_time_bounded_duration(task) + + if self.has_edge(start_node_id, finish_node_id): + if lower_bound == upper_bound: + self.add_constraint(start_node_id, finish_node_id, 0, 0) + else: + self.add_constraint(start_node_id, finish_node_id, lower_bound, upper_bound, is_contingent=True) + + def remove_node_ids(self, node_ids, archived_stn=None): + # Assumes that the node_ids are in consecutive order from node_id 1 onwards + for i in node_ids: + if archived_stn: + # Start adding nodes after the last node_id in archived_stn + start_node_id = list(archived_stn.nodes())[-1] + if start_node_id == 0: # skip the zero_timepoint + start_node_id = 1 + archived_stn.add_node(start_node_id, data=self.nodes[i]['data']) + archived_stn.add_edge(start_node_id, 0, weight=self[i][0]['weight'], is_executed=True) + archived_stn.add_edge(0, start_node_id, weight=self[0][i]['weight'], is_executed=True) + + if self.has_edge(i, i+1): + if self[i][i + 1]['is_contingent'] is True: + archived_stn.add_constraint(start_node_id, start_node_id+1, -self[i+1][i]['weight'], self[i][i+1]['weight'], self[i][i+1]['distribution']) + else: + archived_stn.add_constraint(start_node_id, start_node_id+1, -self[i+1][i]['weight'], self[i][i+1]['weight']) + else: + # Add dummy node + archived_stn.add_node(start_node_id+1) + self.remove_node(i) + return archived_stn + @staticmethod def get_travel_time_bounded_duration(task): """ Returns the estimated travel time as a bounded interval diff --git a/stn/task.py b/stn/task.py index da37fa1..eaad0f3 100644 --- a/stn/task.py +++ b/stn/task.py @@ -44,7 +44,7 @@ def __str__(self): class Task(AsDictMixin): - def __init__(self, task_id, timepoints, edges, pickup_action_id, delivery_action_id): + def __init__(self, task_id, timepoints, edges, start_action_id, finish_action_id): """ Constructor for the Task object @@ -52,14 +52,14 @@ def __init__(self, task_id, timepoints, edges, pickup_action_id, delivery_action task_id (UUID): An instance of an UUID object timepoints (list): list of timepoints (Timepoints) Edges (list): list of edges (Edges) - pickup_action_id (UUID): Action id of the pickup action - delivery_action_id (UUID): Action id of te delivery action + start_action_id (UUID): Action id linked to the start timepoint + finish_action_id (UUID): Action id linkted to the finish timepoint """ self.task_id = task_id self.timepoints = timepoints self.edges = edges - self.pickup_action_id = pickup_action_id - self.delivery_action_id = delivery_action_id + self.start_action_id = start_action_id + self.finish_action_id = finish_action_id def __str__(self): to_print = "" @@ -70,8 +70,8 @@ def __str__(self): to_print += "\n Edges: \n" for edge in self.edges: to_print += str(edge) + "\t" - to_print += "\n Pickup action:" + str(self.pickup_action_id) - to_print += "\n Delivery action:" + str(self.delivery_action_id) + to_print += "\n Start action:" + str(self.start_action_id) + to_print += "\n Finish action:" + str(self.finish_action_id) return to_print def get_timepoint(self, timepoint_name): diff --git a/stn/utils/as_dict.py b/stn/utils/as_dict.py index 7a24687..f0fc99d 100644 --- a/stn/utils/as_dict.py +++ b/stn/utils/as_dict.py @@ -44,7 +44,7 @@ def to_attrs(cls, dict_repr): @classmethod def _get_value(cls, key, value): - if key in ['task_id', 'pickup_action_id', 'delivery_action_id']: + if key in ['task_id', 'start_action_id', 'finish_action_id']: return from_str(value) else: return value diff --git a/stn/utils/utils.py b/stn/utils/utils.py index 94fab8b..b7574de 100644 --- a/stn/utils/utils.py +++ b/stn/utils/utils.py @@ -24,13 +24,13 @@ def load_yaml(file): def create_task(stn, task_dict): task_id = task_dict.get("task_id") - r_earliest_pickup = task_dict.get("earliest_pickup") - r_latest_pickup = task_dict.get("latest_pickup") + r_earliest_start = task_dict.get("earliest_start") + r_latest_start = task_dict.get("latest_start") travel_time = Edge(**task_dict.get("travel_time")) work_time = Edge(**task_dict.get("work_time")) - timepoint_constraints = stn.create_timepoint_constraints(r_earliest_pickup, r_latest_pickup, travel_time, work_time) + timepoint_constraints = stn.create_timepoint_constraints(r_earliest_start, r_latest_start, travel_time, work_time) inter_timepoint_constraints = [travel_time, work_time] - pickup_action_id = generate_uuid() - delivery_action_id = generate_uuid() + start_action_id = generate_uuid() + finish_action_id = generate_uuid() - return Task(task_id, timepoint_constraints, inter_timepoint_constraints, pickup_action_id, delivery_action_id) + return Task(task_id, timepoint_constraints, inter_timepoint_constraints, start_action_id, finish_action_id) diff --git a/test/data/pstn_two_tasks.json b/test/data/pstn_two_tasks.json index 06a5f14..0901581 100644 --- a/test/data/pstn_two_tasks.json +++ b/test/data/pstn_two_tasks.json @@ -195,7 +195,7 @@ "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"", - "node_type":"start" + "node_type":"departure" } }, { @@ -203,7 +203,7 @@ "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"AMK_TDU-TGR-1_X_9.7_Y_5.6", - "node_type":"pickup" + "node_type":"start" } }, { @@ -211,7 +211,7 @@ "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"AMK_TDU-TGR-1_X_5.82_Y_6.57", - "node_type":"delivery" + "node_type":"finish" } }, { @@ -219,7 +219,7 @@ "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"", - "node_type":"start" + "node_type":"departure" } }, { @@ -227,7 +227,7 @@ "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"AMK_TDU-TGR-1_X_14.03_Y_9.55", - "node_type":"pickup" + "node_type":"start" } }, { @@ -235,7 +235,7 @@ "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"AMK_TDU-TGR-1_X_15.09_Y_5.69", - "node_type":"delivery" + "node_type":"finish" } } ], diff --git a/test/data/stn_two_tasks.json b/test/data/stn_two_tasks.json index e0d0baa..dc075a4 100644 --- a/test/data/stn_two_tasks.json +++ b/test/data/stn_two_tasks.json @@ -150,7 +150,7 @@ { "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", - "node_type":"start", + "node_type":"departure", "pose":"" }, "id":1 @@ -158,7 +158,7 @@ { "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", - "node_type":"pickup", + "node_type":"start", "pose":"AMK_TDU-TGR-1_X_9.7_Y_5.6" }, "id":2 @@ -166,7 +166,7 @@ { "data":{ "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", - "node_type":"delivery", + "node_type":"finish", "pose":"AMK_TDU-TGR-1_X_5.82_Y_6.57" }, "id":3 @@ -174,7 +174,7 @@ { "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", - "node_type":"start", + "node_type":"departure", "pose":"" }, "id":4 @@ -182,7 +182,7 @@ { "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", - "node_type":"pickup", + "node_type":"start", "pose":"AMK_TDU-TGR-1_X_14.03_Y_9.55" }, "id":5 @@ -190,7 +190,7 @@ { "data":{ "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", - "node_type":"delivery", + "node_type":"finish", "pose":"AMK_TDU-TGR-1_X_15.09_Y_5.69" }, "id":6 diff --git a/test/data/stnu_two_tasks.json b/test/data/stnu_two_tasks.json index 2cc571b..4c20f3a 100644 --- a/test/data/stnu_two_tasks.json +++ b/test/data/stnu_two_tasks.json @@ -10,7 +10,7 @@ }, { "data":{ - "node_type":"start", + "node_type":"departure", "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"" }, @@ -18,7 +18,7 @@ }, { "data":{ - "node_type":"pickup", + "node_type":"start", "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"AMK_TDU-TGR-1_X_9.7_Y_5.6" }, @@ -26,7 +26,7 @@ }, { "data":{ - "node_type":"delivery", + "node_type":"finish", "task_id":"0d06fb90-a76d-48b4-b64f-857b7388ab70", "pose":"AMK_TDU-TGR-1_X_5.82_Y_6.57" }, @@ -34,7 +34,7 @@ }, { "data":{ - "node_type":"start", + "node_type":"departure", "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"" }, @@ -42,7 +42,7 @@ }, { "data":{ - "node_type":"pickup", + "node_type":"start", "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"AMK_TDU-TGR-1_X_14.03_Y_9.55" }, @@ -50,7 +50,7 @@ }, { "data":{ - "node_type":"delivery", + "node_type":"finish", "task_id":"0616af00-ec3b-4ecd-ae62-c94a3703594c", "pose":"AMK_TDU-TGR-1_X_15.09_Y_5.69" }, diff --git a/test/data/tasks.yaml b/test/data/tasks.yaml index 8ec79ca..5ce2dda 100644 --- a/test/data/tasks.yaml +++ b/test/data/tasks.yaml @@ -1,7 +1,7 @@ 0616af00-ec3b-4ecd-ae62-c94a3703594c: task_id: 0616af00-ec3b-4ecd-ae62-c94a3703594c - earliest_pickup: 10 - latest_pickup: 20 + earliest_start: 10 + latest_start: 20 travel_time: name: "travel_time" mean: 5 @@ -12,8 +12,8 @@ variance: 0.2 207cc8da-2f0e-4538-802b-b8f3954df38d: task_id: 207cc8da-2f0e-4538-802b-b8f3954df38d - earliest_pickup: 40 - latest_pickup: 50 + earliest_start: 40 + latest_start: 50 travel_time: name: "travel_time" mean: 5 @@ -24,8 +24,8 @@ variance: 0.2 0d06fb90-a76d-48b4-b64f-857b7388ab70: task_id: 0d06fb90-a76d-48b4-b64f-857b7388ab70 - earliest_pickup: 70 - latest_pickup: 80 + earliest_start: 70 + latest_start: 80 travel_time: name: "travel_time" mean: 5