Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replacement model outage update #215

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 11 additions & 4 deletions emf/loadflow_tool/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -504,20 +504,27 @@ def export_model(network: pypowsybl.network, opdm_object_meta, profiles=None):


def get_model_outages(network: pypowsybl.network):

outage_log = []
lines = network.get_lines().reset_index(names=['grid_id'])
lines = network.get_elements(element_type=pypowsybl.network.ElementType.LINE, all_attributes=True).reset_index(names=['grid_id'])
_voltage_levels = network.get_voltage_levels(all_attributes=True).rename(columns={"name": "voltage_level_name"})
_substations = network.get_substations(all_attributes=True).rename(columns={"name": "substation_name"})
lines = lines.merge(_voltage_levels, left_on='voltage_level1_id', right_index=True, suffixes=(None, '_voltage_level'))
lines = lines.merge(_substations, left_on='substation_id', right_index=True, suffixes=(None, '_substation'))
lines['element_type'] = 'Line'

dlines = get_network_elements(network, pypowsybl.network.ElementType.DANGLING_LINE).reset_index(names=['grid_id'])
dlines['element_type'] = 'Tieline'

gens = get_network_elements(network, pypowsybl.network.ElementType.GENERATOR).reset_index(names=['grid_id'])
gens['element_type'] = 'Generator'

disconnected_lines = lines[(lines['connected1'] == False) | (lines['connected2'] == False)]
disconnected_dlines = dlines[dlines['connected'] == False]
disconnected_gens = gens[gens['connected'] == False]

outage_log.extend(disconnected_lines[['grid_id', 'name', 'element_type']].to_dict('records'))
outage_log.extend(disconnected_dlines[['grid_id', 'name', 'element_type']].to_dict('records'))
outage_log.extend(disconnected_gens[['grid_id', 'name', 'element_type']].to_dict('records'))
outage_log.extend(disconnected_lines[['grid_id', 'name', 'element_type', 'country']].to_dict('records'))
outage_log.extend(disconnected_dlines[['grid_id', 'name', 'element_type', 'country']].to_dict('records'))
outage_log.extend(disconnected_gens[['grid_id', 'name', 'element_type', 'country']].to_dict('records'))

return outage_log
13 changes: 11 additions & 2 deletions emf/loadflow_tool/model_merger/model_merger.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
# TODO - move this async solution to some common module
from concurrent.futures import ThreadPoolExecutor
from lxml import etree
from emf.loadflow_tool.model_merger.temporary_fixes import run_post_merge_processing, run_pre_merge_processing
from emf.loadflow_tool.model_merger.temporary_fixes import run_post_merge_processing, run_pre_merge_processing, fix_model_outages

logger = logging.getLogger(__name__)
parse_app_properties(caller_globals=globals(), path=config.paths.cgm_worker.merger)
Expand Down Expand Up @@ -72,7 +72,10 @@ def handle(self, task_object: dict, **kwargs):
"exclusion_reason": [],
"replacement": False,
"replaced_entity": [],
"replacement_reason": []}
"replacement_reason": [],
"outages_corrected": False,
"outage_fixes": [],
"outages_unmapped": []}

# Parse relevant data from Task
task = task_object
Expand Down Expand Up @@ -222,6 +225,12 @@ def handle(self, task_object: dict, **kwargs):
merged_model = merge_functions.load_model(input_models)
# TODO - run other LF if default fails

# Crosscheck replaced model outages with latest UAP if atleast one baltic model was replaced
replaced_tso_list = [model['tso'] for model in merge_log['replaced_entity']]

if any(tso in ['LITGRID', 'AST', 'ELERING'] for tso in replaced_tso_list):
merged_model, merge_log = fix_model_outages(merged_model, replaced_tso_list, merge_log, scenario_datetime, time_horizon)

# Various fixes from igmsshvscgmssh error
if remove_non_generators_from_slack_participation:
network_pre_instance = merged_model["network"]
Expand Down
92 changes: 91 additions & 1 deletion emf/loadflow_tool/model_merger/temporary_fixes.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import triplets
from emf.loadflow_tool.helper import create_opdm_objects
import pandas as pd
import logging

from emf.common.integrations import elastic
from emf.loadflow_tool.helper import create_opdm_objects, get_model_outages
from emf.loadflow_tool.model_merger.merge_functions import (load_opdm_data, create_sv_and_updated_ssh, fix_sv_shunts,
fix_sv_tapsteps, remove_duplicate_sv_voltages,
remove_small_islands,check_and_fix_dependencies,
Expand All @@ -9,6 +13,9 @@
set_brell_lines_to_zero_in_models_new)


logger = logging.getLogger(__name__)


def run_pre_merge_processing(input_models, merging_area):

# TODO warning logs for temp fix functions
Expand Down Expand Up @@ -60,3 +67,86 @@ def run_post_merge_processing(input_models, solved_model, task_properties, SMALL
#sv_data, ssh_data = disconnect_equipment_if_flow_sum_not_zero(cgm_sv_data=sv_data, cgm_ssh_data=ssh_data, original_data=models_as_triplets) fix implemented in pypowsybl 1.8.1

return sv_data, ssh_data


def fix_model_outages(merged_model, replaced_model_list: list, merge_log, scenario_datetime, time_horizon):

area_map = {"LITGRID": "Lithuania", "AST": "Latvia", "ELERING": "Estonia"}
outage_areas = [area_map.get(item, item) for item in replaced_model_list]

elk_service = elastic.Elastic()

# Get outage eic-mrid mapping
mrid_map = elk_service.get_docs_by_query(index='config-network*', query={"match_all": {}}, size=10000)
mrid_map['mrid'] = mrid_map['mrid'].str.lstrip('_')

# Get latest UAP parse date
if time_horizon == 'MO':
merge_type = "Month"
else:
merge_type = "Week"

body = {"size": 1, "query": {"bool": {"must": [{"match": {"Merge": merge_type}}]}},
"sort": [{"reportParsedDate": {"order": "desc"}}], "fields": ["reportParsedDate"]}
last_uap_version = elk_service.client.search(index='opc-outages-baltics*', body=body)['hits']['hits'][0]['fields']['reportParsedDate'][0]

# Query for latest outage UAP
uap_query = {"bool": {"must": [{"match": {"reportParsedDate": f"{last_uap_version}"}},
{"match": {"Merge": merge_type}}]}}
uap_outages = elk_service.get_docs_by_query(index='opc-outages-baltics*', query=uap_query, size=10000)
uap_outages = uap_outages.merge(mrid_map[['eic', 'mrid']], how='left', on='eic').rename(columns={"mrid": 'grid_id'})

# Filter outages according to model scenario date and replaced area
filtered_outages = uap_outages[(uap_outages['start_date'] <= scenario_datetime) & (uap_outages['end_date'] >= scenario_datetime)]
filtered_outages = filtered_outages[filtered_outages['Area'].isin(outage_areas)]

mapped_outages = filtered_outages[~filtered_outages['grid_id'].isna()]
missing_outages = filtered_outages[filtered_outages['grid_id'].isna()]

if not missing_outages.empty:
logger.warning(f"Missing outage mRID(s): {missing_outages['name'].values}")

# Get outages already applied to the model
model_outages = pd.DataFrame(get_model_outages(merged_model['network']))
mapped_model_outages = pd.merge(model_outages, mrid_map, left_on='grid_id', right_on='mrid', how='inner')
model_area_map = {"LITGRID": "LT", "AST": "LV", "ELERING": "EE"}
model_outage_areas = [model_area_map.get(item, item) for item in replaced_model_list]
filtered_model_outages = mapped_model_outages[mapped_model_outages['country'].isin(model_outage_areas)]

logger.info("Fixing outages inside merged model:")

# Reconnecting outages from network-config list
for index, outage in filtered_model_outages.iterrows():
try:
if merged_model['network'].connect(outage['grid_id']):
logger.info(f" {outage['name']} {outage['grid_id']} successfully reconnected")
merge_log.update({'outages_corrected': True})
merge_log.get('outage_fixes').extend([{'name': outage['name'], 'grid_id': outage['grid_id'], "eic": outage['eic'], "outage_status": "connected"}])
else:
if uap_outages['grid_id'].str.contains(outage['grid_id']).any():
logger.info(f"{outage['name']} {outage['grid_id']} is already connected")
else:
logger.error(f"Failed to connect outage: {outage['name']} {outage['grid_id']}")
except Exception as e:
logger.error((e, outage['name']))
merge_log.get('outages_unmapped').extend([{'name': outage['name'], 'grid_id': outage['grid_id'], "eic": outage['eic']}])
continue

# Applying outages from UAP
for index, outage in mapped_outages.iterrows():
try:
if merged_model['network'].disconnect(outage['grid_id']):
logger.info(f"{outage['name']} {outage['grid_id']} successfully disconnected")
merge_log.update({'outages_corrected': True})
merge_log.get('outage_fixes').extend([{'name': outage['name'], 'grid_id': outage['grid_id'], "eic": outage['eic'], "outage_status": "disconnected"}])
else:
if uap_outages['grid_id'].str.contains(outage['grid_id']).any():
logger.info(f"{outage['name']} {outage['grid_id']} is already in outage")
else:
logger.error(f"Failed to disconnect outage: {outage['name']} {outage['grid_id']}")
except Exception as e:
logger.error((e, outage['name']))
merge_log.get('outages_unmapped').extend([{'name': outage['name'], 'grid_id': outage['grid_id'], "eic": outage['eic']}])
continue

return merged_model, merge_log
Loading