Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Mr check kirchhoff first law #189

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 86 additions & 0 deletions emf/loadflow_tool/model_merger/merge_functions.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import zipfile
import config
from emf.loadflow_tool.helper import load_model, load_opdm_data, filename_from_metadata, attr_to_dict, export_model, parse_pypowsybl_report, get_network_elements
from emf.loadflow_tool import loadflow_settings
Expand Down Expand Up @@ -97,6 +98,37 @@ def update_FullModel_from_OpdmObject(data, opdm_object):
})


def revert_ids_back(exported_model, triplets_data, revert_ids: bool = True):
"""
As pypowsybl creates its own unique uuids for the cases when the originals do not match the criteria then this
takes the naming_strategy.csv provided by the pypowsybl and reverts those ids back if it is applicable
:param exported_model: binary object from pypowsybl
:param triplets_data: profile(s) converted to triplets
:param revert_ids: True = fix, False=report only
:return (updated) triplets data
"""

contents = zipfile.ZipFile(exported_model)
naming_strategy = pandas.DataFrame()

for file_name in contents.namelist():
if 'naming_strategy' in file_name:
naming_strategy = pandas.read_csv(filepath_or_buffer=BytesIO(contents.read(file_name)), sep=';')
break
if not naming_strategy.empty:
existing_values = triplets_data.merge(naming_strategy, left_on='VALUE', right_on='CgmesUuid')
existing_values = existing_values[existing_values['IidmId'] != 'unknown']
if not existing_values.empty:
if not revert_ids:
logger.error(f"Found {len(existing_values.index)} changed ids, consider dangling reference errors")
return triplets_data
logger.warning(f"Mapping {len(existing_values.index)} ids back")
existing_values['VALUE'] = existing_values['IidmId']
new_existing_values = existing_values[['ID', 'KEY', 'VALUE', 'INSTANCE_ID']]
triplets_data = triplets.rdf_parser.update_triplet_from_triplet(triplets_data, new_existing_values)
return triplets_data


def create_sv_and_updated_ssh(merged_model, original_models, models_as_triplets, scenario_date, time_horizon, version, merging_area, merging_entity, mas):

### SV ###
Expand All @@ -118,6 +150,8 @@ def create_sv_and_updated_ssh(merged_model, original_models, models_as_triplets,

# Load SV data
sv_data = pandas.read_RDF([exported_model])
# Fix naming
sv_data = revert_ids_back(exported_model=exported_model, triplets_data=sv_data, revert_ids=False)

# Update
sv_data.set_VALUE_at_KEY(key='label', value=filename_from_metadata(opdm_object_meta))
Expand Down Expand Up @@ -822,6 +856,58 @@ def set_brell_lines_to_zero_in_models_new(assembled_data, magic_brell_lines: dic
return assembled_data


def get_nodes_against_kirchhoff_first_law(original_models,
cgm_sv_data: pandas.DataFrame = None,
sv_injection_limit: float = SV_INJECTION_LIMIT,
consider_sv_injection: bool = False,
nodes_only: bool = True):
"""
Gets dataframe of nodes in which the sum of flows exceeds the limit
:param cgm_sv_data: merged SV profile (needed to set the flows for terminals)
:param original_models: IGMs (triplets, dictionary)
:param consider_sv_injection: whether to consider the sv injections
:param nodes_only: if true then return unique nodes only, if false then nodes with corresponding terminals
:param sv_injection_limit: threshold for deciding whether the node is violated by sum of flows
"""
original_models = get_opdm_data_from_models(model_data=original_models)
sv_injections = pandas.DataFrame()
if cgm_sv_data is None:
cgm_sv_data = original_models
power_flow = cgm_sv_data.type_tableview('SvPowerFlow')[['SvPowerFlow.Terminal', 'SvPowerFlow.p', 'SvPowerFlow.q']]
if consider_sv_injection:
try:
sv_injections = (cgm_sv_data.type_tableview('SvInjection')
.rename_axis('SvInjection')
.rename(columns={'SvInjection.TopologicalNode': 'Terminal.TopologicalNode',
'SvInjection.pInjection': 'SvPowerFlow.p',
'SvInjection.qInjection': 'SvPowerFlow.q'})
.reset_index())[['Terminal.TopologicalNode', 'SvPowerFlow.p', 'SvPowerFlow.q']]
except Exception:
sv_injections = pandas.DataFrame()
# Get terminals
terminals = original_models.type_tableview('Terminal').rename_axis('Terminal').reset_index()
terminals = terminals[['Terminal', 'Terminal.ConductingEquipment', 'Terminal.TopologicalNode']]
# Calculate summed flows per topological node
flows_summed = ((power_flow.merge(terminals, left_on='SvPowerFlow.Terminal', right_on='Terminal', how='left')
.groupby('Terminal.TopologicalNode')[['SvPowerFlow.p', 'SvPowerFlow.q']]
.agg(lambda x: pandas.to_numeric(x, errors='coerce').sum()))
.rename_axis('Terminal.TopologicalNode').reset_index())
if not sv_injections.empty:
flows_summed = (pandas.concat([flows_summed, sv_injections]).groupby('Terminal.TopologicalNode').sum()
.reset_index())
# Get topological nodes that have mismatch
nok_nodes = flows_summed[(abs(flows_summed['SvPowerFlow.p']) > sv_injection_limit) |
(abs(flows_summed['SvPowerFlow.q']) > sv_injection_limit)][['Terminal.TopologicalNode']]
if nodes_only:
return nok_nodes
try:
terminals_nodes = terminals.merge(flows_summed, on='Terminal.TopologicalNode', how='left')
terminals_nodes = terminals_nodes.merge(nok_nodes, on='Terminal.TopologicalNode')
return terminals_nodes
except IndexError:
return pandas.DataFrame()


if __name__ == "__main__":

from emf.common.integrations.object_storage.models import get_latest_boundary, get_latest_models_and_download
Expand Down
32 changes: 32 additions & 0 deletions emf/loadflow_tool/model_validator/validator.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import triplets
import pandas
import uuid
import pypowsybl
import logging
import json
Expand All @@ -9,6 +12,7 @@
from emf.common.logging import custom_logger
from emf.common.config_parser import parse_app_properties
from emf.common.integrations import elastic
from emf.loadflow_tool.model_merger.merge_functions import get_nodes_against_kirchhoff_first_law, get_opdm_data_from_models, revert_ids_back

# Initialize custom logger
# custom_logger.initialize_custom_logger(extra={'worker': 'model-retriever', 'worker_uuid': str(uuid.uuid4())})
Expand All @@ -24,6 +28,11 @@ def validate_model(opdm_objects, loadflow_parameters=getattr(loadflow_settings,
# Load data
start_time = time.time()
model_data = load_model(opdm_objects=opdm_objects)
# Pre check
opdm_model_triplets = get_opdm_data_from_models(model_data=opdm_objects)
violated_nodes_pre = get_nodes_against_kirchhoff_first_law(original_models=opdm_model_triplets, consider_sv_injection=True)
kirchhoff_first_law_detected = False if violated_nodes_pre.empty else True

network = model_data["network"]

# Run all validations
Expand All @@ -50,6 +59,23 @@ def validate_model(opdm_objects, loadflow_parameters=getattr(loadflow_settings,
parameters=loadflow_parameters,
reporter=loadflow_report)

# Export sv profile and check it for Kirchhoff 1st law
export_parameters = {"iidm.export.cgmes.profiles": 'SV',
"iidm.export.cgmes.naming-strategy": "cgmes-fix-all-invalid-ids"}
bytes_object = network.save_to_binary_buffer(format="CGMES",
parameters=export_parameters)
bytes_object.name = f"{uuid.uuid4()}.zip"
# Load SV data
sv_data = pandas.read_RDF([bytes_object])
# Fix naming
sv_data = revert_ids_back(exported_model=bytes_object, triplets_data=sv_data)
# Check violations after loadflow
violated_nodes_post = get_nodes_against_kirchhoff_first_law(original_models=opdm_model_triplets,
cgm_sv_data=sv_data,
consider_sv_injection=True)
kirchhoff_first_law_detected = kirchhoff_first_law_detected or (False if violated_nodes_post.empty else True)
# End of post check

# Parsing loadflow results
# TODO move sanitization to Elastic integration
loadflow_result_dict = {}
Expand All @@ -69,6 +95,12 @@ def validate_model(opdm_objects, loadflow_parameters=getattr(loadflow_settings,
model_data["validation_duration_s"] = round(time.time() - start_time, 3)
logger.info(f"Load flow validation status: {model_valid} [duration {model_data['validation_duration_s']}s]")

# Test for kirchhoff
# Ver 1 call model invalid if they are present
# model_valid = model_valid and (not kirchhoff_first_law_detected)
# Ver 2 save it as additional parameter to elastic report
model_data["Kirchhoff_first_law_error"] = kirchhoff_first_law_detected

try:
model_data['outages'] = get_model_outages(network)
except Exception as e:
Expand Down
Loading