Skip to content

Commit

Permalink
Merge pull request #203 from Baltic-RCC/dev
Browse files Browse the repository at this point in the history
Dev release to prod
  • Loading branch information
VeikoAunapuu authored Dec 2, 2024
2 parents 634c30a + 8081a71 commit 137905e
Show file tree
Hide file tree
Showing 16 changed files with 427 additions and 80 deletions.
4 changes: 2 additions & 2 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ name = "pypi"
[packages]
pandas = "2.2.2"
numpy = "2.0.0"
pypowsybl = "1.6.1"
pypowsybl = "1.8.1"
aniso8601 = "*"
lxml = "*"
edx = "*"
Expand All @@ -26,4 +26,4 @@ tabulate = "*"
pyvis = "*"

[requires]
python_version = "3.11"
python_version = "3.11"
65 changes: 35 additions & 30 deletions Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion config/cgm_worker/validator.properties
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
[MAIN]
VALIDATOR_ELK_INDEX = emfos-igm-validation

VALIDATION_LOAD_FLOW_SETTINGS = CGM_RELAXED_2
VALIDATION_LOAD_FLOW_SETTINGS = CGM_RELAXED_2

CHECK_NON_RETAINED_SWITCHES = False
CHECK_KIRCHHOFF_FIRST_LAW = False
1 change: 1 addition & 0 deletions config/task_generator/manual_task_generator.properties
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ TASK_VERSION =
#TASK_MAS =
TASK_MERGING_ENTITY =
RUN_REPLACEMENT = True
RUN_REPLACEMENT_LOCAL = True
RUN_SCALING = False
UPLOAD_TO_OPDM = False
UPLOAD_TO_MINIO = True
Expand Down
62 changes: 50 additions & 12 deletions config/task_generator/process_conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"tags": [],
"properties": {
"merge_type": "EU",
"merging_entity": "BALTICRSC",
"merging_entity": "BALTICRCC",
"mas": "http://www.baltic-rsc.eu/OperationalPlanning",
"included": [],
"excluded": [],
Expand Down Expand Up @@ -53,7 +53,7 @@
"tags": [],
"properties": {
"merge_type": "EU",
"merging_entity": "BALTICRSC",
"merging_entity": "BALTICRCC",
"mas": "http://www.baltic-rsc.eu/OperationalPlanning",
"included": [],
"excluded": [],
Expand Down Expand Up @@ -82,7 +82,7 @@
"tags": [],
"properties": {
"merge_type": "EU",
"merging_entity": "BALTICRSC",
"merging_entity": "BALTICRCC",
"mas": "http://www.baltic-rsc.eu/OperationalPlanning",
"included": [],
"excluded": [],
Expand Down Expand Up @@ -111,7 +111,7 @@
"tags": [],
"properties": {
"merge_type": "EU",
"merging_entity": "BALTICRSC",
"merging_entity": "BALTICRCC",
"mas": "http://www.baltic-rsc.eu/OperationalPlanning",
"included": [],
"excluded": [],
Expand Down Expand Up @@ -140,7 +140,7 @@
"tags": [],
"properties": {
"merge_type": "EU",
"merging_entity": "BALTICRSC",
"merging_entity": "BALTICRCC",
"mas": "http://www.baltic-rsc.eu/OperationalPlanning",
"included": [],
"excluded": [],
Expand Down Expand Up @@ -180,7 +180,7 @@
"tags": [],
"properties": {
"merge_type": "BA",
"merging_entity": "BALTICRSC",
"merging_entity": "BALTICRCC",
"included": [
"AST",
"PSE",
Expand Down Expand Up @@ -216,7 +216,7 @@
"tags": [],
"properties": {
"merge_type": "BA",
"merging_entity": "BALTICRSC",
"merging_entity": "BALTICRCC",
"included": [
"AST",
"PSE",
Expand Down Expand Up @@ -252,7 +252,7 @@
"tags": [],
"properties": {
"merge_type": "BA",
"merging_entity": "BALTICRSC",
"merging_entity": "BALTICRCC",
"included": [
"AST",
"PSE",
Expand Down Expand Up @@ -288,7 +288,7 @@
"tags": [],
"properties": {
"merge_type": "BA",
"merging_entity": "BALTICRSC",
"merging_entity": "BALTICRCC",
"included": [
"AST",
"PSE",
Expand Down Expand Up @@ -324,7 +324,7 @@
"tags": [],
"properties": {
"merge_type": "BA",
"merging_entity": "BALTICRSC",
"merging_entity": "BALTICRCC",
"included": [
"AST",
"PSE",
Expand All @@ -342,7 +342,45 @@
"upload_to_opdm": "False",
"upload_to_minio": "True",
"send_merge_report": "True"
}},
}
},
{
"@context": "https://example.com/schemas/run.jsonld",
"@id": "https://example.com/runs/YearAheadRMM",
"@type": "Run",
"process_id": "https://example.com/processes/RMM_CREATION",
"valid_from": "",
"valid_to": "",
"gate_open": "P1DT5H",
"gate_close": "P1DT4H",
"run_at": "0 4 1 9 *",
"time_frame": "Y-1",
"data_timestamps": "30 * * * *",
"data_resolution": "PT1H",
"tags": [],
"properties": {
"merge_type": "BA",
"merging_entity": "BALTICRCC",
"included": [
"AST",
"PSE",
"ELERING"
],
"excluded": [],
"local_import": [
"LITGRID"
],
"time_horizon": "YR",
"version": "001",
"mas": "http://www.baltic-rsc.eu/OperationalPlanning/RMM",
"replacement": "True",
"scaling": "False",
"upload_to_opdm": "False",
"upload_to_minio": "True",
"send_merge_report": "True"
}
},

{
"@context": "https://example.com/schemas/run.jsonld",
"@id": "https://example.com/runs/WeekAheadRMM",
Expand All @@ -359,7 +397,7 @@
"tags": [],
"properties": {
"merge_type": "BA",
"merging_entity": "BALTICRSC",
"merging_entity": "BALTICRCC",
"included": [
"AST",
"PSE",
Expand Down
1 change: 1 addition & 0 deletions config/task_generator/task_generator.properties
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ RMM_LOCAL_IMPORT = LITGRID
# MERGE CONFIGURATION FLAGS
RUN_REPLACEMENT_RMM = True
RUN_REPLACEMENT_CGM = False
RUN_REPLACEMENT_LOCAL = True
RUN_SCALING_RMM = False
RUN_SCALING_CGM = False
UPLOAD_TO_OPDM_RMM = False
Expand Down
35 changes: 28 additions & 7 deletions emf/common/integrations/minio_api.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import pandas as pd
import requests
from lxml import etree
import minio
Expand Down Expand Up @@ -186,6 +187,24 @@ def query_objects(self, bucket_name: str, metadata: dict = None, prefix: str = N

return result_list


def get_all_objects_name(self, bucket_name: str, prefix: str = None):
objects = self.client.list_objects(bucket_name=bucket_name, prefix=prefix,recursive=True)
list_elements=[]
for obj in objects:
try:
object_name = obj.object_name.split("/")[-1]
#only take models that have metadata in the filename
if len(object_name.split('-')) > 3:
list_elements.append(object_name)
except:
logger.warning(f"Object name not present")

return list_elements




@renew_authentication_token
def get_latest_models_and_download(self,
time_horizon: str,
Expand Down Expand Up @@ -236,14 +255,16 @@ def get_latest_models_and_download(self,

# Filter to the latest versions of received network models for each model entity
logger.info(f"Filtering to latest model version")
additional_models_filtered = {}
list_of_name_parts = []
for model in additional_models:
parts = model.object_name.split('.')[0].split('-')
party, version = parts[-2], parts[-1]
# Check if the party already in the filtered dictionary or version is higher
if party not in additional_models_filtered or version > additional_models_filtered[party][1]:
additional_models_filtered[party] = (model, version)
additional_models_filtered = [model for model, version in additional_models_filtered.values()]
name_parts = model.object_name.split('/')[-1].split('.')[0].split('-')
name_parts.append(model.object_name)
list_of_name_parts.append(name_parts)

name_parts_df = pd.DataFrame(list_of_name_parts, columns=['timestamp', 'business_type', 'tso', 'version', 'object_name'])
name_parts_df = name_parts_df.sort_values(by=['business_type', 'version'], ascending=[True, False])
name_parts_df = name_parts_df.drop_duplicates('tso')
additional_models_filtered = [model for model in additional_models if model.object_name in name_parts_df['object_name'].values]

# Download relevant models
for model in additional_models_filtered:
Expand Down
3 changes: 0 additions & 3 deletions emf/common/integrations/rabbit.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,6 @@

parse_app_properties(globals(), config.paths.integrations.rabbit)

pika_logger = logging.getLogger("pika")
pika_logger.setLevel(logging.DEBUG)


class BlockingClient:

Expand Down
13 changes: 9 additions & 4 deletions emf/loadflow_tool/model_merger/merge_functions.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import zipfile
from io import BytesIO

import config
from emf.loadflow_tool.helper import load_model, load_opdm_data, filename_from_metadata, attr_to_dict, export_model, parse_pypowsybl_report, get_network_elements
from emf.loadflow_tool import loadflow_settings
Expand Down Expand Up @@ -621,10 +624,12 @@ def remove_duplicate_sv_voltages(cgm_sv_data, original_data):
# Just in case convert the values to numeric
sv_voltage_values[['SvVoltage.v']] = (sv_voltage_values[['SvVoltage.v']].apply(lambda x: x.apply(Decimal)))
# Group by topological node id and by some logic take SvVoltage that will be dropped
voltages_to_discard = (sv_voltage_values.groupby(['SvVoltage.SvTopologicalNode']).
apply(lambda x: take_best_match_for_sv_voltage(input_data=x,
column_name='SvVoltage.v',
to_keep=False), include_groups=False))
voltages_to_keep = (sv_voltage_values.groupby(['SvVoltage.SvTopologicalNode']).
apply(lambda x: take_best_match_for_sv_voltage(input_data=x,
column_name='SvVoltage.v',
to_keep=True), include_groups=False))
voltages_to_discard = sv_voltage_values.merge(voltages_to_keep['ID'], on='ID', how='left', indicator=True)
voltages_to_discard = voltages_to_discard[voltages_to_discard['_merge'] == 'left_only']
if not voltages_to_discard.empty:
logger.info(f"Removing {len(voltages_to_discard.index)} duplicate voltage levels from boundary nodes")
sv_voltages_to_remove = pandas.merge(cgm_sv_data, voltages_to_discard['ID'].to_frame(), on='ID')
Expand Down
Loading

0 comments on commit 137905e

Please sign in to comment.