Skip to content

Commit

Permalink
first version of pyenergyplus migration passing integration tests
Browse files Browse the repository at this point in the history
  • Loading branch information
TShapinsky committed Jan 6, 2025
1 parent 2698801 commit 0f98b0b
Show file tree
Hide file tree
Showing 69 changed files with 1,876 additions and 576,019 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ repos:
- id: requirements-txt-fixer
- id: mixed-line-ending
args: ["--fix=auto"]
- repo: https://github.com/pre-commit/mirrors-autopep8
rev: v2.0.1
- repo: https://github.com/hhatto/autopep8
rev: v2.3.1
hooks:
- id: autopep8
args:
Expand Down
7 changes: 7 additions & 0 deletions alfalfa_web/server/api-v2.js
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,13 @@ router.get("/runs/:runId/time", async (req, res, next) => {
.catch(next);
});

router.get("/runs/:runId/log", async (req, res, next) => {
api
.getRunLog(req.run)
.then((log) => res.json({ payload: { log } }))
.catch(next);
});

router.get("/runs/:runId/points", (req, res, next) => {
api
.getPointsByRun(req.run)
Expand Down
8 changes: 7 additions & 1 deletion alfalfa_web/server/api.js
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,11 @@ class AlfalfaAPI {
return await getHashValue(this.redis, run.ref_id, "sim_time");
};

getRunLog = async (run) => {
const log_lines = await this.redis.lRange(`run:${run.ref_id}:log`, -100, -1);
return log_lines.join("\n");
};

getPointsByRun = async (run) => {
const pointsCursor = this.points.find({ run: run._id });
return Promise.resolve(pointsCursor.toArray());
Expand Down Expand Up @@ -126,7 +131,8 @@ class AlfalfaAPI {
const pointDict = {
id: point.ref_id,
name: point.name,
type: point.point_type
type: point.point_type,
units: point.units
};
return pointDict;
};
Expand Down
7 changes: 1 addition & 6 deletions alfalfa_worker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM ghcr.io/nrel/alfalfa-dependencies:openstudio_3.8 AS base
FROM ghcr.io/nrel/alfalfa-dependencies:prepare_080 AS base

ENV HOME=/alfalfa

Expand All @@ -21,11 +21,6 @@ ENV PYTHONPATH="${HOME}:${PYTHONPATH}"

COPY ./alfalfa_worker ${HOME}/alfalfa_worker

RUN pip3.8 install virtualenv \
&& pip3.8 install \
scipy \
symfit

COPY ./alfalfa_worker /alfalfa/alfalfa_worker

COPY ./deploy /alfalfa/deploy
Expand Down
8 changes: 8 additions & 0 deletions alfalfa_worker/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,22 @@
import os
import sys
import traceback
from logging import StreamHandler, basicConfig
from pathlib import Path

# Determine which worker to load based on the QUEUE.
# This may be temporary for now, not sure on how else
# to determine which worker gets launched
from alfalfa_worker.dispatcher import Dispatcher
from alfalfa_worker.lib.constants import DATETIME_FORMAT

if __name__ == '__main__':

basicConfig(level=os.environ.get("LOGLEVEL", "INFO"),
handlers=[StreamHandler(sys.stdout)],
format='%(asctime)s - %(name)s - %(levelname)s: %(message)s',
datefmt=DATETIME_FORMAT)

try:
workdir = Path(os.environ.get('RUN_DIR', '/runs'))
dispatcher = Dispatcher(workdir)
Expand Down
2 changes: 1 addition & 1 deletion alfalfa_worker/dispatcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def process_message(self, message):
"""
try:
message_body = json.loads(message)
self.logger.info(f"Processing message of {message_body}")
self.logger.debug(f"Processing message of {message_body}")
job = message_body.get('job')
if job:
params = message_body.get('params', {})
Expand Down
94 changes: 0 additions & 94 deletions alfalfa_worker/jobs/modelica/create_run.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,4 @@
import json
import os
from pathlib import Path
from uuid import uuid4

from pyfmi import load_fmu

from alfalfa_worker.lib.enums import RunStatus, SimType
from alfalfa_worker.lib.job import Job
Expand All @@ -19,7 +14,6 @@ def __init__(self, model_id, run_id=None):
# Define FMU specific attributes
self.upload_fmu: Path = self.dir / model_name
self.fmu_path = self.dir / 'model.fmu'
self.fmu_json = self.dir / 'tags.json'
self.model_name = model_name

# Needs to be set after files are uploaded / parsed.
Expand All @@ -34,99 +28,11 @@ def exec(self):
"""
self.logger.info("add_fmu for {}".format(self.run.ref_id))

# Create the FMU tags (no longer external now that python2 is deprecated)
self.create_tags()
# insert tags into db
self.insert_fmu_tags()
self.upload_fmu.rename(self.fmu_path)

def validate(self) -> None:
assert (self.dir / 'model.fmu').exists(), "model file not created"
assert (self.dir / 'tags.json').exists(), "tags file not created"

def cleanup(self) -> None:
super().cleanup()
self.set_run_status(RunStatus.READY)

def get_site_ref(self, haystack_json):
"""
Find the site given the haystack JSON file. Remove 'r:' from string.
:param haystack_json: json serialized Haystack document
:return: site_ref: id of site
"""
site_ref = ''
with open(haystack_json) as json_file:
data = json.load(json_file)
for entity in data:
if 'site' in entity:
if entity['site'] == 'm:':
site_ref = entity['id'].replace('r:', '')
break
return site_ref

def insert_fmu_tags(self):
with open(self.fmu_json, 'r') as f:
data = f.read()
points_json = json.loads(data)

self.run_manager.add_site_to_mongo(points_json, self.run)

def create_tags(self):
# 1.0 setup the inputs
fmu = load_fmu(self.upload_fmu)

# 2.0 get input/output variables from the FMU
# causality = 1 is parameter, 2 is input, 3 is output
input_names = fmu.get_model_variables(causality=2).keys()
output_names = fmu.get_model_variables(causality=3).keys()

# 3.0 add site tagging
tags = []

fmu_upload_name = os.path.basename(self.model_name) # without directories
fmu_upload_name = os.path.splitext(fmu_upload_name)[0] # without extension

# TODO: Figure out how to find geo_city
sitetag = {
"dis": "s:%s" % fmu_upload_name,
"id": "r:%s" % self.run.ref_id,
"site": "m:",
"datetime": "s:",
"simStatus": "s:Stopped",
"simType": "s:fmu",
"siteRef": "r:%s" % self.run.ref_id
}
tags.append(sitetag)

# 4.0 add input tagging
for var_input in input_names:
if not var_input.endswith("_activate"):
tag_input = {
"id": "r:%s" % uuid4(),
"dis": "s:%s" % var_input,
"siteRef": "r:%s" % self.run.ref_id,
"point": "m:",
"writable": "m:",
"writeStatus": "s:disabled",
"kind": "s:Number",
}
tags.append(tag_input)
tag_input = {}

# 5.0 add output tagging
for var_output in output_names:
tag_output = {
"id": "r:%s" % uuid4(),
"dis": "s:%s" % var_output,
"siteRef": "r:%s" % self.run.ref_id,
"point": "m:",
"cur": "m:",
"curVal": "n:",
"curStatus": "s:disabled",
"kind": "s:Number",
}
tags.append(tag_output)

# 6.0 write tags to the json file
with open(self.fmu_json, 'w') as outfile:
json.dump(tags, outfile)
Loading

0 comments on commit 0f98b0b

Please sign in to comment.