From 93c2232ce19e00187334ee89a9bfcee79b77fce4 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 21 Mar 2024 11:47:15 -0700 Subject: [PATCH 01/89] Read secret_list from actual file if present, else use sample file. Added a try-catch block similar to how other config files are used in case where actual non-sample file is present else use the sample file. Can remove the file from internal repo since not needed anymore. --- emission/net/auth/secret.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/emission/net/auth/secret.py b/emission/net/auth/secret.py index 1593fc2fb..582a15d5a 100644 --- a/emission/net/auth/secret.py +++ b/emission/net/auth/secret.py @@ -9,6 +9,15 @@ def __init__(self): key_file.close() self.client_secret_list = key_data["client_secret_list"] + try: + key_file = open('conf/net/auth/secret_list.json') + except: + print("secret_list.json not configured, falling back to sample, default configuration") + key_file = open('conf/net/auth/secret_list.json.sample') + key_data = json.load(key_file) + key_file.close() + self.client_secret_list = key_data["client_secret_list"] + def verifyUserToken(self, token): # attempt to validate token on the client-side logging.debug("Using the SecretAuthMethod to verify id token %s of length %d against secret list %s..." % From a80d7e4a996161b135705bdbc0ab14973dda62e6 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Mon, 25 Mar 2024 19:59:28 -0700 Subject: [PATCH 02/89] Matched conf/log files with internal repo Changed level to DEBUG and formatter set to detailed. Can keep both external and internal versions same by making external also have the same logging level as internal. Internal version given priority as detailed logging is better for detecting errors. Also, debug() statements [~1500] are much more than warning() statements [~50]. Will lose out on all these when external repo is used / run if only WARNING level set as default which is higher than DEBUG level. --- conf/log/intake.conf.sample | 3 ++- conf/log/webserver.conf.sample | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/conf/log/intake.conf.sample b/conf/log/intake.conf.sample index d51c5ac2e..413305ad8 100644 --- a/conf/log/intake.conf.sample +++ b/conf/log/intake.conf.sample @@ -12,7 +12,8 @@ }, "console": { "class": "logging.StreamHandler", - "level": "WARNING" + "level": "DEBUG", + "formatter": "detailed" }, "file": { "backupCount": 8, diff --git a/conf/log/webserver.conf.sample b/conf/log/webserver.conf.sample index de9512ea8..54cb42fa9 100644 --- a/conf/log/webserver.conf.sample +++ b/conf/log/webserver.conf.sample @@ -12,7 +12,8 @@ }, "console": { "class": "logging.StreamHandler", - "level": "WARNING" + "level": "DEBUG", + "formatter": "detailed" }, "file": { "backupCount": 3, From 20e18562925f5e8dd15fe794adeb7ac9cbe0909a Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 28 Mar 2024 21:45:28 -0700 Subject: [PATCH 03/89] Reading push.json values from environment variables Push.json.sample just has 4 key-value pairs which can be changed to ENV variables internally. Hence, need to write code to read from Env variables. Changing emission/net/ext_service/push/notify_interface.py to read from env variables. Using helper class config.py, based on existing template. First reading from .json file if exists, else read from env variables. If env variables are all None, then say that values not configured. --- emission/net/ext_service/push/config.py | 37 +++++++++++++++++++ .../net/ext_service/push/notify_interface.py | 6 +-- 2 files changed, 40 insertions(+), 3 deletions(-) create mode 100644 emission/net/ext_service/push/config.py diff --git a/emission/net/ext_service/push/config.py b/emission/net/ext_service/push/config.py new file mode 100644 index 000000000..61a9946c0 --- /dev/null +++ b/emission/net/ext_service/push/config.py @@ -0,0 +1,37 @@ +import json +import logging +import os + +def get_config_data_from_env(): + config_data_env = { + "provider": os.getenv("PUSH_PROVIDER"), + "server_auth_token": os.getenv("PUSH_SERVER_AUTH_TOKEN"), + "app_package_name": os.getenv("PUSH_APP_PACKAGE_NAME"), + "ios_token_format": os.getenv("PUSH_IOS_TOKEN_FORMAT") + } + return config_data_env + +def get_config_data(): + try: + config_file = open('conf/net/ext_service/push.json') + ret_val = json.load(config_file) + config_file.close() + except: + logging.debug("net.ext_service.push.json not configured, checking environment variables...") + ret_val = get_config_data_from_env() + # Check if all PUSH environment variables are not set + if (not any(ret_val.values())): + raise TypeError + return ret_val + +try: + config_data = get_config_data() +except: + logging.debug("All push environment variables are set to None") + +def get_config(): + return config_data + +def reload_config(): + global config_data + config_data = get_config_data() diff --git a/emission/net/ext_service/push/notify_interface.py b/emission/net/ext_service/push/notify_interface.py index 6b94857f6..6d8332c1b 100644 --- a/emission/net/ext_service/push/notify_interface.py +++ b/emission/net/ext_service/push/notify_interface.py @@ -11,15 +11,15 @@ import logging import importlib +import emission.net.ext_service.push.config as pc + # Note that the URL is hardcoded because the API endpoints are not standardized. # If we change a push provider, we will need to modify to match their endpoints. # Hardcoding will remind us of this :) # We can revisit this if push providers eventually decide to standardize... try: - push_config_file = open('conf/net/ext_service/push.json') - push_config = json.load(push_config_file) - push_config_file.close() + push_config = pc.get_config_data() except: logging.warning("push service not configured, push notifications not supported") From 4a1005e80b35e2e026a13c2da70e9d2e92ec0572 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Mon, 1 Apr 2024 14:33:49 -0700 Subject: [PATCH 04/89] Choose analysis/debug.conf file based on ENV var Added new environment variable PROD_STAGE. If this is set to TRUE, then the debug.conf.internal.json will be selected as the conf file. Else, the existing try-catch block is executed which either checks for a debug.conf.json or uses the sample file if the former does not exist. The try part is still valid, since some Test files copy over the sample file as debug.conf.json, then reload the config in eac.reload_config() which would need reading from debug.conf.json as well. Hence, now three files exist: - debug.conf.json.sample - debug.conf.json - debug.conf.json.internal --- .gitignore | 1 + conf/analysis/debug.conf.internal.json | 14 ++++++++++++++ emission/analysis/config.py | 17 ++++++++++++----- 3 files changed, 27 insertions(+), 5 deletions(-) create mode 100644 conf/analysis/debug.conf.internal.json diff --git a/.gitignore b/.gitignore index 1b467ec07..fb58e8e38 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ CFC_DataCollector/moves_collect.log webapp/www/lib conf/**/*.json !conf/**/*.schema.json +!conf/analysis/debug.conf.internal.json *.ipynb_checkpoints* diff --git a/conf/analysis/debug.conf.internal.json b/conf/analysis/debug.conf.internal.json new file mode 100644 index 000000000..4097d8617 --- /dev/null +++ b/conf/analysis/debug.conf.internal.json @@ -0,0 +1,14 @@ +{ + "intake.segmentation.section_segmentation.sectionValidityAssertions": true, + "intake.cleaning.clean_and_resample.speedDistanceAssertions": false, + "intake.cleaning.clean_and_resample.sectionValidityAssertions": false, + "intake.cleaning.filter_accuracy.enable": false, + "classification.inference.mode.useAdvancedFeatureIndices": true, + "classification.inference.mode.useBusTrainFeatureIndices": true, + "classification.validityAssertions": true, + "output.conversion.validityAssertions": true, + "section.startStopRadius": 150, + "section.endStopRadius": 150, + "analysis.result.section.key": "analysis/inferred_section", + "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/replaced_mode", "manual/trip_user_input"] +} diff --git a/emission/analysis/config.py b/emission/analysis/config.py index 3f41d22f1..f579349c9 100644 --- a/emission/analysis/config.py +++ b/emission/analysis/config.py @@ -1,12 +1,19 @@ import json +import os def get_config_data(): - try: - config_file = open('conf/analysis/debug.conf.json') - except: - print("analysis.debug.conf.json not configured, falling back to sample, default configuration") - config_file = open('conf/analysis/debug.conf.json.sample') + if os.getenv("PROD_STAGE") == "TRUE": + print("In production environment, opening internal debug.conf") + config_file = open('conf/analysis/debug.conf.internal.json') + else: + try: + print("Trying to open debug.conf.json") + config_file = open('conf/analysis/debug.conf.json') + except: + print("analysis.debug.conf.json not configured, falling back to sample, default configuration") + config_file = open('conf/analysis/debug.conf.json.sample') ret_val = json.load(config_file) + print(ret_val) config_file.close() return ret_val From d4dad4ac8bd6f526db6f0bd424b9aac794ca0ba3 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Thu, 4 Apr 2024 16:08:58 -0600 Subject: [PATCH 05/89] Testing method to share tag between repos This is the first part of the method to share the server image tag between other repositories. It would be shared as tag.txt, which gets overwritten every time the image build runs. --- .github/workflows/image_build_push.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 3b991786a..c5f2dc74a 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -6,7 +6,7 @@ name: docker image # events but only for the master branch on: push: - branches: [ master, gis-based-mode-detection ] + branches: [ master, gis-based-mode-detection, consolidate-differences ] # Env variable @@ -46,3 +46,14 @@ jobs: - name: push docker image run: | docker push $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} + + - name: Write tag file + run: | + echo 'foo bar' > tag.txt + + - uses: actions/upload-artifact@v4 + with: + name: Create tag artifact + path: tag.txt + if-no-files-found: error + overwrite: true From 02c7fc748d2d963c289e408901ac9049f80b0abd Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Wed, 10 Apr 2024 03:27:32 -0700 Subject: [PATCH 06/89] Changed webserver.conf.json to Environment variables + Removed sed / jq from docker_start_script.sh MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: Currently replaced config file values with environment variables. Expecting developers to manually set env vars instead of conf files. So, for instance, running containers using `docker-compose` or `docker run`, need to set these values similarly like push environment variables will need to be set. Key-value pairs in the webserver config file: REMOVED: log_base_dir, python_path, log_file ENV: static_path, 404_redirect, host, port, timeout, auth, aggregate_call_auth ——————————— Changes 1. webserver.conf.sample - Kept file as it is and changed how values being read in cfc_webapp.py, TestWebserver.py and docker_start_script.sh 2. TestWebserver.py - In setup(): Storing current original ENV variables, Replacing them with test values - In teardown(): Restoring original ENV variables 3. cfc_webapp.py - Removed log_base_dir, python_path, log_file as these were only used in the cfc_webapp.py to read in the value from the config file and not used elsewhere. - Added environment variables for the other config key-value pairs to avoid dependence on config file and as they were being used in the cfc_webapp.py file. 4. docker_start_script.sh - Removed sed / jq usage for editing webserver.conf.sample.json file and copying over as webserver.conf.json; simply setting the environment variable for WEB_SERVER_HOST now. ------------------- Special notes: 1. config.json <-> webserver.conf.sample.json - Some files still use config.json and these changes were made 7-8 years ago (even 10 years ago). This config.json file has the same contents as the current webserver.conf.sample. - So, webserver.conf.sample.json was actually config.json at some point! https://github.com/e-mission/e-mission-server/commit/a028dec753e33c8fd3683f309319cfd9b7f1187e 2. Sed localhost replacement isn’t functionally correct - The default sample value for server.host is "0.0.0.0". - But the sed command replaces “localhost” with the ENV variable value. - Since the localhost keyword itself isn’t there in the sample file, the replacement will not work either. ---------------- --- .docker/docker_start_script.sh | 5 +-- emission/net/api/cfc_webapp.py | 19 ++++----- emission/tests/netTests/TestWebserver.py | 51 ++++++++++-------------- 3 files changed, 29 insertions(+), 46 deletions(-) diff --git a/.docker/docker_start_script.sh b/.docker/docker_start_script.sh index f41528c0f..250fa5774 100644 --- a/.docker/docker_start_script.sh +++ b/.docker/docker_start_script.sh @@ -17,9 +17,8 @@ cat conf/storage/db.conf echo ${WEB_SERVER_HOST} if [ -z ${WEB_SERVER_HOST} ] ; then local_host=`hostname -i` - sed "s_localhost_${local_host}_" conf/net/api/webserver.conf.sample > conf/net/api/webserver.conf -else - sed "s_localhost_${WEB_SERVER_HOST}_" conf/net/api/webserver.conf.sample > conf/net/api/webserver.conf + export WEB_SERVER_HOST=$local_host + echo "Setting webserver host environment variable to localhost" fi cat conf/net/api/webserver.conf diff --git a/emission/net/api/cfc_webapp.py b/emission/net/api/cfc_webapp.py index e585d6a25..6596921ea 100644 --- a/emission/net/api/cfc_webapp.py +++ b/emission/net/api/cfc_webapp.py @@ -58,20 +58,15 @@ logging.debug("webserver not configured, falling back to sample, default configuration") config_file = open('conf/net/api/webserver.conf.sample') -OPENPATH_URL="https://www.nrel.gov/transportation/openpath.html" STUDY_CONFIG = os.getenv('STUDY_CONFIG', "stage-program") - -config_data = json.load(config_file) config_file.close() -static_path = config_data["paths"]["static_path"] -python_path = config_data["paths"]["python_path"] -server_host = config_data["server"]["host"] -server_port = config_data["server"]["port"] -socket_timeout = config_data["server"]["timeout"] -log_base_dir = config_data["paths"]["log_base_dir"] -auth_method = config_data["server"]["auth"] -aggregate_call_auth = config_data["server"]["aggregate_call_auth"] -not_found_redirect = config_data["paths"].get("404_redirect", OPENPATH_URL) +static_path = os.getenv('WEB_SERVER_STATIC_PATH', "webapp/www/") +server_host = os.getenv('WEB_SERVER_HOST', "0.0.0.0") +server_port = os.getenv('WEB_SERVER_PORT', "8080") +socket_timeout = os.getenv('WEB_SERVER_TIMEOUT', "3600") +auth_method = os.getenv('WEB_SERVER_AUTH', "skip") +aggregate_call_auth = os.getenv('WEB_SERVER_AGGREGATE_CALL_AUTH', "no_auth") +not_found_redirect = os.getenv('WEB_SERVER_OPENPATH_URL', "https://www.nrel.gov/transportation/openpath.html") BaseRequest.MEMFILE_MAX = 1024 * 1024 * 1024 # Allow the request size to be 1G # to accomodate large section sizes diff --git a/emission/tests/netTests/TestWebserver.py b/emission/tests/netTests/TestWebserver.py index 4316365df..a102dd334 100644 --- a/emission/tests/netTests/TestWebserver.py +++ b/emission/tests/netTests/TestWebserver.py @@ -23,39 +23,28 @@ class TestWebserver(unittest.TestCase): def setUp(self): - import shutil - - self.webserver_conf_path = "conf/net/api/webserver.conf" - shutil.copyfile( - "%s.sample" % self.webserver_conf_path, self.webserver_conf_path - ) - with open(self.webserver_conf_path, "w") as fd: - fd.write( - json.dumps( - { - "paths": { - "static_path": "webapp/www", - "python_path": "main", - "log_base_dir": ".", - "log_file": "debug.log", - "404_redirect": "http://somewhere.else", - }, - "server": { - "host": "0.0.0.0", - "port": "8080", - "timeout": "3600", - "auth": "skip", - "aggregate_call_auth": "no_auth", - }, - } - ) - ) - logging.debug("Finished setting up %s" % self.webserver_conf_path) - with open(self.webserver_conf_path) as fd: - logging.debug("Current values are %s" % json.load(fd)) + self.originalWebserverEnvVars = {} + self.testModifiedEnvVars = { + 'WEB_SERVER_OPENPATH_URL' : "http://somewhere.else" + } + + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): + if os.getenv(env_var_name) is not None: + # Storing original webserver environment variables before modification + self.originalWebserverEnvVars[env_var_name] = os.getenv(env_var_name) + # Setting webserver environment variables with test values + os.environ[env_var_name] = env_var_value + + logging.debug("Finished setting up test webserver environment variables") + logging.debug("Current original values are = %s" % self.originalWebserverEnvVars) + logging.debug("Current modified values are = %s" % self.testModifiedEnvVars) def tearDown(self): - os.remove(self.webserver_conf_path) + # Restoring original webserver environment variables + for env_var_name, env_var_value in self.originalWebserverEnvVars.items(): + os.environ[env_var_name] = env_var_value + logging.debug("Finished restoring original webserver environment variables") + logging.debug("Restored original values are = %s" % self.originalWebserverEnvVars) def test404Redirect(self): from emission.net.api.bottle import response From 4aeb4a6cb8f6d9ff21bc1aec4272b1f2a8e3d385 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Wed, 10 Apr 2024 19:27:42 -0700 Subject: [PATCH 07/89] Corrected logic to set test webserver ENV variables MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Initially, was setting them in the if block if ENV variables already existed. It wasn’t being set as if condition was being evaluated as False. But if condition should be mainly to store existing values. In any case, test values must be set, hence moving it outside if block. --- emission/tests/netTests/TestWebserver.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/emission/tests/netTests/TestWebserver.py b/emission/tests/netTests/TestWebserver.py index a102dd334..17668cde9 100644 --- a/emission/tests/netTests/TestWebserver.py +++ b/emission/tests/netTests/TestWebserver.py @@ -32,8 +32,8 @@ def setUp(self): if os.getenv(env_var_name) is not None: # Storing original webserver environment variables before modification self.originalWebserverEnvVars[env_var_name] = os.getenv(env_var_name) - # Setting webserver environment variables with test values - os.environ[env_var_name] = env_var_value + # Setting webserver environment variables with test values + os.environ[env_var_name] = env_var_value logging.debug("Finished setting up test webserver environment variables") logging.debug("Current original values are = %s" % self.originalWebserverEnvVars) From 2531672364d432b14c9664ac78367ae8e9f81d2e Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 11 Apr 2024 19:59:14 -0700 Subject: [PATCH 08/89] Changed Webserver.conf + Db.conf to Environment variable + Removed sed / jq use Details of files changed 1. Start scripts .docker/docker_start_script.sh emission/integrationTests/start_integration_tests.sh setup/tests/start_script.sh - Changed seq / jq usage to directly set Environment variable to desired value; no need of saving sample file as actual conf json file. 2. Config.py files emission/core/config.py emission/net/api/config.py emission/net/ext_service/push/config.py - Based this file on emission/analysis/config.py - Added these to read from conf files if present or environment variables instead of sample files. - Default values set are taken from sample files. - check_unset_env_vars() can be used to check whether ALL environment variables are unset. 3. DB, Webapp, Push application usage files emission/core/get_database.py emission/net/api/cfc_webapp.py emission/net/ext_service/push/notify_interface.py - Changed logic to read using the config.py files that read the non-sample actual config files if present or from the Environment variables instead of sample files. 4. Test Files emission/integrationTests/storageTests/TestMongodbAuth.py emission/tests/netTests/TestWebserver.py emission/tests/netTests/TestPush.py - Test files that exercise the functionality of the logic in the files in (3). - Earlier, config files were being replaced with test values and copied over for testing purposes. - Now, switching to using environment variables - call sent to config files in (2) indirectly via application usage files in (3) - Following flow is followed in reading from and restoring original environment variables values setup() - Sets ENV vars by storing original vars if set, then uses test ENV vars as new values TestFunc() - importing modules named in (3) causes values to be read in, which now reads in newer test values since they set the ENV vars in setup() - only those ENV vars in test values are set; unchanged ones left untouched or default values read using os.getenv(var name, default) teardown() - Unset test ENV vars by using del os.environ[var_name] - Restore original values from original dict --- .docker/docker_start_script.sh | 5 +- emission/core/config.py | 39 +++++++++++++++ emission/core/get_database.py | 12 ++--- .../start_integration_tests.sh | 5 +- .../storageTests/TestMongodbAuth.py | 29 ++++++++--- emission/net/api/cfc_webapp.py | 23 ++++----- emission/net/api/config.py | 48 +++++++++++++++++++ emission/net/ext_service/push/config.py | 4 +- .../net/ext_service/push/notify_interface.py | 2 +- emission/tests/netTests/TestPush.py | 48 +++++++++++++------ emission/tests/netTests/TestWebserver.py | 15 +++--- setup/tests/start_script.sh | 5 +- 12 files changed, 173 insertions(+), 62 deletions(-) create mode 100644 emission/core/config.py create mode 100644 emission/net/api/config.py diff --git a/.docker/docker_start_script.sh b/.docker/docker_start_script.sh index 250fa5774..f4bc17032 100644 --- a/.docker/docker_start_script.sh +++ b/.docker/docker_start_script.sh @@ -7,9 +7,8 @@ echo ${DB_HOST} if [ -z ${DB_HOST} ] ; then local_host=`hostname -i` - jq --arg db_host "$local_host" '.timeseries.url = $db_host' conf/storage/db.conf.sample > conf/storage/db.conf -else - jq --arg db_host "$DB_HOST" '.timeseries.url = $db_host' conf/storage/db.conf.sample > conf/storage/db.conf + export DB_HOST=$local_host + echo "Setting db host environment variable to localhost" fi cat conf/storage/db.conf diff --git a/emission/core/config.py b/emission/core/config.py new file mode 100644 index 000000000..e1c0d13c4 --- /dev/null +++ b/emission/core/config.py @@ -0,0 +1,39 @@ +import json +import logging +import os + +def get_config_data_from_env(): + config_data_env = { + "url": os.getenv('DB_HOST', "localhost"), + "result_limit": os.getenv('DB_TS_RESULT_LIMIT', 250000) + } + return config_data_env + +def check_unset_env_vars(): + config_data_env = { + "url": os.getenv('DB_HOST'), + "result_limit": os.getenv('DB_TS_RESULT_LIMIT') + } + return not any(config_data_env.values()) + +def get_config_data(): + try: + config_file = open('conf/storage/db.conf') + ret_val = json.load(config_file) + config_file.close() + except: + # Check if all DB environment variables are not set + # if check_unset_env_vars(): + # print("All DB environment variables are set to None") + logging.debug("storage not configured, falling back to sample, default configuration") + ret_val = get_config_data_from_env() + return ret_val + +config_data = get_config_data() + +def get_config(): + return config_data + +def reload_config(): + global config_data + config_data = get_config_data() diff --git a/emission/core/get_database.py b/emission/core/get_database.py index 0939b41d9..005abe3ad 100644 --- a/emission/core/get_database.py +++ b/emission/core/get_database.py @@ -10,16 +10,10 @@ import os import json -try: - config_file = open('conf/storage/db.conf') -except: - print("storage not configured, falling back to sample, default configuration") - config_file = open('conf/storage/db.conf.sample') +import emission.core.config as ecc -config_data = json.load(config_file) -url = config_data["timeseries"]["url"] -result_limit = config_data["timeseries"]["result_limit"] -config_file.close() +url = ecc.get_config()["url"] +result_limit = ecc.get_config()["result_limit"] try: parsed=pymongo.uri_parser.parse_uri(url) diff --git a/emission/integrationTests/start_integration_tests.sh b/emission/integrationTests/start_integration_tests.sh index da2e30e5b..7096f9833 100644 --- a/emission/integrationTests/start_integration_tests.sh +++ b/emission/integrationTests/start_integration_tests.sh @@ -6,9 +6,8 @@ cd /src/e-mission-server echo ${DB_HOST} if [ -z ${DB_HOST} ] ; then local_host=`hostname -i` - sed "s_localhost_${local_host}_" conf/storage/db.conf.sample > conf/storage/db.conf -else - sed "s_localhost_${DB_HOST}_" conf/storage/db.conf.sample > conf/storage/db.conf + export DB_HOST=$local_host + echo "Setting db host environment variable to localhost" fi cat conf/storage/db.conf diff --git a/emission/integrationTests/storageTests/TestMongodbAuth.py b/emission/integrationTests/storageTests/TestMongodbAuth.py index 13f89d9a3..0e0190d7b 100644 --- a/emission/integrationTests/storageTests/TestMongodbAuth.py +++ b/emission/integrationTests/storageTests/TestMongodbAuth.py @@ -47,10 +47,19 @@ def setUp(self): self.uuid = uuid.uuid4() self.testUserId = self.uuid self.db_conf_file = "conf/storage/db.conf" + self.originalDBEnvVars = {} self.createAdmin() def tearDown(self): self.admin_auth.command({"dropAllUsersFromDatabase": 1}) + logging.debug("Deleting test db environment variables") + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): + del os.environ[env_var_name] + # Restoring original db environment variables + for env_var_name, env_var_value in self.originalDBEnvVars.items(): + os.environ[env_var_name] = env_var_value + logging.debug("Finished restoring original db environment variables") + logging.debug("Restored original values are = %s" % self.originalDBEnvVars) try: os.remove(self.db_conf_file) except FileNotFoundError as e: @@ -67,14 +76,20 @@ def createAdmin(self): self.admin_auth = pymongo.MongoClient(self.getURL(self.test_username, self.test_password)).admin def configureDB(self, url): - config = { - "timeseries": { - "url": url, - "result_limit": 250000 - } + self.testModifiedEnvVars = { + 'DB_HOST' : url } - with open(self.db_conf_file, "w") as fp: - json.dump(config, fp, indent=4) + + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): + if os.getenv(env_var_name) is not None: + # Storing original db environment variables before modification + self.originalDBEnvVars[env_var_name] = os.getenv(env_var_name) + # Setting db environment variables with test values + os.environ[env_var_name] = env_var_value + + logging.debug("Finished setting up test db environment variables") + logging.debug("Current original values are = %s" % self.originalDBEnvVars) + logging.debug("Current modified values are = %s" % self.testModifiedEnvVars) def getURL(self, username, password, dbname="admin"): return "mongodb://%s:%s@localhost/%s?authSource=admin&authMechanism=SCRAM-SHA-1" % (username, password, dbname) diff --git a/emission/net/api/cfc_webapp.py b/emission/net/api/cfc_webapp.py index 6596921ea..4cde31550 100644 --- a/emission/net/api/cfc_webapp.py +++ b/emission/net/api/cfc_webapp.py @@ -51,22 +51,17 @@ import emission.storage.timeseries.cache_series as esdc import emission.core.timer as ect import emission.core.get_database as edb - -try: - config_file = open('conf/net/api/webserver.conf') -except: - logging.debug("webserver not configured, falling back to sample, default configuration") - config_file = open('conf/net/api/webserver.conf.sample') +import emission.net.api.config as enac STUDY_CONFIG = os.getenv('STUDY_CONFIG', "stage-program") -config_file.close() -static_path = os.getenv('WEB_SERVER_STATIC_PATH', "webapp/www/") -server_host = os.getenv('WEB_SERVER_HOST', "0.0.0.0") -server_port = os.getenv('WEB_SERVER_PORT', "8080") -socket_timeout = os.getenv('WEB_SERVER_TIMEOUT', "3600") -auth_method = os.getenv('WEB_SERVER_AUTH', "skip") -aggregate_call_auth = os.getenv('WEB_SERVER_AGGREGATE_CALL_AUTH', "no_auth") -not_found_redirect = os.getenv('WEB_SERVER_OPENPATH_URL', "https://www.nrel.gov/transportation/openpath.html") +enac.reload_config() +static_path = enac.get_config()["static_path"] +server_host = enac.get_config()["server_host"] +server_port = enac.get_config()["server_port"] +socket_timeout = enac.get_config()["socket_timeout"] +auth_method = enac.get_config()["auth_method"] +aggregate_call_auth = enac.get_config()["aggregate_call_auth"] +not_found_redirect = enac.get_config()["not_found_redirect"] BaseRequest.MEMFILE_MAX = 1024 * 1024 * 1024 # Allow the request size to be 1G # to accomodate large section sizes diff --git a/emission/net/api/config.py b/emission/net/api/config.py new file mode 100644 index 000000000..030bcf322 --- /dev/null +++ b/emission/net/api/config.py @@ -0,0 +1,48 @@ +import json +import logging +import os + +def get_config_data_from_env(): + config_data_env = { + "static_path": os.getenv('WEB_SERVER_STATIC_PATH', "webapp/www/"), + "server_host": os.getenv('WEB_SERVER_HOST', "0.0.0.0"), + "server_port": os.getenv('WEB_SERVER_PORT', "8080"), + "socket_timeout": os.getenv('WEB_SERVER_TIMEOUT', "3600"), + "auth_method": os.getenv('WEB_SERVER_AUTH', "skip"), + "aggregate_call_auth": os.getenv('WEB_SERVER_AGGREGATE_CALL_AUTH', "no_auth"), + "not_found_redirect": os.getenv('WEB_SERVER_REDIRECT_URL', "https://www.nrel.gov/transportation/openpath.html") + } + return config_data_env + +def check_unset_env_vars(): + config_data_env = { + "static_path": os.getenv('WEB_SERVER_STATIC_PATH'), + "server_host": os.getenv('WEB_SERVER_HOST'), + "server_port": os.getenv('WEB_SERVER_PORT'), + "socket_timeout": os.getenv('WEB_SERVER_TIMEOUT'), + "auth_method": os.getenv('WEB_SERVER_AUTH'), + "aggregate_call_auth": os.getenv('WEB_SERVER_AGGREGATE_CALL_AUTH'), + "not_found_redirect": os.getenv('WEB_SERVER_REDIRECT_URL') + } + return not any(config_data_env.values()) + +def get_config_data(): + try: + config_file = open('conf/storage/db.conf') + ret_val = json.load(config_file) + config_file.close() + except: + # Check if all Webserver environment variables are not set + # if check_unset_env_vars(): + logging.debug("webserver not configured, falling back to sample, default configuration") + ret_val = get_config_data_from_env() + return ret_val + +config_data = get_config_data() + +def get_config(): + return config_data + +def reload_config(): + global config_data + config_data = get_config_data() diff --git a/emission/net/ext_service/push/config.py b/emission/net/ext_service/push/config.py index 61a9946c0..1dabb75f0 100644 --- a/emission/net/ext_service/push/config.py +++ b/emission/net/ext_service/push/config.py @@ -17,7 +17,7 @@ def get_config_data(): ret_val = json.load(config_file) config_file.close() except: - logging.debug("net.ext_service.push.json not configured, checking environment variables...") + logging.warning("net.ext_service.push.json not configured, checking environment variables...") ret_val = get_config_data_from_env() # Check if all PUSH environment variables are not set if (not any(ret_val.values())): @@ -27,7 +27,7 @@ def get_config_data(): try: config_data = get_config_data() except: - logging.debug("All push environment variables are set to None") + logging.warning("All push environment variables are set to None") def get_config(): return config_data diff --git a/emission/net/ext_service/push/notify_interface.py b/emission/net/ext_service/push/notify_interface.py index 6d8332c1b..d38a213b1 100644 --- a/emission/net/ext_service/push/notify_interface.py +++ b/emission/net/ext_service/push/notify_interface.py @@ -19,7 +19,7 @@ # We can revisit this if push providers eventually decide to standardize... try: - push_config = pc.get_config_data() + push_config = pc.get_config() except: logging.warning("push service not configured, push notifications not supported") diff --git a/emission/tests/netTests/TestPush.py b/emission/tests/netTests/TestPush.py index af529594d..8fb1e1456 100644 --- a/emission/tests/netTests/TestPush.py +++ b/emission/tests/netTests/TestPush.py @@ -40,22 +40,33 @@ def generate_fake_result(successful_tokens, failed_tokens): class TestPush(unittest.TestCase): def setUp(self): - import shutil - self.push_conf_path = "conf/net/ext_service/push.json" - shutil.copyfile("%s.sample" % self.push_conf_path, - self.push_conf_path) - with open(self.push_conf_path, "w") as fd: - fd.write(json.dumps({ - "provider": "firebase", - "server_auth_token": "firebase_api_key", - "ios_token_format": "apns" - })) - logging.debug("Finished setting up %s" % self.push_conf_path) - with open(self.push_conf_path) as fd: - logging.debug("Current values are %s" % json.load(fd)) + self.originalPushEnvVars = {} + self.testModifiedEnvVars = { + 'PUSH_PROVIDER' : "firebase", + 'PUSH_SERVER_AUTH_TOKEN' : "firebase_api_key", + 'PUSH_IOS_TOKEN_FORMAT' : "apns" + } + + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): + if os.getenv(env_var_name) is not None: + # Storing original push environment variables before modification + self.originalPushEnvVars[env_var_name] = os.getenv(env_var_name) + # Setting push environment variables with test values + os.environ[env_var_name] = env_var_value + + logging.debug("Finished setting up test push environment variables") + logging.debug("Current original values are = %s" % self.originalPushEnvVars) + logging.debug("Current modified values are = %s" % self.testModifiedEnvVars) def tearDown(self): - os.remove(self.push_conf_path) + logging.debug("Deleting test push environment variables") + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): + del os.environ[env_var_name] + # Restoring original push environment variables + for env_var_name, env_var_value in self.originalPushEnvVars.items(): + os.environ[env_var_name] = env_var_value + logging.debug("Finished restoring original push environment variables") + logging.debug("Restored original values are = %s" % self.originalPushEnvVars) def testGetInterface(self): import emission.net.ext_service.push.notify_interface as pni @@ -177,6 +188,15 @@ def testFcmNoMapping(self): # and there will be no entries in the token mapping database self.assertEqual(edb.get_push_token_mapping_db().count_documents({}), 0) + + def testNoEnvVarSetUp(self): + self.tearDown() + import emission.net.ext_service.push.notify_interface as pni + # import emission.net.ext_service.push.config as pc + # print("Fetching push config from ENV variables by deleting existing non-sample JSON file") + # self.tearDown() + # self.assertRaises(TypeError, pc.get_config_data()) + self.setUp() if __name__ == '__main__': import emission.tests.common as etc diff --git a/emission/tests/netTests/TestWebserver.py b/emission/tests/netTests/TestWebserver.py index 17668cde9..588944623 100644 --- a/emission/tests/netTests/TestWebserver.py +++ b/emission/tests/netTests/TestWebserver.py @@ -25,7 +25,7 @@ class TestWebserver(unittest.TestCase): def setUp(self): self.originalWebserverEnvVars = {} self.testModifiedEnvVars = { - 'WEB_SERVER_OPENPATH_URL' : "http://somewhere.else" + 'WEB_SERVER_REDIRECT_URL' : "http://somewhere.else" } for env_var_name, env_var_value in self.testModifiedEnvVars.items(): @@ -35,16 +35,19 @@ def setUp(self): # Setting webserver environment variables with test values os.environ[env_var_name] = env_var_value - logging.debug("Finished setting up test webserver environment variables") - logging.debug("Current original values are = %s" % self.originalWebserverEnvVars) - logging.debug("Current modified values are = %s" % self.testModifiedEnvVars) + print("Finished setting up test webserver environment variables") + print("Current original values are = %s" % self.originalWebserverEnvVars) + print("Current modified values are = %s" % self.testModifiedEnvVars) def tearDown(self): + print("Deleting test webserver environment variables") + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): + del os.environ[env_var_name] # Restoring original webserver environment variables for env_var_name, env_var_value in self.originalWebserverEnvVars.items(): os.environ[env_var_name] = env_var_value - logging.debug("Finished restoring original webserver environment variables") - logging.debug("Restored original values are = %s" % self.originalWebserverEnvVars) + print("Finished restoring original webserver environment variables") + print("Restored original values are = %s" % self.originalWebserverEnvVars) def test404Redirect(self): from emission.net.api.bottle import response diff --git a/setup/tests/start_script.sh b/setup/tests/start_script.sh index b76478da9..0fe21ed1a 100644 --- a/setup/tests/start_script.sh +++ b/setup/tests/start_script.sh @@ -6,9 +6,8 @@ cd /src/e-mission-server echo ${DB_HOST} if [ -z ${DB_HOST} ] ; then local_host=`hostname -i` - sed "s_localhost_${local_host}_" conf/storage/db.conf.sample > conf/storage/db.conf -else - sed "s_localhost_${DB_HOST}_" conf/storage/db.conf.sample > conf/storage/db.conf + export DB_HOST=$local_host + echo "Setting db host environment variable to localhost" fi cat conf/storage/db.conf From ea57afa92aff6df6a086a298f41258eaa4797228 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Fri, 12 Apr 2024 00:03:59 -0700 Subject: [PATCH 09/89] Reverting Natalie testing artifact changes + Adding image-push-merge --- .github/workflows/image_build_push.yml | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index c5f2dc74a..9fd15dfa0 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -6,7 +6,7 @@ name: docker image # events but only for the master branch on: push: - branches: [ master, gis-based-mode-detection, consolidate-differences ] + branches: [ image-push-merge ] # Env variable @@ -46,14 +46,3 @@ jobs: - name: push docker image run: | docker push $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} - - - name: Write tag file - run: | - echo 'foo bar' > tag.txt - - - uses: actions/upload-artifact@v4 - with: - name: Create tag artifact - path: tag.txt - if-no-files-found: error - overwrite: true From e8344e9a3433838aec72daf63ea1a277ce8dc8e2 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Fri, 12 Apr 2024 00:59:32 -0700 Subject: [PATCH 10/89] Fixes for failing TestTokenQueries print assertions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit test_run_script_empty () and test_run_script_show() were failing - Was occurring because I had used logging.debug() instead of print() - Hence std output stream was unable to get the print(“storage not configured”) statement Some more fixes: - db.conf incorrectly read instead of webserver.conf in config.py in emisison/net/api - TestPush had an incomplete test, that was used just for testing purposes to invoke calls on importing the pni module. - TestWebserver.py changed print() statements to logging.debug() --- emission/core/config.py | 2 +- emission/net/api/config.py | 2 +- emission/tests/netTests/TestPush.py | 8 -------- emission/tests/netTests/TestWebserver.py | 12 ++++++------ 4 files changed, 8 insertions(+), 16 deletions(-) diff --git a/emission/core/config.py b/emission/core/config.py index e1c0d13c4..6612732c2 100644 --- a/emission/core/config.py +++ b/emission/core/config.py @@ -25,7 +25,7 @@ def get_config_data(): # Check if all DB environment variables are not set # if check_unset_env_vars(): # print("All DB environment variables are set to None") - logging.debug("storage not configured, falling back to sample, default configuration") + print("storage not configured, falling back to sample, default configuration") ret_val = get_config_data_from_env() return ret_val diff --git a/emission/net/api/config.py b/emission/net/api/config.py index 030bcf322..31e0e34c0 100644 --- a/emission/net/api/config.py +++ b/emission/net/api/config.py @@ -28,7 +28,7 @@ def check_unset_env_vars(): def get_config_data(): try: - config_file = open('conf/storage/db.conf') + config_file = open('conf/net/api/webserver.conf') ret_val = json.load(config_file) config_file.close() except: diff --git a/emission/tests/netTests/TestPush.py b/emission/tests/netTests/TestPush.py index 8fb1e1456..865ffa0b7 100644 --- a/emission/tests/netTests/TestPush.py +++ b/emission/tests/netTests/TestPush.py @@ -189,14 +189,6 @@ def testFcmNoMapping(self): # and there will be no entries in the token mapping database self.assertEqual(edb.get_push_token_mapping_db().count_documents({}), 0) - def testNoEnvVarSetUp(self): - self.tearDown() - import emission.net.ext_service.push.notify_interface as pni - # import emission.net.ext_service.push.config as pc - # print("Fetching push config from ENV variables by deleting existing non-sample JSON file") - # self.tearDown() - # self.assertRaises(TypeError, pc.get_config_data()) - self.setUp() if __name__ == '__main__': import emission.tests.common as etc diff --git a/emission/tests/netTests/TestWebserver.py b/emission/tests/netTests/TestWebserver.py index 588944623..2c3a634d9 100644 --- a/emission/tests/netTests/TestWebserver.py +++ b/emission/tests/netTests/TestWebserver.py @@ -35,19 +35,19 @@ def setUp(self): # Setting webserver environment variables with test values os.environ[env_var_name] = env_var_value - print("Finished setting up test webserver environment variables") - print("Current original values are = %s" % self.originalWebserverEnvVars) - print("Current modified values are = %s" % self.testModifiedEnvVars) + logging.debug("Finished setting up test webserver environment variables") + logging.debug("Current original values are = %s" % self.originalWebserverEnvVars) + logging.debug("Current modified values are = %s" % self.testModifiedEnvVars) def tearDown(self): - print("Deleting test webserver environment variables") + logging.debug("Deleting test webserver environment variables") for env_var_name, env_var_value in self.testModifiedEnvVars.items(): del os.environ[env_var_name] # Restoring original webserver environment variables for env_var_name, env_var_value in self.originalWebserverEnvVars.items(): os.environ[env_var_name] = env_var_value - print("Finished restoring original webserver environment variables") - print("Restored original values are = %s" % self.originalWebserverEnvVars) + logging.debug("Finished restoring original webserver environment variables") + logging.debug("Restored original values are = %s" % self.originalWebserverEnvVars) def test404Redirect(self): from emission.net.api.bottle import response From 7f0d5f0bf31f2b2bc76f0e1d295f2c5494a3f5a6 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Fri, 12 Apr 2024 01:35:58 -0700 Subject: [PATCH 11/89] Fixes for failing TestTokenQueries print assertions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Assertion Error this time in another CI GitHub actions Workflow name : test-with-docker Succeeded earlier when the other test (ubuntu-only-test-with-manual-install) failed: https://github.com/e-mission/e-mission-server/actions/runs/8658279606/job/23741854476 - This happened since then “storage not configured” wasn’t being printed, which is what the test wants as docker compose is used to set up mongo db container. - With docker, localhost becomes db as the DB_HOST ENV var or “url” key-value. SOLVED - Added if condition to print “storage not configured” only when url value is till equal to sample value “localhost” --- emission/core/config.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/emission/core/config.py b/emission/core/config.py index 6612732c2..8dbbabe9d 100644 --- a/emission/core/config.py +++ b/emission/core/config.py @@ -25,8 +25,9 @@ def get_config_data(): # Check if all DB environment variables are not set # if check_unset_env_vars(): # print("All DB environment variables are set to None") - print("storage not configured, falling back to sample, default configuration") ret_val = get_config_data_from_env() + if ret_val["url"] == "localhost": + print("storage not configured, falling back to sample, default configuration") return ret_val config_data = get_config_data() From 05551c816930be81db9736a84530d41bf84dfc4a Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Mon, 15 Apr 2024 17:14:34 -0700 Subject: [PATCH 12/89] Try-except block brought to the top Will first check if debug.conf exists, like other conf files. If it does not, except block then check if PROD_STAGE environment variable is set and whether debug.conf.internal is present. Else, fall back to sample conf. --- emission/analysis/config.py | 17 ++++++++--------- seed_model.json | 1 + 2 files changed, 9 insertions(+), 9 deletions(-) create mode 100644 seed_model.json diff --git a/emission/analysis/config.py b/emission/analysis/config.py index f579349c9..a7a84b6db 100644 --- a/emission/analysis/config.py +++ b/emission/analysis/config.py @@ -2,18 +2,17 @@ import os def get_config_data(): - if os.getenv("PROD_STAGE") == "TRUE": - print("In production environment, opening internal debug.conf") - config_file = open('conf/analysis/debug.conf.internal.json') - else: - try: - print("Trying to open debug.conf.json") - config_file = open('conf/analysis/debug.conf.json') - except: + try: + print("Trying to open debug.conf.json") + config_file = open('conf/analysis/debug.conf.json') + except: + if os.getenv("PROD_STAGE") == "TRUE": + print("In production environment, opening internal debug.conf") + config_file = open('conf/analysis/debug.conf.internal.json') + else: print("analysis.debug.conf.json not configured, falling back to sample, default configuration") config_file = open('conf/analysis/debug.conf.json.sample') ret_val = json.load(config_file) - print(ret_val) config_file.close() return ret_val diff --git a/seed_model.json b/seed_model.json new file mode 100644 index 000000000..b8a4aca8a --- /dev/null +++ b/seed_model.json @@ -0,0 +1 @@ +{"py/object": "sklearn.ensemble._forest.RandomForestClassifier", "py/state": {"base_estimator": {"py/object": "sklearn.tree._classes.DecisionTreeClassifier", "py/state": {"class_weight": null, "classes_": null, "criterion": "gini", "max_depth": null, "max_features": null, "max_features_": null, "max_leaf_nodes": null, "min_impurity_split": 1e-07, "min_samples_leaf": 1, "min_samples_split": 2, "min_weight_fraction_leaf": 0.0, "n_classes_": null, "n_features_": null, "n_outputs_": null, "presort": false, "random_state": null, "splitter": "best", "tree_": null, "_sklearn_version": "0.23.2"}}, "base_estimator_": {"py/id": 1}, "bootstrap": true, "class_weight": null, "classes_": {"py/object": "numpy.ndarray", "dtype": "float64", "values": [1.0, 5.0]}, "criterion": "gini", "estimator_params": {"py/tuple": ["criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "min_impurity_split", "random_state"]}, "estimators_": [{"py/object": "sklearn.tree._classes.DecisionTreeClassifier", "py/state": {"class_weight": null, "classes_": {"py/object": "numpy.ndarray", "dtype": "float64", "values": [0.0, 1.0]}, "criterion": "gini", "max_depth": null, "max_features": "auto", "max_features_": 3, "max_leaf_nodes": null, "min_impurity_split": 1e-07, "min_samples_leaf": 1, "min_samples_split": 2, "min_weight_fraction_leaf": 0.0, "n_classes_": {"py/object": "numpy.int64", "dtype": "int64", "value": 2}, "n_features_": 13, "n_outputs_": 1, "presort": false, "random_state": 1842313873, "splitter": "best", "tree_": {"py/reduce": [{"py/type": "sklearn.tree._tree.Tree"}, {"py/tuple": [13, {"py/object": "numpy.ndarray", "dtype": "int64", "values": [2]}, 1]}, {"max_depth": 1, "node_count": 3, "nodes": {"py/object": "numpy.ndarray", "dtype": "[('left_child', ' Date: Mon, 15 Apr 2024 20:30:21 -0700 Subject: [PATCH 13/89] Removed extraneous seed_model.json Accidentally added this after running all tests. --- seed_model.json | 1 - 1 file changed, 1 deletion(-) delete mode 100644 seed_model.json diff --git a/seed_model.json b/seed_model.json deleted file mode 100644 index b8a4aca8a..000000000 --- a/seed_model.json +++ /dev/null @@ -1 +0,0 @@ -{"py/object": "sklearn.ensemble._forest.RandomForestClassifier", "py/state": {"base_estimator": {"py/object": "sklearn.tree._classes.DecisionTreeClassifier", "py/state": {"class_weight": null, "classes_": null, "criterion": "gini", "max_depth": null, "max_features": null, "max_features_": null, "max_leaf_nodes": null, "min_impurity_split": 1e-07, "min_samples_leaf": 1, "min_samples_split": 2, "min_weight_fraction_leaf": 0.0, "n_classes_": null, "n_features_": null, "n_outputs_": null, "presort": false, "random_state": null, "splitter": "best", "tree_": null, "_sklearn_version": "0.23.2"}}, "base_estimator_": {"py/id": 1}, "bootstrap": true, "class_weight": null, "classes_": {"py/object": "numpy.ndarray", "dtype": "float64", "values": [1.0, 5.0]}, "criterion": "gini", "estimator_params": {"py/tuple": ["criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "min_impurity_split", "random_state"]}, "estimators_": [{"py/object": "sklearn.tree._classes.DecisionTreeClassifier", "py/state": {"class_weight": null, "classes_": {"py/object": "numpy.ndarray", "dtype": "float64", "values": [0.0, 1.0]}, "criterion": "gini", "max_depth": null, "max_features": "auto", "max_features_": 3, "max_leaf_nodes": null, "min_impurity_split": 1e-07, "min_samples_leaf": 1, "min_samples_split": 2, "min_weight_fraction_leaf": 0.0, "n_classes_": {"py/object": "numpy.int64", "dtype": "int64", "value": 2}, "n_features_": 13, "n_outputs_": 1, "presort": false, "random_state": 1842313873, "splitter": "best", "tree_": {"py/reduce": [{"py/type": "sklearn.tree._tree.Tree"}, {"py/tuple": [13, {"py/object": "numpy.ndarray", "dtype": "int64", "values": [2]}, 1]}, {"max_depth": 1, "node_count": 3, "nodes": {"py/object": "numpy.ndarray", "dtype": "[('left_child', ' Date: Wed, 24 Apr 2024 15:17:05 -0700 Subject: [PATCH 14/89] TODO added to change to master branch in YML file --- .github/workflows/image_build_push.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 9fd15dfa0..3db336372 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -4,6 +4,7 @@ name: docker image # Controls when the action will run. Triggers the workflow on push or pull request # events but only for the master branch +# TODO: Change to master branch once changes are final. on: push: branches: [ image-push-merge ] From 2fdb46994e9fa5af64701c7d31794760f7c4c0e8 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Wed, 24 Apr 2024 15:44:33 -0700 Subject: [PATCH 15/89] Adding image-push-merge branch to automated CI/CD tests --- .github/workflows/test-with-docker.yml | 4 ++-- .github/workflows/test-with-manual-install.yml | 10 ++++++---- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test-with-docker.yml b/.github/workflows/test-with-docker.yml index e38499544..f5ae3e239 100644 --- a/.github/workflows/test-with-docker.yml +++ b/.github/workflows/test-with-docker.yml @@ -6,9 +6,9 @@ name: test-with-docker # events but only for the master branch on: push: - branches: [ master ] + branches: [ image-push-merge ] pull_request: - branches: [ master ] + branches: [ image-push-merge ] schedule: # * is a special character in YAML so you have to quote this string - cron: '5 4 * * 0' diff --git a/.github/workflows/test-with-manual-install.yml b/.github/workflows/test-with-manual-install.yml index 4a81eb000..62863e8e1 100644 --- a/.github/workflows/test-with-manual-install.yml +++ b/.github/workflows/test-with-manual-install.yml @@ -7,12 +7,14 @@ name: ubuntu-only-test-with-manual-install on: push: branches: - - master - - gis-based-mode-detection + # - master + # - gis-based-mode-detection + - image-push-merge pull_request: branches: - - master - - gis-based-mode-detection + # - master + # - gis-based-mode-detection + - image-push-merge schedule: # * is a special character in YAML so you have to quote this string - cron: '5 4 * * 0' From 91abdeaf2eaff57917434c417209a8c31f35b1f1 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 02:52:53 -0700 Subject: [PATCH 16/89] Upload artifact test - 1 Removed test workflow execution. Added upload artifact. --- .github/workflows/image_build_push.yml | 12 ++++++++++++ .github/workflows/test-with-docker.yml | 4 ++-- .github/workflows/test-with-manual-install.yml | 12 ++++++------ 3 files changed, 20 insertions(+), 8 deletions(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 3db336372..8690196a6 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -47,3 +47,15 @@ jobs: - name: push docker image run: | docker push $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} + + - name: Create a text file + run: | + echo "hello world" > tag_file.txt + echo "Created tag text file" + + - name: Upload Artifact + uses: actions/upload-artifact@v4 + with: + name: docker-image-tag + path: tag_file.txt + overwrite: true diff --git a/.github/workflows/test-with-docker.yml b/.github/workflows/test-with-docker.yml index f5ae3e239..108dd62fa 100644 --- a/.github/workflows/test-with-docker.yml +++ b/.github/workflows/test-with-docker.yml @@ -6,9 +6,9 @@ name: test-with-docker # events but only for the master branch on: push: - branches: [ image-push-merge ] + branches: [ master ] pull_request: - branches: [ image-push-merge ] + branches: [ master ] schedule: # * is a special character in YAML so you have to quote this string - cron: '5 4 * * 0' diff --git a/.github/workflows/test-with-manual-install.yml b/.github/workflows/test-with-manual-install.yml index 62863e8e1..7212b9ebf 100644 --- a/.github/workflows/test-with-manual-install.yml +++ b/.github/workflows/test-with-manual-install.yml @@ -7,14 +7,14 @@ name: ubuntu-only-test-with-manual-install on: push: branches: - # - master - # - gis-based-mode-detection - - image-push-merge + - master + - gis-based-mode-detection + # - image-push-merge pull_request: branches: - # - master - # - gis-based-mode-detection - - image-push-merge + - master + - gis-based-mode-detection + # - image-push-merge schedule: # * is a special character in YAML so you have to quote this string - cron: '5 4 * * 0' From bbdaaaede9c819be371776879b8699d5ceec3cb0 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 02:59:42 -0700 Subject: [PATCH 17/89] Upload artifact test - 2 Commented image build and push for testing purposes. Fixed indentation. Learnt that YAML do not permit using tabs as indentation; spaces allowed. --- .github/workflows/image_build_push.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 8690196a6..73d06d4d6 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -39,23 +39,23 @@ jobs: run: echo running in repo ${GITHUB_REPOSITORY#*/} branch ${GITHUB_REF##*/} on ${{ steps.date.outputs.date }} # Runs a set of commands using the runners shell - - name: build docker image - run: | - docker build -t $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} . - docker images + # - name: build docker image + # run: | + # docker build -t $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} . + # docker images - - name: push docker image - run: | - docker push $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} + # - name: push docker image + # run: | + # docker push $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} - name: Create a text file run: | echo "hello world" > tag_file.txt - echo "Created tag text file" + echo "Created tag text file" - name: Upload Artifact uses: actions/upload-artifact@v4 - with: + with: name: docker-image-tag - path: tag_file.txt - overwrite: true + path: tag_file.txt + overwrite: true From 5d0ca0237d1dc5925452de40e432cbd16a5ffb57 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 10:50:46 -0700 Subject: [PATCH 18/89] Added temporary test file Temporary test file while working on automating tags modification. Checking to see if new artifacts generated even if overwrite is set to true. --- .docker/test.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .docker/test.txt diff --git a/.docker/test.txt b/.docker/test.txt new file mode 100644 index 000000000..9310e7312 --- /dev/null +++ b/.docker/test.txt @@ -0,0 +1,2 @@ +Temporary test file while working on automating tags modification. +Checking to see if new artifacts generated even if overwrite is set to true. \ No newline at end of file From 2e8a91191cdba694925e4451c1d219446fbba803 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Thu, 25 Apr 2024 14:21:18 -0600 Subject: [PATCH 19/89] Changes to actions + echo Trying to build an image off this branch instead of the other branch, so that testing is more relevant to the changes on consolidate-differences. --- .docker/docker_start_script.sh | 1 + .github/workflows/image_build_push.yml | 2 +- .github/workflows/osx-ubuntu-manual-install.yml | 7 ++++--- .github/workflows/test-default-action.yml | 7 ++++--- .github/workflows/test-with-docker.yml | 4 ++-- .github/workflows/test-with-manual-install.yml | 10 ++++++---- 6 files changed, 18 insertions(+), 13 deletions(-) diff --git a/.docker/docker_start_script.sh b/.docker/docker_start_script.sh index f4bc17032..daf82b32e 100644 --- a/.docker/docker_start_script.sh +++ b/.docker/docker_start_script.sh @@ -33,6 +33,7 @@ fi #TODO: start cron jobs # change python environment +echo "Starting up e-mission-environment..." source setup/activate.sh # launch the webapp diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 9fd15dfa0..9fbaa04f3 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -6,7 +6,7 @@ name: docker image # events but only for the master branch on: push: - branches: [ image-push-merge ] + branches: [ consolidate-differences ] # Env variable diff --git a/.github/workflows/osx-ubuntu-manual-install.yml b/.github/workflows/osx-ubuntu-manual-install.yml index e6ecf0ddc..00db6f1f9 100644 --- a/.github/workflows/osx-ubuntu-manual-install.yml +++ b/.github/workflows/osx-ubuntu-manual-install.yml @@ -5,11 +5,12 @@ name: osx-ubuntu-manual-install on: push: branches: - - master - - gis-based-mode-detection + # - master + # - gis-based-mode-detection + - consolidate-differences pull_request: branches: - - master + # - master - gis-based-mode-detection schedule: # * is a special character in YAML so you have to quote this string diff --git a/.github/workflows/test-default-action.yml b/.github/workflows/test-default-action.yml index 424e5bee1..15b38e593 100644 --- a/.github/workflows/test-default-action.yml +++ b/.github/workflows/test-default-action.yml @@ -7,11 +7,12 @@ name: CI on: push: branches: - - master - - gis-based-mode-detection + # - master + # - gis-based-mode-detection + - consolidate-differences pull_request: branches: - - master + # - master - gis-based-mode-detection # A workflow run is made up of one or more jobs that can run sequentially or in parallel diff --git a/.github/workflows/test-with-docker.yml b/.github/workflows/test-with-docker.yml index e38499544..a71fda621 100644 --- a/.github/workflows/test-with-docker.yml +++ b/.github/workflows/test-with-docker.yml @@ -6,9 +6,9 @@ name: test-with-docker # events but only for the master branch on: push: - branches: [ master ] + branches: [ consolidate-differences ] pull_request: - branches: [ master ] + branches: [ consolidate-differences ] schedule: # * is a special character in YAML so you have to quote this string - cron: '5 4 * * 0' diff --git a/.github/workflows/test-with-manual-install.yml b/.github/workflows/test-with-manual-install.yml index 4a81eb000..0056cc37a 100644 --- a/.github/workflows/test-with-manual-install.yml +++ b/.github/workflows/test-with-manual-install.yml @@ -7,12 +7,14 @@ name: ubuntu-only-test-with-manual-install on: push: branches: - - master - - gis-based-mode-detection + # - master + # - gis-based-mode-detection + - consolidate-differences pull_request: branches: - - master - - gis-based-mode-detection + # - master + # - gis-based-mode-detection + - consolidate-differences schedule: # * is a special character in YAML so you have to quote this string - cron: '5 4 * * 0' From f5aae4760062cc357a7f2e806f009028525d253b Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 16:19:53 -0700 Subject: [PATCH 20/89] Upload artifact test - 3 Storing actual required tag with date timestamp. --- .github/workflows/image_build_push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 73d06d4d6..3cd0f61a9 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -50,7 +50,7 @@ jobs: - name: Create a text file run: | - echo "hello world" > tag_file.txt + echo ${{ steps.date.outputs.date }} > tag_file.txt echo "Created tag text file" - name: Upload Artifact From 051ef66b6f19a435c2a24f65137b4515ad7e3ba3 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 18:36:53 -0700 Subject: [PATCH 21/89] Repository dispatch send - 1 Checked out from image-push-merge branch which has artifact method working. - Changed trigger branch on push to tags-dispatch - Dispatching repository event from server so that join page repo's workflow will be triggered. - Once this dispatch method works, might not even need artifacts since repository dispatch has a client_payload option to pass data. - This is helpful since only 500 artifacts are allowed in any repo: https://github.com/actions/upload-artifact#:~:text=has%20a%20limit-,of%20500%20artifacts.,-For%20assistance%20with --- .github/workflows/image_build_push.yml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 3cd0f61a9..9009a94c8 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -7,7 +7,7 @@ name: docker image # TODO: Change to master branch once changes are final. on: push: - branches: [ image-push-merge ] + branches: [ tags-dispatch ] # Env variable @@ -59,3 +59,13 @@ jobs: name: docker-image-tag path: tag_file.txt overwrite: true + + - name: Trigger workflow in join-page, admin-dash, public-dash + run: | + curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.GH_PAT_TAG }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/MukuFlash03/nrel-openpath-join-page/dispatches \ + -d '{"event_type":"tags-dispatch","client_payload":{"docker-image-tag":${{ steps.date.outputs.date }}}}' \ No newline at end of file From 6c448656d1c52c48c15f1a2f0d28fe4fe9ee2e49 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 21:16:22 -0700 Subject: [PATCH 22/89] Workflow dispatch send - 1 Trying with workflow dispatch. --- .github/workflows/image_build_push.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 9009a94c8..4bbd9aef6 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -62,10 +62,10 @@ jobs: - name: Trigger workflow in join-page, admin-dash, public-dash run: | - curl -L \ - -X POST \ - -H "Accept: application/vnd.github+json" \ - -H "Authorization: Bearer ${{ secrets.GH_PAT_TAG }}" \ - -H "X-GitHub-Api-Version: 2022-11-28" \ - https://api.github.com/repos/MukuFlash03/nrel-openpath-join-page/dispatches \ - -d '{"event_type":"tags-dispatch","client_payload":{"docker-image-tag":${{ steps.date.outputs.date }}}}' \ No newline at end of file + curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/MukuFlash03/nrel-openpath-join-page/actions/workflows/90148778/dispatches \ + -d '{"ref":"tags-dispatch","inputs":{"name":"Mona the Octocat","home":"San Francisco, CA"}}' \ No newline at end of file From 4569dfb00f656da06851b7c8befb0f092ef15c3a Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 21:18:49 -0700 Subject: [PATCH 23/89] Workflow dispatch send - 2 Removing failed inputs --- .github/workflows/image_build_push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 4bbd9aef6..c9cdab548 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -68,4 +68,4 @@ jobs: -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/MukuFlash03/nrel-openpath-join-page/actions/workflows/90148778/dispatches \ - -d '{"ref":"tags-dispatch","inputs":{"name":"Mona the Octocat","home":"San Francisco, CA"}}' \ No newline at end of file + -d '{"ref":"tags-dispatch"}' \ No newline at end of file From b562c2c155a1d958326cf0de5bab3171eaccb6f6 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 21:21:30 -0700 Subject: [PATCH 24/89] Workflow dispatch send - 3 Testing triggering workflow dispatch again. --- .docker/test.txt | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.docker/test.txt b/.docker/test.txt index 9310e7312..9a745cd3d 100644 --- a/.docker/test.txt +++ b/.docker/test.txt @@ -1,2 +1,8 @@ Temporary test file while working on automating tags modification. -Checking to see if new artifacts generated even if overwrite is set to true. \ No newline at end of file +Checking to see if new artifacts generated even if overwrite is set to true. + +----- + +Testing again to see if workflow dispatch works + +------ \ No newline at end of file From 25109b31bacf3f20bf9dd6a0a1e893cf99c4898b Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 21:39:29 -0700 Subject: [PATCH 25/89] Workflow dispatch send - 3 Changed branch name to new branch so it picks workflow events in join from this branch. --- .docker/test.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/.docker/test.txt b/.docker/test.txt index 9a745cd3d..63a1bf80c 100644 --- a/.docker/test.txt +++ b/.docker/test.txt @@ -5,4 +5,5 @@ Checking to see if new artifacts generated even if overwrite is set to true. Testing again to see if workflow dispatch works +Yes, working with updated branch as well. ------ \ No newline at end of file From 709f3cf480face7f886a18612ac7735bc4b37a1e Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 22:48:48 -0700 Subject: [PATCH 26/89] Workflow dispatch send - 4 Testing if working with updated token scopes to have only actions: write --- .docker/test.txt | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.docker/test.txt b/.docker/test.txt index 63a1bf80c..351c280d9 100644 --- a/.docker/test.txt +++ b/.docker/test.txt @@ -6,4 +6,8 @@ Checking to see if new artifacts generated even if overwrite is set to true. Testing again to see if workflow dispatch works Yes, working with updated branch as well. ------- \ No newline at end of file +------ + +Testing if working with updated token scopes to have only actions: write + +----- \ No newline at end of file From 4b4218529faabe2c9f7809331eaaf5f57e1975eb Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 23:20:53 -0700 Subject: [PATCH 27/89] Workflow dispatch send - 5 Testing sending docker image tags as input values in the POST request. --- .github/workflows/image_build_push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index c9cdab548..8cd707682 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -68,4 +68,4 @@ jobs: -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/MukuFlash03/nrel-openpath-join-page/actions/workflows/90148778/dispatches \ - -d '{"ref":"tags-dispatch"}' \ No newline at end of file + -d '{"ref":"tags-dispatch", "inputs": {"docker_image_tag" : "test_tag"}}' \ No newline at end of file From 12d09ae06e9a9c5e44954a9718a33a038bde15aa Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 23:23:39 -0700 Subject: [PATCH 28/89] Workflow dispatch send - 6 Sending actual date timestamp which is used as a suffix to the docker image tag. --- .github/workflows/image_build_push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 8cd707682..30bb7add0 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -68,4 +68,4 @@ jobs: -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/MukuFlash03/nrel-openpath-join-page/actions/workflows/90148778/dispatches \ - -d '{"ref":"tags-dispatch", "inputs": {"docker_image_tag" : "test_tag"}}' \ No newline at end of file + -d '{"ref":"tags-dispatch", "inputs": {"docker_image_tag" : "${{ steps.date.outputs.date }}"}}' \ No newline at end of file From 94c129bb3b2f1b827661c64093b7803918c18ccd Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 25 Apr 2024 23:52:48 -0700 Subject: [PATCH 29/89] Workflow dispatch send - 7 Removed code related to artifact method for sending docker image tags. Workflow dispatch uses POST request via which input data values can be sent to the repository in which the workflow is to be triggered. --- .github/workflows/image_build_push.yml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 30bb7add0..1c6f19a84 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -48,18 +48,6 @@ jobs: # run: | # docker push $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} - - name: Create a text file - run: | - echo ${{ steps.date.outputs.date }} > tag_file.txt - echo "Created tag text file" - - - name: Upload Artifact - uses: actions/upload-artifact@v4 - with: - name: docker-image-tag - path: tag_file.txt - overwrite: true - - name: Trigger workflow in join-page, admin-dash, public-dash run: | curl -L \ From 0dd1245205e6901b0d0ce1d302d56e3db41ffbac Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Fri, 26 Apr 2024 00:43:23 -0700 Subject: [PATCH 30/89] Matrix build send - 1 Copied over YAML file from e-mission-server repo after successfully implementing transmission of docker image tags from e-mission-server to join with artifacts and workflow dispatch. Added another dispatch job to trigger workflow via matrix strategy. --- .github/workflows/image_build_push.yml | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 1c6f19a84..5ee206b30 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -7,7 +7,7 @@ name: docker image # TODO: Change to master branch once changes are final. on: push: - branches: [ tags-dispatch ] + branches: [ tags-matrix ] # Env variable @@ -48,6 +48,15 @@ jobs: # run: | # docker push $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} + dispatch: + runs-on: ubuntu-latest + strategy: + matrix: + repo: ['MukuFlash03/nrel-openpath-join-page', 'MukuFlash03/op-admin-dashboard', 'MukuFlash03/em-public-dashboard'] + + steps: + - uses: actions/checkout@v2 + - name: Trigger workflow in join-page, admin-dash, public-dash run: | curl -L \ @@ -55,5 +64,5 @@ jobs: -H "Accept: application/vnd.github+json" \ -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ - https://api.github.com/repos/MukuFlash03/nrel-openpath-join-page/actions/workflows/90148778/dispatches \ + https://api.github.com/repos/${{ matrix.repo }}/actions/workflows/image_build_push.yml/dispatches \ -d '{"ref":"tags-dispatch", "inputs": {"docker_image_tag" : "${{ steps.date.outputs.date }}"}}' \ No newline at end of file From 5434914ac17ddef894920be56d5e5b42d02fc4b2 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Fri, 26 Apr 2024 00:50:33 -0700 Subject: [PATCH 31/89] Matrix build send - 2 Updated branch name which was set for the workflow dispatch without matrix build. --- .github/workflows/image_build_push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 5ee206b30..2cbb56fb8 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -65,4 +65,4 @@ jobs: -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/${{ matrix.repo }}/actions/workflows/image_build_push.yml/dispatches \ - -d '{"ref":"tags-dispatch", "inputs": {"docker_image_tag" : "${{ steps.date.outputs.date }}"}}' \ No newline at end of file + -d '{"ref":"tags-matrix", "inputs": {"docker_image_tag" : "${{ steps.date.outputs.date }}"}}' \ No newline at end of file From 0fd3e9db732fd17ce50f7cb8811fa98333bffb60 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Fri, 26 Apr 2024 00:58:06 -0700 Subject: [PATCH 32/89] Matrix build send - 3 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Jobs completed successfully but nothing happens in the other three target repos. I observed in real-time that the matrix jobs - "dispatch”, start before or at the same time the main build job and get over really quickly. Hence, the error makes sense as the docker image tag is not generated yet: { "message": "Required input 'docker_image_tag' not provided", "documentation_url": "https://docs.github.com/rest/actions/workflows#create-a-workflow-dispatch-event" } Hence, the timeout-minutes parameter makes sense to use as was mentioned in the blog post: https://www.amaysim.technology/blog/using-github-actions-to-trigger-actions-across-repos 5 minutes seems too much, I’ll give it a minute for now. Alternatively, timeout not needed; can use "needs" similar to how fetch_run_id job was run first before trying to download artifact. --- .github/workflows/image_build_push.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 2cbb56fb8..e250f0967 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -49,6 +49,7 @@ jobs: # docker push $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} dispatch: + needs: build runs-on: ubuntu-latest strategy: matrix: From ab399ea70dd8e215a35564e980bafc2d056417a2 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Fri, 26 Apr 2024 01:02:45 -0700 Subject: [PATCH 33/89] Matrix build send - 3 Since the date is being generated in a different jobs, must access it using needs keyword. --- .github/workflows/image_build_push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index e250f0967..74d5d240d 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -66,4 +66,4 @@ jobs: -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/${{ matrix.repo }}/actions/workflows/image_build_push.yml/dispatches \ - -d '{"ref":"tags-matrix", "inputs": {"docker_image_tag" : "${{ steps.date.outputs.date }}"}}' \ No newline at end of file + -d '{"ref":"tags-matrix", "inputs": {"docker_image_tag" : "${{ needs.build.outputs.date }}"}}' \ No newline at end of file From 91c0c1f86dbc4048782a953eb01c07fd8e2fecf5 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Fri, 26 Apr 2024 01:13:34 -0700 Subject: [PATCH 34/89] Matrix build send - 4 Properly added date by setting it as an output value of the 1st build job. Then storing it as an environment variable in the 2nd job by accessing it via the needs keyword. Finally using it in the curl POST request by referencing the environment variable. --- .github/workflows/image_build_push.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 74d5d240d..f3ce7c1c5 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -22,6 +22,9 @@ jobs: # The type of runner that the job will run on runs-on: ubuntu-latest + outputs: + date: ${{ steps.date.outputs.date }} + # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it @@ -51,6 +54,10 @@ jobs: dispatch: needs: build runs-on: ubuntu-latest + + env: + DOCKER_IMAGE_TAG: ${{needs.build.outputs.date}} + strategy: matrix: repo: ['MukuFlash03/nrel-openpath-join-page', 'MukuFlash03/op-admin-dashboard', 'MukuFlash03/em-public-dashboard'] @@ -66,4 +73,4 @@ jobs: -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/${{ matrix.repo }}/actions/workflows/image_build_push.yml/dispatches \ - -d '{"ref":"tags-matrix", "inputs": {"docker_image_tag" : "${{ needs.build.outputs.date }}"}}' \ No newline at end of file + -d '{"ref":"tags-matrix", "inputs": {"docker_image_tag" : "${{ env.DOCKER_IMAGE_TAG }}"}}' \ No newline at end of file From acecf3e0974094b17dc32dcf4d0ad0d07d16d7ac Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Fri, 26 Apr 2024 01:18:00 -0700 Subject: [PATCH 35/89] Matrix build send - 5 Editing sample file as a sanity check to see if matrix build works. --- .docker/test.txt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.docker/test.txt b/.docker/test.txt index 351c280d9..c640a8df8 100644 --- a/.docker/test.txt +++ b/.docker/test.txt @@ -10,4 +10,8 @@ Yes, working with updated branch as well. Testing if working with updated token scopes to have only actions: write +----- + +Testing to see if matrix build dispatch strategy works again. + ----- \ No newline at end of file From e778b3f3cf050eec33ef439a1b6763b6aaf533e0 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Mon, 29 Apr 2024 20:35:39 -0700 Subject: [PATCH 36/89] Fix for "url" KeyError observed in public-dash redesign testing The bug was triggered by a .gitignore-d conf/ directory in public-dash which had a db.conf file. This was being loaded when docker-compose.dev.yml was used to test the dev version of public-dash. This was occurring since there was a nested dictionary in the db.conf.sample and db.conf files while I had initially only stored the nested keys (url, result_limit). Since it was still reading from the file, it stored the nested dictionary format with timeseries as the parent key followed by (url, result_limit) as children. Hence, fixing it by adding the same nested dictionary structure in the emission/core/config.py and emission/core/get_database.py --- emission/core/config.py | 14 +++++++++----- emission/core/get_database.py | 4 ++-- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/emission/core/config.py b/emission/core/config.py index 8dbbabe9d..3f4c28b3e 100644 --- a/emission/core/config.py +++ b/emission/core/config.py @@ -4,15 +4,19 @@ def get_config_data_from_env(): config_data_env = { - "url": os.getenv('DB_HOST', "localhost"), - "result_limit": os.getenv('DB_TS_RESULT_LIMIT', 250000) + "timeseries": { + "url": os.getenv('DB_HOST', "localhost"), + "result_limit": os.getenv('DB_TS_RESULT_LIMIT', 250000) + } } return config_data_env def check_unset_env_vars(): config_data_env = { - "url": os.getenv('DB_HOST'), - "result_limit": os.getenv('DB_TS_RESULT_LIMIT') + "timeseries": { + "url": os.getenv('DB_HOST'), + "result_limit": os.getenv('DB_TS_RESULT_LIMIT') + } } return not any(config_data_env.values()) @@ -26,7 +30,7 @@ def get_config_data(): # if check_unset_env_vars(): # print("All DB environment variables are set to None") ret_val = get_config_data_from_env() - if ret_val["url"] == "localhost": + if ret_val["timeseries"]["url"] == "localhost": print("storage not configured, falling back to sample, default configuration") return ret_val diff --git a/emission/core/get_database.py b/emission/core/get_database.py index 005abe3ad..c8d370fcd 100644 --- a/emission/core/get_database.py +++ b/emission/core/get_database.py @@ -12,8 +12,8 @@ import emission.core.config as ecc -url = ecc.get_config()["url"] -result_limit = ecc.get_config()["result_limit"] +url = ecc.get_config()["timeseries"]["url"] +result_limit = ecc.get_config()["timeseries"]["result_limit"] try: parsed=pymongo.uri_parser.parse_uri(url) From a0190d431a1c4b11fa1abb6b26793f9ab027859a Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Tue, 30 Apr 2024 01:07:09 -0700 Subject: [PATCH 37/89] Fix for "url" KeyError observed in public-dash redesign testing The bug was triggered by a .gitignore-d conf/ directory in public-dash which had a db.conf file. This was being loaded when docker-compose.dev.yml was used to test the dev version of public-dash. This was occurring since there was a nested dictionary in the db.conf.sample and db.conf files while I had initially only stored the nested keys (url, result_limit). Since it was still reading from the file, it stored the nested dictionary format with timeseries as the parent key followed by (url, result_limit) as children. Hence, fixing it by adding the same nested dictionary structure in the emission/core/config.py and emission/core/get_database.py --- emission/core/config.py | 14 +++++++++----- emission/core/get_database.py | 4 ++-- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/emission/core/config.py b/emission/core/config.py index 8dbbabe9d..3f4c28b3e 100644 --- a/emission/core/config.py +++ b/emission/core/config.py @@ -4,15 +4,19 @@ def get_config_data_from_env(): config_data_env = { - "url": os.getenv('DB_HOST', "localhost"), - "result_limit": os.getenv('DB_TS_RESULT_LIMIT', 250000) + "timeseries": { + "url": os.getenv('DB_HOST', "localhost"), + "result_limit": os.getenv('DB_TS_RESULT_LIMIT', 250000) + } } return config_data_env def check_unset_env_vars(): config_data_env = { - "url": os.getenv('DB_HOST'), - "result_limit": os.getenv('DB_TS_RESULT_LIMIT') + "timeseries": { + "url": os.getenv('DB_HOST'), + "result_limit": os.getenv('DB_TS_RESULT_LIMIT') + } } return not any(config_data_env.values()) @@ -26,7 +30,7 @@ def get_config_data(): # if check_unset_env_vars(): # print("All DB environment variables are set to None") ret_val = get_config_data_from_env() - if ret_val["url"] == "localhost": + if ret_val["timeseries"]["url"] == "localhost": print("storage not configured, falling back to sample, default configuration") return ret_val diff --git a/emission/core/get_database.py b/emission/core/get_database.py index 005abe3ad..c8d370fcd 100644 --- a/emission/core/get_database.py +++ b/emission/core/get_database.py @@ -12,8 +12,8 @@ import emission.core.config as ecc -url = ecc.get_config()["url"] -result_limit = ecc.get_config()["result_limit"] +url = ecc.get_config()["timeseries"]["url"] +result_limit = ecc.get_config()["timeseries"]["result_limit"] try: parsed=pymongo.uri_parser.parse_uri(url) From 776f0b9a50552e58a2dbdf519c323460021a6120 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Wed, 1 May 2024 17:50:13 -0700 Subject: [PATCH 38/89] Artifact + Matrix - 1 Added build and push to have latest image available in docker hub that is generated with updated YAML file. Removed join repo from workflow dispatch. Added artifact so that this can be used in case of push trigger event in admin-dash and public-dash repos with the help of the python script fetch_runID.py in those repos, to get the latest run_ID for a specific branch with the artifact. Workflow dispatch would still use the input parameters which contains the latest timestamp tag. --- .github/workflows/image_build_push.yml | 35 +++++++++++++++++--------- 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index f3ce7c1c5..c89f55a18 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -7,8 +7,7 @@ name: docker image # TODO: Change to master branch once changes are final. on: push: - branches: [ tags-matrix ] - + branches: [ tags-combo-approach ] # Env variable env: @@ -42,28 +41,40 @@ jobs: run: echo running in repo ${GITHUB_REPOSITORY#*/} branch ${GITHUB_REF##*/} on ${{ steps.date.outputs.date }} # Runs a set of commands using the runners shell - # - name: build docker image - # run: | - # docker build -t $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} . - # docker images + - name: build docker image + run: | + docker build -t $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} . + docker images - # - name: push docker image - # run: | - # docker push $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} + - name: push docker image + run: | + docker push $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} + + - name: Create a text file + run: | + echo ${{ steps.date.outputs.date }} > tag_file.txt + echo "Created tag text file" + + - name: Upload Artifact + uses: actions/upload-artifact@v4 + with: + name: docker-image-tag + path: tag_file.txt + overwrite: true dispatch: needs: build runs-on: ubuntu-latest env: - DOCKER_IMAGE_TAG: ${{needs.build.outputs.date}} + DOCKER_IMAGE_TAG: ${{ needs.build.outputs.date }} strategy: matrix: - repo: ['MukuFlash03/nrel-openpath-join-page', 'MukuFlash03/op-admin-dashboard', 'MukuFlash03/em-public-dashboard'] + repo: ['MukuFlash03/op-admin-dashboard', 'MukuFlash03/em-public-dashboard'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Trigger workflow in join-page, admin-dash, public-dash run: | From 17ac3cc6e5a88624755fe3d0af8074ea19d64911 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Wed, 1 May 2024 18:05:57 -0700 Subject: [PATCH 39/89] Artifact + Matrix - 2 Changed branch name to tags-combo-approach; resulted in a failed workflow in admin-dash, public-dash since branch was still set to tags-matrix. --- .github/workflows/image_build_push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index c89f55a18..08a0a3006 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -84,4 +84,4 @@ jobs: -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/${{ matrix.repo }}/actions/workflows/image_build_push.yml/dispatches \ - -d '{"ref":"tags-matrix", "inputs": {"docker_image_tag" : "${{ env.DOCKER_IMAGE_TAG }}"}}' \ No newline at end of file + -d '{"ref":"tags-combo-approach", "inputs": {"docker_image_tag" : "${{ env.DOCKER_IMAGE_TAG }}"}}' \ No newline at end of file From 706f74c63683742e3b892a83eacf63e0b8154b12 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Wed, 1 May 2024 18:32:21 -0700 Subject: [PATCH 40/89] Artifact + Matrix - 3 Changing step name to exclude join-page; included earlier for testing purposes but now focussing on the dashboard repos which actually use the server as the base image and hence need the workflow to be triggered. --- .github/workflows/image_build_push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 08a0a3006..c47147a2c 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -76,7 +76,7 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Trigger workflow in join-page, admin-dash, public-dash + - name: Trigger workflow in admin-dash, public-dash run: | curl -L \ -X POST \ From d724fd1d293f7dd01a683ba5a1da4ed0601967ff Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 2 May 2024 00:33:58 -0700 Subject: [PATCH 41/89] Revert "Adding image-push-merge branch to automated CI/CD tests" This reverts commit 2fdb46994e9fa5af64701c7d31794760f7c4c0e8. --- .github/workflows/test-with-docker.yml | 2 +- .github/workflows/test-with-manual-install.yml | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/test-with-docker.yml b/.github/workflows/test-with-docker.yml index 108dd62fa..e38499544 100644 --- a/.github/workflows/test-with-docker.yml +++ b/.github/workflows/test-with-docker.yml @@ -6,7 +6,7 @@ name: test-with-docker # events but only for the master branch on: push: - branches: [ master ] + branches: [ master ] pull_request: branches: [ master ] schedule: diff --git a/.github/workflows/test-with-manual-install.yml b/.github/workflows/test-with-manual-install.yml index 7212b9ebf..4a81eb000 100644 --- a/.github/workflows/test-with-manual-install.yml +++ b/.github/workflows/test-with-manual-install.yml @@ -9,12 +9,10 @@ on: branches: - master - gis-based-mode-detection - # - image-push-merge pull_request: branches: - master - gis-based-mode-detection - # - image-push-merge schedule: # * is a special character in YAML so you have to quote this string - cron: '5 4 * * 0' From e6a2d7911de69dcb9f18b468b23b34eff697770a Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Thu, 2 May 2024 00:34:30 -0700 Subject: [PATCH 42/89] Revert "TODO added to change to master branch in YML file" This reverts commit 273c2caba529589a6ba1ae405e894e5cce27b933. --- .github/workflows/image_build_push.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index c47147a2c..e3a8e828e 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -4,7 +4,6 @@ name: docker image # Controls when the action will run. Triggers the workflow on push or pull request # events but only for the master branch -# TODO: Change to master branch once changes are final. on: push: branches: [ tags-combo-approach ] From f033f0618de488cbf92d03a948d237e1855b3d55 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Fri, 3 May 2024 03:14:16 -0700 Subject: [PATCH 43/89] Added TODOs in github actions workflow YAML file Reminder for things to change as per master branch of e-mission-server once changes are finalized. --- .github/workflows/image_build_push.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index e3a8e828e..3618e0804 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -6,6 +6,7 @@ name: docker image # events but only for the master branch on: push: + # TODO: Change branch to master branches: [ tags-combo-approach ] # Env variable @@ -70,12 +71,15 @@ jobs: strategy: matrix: + # TODO: Change user / organization to emission instead of MukuFlash03 repo: ['MukuFlash03/op-admin-dashboard', 'MukuFlash03/em-public-dashboard'] steps: - uses: actions/checkout@v4 - name: Trigger workflow in admin-dash, public-dash + # TODO: Create Fine-grained token with "Actions: write" permissions + # TODO: Change branch name under "ref" in the curl command run: | curl -L \ -X POST \ From 1207d791150866c84bd8c7fafc737326a4052374 Mon Sep 17 00:00:00 2001 From: "Mahadik, Mukul Chandrakant" Date: Fri, 3 May 2024 13:41:49 -0700 Subject: [PATCH 44/89] Artifact + Matrix - 4 Adding extra words to TODO just to trigger a workflow dispatch run and check if .env commit action works successfully for this trigger type too. --- .github/workflows/image_build_push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 3618e0804..b098c7cde 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -6,7 +6,7 @@ name: docker image # events but only for the master branch on: push: - # TODO: Change branch to master + # TODO: Change branch to master branch of e-mission-server branches: [ tags-combo-approach ] # Env variable From f182790d04c497bf6ccb077adebba32d576e3253 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Thu, 9 May 2024 09:59:55 -0600 Subject: [PATCH 45/89] Cleanup changes Getting the server changes ready for merge. I modified the yml jobs to run on master again, since this was switched off for testing. This might trigger a cascade of events, and I think we all the other PRs are ready so we can see if their jobs are triggered too. --- .docker/test.txt | 17 ----------------- .github/workflows/image_build_push.yml | 19 +++---------------- .../workflows/osx-ubuntu-manual-install.yml | 6 +++--- .github/workflows/test-default-action.yml | 6 +++--- .github/workflows/test-with-docker.yml | 4 ++-- .../workflows/test-with-manual-install.yml | 8 ++++---- emission/tests/netTests/TestPush.py | 1 - 7 files changed, 15 insertions(+), 46 deletions(-) delete mode 100644 .docker/test.txt diff --git a/.docker/test.txt b/.docker/test.txt deleted file mode 100644 index c640a8df8..000000000 --- a/.docker/test.txt +++ /dev/null @@ -1,17 +0,0 @@ -Temporary test file while working on automating tags modification. -Checking to see if new artifacts generated even if overwrite is set to true. - ------ - -Testing again to see if workflow dispatch works - -Yes, working with updated branch as well. ------- - -Testing if working with updated token scopes to have only actions: write - ------ - -Testing to see if matrix build dispatch strategy works again. - ------ \ No newline at end of file diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index aff75e152..c39f0d1e2 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -1,32 +1,21 @@ -# This is a basic workflow to help you get started with Actions - name: docker image -# Controls when the action will run. Triggers the workflow on push or pull request -# events but only for the master branch on: push: - # TODO: Change branch to master branch of e-mission-server - branches: [ consolidate-differences ] + branches: [ master ] -# Env variable env: DOCKER_USER: ${{secrets.DOCKER_USER}} DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} -# A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: - # This workflow contains a single job called "build" build: - # The type of runner that the job will run on runs-on: ubuntu-latest outputs: date: ${{ steps.date.outputs.date }} - # Steps represent a sequence of tasks that will be executed as part of the job steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v2 - name: docker login run: | # log into docker hub account @@ -71,15 +60,13 @@ jobs: strategy: matrix: - # TODO: Change user / organization to emission instead of MukuFlash03 - repo: ['MukuFlash03/op-admin-dashboard', 'MukuFlash03/em-public-dashboard'] + repo: ['e-mission/op-admin-dashboard', 'e-mission/em-public-dashboard'] steps: - uses: actions/checkout@v4 - name: Trigger workflow in admin-dash, public-dash # TODO: Create Fine-grained token with "Actions: write" permissions - # TODO: Change branch name under "ref" in the curl command run: | curl -L \ -X POST \ @@ -87,4 +74,4 @@ jobs: -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/${{ matrix.repo }}/actions/workflows/image_build_push.yml/dispatches \ - -d '{"ref":"tags-combo-approach", "inputs": {"docker_image_tag" : "${{ env.DOCKER_IMAGE_TAG }}"}}' \ No newline at end of file + -d '{"ref":"master", "inputs": {"docker_image_tag" : "${{ env.DOCKER_IMAGE_TAG }}"}}' \ No newline at end of file diff --git a/.github/workflows/osx-ubuntu-manual-install.yml b/.github/workflows/osx-ubuntu-manual-install.yml index 00db6f1f9..979464fd2 100644 --- a/.github/workflows/osx-ubuntu-manual-install.yml +++ b/.github/workflows/osx-ubuntu-manual-install.yml @@ -5,12 +5,12 @@ name: osx-ubuntu-manual-install on: push: branches: - # - master - # - gis-based-mode-detection + - master + - gis-based-mode-detection - consolidate-differences pull_request: branches: - # - master + - master - gis-based-mode-detection schedule: # * is a special character in YAML so you have to quote this string diff --git a/.github/workflows/test-default-action.yml b/.github/workflows/test-default-action.yml index 15b38e593..662330b8c 100644 --- a/.github/workflows/test-default-action.yml +++ b/.github/workflows/test-default-action.yml @@ -7,12 +7,12 @@ name: CI on: push: branches: - # - master - # - gis-based-mode-detection + - master + - gis-based-mode-detection - consolidate-differences pull_request: branches: - # - master + - master - gis-based-mode-detection # A workflow run is made up of one or more jobs that can run sequentially or in parallel diff --git a/.github/workflows/test-with-docker.yml b/.github/workflows/test-with-docker.yml index a71fda621..e38499544 100644 --- a/.github/workflows/test-with-docker.yml +++ b/.github/workflows/test-with-docker.yml @@ -6,9 +6,9 @@ name: test-with-docker # events but only for the master branch on: push: - branches: [ consolidate-differences ] + branches: [ master ] pull_request: - branches: [ consolidate-differences ] + branches: [ master ] schedule: # * is a special character in YAML so you have to quote this string - cron: '5 4 * * 0' diff --git a/.github/workflows/test-with-manual-install.yml b/.github/workflows/test-with-manual-install.yml index 0056cc37a..fe3d44331 100644 --- a/.github/workflows/test-with-manual-install.yml +++ b/.github/workflows/test-with-manual-install.yml @@ -7,13 +7,13 @@ name: ubuntu-only-test-with-manual-install on: push: branches: - # - master - # - gis-based-mode-detection + - master + - gis-based-mode-detection - consolidate-differences pull_request: branches: - # - master - # - gis-based-mode-detection + - master + - gis-based-mode-detection - consolidate-differences schedule: # * is a special character in YAML so you have to quote this string diff --git a/emission/tests/netTests/TestPush.py b/emission/tests/netTests/TestPush.py index 865ffa0b7..43684a33b 100644 --- a/emission/tests/netTests/TestPush.py +++ b/emission/tests/netTests/TestPush.py @@ -188,7 +188,6 @@ def testFcmNoMapping(self): # and there will be no entries in the token mapping database self.assertEqual(edb.get_push_token_mapping_db().count_documents({}), 0) - if __name__ == '__main__': import emission.tests.common as etc From f1869ab9b1f0d753a941c724dd9be7c504eeea1b Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Thu, 9 May 2024 10:19:48 -0600 Subject: [PATCH 46/89] Update image_build_push.yml re-add run on gis-based-mode-detection branch --- .github/workflows/image_build_push.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index c39f0d1e2..b9538fed6 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -2,7 +2,7 @@ name: docker image on: push: - branches: [ master ] + branches: [ master, gis-based-mode-detection ] env: DOCKER_USER: ${{secrets.DOCKER_USER}} @@ -74,4 +74,4 @@ jobs: -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/${{ matrix.repo }}/actions/workflows/image_build_push.yml/dispatches \ - -d '{"ref":"master", "inputs": {"docker_image_tag" : "${{ env.DOCKER_IMAGE_TAG }}"}}' \ No newline at end of file + -d '{"ref":"master", "inputs": {"docker_image_tag" : "${{ env.DOCKER_IMAGE_TAG }}"}}' From 8f059551e2cc5cb6e27b5d8d32c82f7007f7ee79 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Thu, 9 May 2024 10:24:22 -0600 Subject: [PATCH 47/89] More cleanup + testing image build? Trying to trigger image_build_push.yml and removing more push triggers on other branches. --- .github/workflows/image_build_push.yml | 2 +- .github/workflows/osx-ubuntu-manual-install.yml | 1 - .github/workflows/test-default-action.yml | 1 - .github/workflows/test-with-manual-install.yml | 2 -- 4 files changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index b9538fed6..0181765c5 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -2,7 +2,7 @@ name: docker image on: push: - branches: [ master, gis-based-mode-detection ] + branches: [ master, gis-based-mode-detection, consolidate-differences ] env: DOCKER_USER: ${{secrets.DOCKER_USER}} diff --git a/.github/workflows/osx-ubuntu-manual-install.yml b/.github/workflows/osx-ubuntu-manual-install.yml index 979464fd2..e6ecf0ddc 100644 --- a/.github/workflows/osx-ubuntu-manual-install.yml +++ b/.github/workflows/osx-ubuntu-manual-install.yml @@ -7,7 +7,6 @@ on: branches: - master - gis-based-mode-detection - - consolidate-differences pull_request: branches: - master diff --git a/.github/workflows/test-default-action.yml b/.github/workflows/test-default-action.yml index 662330b8c..424e5bee1 100644 --- a/.github/workflows/test-default-action.yml +++ b/.github/workflows/test-default-action.yml @@ -9,7 +9,6 @@ on: branches: - master - gis-based-mode-detection - - consolidate-differences pull_request: branches: - master diff --git a/.github/workflows/test-with-manual-install.yml b/.github/workflows/test-with-manual-install.yml index fe3d44331..4a81eb000 100644 --- a/.github/workflows/test-with-manual-install.yml +++ b/.github/workflows/test-with-manual-install.yml @@ -9,12 +9,10 @@ on: branches: - master - gis-based-mode-detection - - consolidate-differences pull_request: branches: - master - gis-based-mode-detection - - consolidate-differences schedule: # * is a special character in YAML so you have to quote this string - cron: '5 4 * * 0' From 912bd343c5a091f4669e8c9a74541023285a9057 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Thu, 16 May 2024 17:46:19 -0600 Subject: [PATCH 48/89] Hardcoded webhost Changes to set the webhost to 0.0.0.0 at all times --- .docker/docker_start_script.sh | 9 --------- Dockerfile | 2 +- emission/net/api/config.py | 13 ------------- setup/tests/start_script.sh | 2 ++ 4 files changed, 3 insertions(+), 23 deletions(-) diff --git a/.docker/docker_start_script.sh b/.docker/docker_start_script.sh index daf82b32e..0b3f38d7e 100644 --- a/.docker/docker_start_script.sh +++ b/.docker/docker_start_script.sh @@ -12,15 +12,6 @@ if [ -z ${DB_HOST} ] ; then fi cat conf/storage/db.conf -#set Web Server host using environment variable -echo ${WEB_SERVER_HOST} -if [ -z ${WEB_SERVER_HOST} ] ; then - local_host=`hostname -i` - export WEB_SERVER_HOST=$local_host - echo "Setting webserver host environment variable to localhost" -fi -cat conf/net/api/webserver.conf - if [ -z ${LIVERELOAD_SRC} ] ; then echo "Live reload disabled, " else diff --git a/Dockerfile b/Dockerfile index 7fa923ea0..521e7194f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,7 +29,7 @@ RUN bash -c "./.docker/setup_config.sh" # #declare environment variables ENV DB_HOST='' -ENV WEB_SERVER_HOST='' +ENV WEB_SERVER_HOST=0.0.0.0 ENV LIVERELOAD_SRC='' ENV STUDY_CONFIG='' diff --git a/emission/net/api/config.py b/emission/net/api/config.py index 31e0e34c0..7c78e6391 100644 --- a/emission/net/api/config.py +++ b/emission/net/api/config.py @@ -14,25 +14,12 @@ def get_config_data_from_env(): } return config_data_env -def check_unset_env_vars(): - config_data_env = { - "static_path": os.getenv('WEB_SERVER_STATIC_PATH'), - "server_host": os.getenv('WEB_SERVER_HOST'), - "server_port": os.getenv('WEB_SERVER_PORT'), - "socket_timeout": os.getenv('WEB_SERVER_TIMEOUT'), - "auth_method": os.getenv('WEB_SERVER_AUTH'), - "aggregate_call_auth": os.getenv('WEB_SERVER_AGGREGATE_CALL_AUTH'), - "not_found_redirect": os.getenv('WEB_SERVER_REDIRECT_URL') - } - return not any(config_data_env.values()) - def get_config_data(): try: config_file = open('conf/net/api/webserver.conf') ret_val = json.load(config_file) config_file.close() except: - # Check if all Webserver environment variables are not set # if check_unset_env_vars(): logging.debug("webserver not configured, falling back to sample, default configuration") ret_val = get_config_data_from_env() diff --git a/setup/tests/start_script.sh b/setup/tests/start_script.sh index 0fe21ed1a..d9bb52eef 100644 --- a/setup/tests/start_script.sh +++ b/setup/tests/start_script.sh @@ -9,6 +9,8 @@ if [ -z ${DB_HOST} ] ; then export DB_HOST=$local_host echo "Setting db host environment variable to localhost" fi + +export WEB_SERVER_HOST=0.0.0.0 cat conf/storage/db.conf echo "Setting up conda..." From 738b6296cea4c4cf81fc0b060cfd80cdcf0373e9 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Fri, 17 May 2024 09:17:05 -0600 Subject: [PATCH 49/89] secret.py Removing the duplicate section --- emission/net/auth/secret.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/emission/net/auth/secret.py b/emission/net/auth/secret.py index 582a15d5a..f96a060d1 100644 --- a/emission/net/auth/secret.py +++ b/emission/net/auth/secret.py @@ -4,11 +4,6 @@ class SecretMethod(object): def __init__(self): - key_file = open('conf/net/auth/secret_list.json') - key_data = json.load(key_file) - key_file.close() - self.client_secret_list = key_data["client_secret_list"] - try: key_file = open('conf/net/auth/secret_list.json') except: From 7102508738c970f6c5f1267a8856c12e0440f484 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Fri, 17 May 2024 11:01:45 -0600 Subject: [PATCH 50/89] Restore intake.conf.sample Reverting the logging level to WARNING --- conf/log/intake.conf.sample | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/conf/log/intake.conf.sample b/conf/log/intake.conf.sample index 413305ad8..d51c5ac2e 100644 --- a/conf/log/intake.conf.sample +++ b/conf/log/intake.conf.sample @@ -12,8 +12,7 @@ }, "console": { "class": "logging.StreamHandler", - "level": "DEBUG", - "formatter": "detailed" + "level": "WARNING" }, "file": { "backupCount": 8, From 3d774390e380f6f624630c0a839db110069260c1 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Fri, 17 May 2024 11:02:59 -0600 Subject: [PATCH 51/89] Reverting webserver.conf.sample Restoring to WARNING logging level --- conf/log/webserver.conf.sample | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/conf/log/webserver.conf.sample b/conf/log/webserver.conf.sample index 54cb42fa9..de9512ea8 100644 --- a/conf/log/webserver.conf.sample +++ b/conf/log/webserver.conf.sample @@ -12,8 +12,7 @@ }, "console": { "class": "logging.StreamHandler", - "level": "DEBUG", - "formatter": "detailed" + "level": "WARNING" }, "file": { "backupCount": 3, From a0f2424308ad2b3ccad9791f66aae73b4f0e529c Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Fri, 17 May 2024 12:08:24 -0600 Subject: [PATCH 52/89] Removing check_unset_env_vars Removing the functionality of check_unset_env_vars. DB_HOST should be caught in start_script.sh. When I rebuilt the image without this functionality and ran the container, all tests passed. --- emission/core/config.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/emission/core/config.py b/emission/core/config.py index 3f4c28b3e..7e505ca87 100644 --- a/emission/core/config.py +++ b/emission/core/config.py @@ -11,24 +11,12 @@ def get_config_data_from_env(): } return config_data_env -def check_unset_env_vars(): - config_data_env = { - "timeseries": { - "url": os.getenv('DB_HOST'), - "result_limit": os.getenv('DB_TS_RESULT_LIMIT') - } - } - return not any(config_data_env.values()) - def get_config_data(): try: config_file = open('conf/storage/db.conf') ret_val = json.load(config_file) config_file.close() except: - # Check if all DB environment variables are not set - # if check_unset_env_vars(): - # print("All DB environment variables are set to None") ret_val = get_config_data_from_env() if ret_val["timeseries"]["url"] == "localhost": print("storage not configured, falling back to sample, default configuration") From 18a8872fd9ad29364a02ce6aa4bee2f97f157598 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Fri, 17 May 2024 12:29:19 -0600 Subject: [PATCH 53/89] Removing check_unset_env_vars functionality --- emission/net/api/config.py | 1 + 1 file changed, 1 insertion(+) diff --git a/emission/net/api/config.py b/emission/net/api/config.py index 7c78e6391..e8cbd6392 100644 --- a/emission/net/api/config.py +++ b/emission/net/api/config.py @@ -14,6 +14,7 @@ def get_config_data_from_env(): } return config_data_env + def get_config_data(): try: config_file = open('conf/net/api/webserver.conf') From 53015c7554fe96cb14972d664959a54116b66eb8 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Tue, 21 May 2024 12:39:49 -0600 Subject: [PATCH 54/89] Update image_build_push.yml Removing consolidate differences branch --- .github/workflows/image_build_push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 0181765c5..b9538fed6 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -2,7 +2,7 @@ name: docker image on: push: - branches: [ master, gis-based-mode-detection, consolidate-differences ] + branches: [ master, gis-based-mode-detection ] env: DOCKER_USER: ${{secrets.DOCKER_USER}} From 41a410c10f09cde91c8a4d36ccb3d47a167a77ff Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Tue, 21 May 2024 12:43:59 -0600 Subject: [PATCH 55/89] Update docker_start_script.sh Removing DB_HOST if-else functionality, as it's not necessary for the container. --- .docker/docker_start_script.sh | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/.docker/docker_start_script.sh b/.docker/docker_start_script.sh index 0b3f38d7e..a7c8e06ea 100644 --- a/.docker/docker_start_script.sh +++ b/.docker/docker_start_script.sh @@ -1,15 +1,5 @@ #!/usr/bin/env bash -#Configure web server -# cd /usr/src/app/e-mission-server - -#set database URL using environment variable -echo ${DB_HOST} -if [ -z ${DB_HOST} ] ; then - local_host=`hostname -i` - export DB_HOST=$local_host - echo "Setting db host environment variable to localhost" -fi cat conf/storage/db.conf if [ -z ${LIVERELOAD_SRC} ] ; then From 7405ff1e1fa1e11312544d0175a1c5dca921f939 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Tue, 21 May 2024 12:46:08 -0600 Subject: [PATCH 56/89] Setting DB_HOST=db Setting DB_HOST=db as the default for running in a docker container --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 521e7194f..2840ab062 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,7 +28,7 @@ RUN chmod u+x ./.docker/setup_config.sh RUN bash -c "./.docker/setup_config.sh" # #declare environment variables -ENV DB_HOST='' +ENV DB_HOST=db ENV WEB_SERVER_HOST=0.0.0.0 ENV LIVERELOAD_SRC='' From cd622471c5a03f7dfe3f1f20b69c0866139e481a Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Wed, 22 May 2024 14:41:37 -0600 Subject: [PATCH 57/89] Update docker_start_script.sh Removing unnecessary echo --- .docker/docker_start_script.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/.docker/docker_start_script.sh b/.docker/docker_start_script.sh index a7c8e06ea..eda101541 100644 --- a/.docker/docker_start_script.sh +++ b/.docker/docker_start_script.sh @@ -14,7 +14,6 @@ fi #TODO: start cron jobs # change python environment -echo "Starting up e-mission-environment..." source setup/activate.sh # launch the webapp From ebc8188978ad91da33bca7a0b778c86be3101727 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Wed, 22 May 2024 14:59:10 -0600 Subject: [PATCH 58/89] Rename debug.conf.internal.json to debug.conf.prod.json --- conf/analysis/{debug.conf.internal.json => debug.conf.prod.json} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename conf/analysis/{debug.conf.internal.json => debug.conf.prod.json} (100%) diff --git a/conf/analysis/debug.conf.internal.json b/conf/analysis/debug.conf.prod.json similarity index 100% rename from conf/analysis/debug.conf.internal.json rename to conf/analysis/debug.conf.prod.json From 41ae79f0d76b10698aae6cb1a1ef874a8ca8a3ce Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Wed, 22 May 2024 15:00:25 -0600 Subject: [PATCH 59/89] Update config.py Updating to reflect the changed names of the debug.conf.json files --- emission/analysis/config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/emission/analysis/config.py b/emission/analysis/config.py index a7a84b6db..55deb77d3 100644 --- a/emission/analysis/config.py +++ b/emission/analysis/config.py @@ -8,10 +8,10 @@ def get_config_data(): except: if os.getenv("PROD_STAGE") == "TRUE": print("In production environment, opening internal debug.conf") - config_file = open('conf/analysis/debug.conf.internal.json') + config_file = open('conf/analysis/debug.conf.prod.json') else: print("analysis.debug.conf.json not configured, falling back to sample, default configuration") - config_file = open('conf/analysis/debug.conf.json.sample') + config_file = open('conf/analysis/debug.conf.dev.json') ret_val = json.load(config_file) config_file.close() return ret_val From 290b0fcf434ea697109d86a0703a5485a4b0de82 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Wed, 22 May 2024 15:17:12 -0600 Subject: [PATCH 60/89] Push to rename My machine is acting up and not allowing me to rename this file. Going to do it manually in github, but need to make an inconsequential change first. --- conf/analysis/debug.conf.json.sample | 1 + 1 file changed, 1 insertion(+) diff --git a/conf/analysis/debug.conf.json.sample b/conf/analysis/debug.conf.json.sample index 23c184aa7..ea638b701 100644 --- a/conf/analysis/debug.conf.json.sample +++ b/conf/analysis/debug.conf.json.sample @@ -12,3 +12,4 @@ "analysis.result.section.key": "analysis/inferred_section", "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/trip_user_input", "manual/place_user_input"] } + From 29869cdd4037aa03ae64168826d96a7263cd7b6c Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Wed, 22 May 2024 15:19:16 -0600 Subject: [PATCH 61/89] Update and rename debug.conf.json.sample to debug.conf.dev.json Removing the inconsequential change from the last push and renaming manually. This was my issue: https://github.com/desktop/desktop/issues/13588 --- conf/analysis/{debug.conf.json.sample => debug.conf.dev.json} | 1 - 1 file changed, 1 deletion(-) rename conf/analysis/{debug.conf.json.sample => debug.conf.dev.json} (99%) diff --git a/conf/analysis/debug.conf.json.sample b/conf/analysis/debug.conf.dev.json similarity index 99% rename from conf/analysis/debug.conf.json.sample rename to conf/analysis/debug.conf.dev.json index ea638b701..23c184aa7 100644 --- a/conf/analysis/debug.conf.json.sample +++ b/conf/analysis/debug.conf.dev.json @@ -12,4 +12,3 @@ "analysis.result.section.key": "analysis/inferred_section", "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/trip_user_input", "manual/place_user_input"] } - From 38209ef54f7d395fb52902ed917076fdb2157bb0 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Wed, 22 May 2024 15:36:25 -0600 Subject: [PATCH 62/89] common.py fix? Thought I would quickly change the name of debug.conf.json.sample to debug.conf.dev.json; this was not inconsequential. Seeing if this fixes it. --- emission/tests/common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/emission/tests/common.py b/emission/tests/common.py index 122b2fc7f..492fab547 100644 --- a/emission/tests/common.py +++ b/emission/tests/common.py @@ -259,7 +259,7 @@ def set_analysis_config(key, value): import shutil analysis_conf_path = "conf/analysis/debug.conf.json" - shutil.copyfile("%s.sample" % analysis_conf_path, + shutil.copyfile("conf/analysis/debug.conf.dev.json", analysis_conf_path) with open(analysis_conf_path) as fd: curr_config = json.load(fd) From da485c257cdf955f9e44b325ebd967a71c42633b Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Thu, 23 May 2024 16:57:35 -0600 Subject: [PATCH 63/89] Removing redundant DB_HOST setting DB_HOST is hardcoded in the compose file that runs this script anyway. --- emission/integrationTests/start_integration_tests.sh | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/emission/integrationTests/start_integration_tests.sh b/emission/integrationTests/start_integration_tests.sh index 7096f9833..7b01b5911 100644 --- a/emission/integrationTests/start_integration_tests.sh +++ b/emission/integrationTests/start_integration_tests.sh @@ -2,13 +2,7 @@ # Using an automated install cd /src/e-mission-server -#set database URL using environment variable echo ${DB_HOST} -if [ -z ${DB_HOST} ] ; then - local_host=`hostname -i` - export DB_HOST=$local_host - echo "Setting db host environment variable to localhost" -fi cat conf/storage/db.conf echo "Setting up conda..." @@ -24,4 +18,4 @@ echo "Adding permissions for the runIntegrationTests.sh script" chmod +x runIntegrationTests.sh echo "Permissions added for the runIntegrationTests.sh script" -./runIntegrationTests.sh \ No newline at end of file +./runIntegrationTests.sh From e6b388bd3e1c0eff8ae76594c2a7539fc7464815 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Fri, 24 May 2024 12:17:43 -0600 Subject: [PATCH 64/89] reverting dockerfile + start script changes reverting to DB_HOST='' since setting it to db probably isnt the best idea. Also removing the DB_HOST fallback in start_script.sh since it'll be caught in config.py --- Dockerfile | 2 +- setup/tests/start_script.sh | 5 ----- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index 2840ab062..521e7194f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,7 +28,7 @@ RUN chmod u+x ./.docker/setup_config.sh RUN bash -c "./.docker/setup_config.sh" # #declare environment variables -ENV DB_HOST=db +ENV DB_HOST='' ENV WEB_SERVER_HOST=0.0.0.0 ENV LIVERELOAD_SRC='' diff --git a/setup/tests/start_script.sh b/setup/tests/start_script.sh index d9bb52eef..c5d2d7e39 100644 --- a/setup/tests/start_script.sh +++ b/setup/tests/start_script.sh @@ -4,11 +4,6 @@ cd /src/e-mission-server #set database URL using environment variable echo ${DB_HOST} -if [ -z ${DB_HOST} ] ; then - local_host=`hostname -i` - export DB_HOST=$local_host - echo "Setting db host environment variable to localhost" -fi export WEB_SERVER_HOST=0.0.0.0 cat conf/storage/db.conf From bb03c42739ea62326be0298822f8d9c9cfd04331 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Sun, 26 May 2024 01:07:07 -0600 Subject: [PATCH 65/89] Testing workflows with compose Had to revert some changes but need to test the functionality of docker compose --- .github/workflows/image_build_push.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index b9538fed6..728f764fb 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -2,7 +2,7 @@ name: docker image on: push: - branches: [ master, gis-based-mode-detection ] + branches: [ master, gis-based-mode-detection, consolidate_differences ] env: DOCKER_USER: ${{secrets.DOCKER_USER}} @@ -60,7 +60,7 @@ jobs: strategy: matrix: - repo: ['e-mission/op-admin-dashboard', 'e-mission/em-public-dashboard'] + repo: ['MukuFlash03/op-admin-dashboard', 'MukuFlash03/em-public-dashboard'] steps: - uses: actions/checkout@v4 From cf50d17763a9c15d43d6fdf14bc42bf7ac6bc685 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Sun, 26 May 2024 01:09:57 -0600 Subject: [PATCH 66/89] Triggering workflows. --- .github/workflows/image_build_push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 728f764fb..e093805fa 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -2,7 +2,7 @@ name: docker image on: push: - branches: [ master, gis-based-mode-detection, consolidate_differences ] + branches: [ master, gis-based-mode-detection, consolidate-differences ] env: DOCKER_USER: ${{secrets.DOCKER_USER}} From 6a21f5dc89265021769761413ae1f33a464a1bb0 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Sun, 26 May 2024 03:17:11 -0600 Subject: [PATCH 67/89] Reverting image_build_push.yml --- .github/workflows/image_build_push.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index e093805fa..b9538fed6 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -2,7 +2,7 @@ name: docker image on: push: - branches: [ master, gis-based-mode-detection, consolidate-differences ] + branches: [ master, gis-based-mode-detection ] env: DOCKER_USER: ${{secrets.DOCKER_USER}} @@ -60,7 +60,7 @@ jobs: strategy: matrix: - repo: ['MukuFlash03/op-admin-dashboard', 'MukuFlash03/em-public-dashboard'] + repo: ['e-mission/op-admin-dashboard', 'e-mission/em-public-dashboard'] steps: - uses: actions/checkout@v4 From b961417feb4144fadf1c6b83e7a19f5873e08334 Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Mon, 10 Jun 2024 10:06:31 -0600 Subject: [PATCH 68/89] Not showing changes to branches for some reason in image_build_push.yml I removed the consolidate differences branch a long time ago, but this change is not reflected in the code review space for some reason. Trying to force it to display the change. From a245e7d80ca154180b64124ada8009c647b5bf3a Mon Sep 17 00:00:00 2001 From: Natalie Schultz <90212258+nataliejschultz@users.noreply.github.com> Date: Mon, 17 Jun 2024 15:20:11 -0600 Subject: [PATCH 69/89] Adding comment to see if it resolves github display error GitHub isn't displaying the changes I've made to this file, namely removing the consolidate-differences branch. I'm not sure why it's not displaying this, since when I go to edit the file in github, it looks correct. So, I'm adding a comment to see if it updates the file and makes it display correctly. --- .github/workflows/image_build_push.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index b9538fed6..46a22b630 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -4,6 +4,7 @@ on: push: branches: [ master, gis-based-mode-detection ] +#Dockerhub credentials are set as environment variables env: DOCKER_USER: ${{secrets.DOCKER_USER}} DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} From 20670558cee27244f7f9008bf414184e424ffd83 Mon Sep 17 00:00:00 2001 From: Shankari Date: Tue, 6 Aug 2024 19:02:02 -0700 Subject: [PATCH 70/89] =?UTF-8?q?=F0=9F=A9=B9=20Don't=20cat=20the=20db.con?= =?UTF-8?q?f=20file?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Since it won't exist https://github.com/e-mission/e-mission-server/pull/961#discussion_r1706259761 --- .docker/docker_start_script.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/.docker/docker_start_script.sh b/.docker/docker_start_script.sh index eda101541..7debd03ba 100644 --- a/.docker/docker_start_script.sh +++ b/.docker/docker_start_script.sh @@ -1,7 +1,5 @@ #!/usr/bin/env bash -cat conf/storage/db.conf - if [ -z ${LIVERELOAD_SRC} ] ; then echo "Live reload disabled, " else From 3000758c17e2cdc85f237927e400ba197f11046d Mon Sep 17 00:00:00 2001 From: Shankari Date: Tue, 6 Aug 2024 19:07:06 -0700 Subject: [PATCH 71/89] =?UTF-8?q?=F0=9F=A9=B9=20Use=20the=20correct=20file?= =?UTF-8?q?name=20in=20the=20gitignore?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit To be consistent with https://github.com/e-mission/e-mission-server/pull/961/commits/29869cdd4037aa03ae64168826d96a7263cd7b6c --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index fb58e8e38..0f5c8be38 100644 --- a/.gitignore +++ b/.gitignore @@ -16,7 +16,8 @@ CFC_DataCollector/moves_collect.log webapp/www/lib conf/**/*.json !conf/**/*.schema.json -!conf/analysis/debug.conf.internal.json +!conf/analysis/debug.conf.dev.json +!conf/analysis/debug.conf.prod.json *.ipynb_checkpoints* From 6a8a13ff11f0056a0368160133440efaf29e85cf Mon Sep 17 00:00:00 2001 From: Shankari Date: Tue, 6 Aug 2024 19:14:22 -0700 Subject: [PATCH 72/89] =?UTF-8?q?=F0=9F=A9=B9=20Set=20the=20default=20valu?= =?UTF-8?q?e=20for=20the=20`DB=5FHOST`=20as=20well?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Since we now don't set it in the start script, we should do so in the `Dockerfile`, consistent with the `WEB_SERVER_HOST` set in https://github.com/MukuFlash03/e-mission-server/commit/912bd343c5a091f4669e8c9a74541023285a9057 It is interesting that this was set earlier but then reverted in https://github.com/MukuFlash03/e-mission-server/commit/e6b388bd3e1c0eff8ae76594c2a7539fc7464815 As long as we can override it, I think we should be fine. --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 521e7194f..e98dc2124 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,7 +28,7 @@ RUN chmod u+x ./.docker/setup_config.sh RUN bash -c "./.docker/setup_config.sh" # #declare environment variables -ENV DB_HOST='' +ENV DB_HOST='db' ENV WEB_SERVER_HOST=0.0.0.0 ENV LIVERELOAD_SRC='' From fc183cb26edff2ad3a44a8bda7b4a5fc6c1353ce Mon Sep 17 00:00:00 2001 From: Shankari Date: Tue, 6 Aug 2024 19:18:54 -0700 Subject: [PATCH 73/89] =?UTF-8?q?=F0=9F=A9=B9=20Unify=20the=20supported=20?= =?UTF-8?q?user=20inputs=20across=20the=20debug=20and=20prod=20configs?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Before this change ``` < "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/trip_user_input", "manual/place_user_input"] --- > "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/replaced_mode", "manual/trip_user_input"] ``` After this change, no diffs in the `userinput.keylist`. Only changes are to the assertions being enabled ``` $ diff conf/analysis/debug.conf.dev.json conf/analysis/debug.conf.prod.json 3,4c3,4 < "intake.cleaning.clean_and_resample.speedDistanceAssertions": true, < "intake.cleaning.clean_and_resample.sectionValidityAssertions": true, --- > "intake.cleaning.clean_and_resample.speedDistanceAssertions": false, > "intake.cleaning.clean_and_resample.sectionValidityAssertions": false, ``` --- conf/analysis/debug.conf.dev.json | 2 +- conf/analysis/debug.conf.prod.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/conf/analysis/debug.conf.dev.json b/conf/analysis/debug.conf.dev.json index 23c184aa7..b778a692b 100644 --- a/conf/analysis/debug.conf.dev.json +++ b/conf/analysis/debug.conf.dev.json @@ -10,5 +10,5 @@ "section.startStopRadius": 150, "section.endStopRadius": 150, "analysis.result.section.key": "analysis/inferred_section", - "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/trip_user_input", "manual/place_user_input"] + "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/replaced_mode", "manual/trip_user_input", "manual/place_user_input"] } diff --git a/conf/analysis/debug.conf.prod.json b/conf/analysis/debug.conf.prod.json index 4097d8617..a234b1162 100644 --- a/conf/analysis/debug.conf.prod.json +++ b/conf/analysis/debug.conf.prod.json @@ -10,5 +10,5 @@ "section.startStopRadius": 150, "section.endStopRadius": 150, "analysis.result.section.key": "analysis/inferred_section", - "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/replaced_mode", "manual/trip_user_input"] + "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/replaced_mode", "manual/trip_user_input", "manual/place_user_input"] } From 51e16de15ca221c3cab892f3240a456efa11e579 Mon Sep 17 00:00:00 2001 From: Shankari Date: Tue, 6 Aug 2024 19:29:41 -0700 Subject: [PATCH 74/89] =?UTF-8?q?=F0=9F=A9=B9=20Remove=20the=20cat=20of=20?= =?UTF-8?q?db.conf=20from=20the=20integration=20tests=20as=20well?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Similar to 20670558cee27244f7f9008bf414184e424ffd83 but in a different file --- emission/integrationTests/start_integration_tests.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/emission/integrationTests/start_integration_tests.sh b/emission/integrationTests/start_integration_tests.sh index 7b01b5911..af792a5e0 100644 --- a/emission/integrationTests/start_integration_tests.sh +++ b/emission/integrationTests/start_integration_tests.sh @@ -3,7 +3,6 @@ cd /src/e-mission-server echo ${DB_HOST} -cat conf/storage/db.conf echo "Setting up conda..." source setup/setup_conda.sh Linux-x86_64 From 7ab37b66ef89a63b7b1be26987f8265ca8781679 Mon Sep 17 00:00:00 2001 From: Shankari Date: Tue, 6 Aug 2024 19:38:22 -0700 Subject: [PATCH 75/89] =?UTF-8?q?=F0=9F=A9=B9=20Cleanup=20environment=20va?= =?UTF-8?q?riables=20in=20the=20basic=20start=20script?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Remove cat - Don't set WEB_HOST since it is set in the Dockerfile Consistent with: 20670558cee27244f7f9008bf414184e424ffd83 and 51e16de15ca221c3cab892f3240a456efa11e579 --- setup/tests/start_script.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/setup/tests/start_script.sh b/setup/tests/start_script.sh index c5d2d7e39..2f4516ef9 100644 --- a/setup/tests/start_script.sh +++ b/setup/tests/start_script.sh @@ -5,9 +5,6 @@ cd /src/e-mission-server #set database URL using environment variable echo ${DB_HOST} -export WEB_SERVER_HOST=0.0.0.0 -cat conf/storage/db.conf - echo "Setting up conda..." source setup/setup_conda.sh Linux-x86_64 From 727c00c390d24de2e0e5f6920eefca404c47875f Mon Sep 17 00:00:00 2001 From: Shankari Date: Fri, 9 Aug 2024 19:57:06 -0700 Subject: [PATCH 76/89] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20=20Move=20the=20conf?= =?UTF-8?q?ig=20to=20a=20different=20file=20name=20that=20makes=20more=20s?= =?UTF-8?q?ense?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit So that we can consolidate all the backwards compat code in one place. if there is a config file and the environment variable is set, we need to decide which one wins. We will pull out the code into a common class to ensure DRY. Once the backwards compat has been removed, this can be merged into the individual config files --- emission/core/{config.py => backwards_compat_config.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename emission/core/{config.py => backwards_compat_config.py} (100%) diff --git a/emission/core/config.py b/emission/core/backwards_compat_config.py similarity index 100% rename from emission/core/config.py rename to emission/core/backwards_compat_config.py From 7f1be920098f644d863536a5e810343f2634fc28 Mon Sep 17 00:00:00 2001 From: Shankari Date: Sat, 10 Aug 2024 09:58:23 -0700 Subject: [PATCH 77/89] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20=20Refactor=20the=20?= =?UTF-8?q?backwards=20config=20file=20to=20be=20reusable?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 1. This will avoid having multiple, almost identical, copies of the same file 2. It will ensure that we read the code primarily in "the new way" from a dict 3. it should make removing the backwards compat layer easier in the future since we are reading from a dict with a default anyway Testing done: ``` $ ./e-mission-py.bash emission/tests/storageTests/TestTimeSeries.py ---------------------------------------------------------------------- Ran 8 tests in 18.819s OK ``` --- emission/core/backwards_compat_config.py | 54 ++++++++++++++---------- emission/core/get_database.py | 10 +++-- 2 files changed, 38 insertions(+), 26 deletions(-) diff --git a/emission/core/backwards_compat_config.py b/emission/core/backwards_compat_config.py index 7e505ca87..2cb94e9a7 100644 --- a/emission/core/backwards_compat_config.py +++ b/emission/core/backwards_compat_config.py @@ -1,32 +1,40 @@ import json import logging import os +import numpy as np +import pandas as pd -def get_config_data_from_env(): - config_data_env = { - "timeseries": { - "url": os.getenv('DB_HOST', "localhost"), - "result_limit": os.getenv('DB_TS_RESULT_LIMIT', 250000) - } - } - return config_data_env +# if there is a config file and the environment variable is set, we need to +# decide which one wins. I would argue for the environment variable, to allow +# for a migration to the new model and for us to remove the obsolete code. +# Although arguably, the converse will also work, since we can set the +# variable while the file is present, and then remove the file in a second +# round of changes. Let's keep the order unchanged for now for simplicity, and +# modify as needed later. -def get_config_data(): +def get_config(config_file_name, var_path_mapping): + # Since a `config_data` field would be at the module level, and we want + # the module to be reusable, we are not going to cache the result. It is + # not clear that we need to cache the result anyway, given that we + # typically initialize the config variables at the beginning of the + # modules in which they are used. If we feel like this is an issue, we can + # switch to creating a class instead. + ret_val = {} try: - config_file = open('conf/storage/db.conf') - ret_val = json.load(config_file) + config_file = open(config_file_name) + # we only have a single entry in the config json, not an array + # and there is no way for json_normalize to return a series + # so we will just take the first row of the dataframe + loaded_val = pd.json_normalize(json.load(config_file)).iloc[0] + for var, path in var_path_mapping.items(): + ret_val[var] = loaded_val[path] + # Ensure that the returned values are regular ints + # https://github.com/e-mission/e-mission-server/pull/961#issuecomment-2282206511 + if type(ret_val[var]) is np.int64: + ret_val[var] = int(ret_val[var]) config_file.close() except: - ret_val = get_config_data_from_env() - if ret_val["timeseries"]["url"] == "localhost": - print("storage not configured, falling back to sample, default configuration") + print("Config file not found, returning a copy of the environment variables instead...") + # https://github.com/e-mission/e-mission-server/pull/961#issuecomment-2282209006 + ret_val = dict(os.environ) return ret_val - -config_data = get_config_data() - -def get_config(): - return config_data - -def reload_config(): - global config_data - config_data = get_config_data() diff --git a/emission/core/get_database.py b/emission/core/get_database.py index c8d370fcd..4af873934 100644 --- a/emission/core/get_database.py +++ b/emission/core/get_database.py @@ -10,10 +10,14 @@ import os import json -import emission.core.config as ecc +import emission.core.backwards_compat_config as ecbc -url = ecc.get_config()["timeseries"]["url"] -result_limit = ecc.get_config()["timeseries"]["result_limit"] +config = ecbc.get_config('conf/storage/db.conf', + {"DB_HOST": "timeseries.url", "DB_RESULT_LIMIT": "timeseries.result_limit"}) + +print("Retrieved config %s" % config) +url = config.get("DB_HOST", "localhost") +result_limit = config.get("DB_RESULT_LIMIT", 250000) try: parsed=pymongo.uri_parser.parse_uri(url) From 7177e7169092099c7dd24f69123309384d4c7176 Mon Sep 17 00:00:00 2001 From: Shankari Date: Sat, 10 Aug 2024 15:55:44 -0700 Subject: [PATCH 78/89] =?UTF-8?q?=F0=9F=94=8A=20log=20the=20full=20backtra?= =?UTF-8?q?ce=20if=20the=20config=20file=20is=20formatted=20incorrectly?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This allows us to troubleshoot the config files and fix them instead of only falling back to the defaults. --- emission/core/backwards_compat_config.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/emission/core/backwards_compat_config.py b/emission/core/backwards_compat_config.py index 2cb94e9a7..afd793504 100644 --- a/emission/core/backwards_compat_config.py +++ b/emission/core/backwards_compat_config.py @@ -33,7 +33,9 @@ def get_config(config_file_name, var_path_mapping): if type(ret_val[var]) is np.int64: ret_val[var] = int(ret_val[var]) config_file.close() - except: + except Exception as e: + if isinstance(e, KeyError) or isinstance(e, json.decoder.JSONDecodeError): + logging.exception(e) print("Config file not found, returning a copy of the environment variables instead...") # https://github.com/e-mission/e-mission-server/pull/961#issuecomment-2282209006 ret_val = dict(os.environ) From 168ef10b7f29084f3a90ec497493130227d3ed8f Mon Sep 17 00:00:00 2001 From: Shankari Date: Sat, 10 Aug 2024 15:58:22 -0700 Subject: [PATCH 79/89] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20=20Move=20the=20api?= =?UTF-8?q?=20configuration=20into=20the=20backwards=20compat=20as=20well?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Consistent with https://github.com/e-mission/e-mission-server/pull/961#issuecomment-2282206511 and 7f1be920098f644d863536a5e810343f2634fc28 (which was the original example for the database) --- emission/net/api/cfc_webapp.py | 29 +++++++++++++++------------ emission/net/api/config.py | 36 ---------------------------------- 2 files changed, 16 insertions(+), 49 deletions(-) delete mode 100644 emission/net/api/config.py diff --git a/emission/net/api/cfc_webapp.py b/emission/net/api/cfc_webapp.py index 4cde31550..9e2eb68fa 100644 --- a/emission/net/api/cfc_webapp.py +++ b/emission/net/api/cfc_webapp.py @@ -51,17 +51,22 @@ import emission.storage.timeseries.cache_series as esdc import emission.core.timer as ect import emission.core.get_database as edb -import emission.net.api.config as enac +import emission.core.backwards_compat_config as ecbc STUDY_CONFIG = os.getenv('STUDY_CONFIG', "stage-program") -enac.reload_config() -static_path = enac.get_config()["static_path"] -server_host = enac.get_config()["server_host"] -server_port = enac.get_config()["server_port"] -socket_timeout = enac.get_config()["socket_timeout"] -auth_method = enac.get_config()["auth_method"] -aggregate_call_auth = enac.get_config()["aggregate_call_auth"] -not_found_redirect = enac.get_config()["not_found_redirect"] + +# Constants that we don't read from the configuration +WEBSERVER_STATIC_PATH="webapp/www" +WEBSERVER_HOST="0.0.0.0" + +config = ecbc.get_config('conf/net/api/webserver.conf', + {"WEBSERVER_PORT": "server.port", "WEBSERVER_TIMEOUT": "server.timeout", + "WEBSERVER_AUTH": "server.auth", "WEBSERVER_AGGREGATE_CALL_AUTH": "server.aggregate_call_auth"}) +server_port = config.get("WEBSERVER_PORT", 8080) +socket_timeout = config.get("WEBSERVER_TIMEOUT", 3600) +auth_method = config.get("WEBSERVER_AUTH", "skip") +aggregate_call_auth = config.get("WEBSERVER_AGGREGATE_CALL_AUTH", "no_auth") +not_found_redirect = config.get("WEBSERVER_NOT_FOUND_REDIRECT", "https://nrel.gov/openpath") BaseRequest.MEMFILE_MAX = 1024 * 1024 * 1024 # Allow the request size to be 1G # to accomodate large section sizes @@ -79,7 +84,7 @@ #Simple path that serves up a static landing page with javascript in it @route('/') def index(): - return static_file("index.html", static_path) + return static_file("index.html", WEBSERVER_STATIC_PATH) # Backward compat to handle older clients # Remove in 2023 after everybody has upgraded @@ -548,6 +553,4 @@ def resolve_auth(auth_method): else: # Non SSL option for testing on localhost print("Running with HTTPS turned OFF - use a reverse proxy on production") - run(host=server_host, port=server_port, server='cheroot', debug=True) - - # run(host="0.0.0.0", port=server_port, server='cherrypy', debug=True) + run(host=WEBSERVER_HOST, port=server_port, server='cheroot', debug=True) diff --git a/emission/net/api/config.py b/emission/net/api/config.py deleted file mode 100644 index e8cbd6392..000000000 --- a/emission/net/api/config.py +++ /dev/null @@ -1,36 +0,0 @@ -import json -import logging -import os - -def get_config_data_from_env(): - config_data_env = { - "static_path": os.getenv('WEB_SERVER_STATIC_PATH', "webapp/www/"), - "server_host": os.getenv('WEB_SERVER_HOST', "0.0.0.0"), - "server_port": os.getenv('WEB_SERVER_PORT', "8080"), - "socket_timeout": os.getenv('WEB_SERVER_TIMEOUT', "3600"), - "auth_method": os.getenv('WEB_SERVER_AUTH', "skip"), - "aggregate_call_auth": os.getenv('WEB_SERVER_AGGREGATE_CALL_AUTH', "no_auth"), - "not_found_redirect": os.getenv('WEB_SERVER_REDIRECT_URL', "https://www.nrel.gov/transportation/openpath.html") - } - return config_data_env - - -def get_config_data(): - try: - config_file = open('conf/net/api/webserver.conf') - ret_val = json.load(config_file) - config_file.close() - except: - # if check_unset_env_vars(): - logging.debug("webserver not configured, falling back to sample, default configuration") - ret_val = get_config_data_from_env() - return ret_val - -config_data = get_config_data() - -def get_config(): - return config_data - -def reload_config(): - global config_data - config_data = get_config_data() From 3dea305184796fdd662e0faf3cedb72f49b3d44c Mon Sep 17 00:00:00 2001 From: Shankari Date: Sat, 10 Aug 2024 15:58:22 -0700 Subject: [PATCH 80/89] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20=20Move=20the=20api?= =?UTF-8?q?=20configuration=20into=20the=20backwards=20compat=20as=20well?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Consistent with https://github.com/e-mission/e-mission-server/pull/961#issuecomment-2282206511 and 7f1be920098f644d863536a5e810343f2634fc28 (which was the original example for the database) Testing done: 1. Started the webapp with no overridden config ``` Config file not found, returning a copy of the environment variables instead... Finished configuring logging for Using auth method skip Replaced json_dumps in plugin with the one from bson Changing bt.json_loads from at 0x10b2b08b0> to at 0x10bd6a940> Running with HTTPS turned OFF - use a reverse proxy on production Bottle v0.13-dev server starting up (using CherootServer())... Listening on http://0.0.0.0:8080/ Hit Ctrl-C to quit. ``` 2. Started the webapp with an invalid config override ``` ERROR:root:Expecting ',' delimiter: line 12 column 5 (char 464) Traceback (most recent call last): File "/Users/kshankar/Desktop/data/e-mission/gis_branch_tests/emission/core/backwards_compat_config.py", line 28, in get_config loaded_val = pd.json_normalize(json.load(config_file)).iloc[0] ... json.decoder.JSONDecodeError: Expecting ',' delimiter: line 12 column 5 (char 464) Config file not found, returning a copy of the environment variables instead... ``` 3. Started the webapp with a valid config override ``` Finished configuring logging for Using auth method token_list Replaced json_dumps in plugin with the one from bson Changing bt.json_loads from at 0x10fd2a8b0> to at 0x1107e5940> Running with HTTPS turned OFF - use a reverse proxy on production Bottle v0.13-dev server starting up (using CherootServer())... Listening on http://0.0.0.0:8080/ Hit Ctrl-C to quit. ``` --- emission/net/api/cfc_webapp.py | 29 +++++++++++++++------------ emission/net/api/config.py | 36 ---------------------------------- 2 files changed, 16 insertions(+), 49 deletions(-) delete mode 100644 emission/net/api/config.py diff --git a/emission/net/api/cfc_webapp.py b/emission/net/api/cfc_webapp.py index 4cde31550..9e2eb68fa 100644 --- a/emission/net/api/cfc_webapp.py +++ b/emission/net/api/cfc_webapp.py @@ -51,17 +51,22 @@ import emission.storage.timeseries.cache_series as esdc import emission.core.timer as ect import emission.core.get_database as edb -import emission.net.api.config as enac +import emission.core.backwards_compat_config as ecbc STUDY_CONFIG = os.getenv('STUDY_CONFIG', "stage-program") -enac.reload_config() -static_path = enac.get_config()["static_path"] -server_host = enac.get_config()["server_host"] -server_port = enac.get_config()["server_port"] -socket_timeout = enac.get_config()["socket_timeout"] -auth_method = enac.get_config()["auth_method"] -aggregate_call_auth = enac.get_config()["aggregate_call_auth"] -not_found_redirect = enac.get_config()["not_found_redirect"] + +# Constants that we don't read from the configuration +WEBSERVER_STATIC_PATH="webapp/www" +WEBSERVER_HOST="0.0.0.0" + +config = ecbc.get_config('conf/net/api/webserver.conf', + {"WEBSERVER_PORT": "server.port", "WEBSERVER_TIMEOUT": "server.timeout", + "WEBSERVER_AUTH": "server.auth", "WEBSERVER_AGGREGATE_CALL_AUTH": "server.aggregate_call_auth"}) +server_port = config.get("WEBSERVER_PORT", 8080) +socket_timeout = config.get("WEBSERVER_TIMEOUT", 3600) +auth_method = config.get("WEBSERVER_AUTH", "skip") +aggregate_call_auth = config.get("WEBSERVER_AGGREGATE_CALL_AUTH", "no_auth") +not_found_redirect = config.get("WEBSERVER_NOT_FOUND_REDIRECT", "https://nrel.gov/openpath") BaseRequest.MEMFILE_MAX = 1024 * 1024 * 1024 # Allow the request size to be 1G # to accomodate large section sizes @@ -79,7 +84,7 @@ #Simple path that serves up a static landing page with javascript in it @route('/') def index(): - return static_file("index.html", static_path) + return static_file("index.html", WEBSERVER_STATIC_PATH) # Backward compat to handle older clients # Remove in 2023 after everybody has upgraded @@ -548,6 +553,4 @@ def resolve_auth(auth_method): else: # Non SSL option for testing on localhost print("Running with HTTPS turned OFF - use a reverse proxy on production") - run(host=server_host, port=server_port, server='cheroot', debug=True) - - # run(host="0.0.0.0", port=server_port, server='cherrypy', debug=True) + run(host=WEBSERVER_HOST, port=server_port, server='cheroot', debug=True) diff --git a/emission/net/api/config.py b/emission/net/api/config.py deleted file mode 100644 index e8cbd6392..000000000 --- a/emission/net/api/config.py +++ /dev/null @@ -1,36 +0,0 @@ -import json -import logging -import os - -def get_config_data_from_env(): - config_data_env = { - "static_path": os.getenv('WEB_SERVER_STATIC_PATH', "webapp/www/"), - "server_host": os.getenv('WEB_SERVER_HOST', "0.0.0.0"), - "server_port": os.getenv('WEB_SERVER_PORT', "8080"), - "socket_timeout": os.getenv('WEB_SERVER_TIMEOUT', "3600"), - "auth_method": os.getenv('WEB_SERVER_AUTH', "skip"), - "aggregate_call_auth": os.getenv('WEB_SERVER_AGGREGATE_CALL_AUTH', "no_auth"), - "not_found_redirect": os.getenv('WEB_SERVER_REDIRECT_URL', "https://www.nrel.gov/transportation/openpath.html") - } - return config_data_env - - -def get_config_data(): - try: - config_file = open('conf/net/api/webserver.conf') - ret_val = json.load(config_file) - config_file.close() - except: - # if check_unset_env_vars(): - logging.debug("webserver not configured, falling back to sample, default configuration") - ret_val = get_config_data_from_env() - return ret_val - -config_data = get_config_data() - -def get_config(): - return config_data - -def reload_config(): - global config_data - config_data = get_config_data() From a0f0c6a66524c18733ae8239203701b3ce21cd98 Mon Sep 17 00:00:00 2001 From: Shankari Date: Sun, 11 Aug 2024 17:30:25 -0700 Subject: [PATCH 81/89] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20=20Move=20the=20push?= =?UTF-8?q?=20configuration=20into=20the=20backwards=20compat=20as=20well?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Consistent with https://github.com/e-mission/e-mission-server/pull/961#issuecomment-2282206511 and - 3dea305184796fdd662e0faf3cedb72f49b3d44c (example for the api) - 7f1be920098f644d863536a5e810343f2634fc28 (original example for the database) Testing done: ``` $ ./e-mission-py.bash emission/tests/netTests/TestPush.py ---------------------------------------------------------------------- Ran 5 tests in 0.276s OK ``` --- emission/net/ext_service/push/notify_interface.py | 10 +++++++--- .../ext_service/push/notify_interface_impl/firebase.py | 8 ++++---- emission/tests/netTests/TestPush.py | 4 ++-- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/emission/net/ext_service/push/notify_interface.py b/emission/net/ext_service/push/notify_interface.py index d38a213b1..b8080bc74 100644 --- a/emission/net/ext_service/push/notify_interface.py +++ b/emission/net/ext_service/push/notify_interface.py @@ -11,22 +11,26 @@ import logging import importlib -import emission.net.ext_service.push.config as pc +import emission.core.backwards_compat_config as ecbc # Note that the URL is hardcoded because the API endpoints are not standardized. # If we change a push provider, we will need to modify to match their endpoints. # Hardcoding will remind us of this :) # We can revisit this if push providers eventually decide to standardize... +push_config = ecbc.get_config('conf/net/ext_service/push.json', + {"PUSH_PROVIDER": "provider", "PUSH_SERVER_AUTH_TOKEN": "server_auth_token", + "PUSH_APP_PACKAGE_NAME": "app_package_name", "PUSH_IOS_TOKEN_FORMAT": "ios_token_format"}) + try: - push_config = pc.get_config() + logging.info(f"Push configured for app {push_config.get('PUSH_SERVER_AUTH_TOKEN')} using platform {os.getenv('PUSH_PROVIDER')} with token {os.getenv('PUSH_SERVER_AUTH_TOKEN')[:10]}... of length {len(os.getenv('PUSH_SERVER_AUTH_TOKEN'))}") except: logging.warning("push service not configured, push notifications not supported") class NotifyInterfaceFactory(object): @staticmethod def getDefaultNotifyInterface(): - return NotifyInterfaceFactory.getNotifyInterface(push_config["provider"]) + return NotifyInterfaceFactory.getNotifyInterface(push_config["PUSH_PROVIDER"]) @staticmethod def getNotifyInterface(pushProvider): diff --git a/emission/net/ext_service/push/notify_interface_impl/firebase.py b/emission/net/ext_service/push/notify_interface_impl/firebase.py index 34593f82f..6ce7eb8e3 100644 --- a/emission/net/ext_service/push/notify_interface_impl/firebase.py +++ b/emission/net/ext_service/push/notify_interface_impl/firebase.py @@ -21,13 +21,13 @@ def get_interface(push_config): class FirebasePush(pni.NotifyInterface): def __init__(self, push_config): - self.server_auth_token = push_config["server_auth_token"] - if "app_package_name" in push_config: - self.app_package_name = push_config["app_package_name"] + self.server_auth_token = push_config["PUSH_SERVER_AUTH_TOKEN"] + if "PUSH_APP_PACKAGE_NAME" in push_config: + self.app_package_name = push_config["PUSH_APP_PACKAGE_NAME"] else: logging.warning("No package name specified, defaulting to embase") self.app_package_name = "edu.berkeley.eecs.embase" - self.is_fcm_format = push_config["ios_token_format"] == "fcm" + self.is_fcm_format = push_config["PUSH_IOS_TOKEN_FORMAT"] == "fcm" def get_and_invalidate_entries(self): # Need to figure out how to do this on firebase diff --git a/emission/tests/netTests/TestPush.py b/emission/tests/netTests/TestPush.py index 43684a33b..2b6ef636c 100644 --- a/emission/tests/netTests/TestPush.py +++ b/emission/tests/netTests/TestPush.py @@ -121,7 +121,7 @@ def testFcmMapping(self): logging.debug("test token map = %s" % self.test_token_map) try: - fcm_instance = pnif.get_interface({"server_auth_token": "firebase_api_key", "ios_token_format": "apns"}) + fcm_instance = pnif.get_interface({"PUSH_SERVER_AUTH_TOKEN": "firebase_api_key", "PUSH_IOS_TOKEN_FORMAT": "apns"}) (mapped_token_map, unmapped_token_list) = fcm_instance.map_existing_fcm_tokens(self.test_token_map) # At this point, there is nothing in the database, so no iOS tokens will be mapped self.assertEqual(len(mapped_token_map["ios"]), 0) @@ -176,7 +176,7 @@ def testFcmNoMapping(self): "android": self.test_token_list_android} logging.debug("test token map = %s" % self.test_token_map) - fcm_instance = pnif.get_interface({"server_auth_token": "firebase_api_key", "ios_token_format": "fcm"}) + fcm_instance = pnif.get_interface({"PUSH_SERVER_AUTH_TOKEN": "firebase_api_key", "PUSH_IOS_TOKEN_FORMAT": "fcm"}) (mapped_token_map, unmapped_token_list) = fcm_instance.map_existing_fcm_tokens(self.test_token_map) # These are assumed to be FCM tokens directly, so no mapping required self.assertEqual(len(mapped_token_map["ios"]), 10) From 10624a64c485d17e258add8037b1c7bf4b913b56 Mon Sep 17 00:00:00 2001 From: Shankari Date: Sun, 11 Aug 2024 17:45:56 -0700 Subject: [PATCH 82/89] =?UTF-8?q?=F0=9F=94=8A=20Indicate=20that=20we=20are?= =?UTF-8?q?=20using=20the=20default=20production=20config?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit if no override is found --- emission/analysis/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/emission/analysis/config.py b/emission/analysis/config.py index 55deb77d3..d484e5354 100644 --- a/emission/analysis/config.py +++ b/emission/analysis/config.py @@ -7,7 +7,7 @@ def get_config_data(): config_file = open('conf/analysis/debug.conf.json') except: if os.getenv("PROD_STAGE") == "TRUE": - print("In production environment, opening internal debug.conf") + print("In production environment, config not overridden, using default production debug.conf") config_file = open('conf/analysis/debug.conf.prod.json') else: print("analysis.debug.conf.json not configured, falling back to sample, default configuration") From 11d2a897382b5df21b107413e22f1af08428ed19 Mon Sep 17 00:00:00 2001 From: Shankari Date: Sun, 11 Aug 2024 18:18:49 -0700 Subject: [PATCH 83/89] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20=20Pull=20out=20the?= =?UTF-8?q?=20code=20to=20reset=20the=20environment=20variable=20overrides?= =?UTF-8?q?=20to=20a=20common=20file?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is consistent with - a0f0c6a66524c18733ae8239203701b3ce21cd98 (push config) - 3dea305184796fdd662e0faf3cedb72f49b3d44c (api config) but ensures that we follow DRY --- .../storageTests/TestMongodbAuth.py | 11 +++-------- emission/tests/common.py | 8 ++++++++ emission/tests/netTests/TestWebserver.py | 14 +++++--------- 3 files changed, 16 insertions(+), 17 deletions(-) diff --git a/emission/integrationTests/storageTests/TestMongodbAuth.py b/emission/integrationTests/storageTests/TestMongodbAuth.py index 0e0190d7b..fbb842841 100644 --- a/emission/integrationTests/storageTests/TestMongodbAuth.py +++ b/emission/integrationTests/storageTests/TestMongodbAuth.py @@ -53,11 +53,7 @@ def setUp(self): def tearDown(self): self.admin_auth.command({"dropAllUsersFromDatabase": 1}) logging.debug("Deleting test db environment variables") - for env_var_name, env_var_value in self.testModifiedEnvVars.items(): - del os.environ[env_var_name] - # Restoring original db environment variables - for env_var_name, env_var_value in self.originalDBEnvVars.items(): - os.environ[env_var_name] = env_var_value + ecc.restoreOriginalEnvVars(self.originalDBEnvVars, self.modifiedEnvVars) logging.debug("Finished restoring original db environment variables") logging.debug("Restored original values are = %s" % self.originalDBEnvVars) try: @@ -80,10 +76,9 @@ def configureDB(self, url): 'DB_HOST' : url } + self.orginalDBEnvVars = dict(os.environ) + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): - if os.getenv(env_var_name) is not None: - # Storing original db environment variables before modification - self.originalDBEnvVars[env_var_name] = os.getenv(env_var_name) # Setting db environment variables with test values os.environ[env_var_name] = env_var_value diff --git a/emission/tests/common.py b/emission/tests/common.py index 492fab547..baae6053c 100644 --- a/emission/tests/common.py +++ b/emission/tests/common.py @@ -10,6 +10,7 @@ import logging from datetime import datetime, timedelta import json +import os import emission.storage.json_wrappers as esj import uuid import pymongo @@ -171,6 +172,13 @@ def setupIncomingEntries(): return (entry_list, ios_entry_list) +def restoreOriginalEnvVars(originalEnvVars, modifiedEnvVars): + for env_var_name, env_var_value in modifiedEnvVars.items(): + del os.environ[env_var_name] + # Restoring original db environment variables + for env_var_name, env_var_value in originalEnvVars.items(): + os.environ[env_var_name] = env_var_value + def runIntakePipeline(uuid): # Move these imports here so that we don't inadvertently load the modules, # and any related config modules, before we want to diff --git a/emission/tests/netTests/TestWebserver.py b/emission/tests/netTests/TestWebserver.py index 2c3a634d9..fc91e5ab1 100644 --- a/emission/tests/netTests/TestWebserver.py +++ b/emission/tests/netTests/TestWebserver.py @@ -25,13 +25,12 @@ class TestWebserver(unittest.TestCase): def setUp(self): self.originalWebserverEnvVars = {} self.testModifiedEnvVars = { - 'WEB_SERVER_REDIRECT_URL' : "http://somewhere.else" + 'WEBSERVER_NOT_FOUND_REDIRECT' : "http://somewhere.else" } + self.orginalDBEnvVars = dict(os.environ) + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): - if os.getenv(env_var_name) is not None: - # Storing original webserver environment variables before modification - self.originalWebserverEnvVars[env_var_name] = os.getenv(env_var_name) # Setting webserver environment variables with test values os.environ[env_var_name] = env_var_value @@ -41,11 +40,8 @@ def setUp(self): def tearDown(self): logging.debug("Deleting test webserver environment variables") - for env_var_name, env_var_value in self.testModifiedEnvVars.items(): - del os.environ[env_var_name] - # Restoring original webserver environment variables - for env_var_name, env_var_value in self.originalWebserverEnvVars.items(): - os.environ[env_var_name] = env_var_value + etc.restoreOriginalEnvVars(self.originalWebserverEnvVars, + self.testModifiedEnvVars) logging.debug("Finished restoring original webserver environment variables") logging.debug("Restored original values are = %s" % self.originalWebserverEnvVars) From 4cc4c585db02c5785e68473aa2764b0c0bd632c4 Mon Sep 17 00:00:00 2001 From: Shankari Date: Mon, 12 Aug 2024 08:39:46 -0700 Subject: [PATCH 84/89] =?UTF-8?q?=E2=9C=85=20Fix=20the=20expected=20text?= =?UTF-8?q?=20while=20checking=20for=20tokens?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In 7f1be920098f644d863536a5e810343f2634fc28, we changed the DB configuration to be based on environment variables. This changed the output text when importing the module and launching the script. This change fixes the test token tests that compare the output text with a baseline to reflect the new expected text. --- emission/tests/storageTests/TestTokenQueries.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/emission/tests/storageTests/TestTokenQueries.py b/emission/tests/storageTests/TestTokenQueries.py index 23f57ae23..7e570de5e 100644 --- a/emission/tests/storageTests/TestTokenQueries.py +++ b/emission/tests/storageTests/TestTokenQueries.py @@ -159,8 +159,8 @@ def test_run_script_show(self): # The first message is displayed when we run tests locally # The second is displayed when we run in the docker CI, since the `DB_HOST` is set to `db` self.assertIn(sp.stdout, - [b'storage not configured, falling back to sample, default configuration\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nx\ny\nz\n', - b'URL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nx\ny\nz\n' + [b'Retrieved config {\'DB_HOST\': \'localhost\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nx\ny\nz\n', + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'CONDA_EXE\': \'/root/miniconda-23.5.2/bin/conda\', \'_CE_M\': \'\', \'HOSTNAME\': \'a846a70e9205\', \'EXP_CONDA_VER\': \'23.5.2\', \'PWD\': \'/src/e-mission-server\', \'CONDA_PREFIX\': \'/root/miniconda-23.5.2/envs/emissiontest\', \'WEB_SERVER_HOST\': \'0.0.0.0\', \'HOME\': \'/root\', \'CONDA_PROMPT_MODIFIER\': \'(emissiontest) \', \'_CE_CONDA\': \'\', \'DB_HOST\': \'db\', \'CONDA_SHLVL\': \'1\', \'SHLVL\': \'2\', \'CONDA_PYTHON_EXE\': \'/root/miniconda-23.5.2/bin/python\', \'CONDA_DEFAULT_ENV\': \'emissiontest\', \'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', \'EXP_CONDA_VER_SUFFIX\': \'0\', \'OLDPWD\': \'/\', \'_\': \'/root/miniconda-23.5.2/envs/emissiontest/bin/python\', \'LC_CTYPE\': \'C.UTF-8\', \'KMP_DUPLICATE_LIB_OK\': \'True\', \'KMP_INIT_AT_FORK\': \'FALSE\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nx\ny\nz\n' ]) def test_run_script_empty(self): @@ -168,8 +168,8 @@ def test_run_script_empty(self): # The first message is displayed when we run tests locally # The second is displayed when we run in the docker CI, since the `DB_HOST` is set to `db` self.assertIn(sp.stdout, - [b'storage not configured, falling back to sample, default configuration\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nPlease provide the script with an argument. Use the "--help" option for more details\n', - b'URL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nPlease provide the script with an argument. Use the "--help" option for more details\n' + [b'Retrieved config {\'DB_HOST\': \'localhost\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nPlease provide the script with an argument. Use the "--help" option for more details\n', + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'CONDA_EXE\': \'/root/miniconda-23.5.2/bin/conda\', \'_CE_M\': \'\', \'HOSTNAME\': \'a846a70e9205\', \'EXP_CONDA_VER\': \'23.5.2\', \'PWD\': \'/src/e-mission-server\', \'CONDA_PREFIX\': \'/root/miniconda-23.5.2/envs/emissiontest\', \'WEB_SERVER_HOST\': \'0.0.0.0\', \'HOME\': \'/root\', \'CONDA_PROMPT_MODIFIER\': \'(emissiontest) \', \'_CE_CONDA\': \'\', \'DB_HOST\': \'db\', \'CONDA_SHLVL\': \'1\', \'SHLVL\': \'2\', \'CONDA_PYTHON_EXE\': \'/root/miniconda-23.5.2/bin/python\', \'CONDA_DEFAULT_ENV\': \'emissiontest\', \'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', \'EXP_CONDA_VER_SUFFIX\': \'0\', \'OLDPWD\': \'/\', \'_\': \'/root/miniconda-23.5.2/envs/emissiontest/bin/python\', \'LC_CTYPE\': \'C.UTF-8\', \'KMP_DUPLICATE_LIB_OK\': \'True\', \'KMP_INIT_AT_FORK\': \'FALSE\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nPlease provide the script with an argument. Use the "--help" option for more details\n' ]) #test that no two options can be used together From 357d4b8afded4e3f7982953b11eee7f347632af6 Mon Sep 17 00:00:00 2001 From: Shankari Date: Mon, 12 Aug 2024 08:45:34 -0700 Subject: [PATCH 85/89] =?UTF-8?q?=F0=9F=94=A5=20Remove=20the=20copied=20ov?= =?UTF-8?q?er=20config=20file?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Since we now use the standard backwards compat module (a0f0c6a66524c18733ae8239203701b3ce21cd98) --- emission/net/ext_service/push/config.py | 37 ------------------------- 1 file changed, 37 deletions(-) delete mode 100644 emission/net/ext_service/push/config.py diff --git a/emission/net/ext_service/push/config.py b/emission/net/ext_service/push/config.py deleted file mode 100644 index 1dabb75f0..000000000 --- a/emission/net/ext_service/push/config.py +++ /dev/null @@ -1,37 +0,0 @@ -import json -import logging -import os - -def get_config_data_from_env(): - config_data_env = { - "provider": os.getenv("PUSH_PROVIDER"), - "server_auth_token": os.getenv("PUSH_SERVER_AUTH_TOKEN"), - "app_package_name": os.getenv("PUSH_APP_PACKAGE_NAME"), - "ios_token_format": os.getenv("PUSH_IOS_TOKEN_FORMAT") - } - return config_data_env - -def get_config_data(): - try: - config_file = open('conf/net/ext_service/push.json') - ret_val = json.load(config_file) - config_file.close() - except: - logging.warning("net.ext_service.push.json not configured, checking environment variables...") - ret_val = get_config_data_from_env() - # Check if all PUSH environment variables are not set - if (not any(ret_val.values())): - raise TypeError - return ret_val - -try: - config_data = get_config_data() -except: - logging.warning("All push environment variables are set to None") - -def get_config(): - return config_data - -def reload_config(): - global config_data - config_data = get_config_data() From b6f59b09448ef51ff06bf9961a008ab6cf4cb40e Mon Sep 17 00:00:00 2001 From: Shankari Date: Mon, 12 Aug 2024 08:50:46 -0700 Subject: [PATCH 86/89] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20=20Access=20the=20en?= =?UTF-8?q?vironment=20variables=20from=20the=20config=20using=20`.get`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit So that we can get a full list of the environment variable by grepping properly ``` $ grep -r 'config.get("[A-Z]' emission/ emission//net/ext_service/push/notify_interface_impl/firebase.py: self.server_auth_token = push_config.get("PUSH_SERVER_AUTH_TOKEN") emission//net/ext_service/push/notify_interface_impl/firebase.py: self.app_package_name = push_config.get("PUSH_APP_PACKAGE_NAME") emission//net/ext_service/push/notify_interface_impl/firebase.py: self.is_fcm_format = push_config.get("PUSH_IOS_TOKEN_FORMAT") == "fcm" emission//net/ext_service/push/notify_interface.py: return NotifyInterfaceFactory.getNotifyInterface(push_config.get("PUSH_PROVIDER")) emission//net/api/cfc_webapp.py:server_port = config.get("WEBSERVER_PORT", 8080) emission//net/api/cfc_webapp.py:socket_timeout = config.get("WEBSERVER_TIMEOUT", 3600) emission//net/api/cfc_webapp.py:auth_method = config.get("WEBSERVER_AUTH", "skip") emission//net/api/cfc_webapp.py:aggregate_call_auth = config.get("WEBSERVER_AGGREGATE_CALL_AUTH", "no_auth") emission//net/api/cfc_webapp.py:not_found_redirect = config.get("WEBSERVER_NOT_FOUND_REDIRECT", "https://nrel.gov/openpath") emission//core/get_database.py:url = config.get("DB_HOST", "localhost") emission//core/get_database.py:result_limit = config.get("DB_RESULT_LIMIT", 250000) ``` --- emission/net/ext_service/push/notify_interface.py | 2 +- .../net/ext_service/push/notify_interface_impl/firebase.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/emission/net/ext_service/push/notify_interface.py b/emission/net/ext_service/push/notify_interface.py index b8080bc74..8363011e1 100644 --- a/emission/net/ext_service/push/notify_interface.py +++ b/emission/net/ext_service/push/notify_interface.py @@ -30,7 +30,7 @@ class NotifyInterfaceFactory(object): @staticmethod def getDefaultNotifyInterface(): - return NotifyInterfaceFactory.getNotifyInterface(push_config["PUSH_PROVIDER"]) + return NotifyInterfaceFactory.getNotifyInterface(push_config.get("PUSH_PROVIDER")) @staticmethod def getNotifyInterface(pushProvider): diff --git a/emission/net/ext_service/push/notify_interface_impl/firebase.py b/emission/net/ext_service/push/notify_interface_impl/firebase.py index 6ce7eb8e3..a33824349 100644 --- a/emission/net/ext_service/push/notify_interface_impl/firebase.py +++ b/emission/net/ext_service/push/notify_interface_impl/firebase.py @@ -21,13 +21,13 @@ def get_interface(push_config): class FirebasePush(pni.NotifyInterface): def __init__(self, push_config): - self.server_auth_token = push_config["PUSH_SERVER_AUTH_TOKEN"] + self.server_auth_token = push_config.get("PUSH_SERVER_AUTH_TOKEN") if "PUSH_APP_PACKAGE_NAME" in push_config: - self.app_package_name = push_config["PUSH_APP_PACKAGE_NAME"] + self.app_package_name = push_config.get("PUSH_APP_PACKAGE_NAME") else: logging.warning("No package name specified, defaulting to embase") self.app_package_name = "edu.berkeley.eecs.embase" - self.is_fcm_format = push_config["PUSH_IOS_TOKEN_FORMAT"] == "fcm" + self.is_fcm_format = push_config.get("PUSH_IOS_TOKEN_FORMAT") == "fcm" def get_and_invalidate_entries(self): # Need to figure out how to do this on firebase From ec388355a2804bffbdd50aed61f8fabb005d6fa1 Mon Sep 17 00:00:00 2001 From: Shankari Date: Mon, 12 Aug 2024 10:36:32 -0700 Subject: [PATCH 87/89] =?UTF-8?q?=E2=9C=85=20Remove=20environment=20variab?= =?UTF-8?q?les=20that=20are=20likely=20to=20be=20different=20across=20runs?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The tests failed because we now print all environment variables, and the `hostname` changes between runs. So comparing the output with the known good output always fails. We strip out variables that are likely to change over time (hostname, conda version...) to make this test more robust. 1. We technically only need the hostname right now, but will add the conda version as well so that we don't get spurious failures when the versions change 2. this was not an issue earlier because we read the values from the config file. We now read environment variables, but that brings in variables that we did not set. So this is a new issue that we need to resolve by stripping them out for the baseline comparison. ``` $ ./e-mission-py.bash emission/tests/storageTests/TestTokenQueries.py ---------------------------------------------------------------------- Ran 21 tests in 23.591s OK ``` --- .../tests/storageTests/TestTokenQueries.py | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/emission/tests/storageTests/TestTokenQueries.py b/emission/tests/storageTests/TestTokenQueries.py index 7e570de5e..c9be13ab0 100644 --- a/emission/tests/storageTests/TestTokenQueries.py +++ b/emission/tests/storageTests/TestTokenQueries.py @@ -158,18 +158,34 @@ def test_run_script_show(self): sp = subprocess.run(["python3", "bin/auth/insert_tokens.py", "--show"], capture_output=True) # The first message is displayed when we run tests locally # The second is displayed when we run in the docker CI, since the `DB_HOST` is set to `db` - self.assertIn(sp.stdout, + stripped_out_stdout = sp.stdout\ + .replace(b"\'CONDA_EXE\': \'/root/miniconda-23.5.2/bin/conda\', ",b"")\ + .replace(b"\'HOSTNAME\': \'78346225cfef\', ",b"")\ + .replace(b"\'EXP_CONDA_VER\': \'23.5.2\', ",b"")\ + .replace(b"\'CONDA_PREFIX\': \'/root/miniconda-23.5.2/envs/emissiontest\', ",b"")\ + .replace(b"\'CONDA_PYTHON_EXE\': \'/root/miniconda-23.5.2/bin/python\', ",b"")\ + .replace(b"\'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', ",b"")\ + .replace(b"\'_\': \'/root/miniconda-23.5.2/envs/emissiontest/bin/python\'",b"") + self.assertIn(stripped_out_stdout, [b'Retrieved config {\'DB_HOST\': \'localhost\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nx\ny\nz\n', - b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'CONDA_EXE\': \'/root/miniconda-23.5.2/bin/conda\', \'_CE_M\': \'\', \'HOSTNAME\': \'a846a70e9205\', \'EXP_CONDA_VER\': \'23.5.2\', \'PWD\': \'/src/e-mission-server\', \'CONDA_PREFIX\': \'/root/miniconda-23.5.2/envs/emissiontest\', \'WEB_SERVER_HOST\': \'0.0.0.0\', \'HOME\': \'/root\', \'CONDA_PROMPT_MODIFIER\': \'(emissiontest) \', \'_CE_CONDA\': \'\', \'DB_HOST\': \'db\', \'CONDA_SHLVL\': \'1\', \'SHLVL\': \'2\', \'CONDA_PYTHON_EXE\': \'/root/miniconda-23.5.2/bin/python\', \'CONDA_DEFAULT_ENV\': \'emissiontest\', \'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', \'EXP_CONDA_VER_SUFFIX\': \'0\', \'OLDPWD\': \'/\', \'_\': \'/root/miniconda-23.5.2/envs/emissiontest/bin/python\', \'LC_CTYPE\': \'C.UTF-8\', \'KMP_DUPLICATE_LIB_OK\': \'True\', \'KMP_INIT_AT_FORK\': \'FALSE\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nx\ny\nz\n' + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'_CE_M\': \'\', \'PWD\': \'/src/e-mission-server\', \'WEB_SERVER_HOST\': \'0.0.0.0\', \'HOME\': \'/root\', \'CONDA_PROMPT_MODIFIER\': \'(emissiontest) \', \'_CE_CONDA\': \'\', \'DB_HOST\': \'db\', \'CONDA_SHLVL\': \'1\', \'SHLVL\': \'2\', \'CONDA_DEFAULT_ENV\': \'emissiontest\', \'EXP_CONDA_VER_SUFFIX\': \'0\', \'OLDPWD\': \'/\', \'LC_CTYPE\': \'C.UTF-8\', \'KMP_DUPLICATE_LIB_OK\': \'True\', \'KMP_INIT_AT_FORK\': \'FALSE\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nx\ny\nz\n' ]) def test_run_script_empty(self): sp = subprocess.run(["python3", "bin/auth/insert_tokens.py"], capture_output=True) # The first message is displayed when we run tests locally # The second is displayed when we run in the docker CI, since the `DB_HOST` is set to `db` - self.assertIn(sp.stdout, + stripped_out_stdout = sp.stdout\ + .replace(b"\'CONDA_EXE\': \'/root/miniconda-23.5.2/bin/conda\', ",b"")\ + .replace(b"\'HOSTNAME\': \'78346225cfef\', ",b"")\ + .replace(b"\'EXP_CONDA_VER\': \'23.5.2\', ",b"")\ + .replace(b"\'CONDA_PREFIX\': \'/root/miniconda-23.5.2/envs/emissiontest\', ",b"")\ + .replace(b"\'CONDA_PYTHON_EXE\': \'/root/miniconda-23.5.2/bin/python\', ",b"")\ + .replace(b"\'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', ",b"")\ + .replace(b"\'_\': \'/root/miniconda-23.5.2/envs/emissiontest/bin/python\'",b"") + self.assertIn(stripped_out_stdout, [b'Retrieved config {\'DB_HOST\': \'localhost\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nPlease provide the script with an argument. Use the "--help" option for more details\n', - b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'CONDA_EXE\': \'/root/miniconda-23.5.2/bin/conda\', \'_CE_M\': \'\', \'HOSTNAME\': \'a846a70e9205\', \'EXP_CONDA_VER\': \'23.5.2\', \'PWD\': \'/src/e-mission-server\', \'CONDA_PREFIX\': \'/root/miniconda-23.5.2/envs/emissiontest\', \'WEB_SERVER_HOST\': \'0.0.0.0\', \'HOME\': \'/root\', \'CONDA_PROMPT_MODIFIER\': \'(emissiontest) \', \'_CE_CONDA\': \'\', \'DB_HOST\': \'db\', \'CONDA_SHLVL\': \'1\', \'SHLVL\': \'2\', \'CONDA_PYTHON_EXE\': \'/root/miniconda-23.5.2/bin/python\', \'CONDA_DEFAULT_ENV\': \'emissiontest\', \'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', \'EXP_CONDA_VER_SUFFIX\': \'0\', \'OLDPWD\': \'/\', \'_\': \'/root/miniconda-23.5.2/envs/emissiontest/bin/python\', \'LC_CTYPE\': \'C.UTF-8\', \'KMP_DUPLICATE_LIB_OK\': \'True\', \'KMP_INIT_AT_FORK\': \'FALSE\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nPlease provide the script with an argument. Use the "--help" option for more details\n' + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'_CE_M\': \'\', \'PWD\': \'/src/e-mission-server\', \'WEB_SERVER_HOST\': \'0.0.0.0\', \'HOME\': \'/root\', \'CONDA_PROMPT_MODIFIER\': \'(emissiontest) \', \'_CE_CONDA\': \'\', \'DB_HOST\': \'db\', \'CONDA_SHLVL\': \'1\', \'SHLVL\': \'2\', \'CONDA_DEFAULT_ENV\': \'emissiontest\', \'EXP_CONDA_VER_SUFFIX\': \'0\', \'OLDPWD\': \'/\', \'LC_CTYPE\': \'C.UTF-8\', \'KMP_DUPLICATE_LIB_OK\': \'True\', \'KMP_INIT_AT_FORK\': \'FALSE\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nPlease provide the script with an argument. Use the "--help" option for more details\n' ]) #test that no two options can be used together From 1a0d4516b9d463364cb1691e4a0a7500d2b1d229 Mon Sep 17 00:00:00 2001 From: Shankari Date: Mon, 12 Aug 2024 11:59:35 -0700 Subject: [PATCH 88/89] =?UTF-8?q?=E2=9C=85=20Delete=20all=20irrelevant=20c?= =?UTF-8?q?onfig=20variables?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Consistent with https://github.com/e-mission/e-mission-server/pull/961#issuecomment-2284668743 Testing done: ``` ---------------------------------------------------------------------- Ran 21 tests in 23.106s OK ``` --- .../tests/storageTests/TestTokenQueries.py | 36 +++++++++---------- 1 file changed, 16 insertions(+), 20 deletions(-) diff --git a/emission/tests/storageTests/TestTokenQueries.py b/emission/tests/storageTests/TestTokenQueries.py index c9be13ab0..364806a5c 100644 --- a/emission/tests/storageTests/TestTokenQueries.py +++ b/emission/tests/storageTests/TestTokenQueries.py @@ -2,11 +2,13 @@ import logging import uuid import json +import os #changed all script runs from os() to subprocess.run() for consistency #TODO clean up commented out os() lines # import os import subprocess +import importlib import emission.core.get_database as edb @@ -15,6 +17,16 @@ class TestTokenQueries(unittest.TestCase): + def setUp(self): + # Delete irrelevant environment variables so that they don't mess up + # the expected comparison with the ground truth + # https://github.com/e-mission/e-mission-server/pull/961#issuecomment-2284668743 + for var_name in os.environ.keys(): + if not var_name.startswith("DB") and \ + var_name not in ["PATH", "PYTHONPATH"]: + logging.debug("Deleting environment variable %s with value %s" % (var_name, os.environ.get(var_name))) + del os.environ[var_name] + importlib.reload(edb) def tearDown(self): #All tests insert tokens of length one. Delete them once the test is done. @@ -158,34 +170,18 @@ def test_run_script_show(self): sp = subprocess.run(["python3", "bin/auth/insert_tokens.py", "--show"], capture_output=True) # The first message is displayed when we run tests locally # The second is displayed when we run in the docker CI, since the `DB_HOST` is set to `db` - stripped_out_stdout = sp.stdout\ - .replace(b"\'CONDA_EXE\': \'/root/miniconda-23.5.2/bin/conda\', ",b"")\ - .replace(b"\'HOSTNAME\': \'78346225cfef\', ",b"")\ - .replace(b"\'EXP_CONDA_VER\': \'23.5.2\', ",b"")\ - .replace(b"\'CONDA_PREFIX\': \'/root/miniconda-23.5.2/envs/emissiontest\', ",b"")\ - .replace(b"\'CONDA_PYTHON_EXE\': \'/root/miniconda-23.5.2/bin/python\', ",b"")\ - .replace(b"\'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', ",b"")\ - .replace(b"\'_\': \'/root/miniconda-23.5.2/envs/emissiontest/bin/python\'",b"") - self.assertIn(stripped_out_stdout, + self.assertIn(sp.stdout, [b'Retrieved config {\'DB_HOST\': \'localhost\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nx\ny\nz\n', - b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'_CE_M\': \'\', \'PWD\': \'/src/e-mission-server\', \'WEB_SERVER_HOST\': \'0.0.0.0\', \'HOME\': \'/root\', \'CONDA_PROMPT_MODIFIER\': \'(emissiontest) \', \'_CE_CONDA\': \'\', \'DB_HOST\': \'db\', \'CONDA_SHLVL\': \'1\', \'SHLVL\': \'2\', \'CONDA_DEFAULT_ENV\': \'emissiontest\', \'EXP_CONDA_VER_SUFFIX\': \'0\', \'OLDPWD\': \'/\', \'LC_CTYPE\': \'C.UTF-8\', \'KMP_DUPLICATE_LIB_OK\': \'True\', \'KMP_INIT_AT_FORK\': \'FALSE\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nx\ny\nz\n' + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'DB_HOST\': \'db\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nx\ny\nz\n' ]) def test_run_script_empty(self): sp = subprocess.run(["python3", "bin/auth/insert_tokens.py"], capture_output=True) # The first message is displayed when we run tests locally # The second is displayed when we run in the docker CI, since the `DB_HOST` is set to `db` - stripped_out_stdout = sp.stdout\ - .replace(b"\'CONDA_EXE\': \'/root/miniconda-23.5.2/bin/conda\', ",b"")\ - .replace(b"\'HOSTNAME\': \'78346225cfef\', ",b"")\ - .replace(b"\'EXP_CONDA_VER\': \'23.5.2\', ",b"")\ - .replace(b"\'CONDA_PREFIX\': \'/root/miniconda-23.5.2/envs/emissiontest\', ",b"")\ - .replace(b"\'CONDA_PYTHON_EXE\': \'/root/miniconda-23.5.2/bin/python\', ",b"")\ - .replace(b"\'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', ",b"")\ - .replace(b"\'_\': \'/root/miniconda-23.5.2/envs/emissiontest/bin/python\'",b"") - self.assertIn(stripped_out_stdout, + self.assertIn(sp.stdout, [b'Retrieved config {\'DB_HOST\': \'localhost\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nPlease provide the script with an argument. Use the "--help" option for more details\n', - b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'_CE_M\': \'\', \'PWD\': \'/src/e-mission-server\', \'WEB_SERVER_HOST\': \'0.0.0.0\', \'HOME\': \'/root\', \'CONDA_PROMPT_MODIFIER\': \'(emissiontest) \', \'_CE_CONDA\': \'\', \'DB_HOST\': \'db\', \'CONDA_SHLVL\': \'1\', \'SHLVL\': \'2\', \'CONDA_DEFAULT_ENV\': \'emissiontest\', \'EXP_CONDA_VER_SUFFIX\': \'0\', \'OLDPWD\': \'/\', \'LC_CTYPE\': \'C.UTF-8\', \'KMP_DUPLICATE_LIB_OK\': \'True\', \'KMP_INIT_AT_FORK\': \'FALSE\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nPlease provide the script with an argument. Use the "--help" option for more details\n' + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'DB_HOST\': \'db\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nPlease provide the script with an argument. Use the "--help" option for more details\n' ]) #test that no two options can be used together From 2fe0816bc714dd752592ac45c9a30e8bd23ef6f0 Mon Sep 17 00:00:00 2001 From: Shankari Date: Mon, 12 Aug 2024 12:31:42 -0700 Subject: [PATCH 89/89] =?UTF-8?q?=E2=9C=85=20Copy/paste=20the=20actual=20t?= =?UTF-8?q?ests=20from=20the=20failed=20CI=20run?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- emission/tests/storageTests/TestTokenQueries.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/emission/tests/storageTests/TestTokenQueries.py b/emission/tests/storageTests/TestTokenQueries.py index 364806a5c..0200ca694 100644 --- a/emission/tests/storageTests/TestTokenQueries.py +++ b/emission/tests/storageTests/TestTokenQueries.py @@ -169,19 +169,23 @@ def test_run_script_show(self): esdt.insert({'token':'z'}) sp = subprocess.run(["python3", "bin/auth/insert_tokens.py", "--show"], capture_output=True) # The first message is displayed when we run tests locally - # The second is displayed when we run in the docker CI, since the `DB_HOST` is set to `db` + # The second is displayed when we run in the CI/CD, but with the local install + # The third is displayed when we run in the docker CI since the `DB_HOST` is set to `db` self.assertIn(sp.stdout, [b'Retrieved config {\'DB_HOST\': \'localhost\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nx\ny\nz\n', - b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'DB_HOST\': \'db\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nx\ny\nz\n' + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'PATH\': \'/home/runner/miniconda-23.5.2/envs/emissiontest/bin:/home/runner/miniconda-23.5.2/condabin:/snap/bin:/home/runner/.local/bin:/opt/pipx_bin:/home/runner/.cargo/bin:/home/runner/.config/composer/vendor/bin:/usr/local/.ghcup/bin:/home/runner/.dotnet/tools:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/home/runner/.dotnet/tools\', \'LC_CTYPE\': \'C.UTF-8\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nx\ny\nz\n', + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'DB_HOST\': \'db\', \'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', \'LC_CTYPE\': \'C.UTF-8\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nx\ny\nz\n' ]) def test_run_script_empty(self): sp = subprocess.run(["python3", "bin/auth/insert_tokens.py"], capture_output=True) # The first message is displayed when we run tests locally - # The second is displayed when we run in the docker CI, since the `DB_HOST` is set to `db` + # The second is displayed when we run in the CI/CD, but with the local install + # The third is displayed when we run in the docker CI since the `DB_HOST` is set to `db` self.assertIn(sp.stdout, [b'Retrieved config {\'DB_HOST\': \'localhost\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nPlease provide the script with an argument. Use the "--help" option for more details\n', - b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'DB_HOST\': \'db\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nPlease provide the script with an argument. Use the "--help" option for more details\n' + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'PATH\': \'/home/runner/miniconda-23.5.2/envs/emissiontest/bin:/home/runner/miniconda-23.5.2/condabin:/snap/bin:/home/runner/.local/bin:/opt/pipx_bin:/home/runner/.cargo/bin:/home/runner/.config/composer/vendor/bin:/usr/local/.ghcup/bin:/home/runner/.dotnet/tools:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/home/runner/.dotnet/tools\', \'LC_CTYPE\': \'C.UTF-8\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nPlease provide the script with an argument. Use the "--help" option for more details\n', + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'DB_HOST\': \'db\', \'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', \'LC_CTYPE\': \'C.UTF-8\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nPlease provide the script with an argument. Use the "--help" option for more details\n' ]) #test that no two options can be used together