Skip to content

Commit

Permalink
Added in more CI/CD with tests and adjusted docker image to not rely …
Browse files Browse the repository at this point in the history
…on git and versioning based on git
  • Loading branch information
kimleeng committed Jul 10, 2020
1 parent ffa020e commit f1ddf9d
Show file tree
Hide file tree
Showing 14 changed files with 277 additions and 498 deletions.
10 changes: 1 addition & 9 deletions .github/workflows/test_standard_workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ jobs:
deploy:
runs-on: ubuntu-latest
steps:

- name: Checkout
uses: actions/checkout@v2
- name: Set up Python
Expand All @@ -24,13 +23,6 @@ jobs:
- name: Run_tests
run: >
docker run
--entrypoint
"python3 -m pytest
-v --doctest-modules
--junitxml=junit/test-results.xml
--cov=minilims
--cov-report=xml
--cov-report=html
/bifrost/tests/"
--entrypoint "cd bifrost; python3 -m pytest"
-e BIFROST_DB_KEY=${{ secrets.MONGODB_ATLAS_CONNECTION }}/bifrost_run_launcher_test
run_launcher
140 changes: 140 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
.mongoDB_dev_keys.env

# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
.pybuilder/
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

# pytype static type analyzer
.pytype/

# Cython debug symbols
cython_debug/
19 changes: 10 additions & 9 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,23 +1,23 @@
# This is intended to run in Github Actions
# Arg can be set to dev for testing purposes
ARG BUILD_ENV="prod"
ARG NAME="bifrost-run_launcher"
ARG CODE_VERSION="unspecified"
ARG RESOURCE_VERSION="unspecified"

#For dev build include testing modules
FROM continuumio/miniconda3:4.7.10 as build_dev
ONBUILD RUN pip install pytest \
pytest-cov \
pytest-profiling \
coverage;
pytest-cov \
pytest-profiling \
coverage;
ONBUILD COPY tests /bifrost/tests
ONBUILD COPY examples /bifrost/examples

FROM continuumio/miniconda3:4.7.10 as build_prod
ONBUILD RUN echo ${BUILD_ENV}

FROM build_${BUILD_ENV}
ARG NAME="bifrost-run_launcher"
ARG CODE_VERSION
ARG RESOURCE_VERSION

LABEL \
name=${NAME} \
description="Docker environment for ${NAME}" \
Expand All @@ -39,8 +39,9 @@ COPY src /bifrost/src
RUN \
pip install bifrostlib==2.0.7; \
sed -i'' 's/<code_version>/'"${CODE_VERSION}"'/g' /bifrost/src/config.yaml; \
sed -i'' 's/<resource_version>/'"${RESOURCE_VERSION}"'/g' /bifrost/src/config.yaml;
#- Source code:end ---------------------------------------------------------------------------------
sed -i'' 's/<resource_version>/'"${RESOURCE_VERSION}"'/g' /bifrost/src/config.yaml; \
echo "done";
#- Source code:end ---------------------------------------------------------------------------------

#- Set up entry point:start ------------------------------------------------------------------------
ENV PATH /bifrost/src/:$PATH
Expand Down
Empty file added __init__.py
Empty file.
14 changes: 14 additions & 0 deletions docker-compose.dev.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
version: '3.8'
services:
bifrost_launcher:
image: bifrost_launcher
build:
context: .
args:
BUILD_ENV: dev
env_file:
- .mongodb_dev_keys.env
volumes:
- .:/bifrost/
entrypoint: "bash"
stdin_open: true # kept open to keep image running
7 changes: 0 additions & 7 deletions examples/per_sample_script.sh
Original file line number Diff line number Diff line change
@@ -1,11 +1,4 @@
# Per sample script example
# This example will assuming paths are set up, run each sample against the 3
# different components (min_read_check v2.0.7, whats_my_species v2.0.7, assemblatron v2.0.7)
# using singularity, you can add other commands here including grid engine.
#
# NOTE_1: You can access $sample and $run variables with $run.sub_value.another_sub_value.etc
# this shouldn't have issues as mongoDB doesn't allow .'s in key names so we can
# parse on that value.
echo "Running $sample.name from $run.name";
BIFROST_RAW_DATA_MNT="/raw_data/mnt";
BIFROST_PIPELINE_TOOLS="/tools/singularity";
Expand Down
3 changes: 0 additions & 3 deletions examples/post_script.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
# Post-script example
# NOTE: You can access $run variables with $run.sub_value.another_sub_value.etc
# this shouldn't have issues as mongoDB doesn't allow .'s in key names so we can
# parse on that value
echo "start post_script $run.name $run.type";
echo "end post_script";
3 changes: 0 additions & 3 deletions examples/pre_script.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
# Pre-script example
# NOTE: You can access $run variables with $run.sub_value.another_sub_value.etc
# this shouldn't have issues as mongoDB doesn't allow .'s in key names so we can
# parse on that value
echo "start pre_script $run.name $run.type";
echo "end pre_script";
Empty file added src/__init__.py
Empty file.
10 changes: 5 additions & 5 deletions src/config.yaml
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
# Notes:
# Values in <value> are modified by CI/CD automation. Please see github/workflows for more info
#-Basic info----------------------------------------------------------------------------------------
name: run_launcher
full_name: run_launcher__<code_version>__<resource_version>
display_name: run_launcher
name: run_launcher__unspecified__unspecified
version:
code: <code_version>
resource: <resource_version>
code: unspecified
resource: unspecified
note: >
Currently run components aren't set up properly (probably want to call them collection components)
values here are not being used properly. I think the idea for run components is that you don't
Expand All @@ -30,7 +30,7 @@ details:
#-Install-------------------------------------------------------------------------------------------
install:
path: # Set by install program
dockerfile: docker://ssidk/bifrost-run_launcher:<code_version>__<resource_version>
dockerfile: docker://ssidk/bifrost-run_launcher:unspecified__unspecified
# None
#---------------------------------------------------------------------------------------------------

Expand Down
32 changes: 13 additions & 19 deletions src/launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@
import sys
import traceback
from bifrostlib import datahandling
from src import pipeline


COMPONENT: dict = datahandling.load_yaml(os.path.join(os.path.dirname(__file__), 'config.yaml'))


def parse_args():
def parser():
"""
Arg parsing via argparse
"""
Expand All @@ -35,19 +37,14 @@ def parse_args():
action='store_true',
help='Provides basic information on component')
parser.add_argument('-pre', '--pre_script',
required=True,
help='Pre script template run before sample script')
parser.add_argument('-per', '--per_sample_script',
required=True,
help='Per sample script template run on each sample')
parser.add_argument('-post', '--post_script',
required=True,
help='Post script template run after sample script')
parser.add_argument('-meta', '--run_metadata',
required=True,
help='Run metadata tsv')
parser.add_argument('-reads', '--reads_folder',
required=True,
help='Run metadata tsv')
parser.add_argument('-name', '--run_name',
default=None,
Expand All @@ -56,13 +53,15 @@ def parse_args():
default=None,
help='Run type for metadata organization')
parser.add_argument('-metamap', '--run_metadata_column_remap',
default=None,
help='Remaps metadata tsv columns to bifrost values')
#TODO: Put code in to utilize ID
parser.add_argument('-id', '--run_id',
help='For re-running a run')
return parser

args: argparse.Namespace = parser.parse_args()

def run_program(args: argparse.Namespace):
if not datahandling.check_db_connection_exists():
message: str = (
f"ERROR: Connection to DB not establised.\n"
Expand Down Expand Up @@ -95,7 +94,7 @@ def show_info():


def install_component():
component: list[dict] = datahandling.get_components(component_names=[COMPONENT['full_name']])
component: list[dict] = datahandling.get_components(component_names=[COMPONENT['name']])
# if len(component) == 1:
# print(f"Component has already been installed")
if len(component) > 1:
Expand All @@ -105,9 +104,9 @@ def install_component():
#HACK: Removed install check so you can reinstall the component. Should do this in a nicer way
COMPONENT['install']['path'] = os.path.os.getcwd()
datahandling.post_component(COMPONENT)
component: list[dict] = datahandling.get_components(component_names=[COMPONENT['full_name']])
component: list[dict] = datahandling.get_components(component_names=[COMPONENT['name']])
if len(component) != 1:
print(f"Error with installation of {COMPONENT['full_name']}\n")
print(f"Error with installation of {COMPONENT['name']} {len(component)}\n")
exit()


Expand All @@ -121,7 +120,7 @@ def run_pipeline(args: object):
datahandling.post_component(COMPONENT)
component: list[dict] = datahandling.get_components(component_names=[COMPONENT['name']])
if len(component) != 1:
print(f"Error with installation of {COMPONENT['full_name']}\n")
print(f"Error with installation of {COMPONENT['name']}\n")
exit()

else:
Expand All @@ -135,16 +134,11 @@ def run_pipeline(args: object):
optional_values = f"{optional_values} -metamap {str(args.run_metadata_column_remap)}"
if args.run_id is not None:
optional_values = f"{optional_values} -id {str(args.run_id)}"
process: subprocess.Popen = subprocess.Popen(
f"/bifrost/src/pipeline.py -pre {str(args.pre_script)} -per {str(args.per_sample_script)} -post {str(args.post_script)} -meta {str(args.run_metadata)} -reads {str(args.reads_folder)} {optional_values}",
stdout=sys.stdout,
stderr=sys.stderr,
shell=True
)
process.communicate()
pipeline.run_pipeline(args)
except:
print(traceback.format_exc())


if __name__ == '__main__':
parse_args()
args: argparse.Namespace = parser().parse_args()
run_program(args)
Loading

0 comments on commit f1ddf9d

Please sign in to comment.