Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into base_plugin
Browse files Browse the repository at this point in the history
  • Loading branch information
dachengx committed Jan 31, 2024
2 parents a1f2e80 + 5cc78d3 commit 3cf0f95
Show file tree
Hide file tree
Showing 11 changed files with 237 additions and 49 deletions.
34 changes: 34 additions & 0 deletions .github/scripts/create_readonly_utilix_config.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/bin/bash

if [ ! -z "$RUNDB_API_URL" ]
then
cat > $HOME/.xenon_config <<EOF
[basic]
logging_level=debug
[RunDB]
rundb_api_url = $RUNDB_API_URL
rundb_api_user = $RUNDB_API_USER_READONLY
rundb_api_password = $RUNDB_API_PASSWORD_READONLY
xent_url = $PYMONGO_URL
xent_user = $PYMONGO_USER
xent_password = $PYMONGO_PASSWORD
xent_database = $PYMONGO_DATABASE
pymongo_url = $PYMONGO_URL
pymongo_user = $PYMONGO_USER
pymongo_password = $PYMONGO_PASSWORD
pymongo_database = $PYMONGO_DATABASE
[scada]
scdata_url = $SCADA_URL
sclastvalue_url = $SCADA_VALUE_URL
sclogin_url = $SCADA_LOGIN_URL
straxen_username = $SCADA_USER
straxen_password = $SCADA_PWD
pmt_parameter_names = no_file_found
EOF
echo "YEAH boy, complete github actions voodoo now made you have access to our database!"
else
echo "You have no power here! Environment variables are not set, therefore no utilix file will be created"
fi
101 changes: 101 additions & 0 deletions .github/workflows/pytest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
# copied from https://github.com/XENONnT/straxen/blob/master/.github/workflows/pytest.yml
# Test fuse on each PR.
# We run three types of tests:
# - Pytest -> these are the "normal" tests and should be run for all
# python versions
# - Coveralls -> this is to see if we are covering all our lines of
# code with our tests. The results get uploaded to
# coveralls.io/github/XENONnT/fuse

name: Test package

# Trigger this code when a new release is published
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]

jobs:
build:
runs-on: ${{ matrix.os }}
env:
HAVE_ACCESS_TO_SECRETS: ${{ secrets.RUNDB_API_URL }}
strategy:
fail-fast: false
matrix:
os: [ "ubuntu-latest" ]
python-version: [ "3.9", "3.10" ]
test: [ 'coveralls', 'pytest' ]

steps:
# Setup and installation
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

- name: Checkout repo
uses: actions/checkout@v4

- name: Install dependencies
# following https://github.com/NESTCollaboration/nestpy/blob/master/README.md
run: |
python -m pip install --upgrade pip
python -m pip install pytest coverage coveralls
git clone https://github.com/NESTCollaboration/nestpy.git
cd nestpy
git submodule update --init --recursive
pip install .
cd ..
rm -rf nestpy
- name: Start MongoDB
uses: supercharge/[email protected]
with:
mongodb-version: 4.4.1

- name: patch utilix file
# Secrets and required files
# Patch this file if we want to have access to the database
run: bash .github/scripts/create_readonly_utilix_config.sh
env:
# RunDB
RUNDB_API_URL: ${{ secrets.RUNDB_API_URL }}
RUNDB_API_USER_READONLY: ${{ secrets.RUNDB_API_USER_READONLY }}
RUNDB_API_PASSWORD_READONLY: ${{ secrets.RUNDB_API_PASSWORD_READONLY}}
PYMONGO_URL: ${{ secrets.PYMONGO_URL }}
PYMONGO_USER: ${{ secrets.PYMONGO_USER }}
PYMONGO_PASSWORD: ${{ secrets.PYMONGO_PASSWORD }}
PYMONGO_DATABASE: ${{ secrets.PYMONGO_DATABASE }}
# SCADA
SCADA_URL: ${{ secrets.SCADA_URL }}
SCADA_VALUE_URL: ${{ secrets.SCADA_VALUE_URL }}
SCADA_USER: ${{ secrets.SCADA_USER }}
SCADA_LOGIN_URL: ${{ secrets.SCADA_LOGIN_URL }}
SCADA_PWD: ${{ secrets.SCADA_PWD }}

- name: Install fuse
run: |
pip install .
- name: Test package
# This is running a normal test
env:
TEST_MONGO_URI: 'mongodb://localhost:27017/'
run: |
coverage run --source=fuse -m pytest --durations 0
coverage report
- name: Coveralls
# Make the coverage report and upload
env:
TEST_MONGO_URI: 'mongodb://localhost:27017/'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
if: matrix.test == 'coveralls' && env.HAVE_ACCESS_TO_SECRETS != null
run: |
coverage run --source=fuse -m pytest -v
coveralls --service=github
- name: goodbye
run: echo "tests done, bye bye"
9 changes: 5 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@

fuse/__pycache__/*
__pycache__
fuse.egg-info/*
.eggs/*
build/*
.DS_Store
.vscode/*
docs/build/*
tests/resource_cache/*
resource_cache
.coverage
.hypothesis
.DS_Store
6 changes: 4 additions & 2 deletions fuse/plugins/detector_physics/s2_photon_propagation.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,8 +343,10 @@ def compute(self, individual_electrons, interactions_in_roi, start, end):

n_chunks = len(electron_chunks)
if n_chunks > 1:
log.info("Chunk size exceeding file size target.")
log.info("Downchunking to %d chunks" % n_chunks)
log.info(
"Chunk size exceeding file size target. "
f"Downchunking to {n_chunks} chunks"
)

last_start = start
if n_chunks>1:
Expand Down
6 changes: 4 additions & 2 deletions fuse/plugins/micro_physics/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,8 +311,10 @@ def output_chunk(self):
self.chunk_bounds = np.append(chunk_start[0]-self.first_chunk_left, chunk_bounds)

else:
log.warning("Only one Chunk created! Only a few events simulated? If no, your chunking parameters might not be optimal.")
log.warning("Try to decrease the source_rate or decrease the n_interactions_per_chunk")
log.warning(
"Only one Chunk created! Only a few events simulated? If no, your chunking parameters might not be optimal. "
"Try to decrease the source_rate or decrease the n_interactions_per_chunk."
)
self.chunk_bounds = [chunk_start[0] - self.first_chunk_left, chunk_end[0]+self.last_chunk_length]

source_done = False
Expand Down
6 changes: 4 additions & 2 deletions fuse/plugins/pmt_and_daq/pmt_response_and_daq.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,8 +224,10 @@ def compute(self, propagated_photons, pulse_windows, start, end):

n_chunks = len(pulse_window_chunks)
if n_chunks > 1:
log.info("Chunk size exceeding file size target.")
log.info("Downchunking to %d chunks" % n_chunks)
log.info(
"Chunk size exceeding file size target. "
f"Downchunking to {n_chunks} chunks"
)

last_start = start

Expand Down
6 changes: 4 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,17 @@ classifiers = [
]
dependencies = [
"numpy",
"strax",
"pandas",
"scipy",
"scikit-learn",
"immutabledict",
"timeout_decorator",
"nestpy >= 2.0.0",
"nestpy >= 2.0.2",
"numba >= 0.57.0",
"awkward >= 2.2.1",
"uproot >= 5.0.7",
"strax >= 1.6.0",
"straxen >= 2.2.0",
]

[project.urls]
Expand Down
1 change: 1 addition & 0 deletions tests/_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
test_root_file_name = 'test_cryo_neutrons_tpc-nveto.root'
35 changes: 26 additions & 9 deletions tests/test_FullChain.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,47 @@
import os
import shutil
import unittest
import fuse
import tempfile
import timeout_decorator
import fuse
import straxen
from _utils import test_root_file_name

TIMEOUT = 60


class TestFullChain(unittest.TestCase):

@classmethod
def setUpClass(self):
def setUpClass(cls):

self.temp_dir = tempfile.TemporaryDirectory()
cls.temp_dir = tempfile.TemporaryDirectory()

self.test_context = fuse.context.full_chain_context(output_folder = self.temp_dir.name)
cls.test_context = fuse.context.full_chain_context(output_folder = cls.temp_dir.name)

self.test_context.set_config({"path": "/project2/lgrandi/xenonnt/simulations/testing",
"file_name": "pmt_neutrons_100.root",
cls.test_context.set_config({"path": cls.temp_dir.name,
"file_name": test_root_file_name,
"entry_stop": 5,
})

self.run_number = "TestRun_00000"
cls.run_number = "TestRun_00000"

@classmethod
def tearDownClass(self):
def tearDownClass(cls):

cls.temp_dir.cleanup()

def setUp(self):
downloader = straxen.MongoDownloader(store_files_at=(self.temp_dir.name,))
downloader.download_single(test_root_file_name, human_readable_file_name=True)

assert os.path.exists(os.path.join(self.temp_dir.name, test_root_file_name))

def tearDown(self):

self.temp_dir.cleanup()
# self.temp_dir.cleanup()
shutil.rmtree(self.temp_dir.name)
os.makedirs(self.temp_dir.name)

@timeout_decorator.timeout(TIMEOUT, exception_message='S1PhotonHits timed out')
def test_S1PhotonHits(self):
Expand Down
35 changes: 26 additions & 9 deletions tests/test_MicroPhysics.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,47 @@
import os
import shutil
import unittest
import fuse
import tempfile
import timeout_decorator
import fuse
import straxen
from _utils import test_root_file_name

TIMEOUT = 60


class TestMicroPhysics(unittest.TestCase):

@classmethod
def setUpClass(self):
def setUpClass(cls):

self.temp_dir = tempfile.TemporaryDirectory()
cls.temp_dir = tempfile.TemporaryDirectory()

self.test_context = fuse.context.microphysics_context(self.temp_dir.name)
cls.test_context = fuse.context.microphysics_context(cls.temp_dir.name)

self.test_context.set_config({"path": "/project2/lgrandi/xenonnt/simulations/testing",
"file_name": "pmt_neutrons_100.root",
cls.test_context.set_config({"path": cls.temp_dir.name,
"file_name": test_root_file_name,
"entry_stop": 25,
})

self.run_number = "TestRun_00000"
cls.run_number = "TestRun_00000"

@classmethod
def tearDownClass(self):
def tearDownClass(cls):

cls.temp_dir.cleanup()

def setUp(self):
downloader = straxen.MongoDownloader(store_files_at=(self.temp_dir.name,))
downloader.download_single(test_root_file_name, human_readable_file_name=True)

assert os.path.exists(os.path.join(self.temp_dir.name, test_root_file_name))

def tearDown(self):

self.temp_dir.cleanup()
# self.temp_dir.cleanup()
shutil.rmtree(self.temp_dir.name)
os.makedirs(self.temp_dir.name)

@timeout_decorator.timeout(TIMEOUT, exception_message='ChunkInput timed out')
def test_ChunkInput(self):
Expand Down
Loading

0 comments on commit 3cf0f95

Please sign in to comment.