Skip to content

Commit

Permalink
Merge pull request #9 from hz-b/dev/feature/numpy-types
Browse files Browse the repository at this point in the history
Dev/feature/numpy types
  • Loading branch information
Sulimankhail authored Sep 9, 2024
2 parents 78d5a02 + 51b5445 commit 10edf2f
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 14 deletions.
26 changes: 16 additions & 10 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,35 +17,41 @@ jobs:
fail-fast: False
matrix:
python-version: ["3.9", "3.10", "3.11"]
numpy-version: [ "numpy<2.0" , "numpy>=2.0" ]

steps:
# try to cache package install
- uses: ConorMacBride/install-package@v1
with:
apt: protobuf-compiler
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}

- name: Set up Python ${{ matrix.python-version }} ${{ matrix.numpy-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache-dependency-path: |
**/pyproject.toml
**/requirements*.txt
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest wheel setuptools
python -m pip install flake8 pytest wheel setuptools build
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: build and binary module
run: |
python setup.py build_proto_c
python -m pip wheel ./
python -m pip install ./
- name: Archive wheel
uses: actions/upload-artifact@v4
with:
name: bact_archiver_${{ matrix.python-version }}
path: bact_archiver*.whl
retention-days: 14
#- name: Archive wheel
# uses: actions/upload-artifact@v4
# with:
# name: bact_archiver_${{ matrix.python-version }}
# path: bact_archiver*.whl
# retention-days: 14

- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
Expand Down
7 changes: 5 additions & 2 deletions bact_archiver/carchiver.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def get_data_from_archiver(data):
res.append(chunk.value)
header = chunk.header
years.extend(chunk.header.year *
np.ones(len(chunk.value[0]), dtype=np.int))
np.ones(len(chunk.value[0]), dtype=int))
logger.debug(chunk.header)
# print('found data:',len(chunk.value[0]))
else:
Expand Down Expand Up @@ -158,13 +158,16 @@ def get_data(data, *, return_type='pandas', time_format='timestamp',
return None
elif len(res) == 1:
values, secs, nanos = res[0]
years = years[0] * np.ones(len(secs), dtype=np.int)
years = years[0] * np.ones(len(secs), dtype=int)
# print('One Chunk Only')
# print('chunk.header.year = ',chunk.header.year)
else:
# if multible chunks, combine data and return
# print("{} chunks found".format(len(res)))
values = np.concatenate([r[0] for r in res])



secs = np.concatenate([r[1] for r in res])
nanos = np.concatenate([r[2] for r in res])

Expand Down
2 changes: 1 addition & 1 deletion proto/epics_event.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ cdef read_chunk_i4(char[:] seq, int N, np.ndarray[np.int32_t] secs, np.ndarray[n
@cython.boundscheck(False)
@cython.wraparound(False)
cdef read_chunk_f8(char[:] seq, int N, np.ndarray[np.int32_t] secs, np.ndarray[np.int32_t] nanos):
cdef np.ndarray[np.float_t] values = np.empty(N,dtype=np.float)
cdef np.ndarray[np.float_t] values = np.empty(N,dtype=float)

cdef ScalarDouble event

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
name = "bact-archiver"
# url =
# fullname = "BACT epics archiver appliance access"
version = "0.2.2"
version = "0.2.3"
description = "EPICS archiver appliance access using google protobuf"
readme="README.rst"
authors = [
Expand Down
3 changes: 3 additions & 0 deletions tests/test_carchiver.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import logging

from bact_archiver.pyarchiver import get_data
from bact_archiver.carchiver import get_data_from_archiver as get_data
from common import test_data_dir

log = logging.getLogger('test')
Expand All @@ -21,6 +22,8 @@ def read(self, fname):
def run_decode(self, *, fname=None, **kw):
data = self.read(fname)

tmp = get_data(data)
return
header, values = get_data(data)
log.debug('Header %s', header)

Expand Down

0 comments on commit 10edf2f

Please sign in to comment.