Skip to content

Commit

Permalink
Merge branch 'develop' into process-timeouts
Browse files Browse the repository at this point in the history
  • Loading branch information
calum-chamberlain authored Dec 11, 2024
2 parents 4ab48e5 + c5811b2 commit 5c3f75f
Show file tree
Hide file tree
Showing 32 changed files with 1,303 additions and 628 deletions.
1 change: 1 addition & 0 deletions .github/test_conda_env.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ channels:
- conda-forge
- defaults
dependencies:
- conda-forge::compilers
- numpy>=1.12
- matplotlib>=1.3.0
- scipy
Expand Down
36 changes: 0 additions & 36 deletions .github/test_conda_env_macOS.yml

This file was deleted.

49 changes: 25 additions & 24 deletions .github/workflows/runtest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,26 +13,27 @@ jobs:
# continue-on-error: true

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

- name: Get conda env file
shell: bash -l {0}
run: |
if [ "$RUNNER_OS" == "macOS" ]; then
cp .github/test_conda_env_macOS.yml .github/test_conda_env.yml
fi
# Not needed as using the generic "compilers"
# - name: Get conda env file
# shell: bash -l {0}
# run: |
# if [ "$RUNNER_OS" == "macOS" ]; then
# cp .github/test_conda_env_macOS.yml .github/test_conda_env.yml
# fi

- name: Setup conda
uses: conda-incubator/setup-miniconda@v2.1.1
uses: conda-incubator/setup-miniconda@v3
with:
miniforge-variant: Mambaforge
miniforge-version: latest
# miniforge-variant: Miniforge
# miniforge-version: latest
python-version: ${{ matrix.python-version }}
activate-environment: eqcorrscan-test
use-mamba: true
# use-mamba: true

- name: Update Env
run: mamba env update -n eqcorrscan-test -f .github/test_conda_env.yml
run: conda env update -n eqcorrscan-test -f .github/test_conda_env.yml

- name: install eqcorrscan
shell: bash -l {0}
Expand Down Expand Up @@ -90,19 +91,19 @@ jobs:
fail-fast: false

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

- name: Setup conda
uses: conda-incubator/setup-miniconda@v2.1.1
uses: conda-incubator/setup-miniconda@v3
with:
miniforge-variant: Mambaforge
miniforge-version: latest
# miniforge-variant: Mambaforge
# miniforge-version: latest
python-version: ${{ matrix.python-version }}
activate-environment: eqcorrscan-test
use-mamba: true
# use-mamba: true

- name: Update Env
run: mamba env update -n eqcorrscan-test -f .github/test_conda_env.yml
run: conda env update -n eqcorrscan-test -f .github/test_conda_env.yml

- name: install eqcorrscan
shell: bash -l {0}
Expand Down Expand Up @@ -148,19 +149,19 @@ jobs:
fail-fast: false

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

- name: Setup conda
uses: conda-incubator/setup-miniconda@v2.1.1
uses: conda-incubator/setup-miniconda@v3
with:
miniforge-variant: Mambaforge
miniforge-version: latest
# miniforge-variant: Mambaforge
# miniforge-version: latest
python-version: ${{ matrix.python-version }}
activate-environment: eqcorrscan-test
use-mamba: true
# use-mamba: true

- name: Update Env
run: mamba env update -n eqcorrscan-test -f .github/test_conda_env.yml
run: conda env update -n eqcorrscan-test -f .github/test_conda_env.yml

- name: install fmf
shell: bash -l {0}
Expand Down
7 changes: 7 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
## Current
- core.match_filter.tribe
- Add option to set minimum number of stations required to use a template in detect
(`min_stations` kwarg)
- core.match_filter.party
- Add client_lag_calc method to run lag-calc using data from a client.

## 0.5.0
* core.match_filter.tribe
- Significant re-write of detect logic to take advantage of parallel steps (see #544)
Expand Down
2 changes: 1 addition & 1 deletion eqcorrscan/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

__all__ = ['core', 'utils', 'tutorials', 'tests']

__version__ = '0.5.0'
__version__ = '0.5.dev'

# Cope with changes to name-space to remove most of the camel-case
_import_map = {}
Expand Down
30 changes: 25 additions & 5 deletions eqcorrscan/core/lag_calc.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,11 +252,11 @@ def xcorr_pick_family(family, stream, shift_len=0.2, min_cc=0.4,
:type horizontal_chans: list
:param horizontal_chans:
List of channel endings for horizontal-channels, on which S-picks will
be made.
be made if no phase hint is given in the template.
:type vertical_chans: list
:param vertical_chans:
List of channel endings for vertical-channels, on which P-picks will
be made.
be made if no phase hint is given in the template.
:type cores: int
:param cores:
Number of cores to use in parallel processing, defaults to one.
Expand Down Expand Up @@ -298,6 +298,24 @@ def xcorr_pick_family(family, stream, shift_len=0.2, min_cc=0.4,
# Correlation function needs a list of streams, we need to maintain order.
ccc, chans = _concatenate_and_correlate(
streams=detect_streams, template=family.template.st, cores=cores)
# Make dict of pick phase hints from the template
phase_hints = dict()
if family.template.event is not None:
# Get a set of picked trace ids, then iterate through that - take
# earliest pick - matches earliest trace used by
# detection.extract_stream
picked_sids = {p.waveform_id.get_seed_string()
for p in family.template.event.picks}
for sid in picked_sids:
_sid_picks = [p for p in family.template.event.picks
if p.waveform_id.get_seed_string() == sid]
_sid_picks.sort(key=lambda p: p.time)
if len(_sid_picks) > 1:
Logger.warning(f"Multiple phase hints found for {sid} - "
f"using earliest ({_sid_picks[0].phase_hint} "
f"at {_sid_picks[0].time})")
phase_hints.update({sid: _sid_picks[0].phase_hint})

for i, detection_id in enumerate(detection_ids):
detection = [d for d in family.detections if d.id == detection_id][0]
correlations = ccc[i]
Expand Down Expand Up @@ -340,11 +358,13 @@ def xcorr_pick_family(family, stream, shift_len=0.2, min_cc=0.4,
'using'.format(cc_max))
continue
cccsum += cc_max
phase = None
if stachan.channel[1][-1] in vertical_chans:
phase = phase_hints.get(tr.id, None)
if phase is None and stachan.channel[1][-1] in vertical_chans:
phase = 'P'
elif stachan.channel[1][-1] in horizontal_chans:
Logger.debug(f"Unknown phase hint for {tr.id} - assigning P")
elif phase is None and stachan.channel[1][-1] in horizontal_chans:
phase = 'S'
Logger.debug(f"Unknown phase hint for {tr.id} - assigning S")
_waveform_id = WaveformStreamID(seed_string=tr.id)
event.picks.append(Pick(
waveform_id=_waveform_id, time=picktime,
Expand Down
32 changes: 24 additions & 8 deletions eqcorrscan/core/match_filter/detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,7 @@ def _calculate_event(self, template=None, template_st=None,
template_pick,
key=lambda p: p.time)[_index].phase_hint
except IndexError:
Logger.error(f"No pick for trace: {tr.id}")
Logger.debug(f"No pick for trace: {tr.id}")
ev.picks.append(new_pick)
if estimate_origin and template is not None\
and template.event is not None:
Expand Down Expand Up @@ -397,7 +397,8 @@ def extract_stream(self, stream, length, prepick, all_vert=False,
pick = [p for p in pick
if p.waveform_id.channel_code == channel]
if len(pick) == 0:
Logger.info("No pick for {0}.{1}".format(station, channel))
Logger.debug(
"No pick for {0}.{1}".format(station, channel))
continue
elif len(pick) > 1:
Logger.info(
Expand All @@ -406,15 +407,30 @@ def extract_stream(self, stream, length, prepick, all_vert=False,
pick.sort(key=lambda p: p.time)
pick = pick[0]
cut_start = pick.time - prepick
cut_end = cut_start + length
_st = _st.slice(starttime=cut_start, endtime=cut_end).copy()
# Minimum length check
# Find nearest sample to avoid to too-short length - see #573
for tr in _st:
if abs((tr.stats.endtime - tr.stats.starttime) -
sample_offset = (cut_start -
tr.stats.starttime) * tr.stats.sampling_rate
Logger.debug(
f"Sample offset for slice on {tr.id}: {sample_offset}")
sample_offset //= 1
# If the sample offset is not a whole number, always take the
# sample before that requested
_tr_cut_start = tr.stats.starttime + (
sample_offset * tr.stats.delta)
_tr_cut_end = _tr_cut_start + length
Logger.debug(
f"Trimming {tr.id} between {_tr_cut_end} "
f"and {_tr_cut_end}.")
_tr = tr.slice(_tr_cut_start, _tr_cut_end).copy()
Logger.debug(
f"Length: {(_tr.stats.endtime - _tr.stats.starttime)}")
Logger.debug(f"Requested length: {length}")
if abs((_tr.stats.endtime - _tr.stats.starttime) -
length) < tr.stats.delta:
cut_stream += tr
cut_stream += _tr
else:
Logger.info(
Logger.debug(
"Insufficient data length for {0}".format(tr.id))
return cut_stream

Expand Down
18 changes: 15 additions & 3 deletions eqcorrscan/core/match_filter/family.py
Original file line number Diff line number Diff line change
Expand Up @@ -565,8 +565,9 @@ def lag_calc(self, stream, pre_processed, shift_len=0.2, min_cc=0.4,
`cores`).
:type ignore_length: bool
:param ignore_length:
If using daylong=True, then dayproc will try check that the data
are there for at least 80% of the day, if you don't want this check
Processing functions will check that the data are there for at
least 80% of the required length and raise an error if not.
If you don't want this check
(which will raise an error if too much data are missing) then set
ignore_length=True. This is not recommended!
:type ignore_bad_data: bool
Expand Down Expand Up @@ -605,6 +606,18 @@ def lag_calc(self, stream, pre_processed, shift_len=0.2, min_cc=0.4,
"""
from eqcorrscan.core.lag_calc import xcorr_pick_family

# We should make sure we have events calculated for all detections
# we should clean out anything that was there before
# (and warn the user)
_overwritten_warning = False
for d in self.detections:
if len(d.event.picks):
_overwritten_warning = True
d._calculate_event(template=self.template)
if _overwritten_warning:
Logger.warning("Old events in family have been overwritten to "
"ensure lag-calc runs as expected")

processed_stream = self._process_streams(
stream=stream, pre_processed=pre_processed,
process_cores=process_cores, parallel=parallel,
Expand Down Expand Up @@ -770,7 +783,6 @@ def _process_streams(self, stream, pre_processed, process_cores=1,
parallel=parallel,
cores=process_cores,
stream=template_stream.merge().copy(),
daylong=False,
ignore_length=ignore_length,
overlap=0.0, ignore_bad_data=ignore_bad_data)
processed_stream = Stream()
Expand Down
Loading

0 comments on commit 5c3f75f

Please sign in to comment.