Skip to content

Commit

Permalink
Merge branch 'hotstuff_integration' into GH-1510-tests-if
Browse files Browse the repository at this point in the history
  • Loading branch information
heifner authored Jan 29, 2024
2 parents bdaf0e0 + 7d9bb58 commit 029b265
Show file tree
Hide file tree
Showing 40 changed files with 301 additions and 3,791 deletions.
27 changes: 16 additions & 11 deletions .github/workflows/build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ jobs:
with:
submodules: recursive
- name: Download builddir
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: ${{matrix.platform}}-build
- name: Build packages
Expand All @@ -112,17 +112,19 @@ jobs:
run: |
python3 -c "from TestHarness import Cluster"
- name: Upload dev package
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
if: matrix.platform != 'reproducible'
with:
name: leap-dev-${{matrix.platform}}-amd64
path: build/leap-dev*.deb
compression-level: 0
- name: Upload leap package
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
if: matrix.platform == 'reproducible'
with:
name: leap-deb-amd64
path: build/leap_*.deb
compression-level: 0

tests:
name: Tests (${{matrix.cfg.name}})
Expand All @@ -142,7 +144,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Download builddir
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: ${{matrix.cfg.builddir}}-build
- name: Run Parallel Tests
Expand All @@ -153,12 +155,13 @@ jobs:
cd build
ctest --output-on-failure -j $(nproc) -LE "(nonparallelizable_tests|long_running_tests)" --timeout 420
- name: Upload core files from failed tests
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
if: failure()
with:
name: ${{matrix.cfg.name}}-tests-logs
if-no-files-found: ignore
path: /cores
compression-level: 0
- name: Check CPU Features
run: awk 'BEGIN {err = 1} /bmi2/ && /adx/ {err = 0} END {exit err}' /proc/cpuinfo

Expand All @@ -177,7 +180,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Download builddir
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: ${{matrix.cfg.builddir}}-build
- name: Run tests in parallel containers
Expand All @@ -192,13 +195,14 @@ jobs:
run: docker run --mount type=bind,source=/var/lib/systemd/coredump,target=/cores alpine sh -c 'tar -C /cores/ -c .' | tar x
if: failure()
- name: Upload logs from failed tests
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
if: failure()
with:
name: ${{matrix.cfg.name}}-np-logs
path: |
*-logs.tar.gz
core*.zst
compression-level: 0

lr-tests:
name: LR Tests (${{matrix.cfg.name}})
Expand All @@ -215,7 +219,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Download builddir
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: ${{matrix.cfg.builddir}}-build
- name: Run tests in parallel containers
Expand All @@ -230,13 +234,14 @@ jobs:
run: docker run --mount type=bind,source=/var/lib/systemd/coredump,target=/cores alpine sh -c 'tar -C /cores/ -c .' | tar x
if: failure()
- name: Upload logs from failed tests
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
if: failure()
with:
name: ${{matrix.cfg.name}}-lr-logs
path: |
*-logs.tar.gz
core*.zst
compression-level: 0

libtester-tests:
name: libtester tests
Expand Down Expand Up @@ -265,7 +270,7 @@ jobs:
submodules: recursive
- if: ${{ matrix.test != 'deb-install' }}
name: Download leap builddir
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: ${{matrix.platform}}-build
- if: ${{ matrix.test != 'deb-install' }}
Expand All @@ -287,7 +292,7 @@ jobs:
rm -r *
- if: ${{ matrix.test == 'deb-install' }}
name: Download leap-dev
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: leap-dev-${{matrix.platform}}-amd64
- if: ${{ matrix.test == 'deb-install' }}
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/build_base.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ jobs:
cmake --build build
tar -pc --exclude "*.o" build | zstd --long -T0 -9 > build.tar.zst
- name: Upload builddir
uses: AntelopeIO/upload-artifact-large-chunks-action@v1
uses: actions/upload-artifact@v4
with:
name: ${{matrix.platform}}-build
path: build.tar.zst
path: build.tar.zst
compression-level: 0
2 changes: 0 additions & 2 deletions libraries/chain/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,7 @@ set(CHAIN_WEBASSEMBLY_SOURCES
)

set(CHAIN_HOTSTUFF_SOURCES
hotstuff/chain_pacemaker.cpp
hotstuff/instant_finality_extension.cpp
hotstuff/qc_chain.cpp
hotstuff/hotstuff.cpp
)

Expand Down
21 changes: 18 additions & 3 deletions libraries/chain/authorization_manager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,12 @@ namespace eosio { namespace chain {

auto effective_provided_delay = (provided_delay >= delay_max_limit) ? fc::microseconds::maximum() : provided_delay;

auto checker = make_auth_checker( [&](const permission_level& p){ return get_permission(p).auth; },
auto checker = make_auth_checker( [&](const permission_level& p) -> const shared_authority* {
if(const permission_object* po = find_permission(p))
return &po->auth;
else
return nullptr;
},
_control.get_global_properties().configuration.max_authority_depth,
provided_keys,
provided_permissions,
Expand Down Expand Up @@ -580,7 +585,12 @@ namespace eosio { namespace chain {

auto delay_max_limit = fc::seconds( _control.get_global_properties().configuration.max_transaction_delay );

auto checker = make_auth_checker( [&](const permission_level& p){ return get_permission(p).auth; },
auto checker = make_auth_checker( [&](const permission_level& p) -> const shared_authority* {
if(const permission_object* po = find_permission(p))
return &po->auth;
else
return nullptr;
},
_control.get_global_properties().configuration.max_authority_depth,
provided_keys,
provided_permissions,
Expand Down Expand Up @@ -611,7 +621,12 @@ namespace eosio { namespace chain {
fc::microseconds provided_delay
)const
{
auto checker = make_auth_checker( [&](const permission_level& p){ return get_permission(p).auth; },
auto checker = make_auth_checker( [&](const permission_level& p) -> const shared_authority* {
if(const permission_object* po = find_permission(p))
return &po->auth;
else
return nullptr;
},
_control.get_global_properties().configuration.max_authority_depth,
candidate_keys,
{},
Expand Down
11 changes: 10 additions & 1 deletion libraries/chain/controller.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -815,6 +815,7 @@ struct controller_impl {
deep_mind_handler* deep_mind_logger = nullptr;
bool okay_to_print_integrity_hash_on_stop = false;
bls_key_map_t node_finalizer_keys;
std::atomic<bool> writing_snapshot = false;

thread_local static platform_timer timer; // a copy for main thread and each read-only thread
#if defined(EOSIO_EOS_VM_RUNTIME_ENABLED) || defined(EOSIO_EOS_VM_JIT_RUNTIME_ENABLED)
Expand Down Expand Up @@ -4326,7 +4327,15 @@ fc::sha256 controller::calculate_integrity_hash() { try {

void controller::write_snapshot( const snapshot_writer_ptr& snapshot ) {
EOS_ASSERT( !my->pending, block_validate_exception, "cannot take a consistent snapshot with a pending block" );
return my->add_to_snapshot(snapshot);
my->writing_snapshot.store(true, std::memory_order_release);
fc::scoped_exit<std::function<void()>> e = [&] {
my->writing_snapshot.store(false, std::memory_order_release);
};
my->add_to_snapshot(snapshot);
}

bool controller::is_writing_snapshot() const {
return my->writing_snapshot.load(std::memory_order_acquire);
}

int64_t controller::set_proposed_producers( vector<producer_authority> producers ) {
Expand Down
Loading

0 comments on commit 029b265

Please sign in to comment.