From 3bafc27de94535e597fd1c532fc537d5b7cdad48 Mon Sep 17 00:00:00 2001 From: Sara Date: Wed, 9 Jan 2019 17:37:23 +0300 Subject: [PATCH 01/41] changed develop to master in documentation Signed-off-by: Sara --- docs/source/getting_started/index.rst | 6 +++--- docs/source/guides/build.rst | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/source/getting_started/index.rst b/docs/source/getting_started/index.rst index b3c77add1a..252bc047cf 100644 --- a/docs/source/getting_started/index.rst +++ b/docs/source/getting_started/index.rst @@ -84,7 +84,7 @@ In order to get those files, you need to clone the .. code-block:: shell - git clone -b develop https://github.com/hyperledger/iroha --depth=1 + git clone -b master https://github.com/hyperledger/iroha --depth=1 .. hint:: ``--depth=1`` option allows us to download only latest commit and save some time and bandwidth. If you want to get a full commit history, you @@ -103,7 +103,7 @@ command -v blockstore:/tmp/block_store \ --network=iroha-network \ --entrypoint=/bin/bash \ - hyperledger/iroha:develop + hyperledger/iroha:latest Let's look in detail what this command does: @@ -119,7 +119,7 @@ Let's look in detail what this command does: - ``--entrypoint=/bin/bash \`` Because ``hyperledger/iroha`` has the custom script which runs after starting the container, we want to override it so we can start Iroha Daemon manually. -- ``hyperledger/iroha:develop`` is the image which has the ``develop`` +- ``hyperledger/iroha:latest`` is the image which has the ``master`` branch. Launching Iroha Daemon diff --git a/docs/source/guides/build.rst b/docs/source/guides/build.rst index 2745305a6a..87cfcb8323 100644 --- a/docs/source/guides/build.rst +++ b/docs/source/guides/build.rst @@ -36,7 +36,7 @@ to the directory of your choice. .. code-block:: shell - git clone -b develop https://github.com/hyperledger/iroha --depth=1 + git clone -b master https://github.com/hyperledger/iroha --depth=1 .. hint:: ``--depth=1`` option allows us to download only latest commit and save some time and bandwidth. If you want to get a full commit history, you @@ -132,7 +132,7 @@ directory of your choice. .. code-block:: shell - git clone -b develop https://github.com/hyperledger/iroha + git clone -b master https://github.com/hyperledger/iroha cd iroha .. hint:: If you have installed the prerequisites with Docker, you don't need From 5dd4b12dec934d2a86649a4ceca6a699bbe6b8f4 Mon Sep 17 00:00:00 2001 From: Mikhail Boldyrev Date: Mon, 14 Jan 2019 09:28:14 +0300 Subject: [PATCH 02/41] Refactor: libacceptance_fixture detached from ITF lib (#2018) * removed itf dependency from acceptance test lib * created a separate common_test_constants lib Signed-off-by: Mikhail Boldyrev --- .../utils/query_error_response_visitor.hpp | 1 + test/framework/CMakeLists.txt | 4 ++- test/integration/acceptance/CMakeLists.txt | 29 ++++++++++++++++++- .../acceptance/acceptance_fixture.cpp | 1 - test/integration/pipeline/CMakeLists.txt | 3 ++ 5 files changed, 35 insertions(+), 3 deletions(-) diff --git a/shared_model/utils/query_error_response_visitor.hpp b/shared_model/utils/query_error_response_visitor.hpp index cf7895d0f1..898217819e 100644 --- a/shared_model/utils/query_error_response_visitor.hpp +++ b/shared_model/utils/query_error_response_visitor.hpp @@ -7,6 +7,7 @@ #define IROHA_QUERY_ERROR_RESPONSE_VISITOR_HPP #include +#include "common/visitor.hpp" #include "interfaces/query_responses/error_query_response.hpp" namespace shared_model { diff --git a/test/framework/CMakeLists.txt b/test/framework/CMakeLists.txt index 5de576c4ba..a87d1688b3 100644 --- a/test/framework/CMakeLists.txt +++ b/test/framework/CMakeLists.txt @@ -14,12 +14,12 @@ add_library(integration_framework integration_framework/fake_peer/fake_peer.cpp integration_framework/fake_peer/yac_network_notifier.cpp integration_framework/port_guard.cpp - common_constants.cpp ) target_link_libraries(integration_framework application integration_framework_config_helper command_client + common_test_constants query_client ordering_gate_common yac_transport @@ -28,6 +28,8 @@ target_link_libraries(integration_framework mst_transport ) +add_library(common_test_constants common_constants.cpp) + target_include_directories(integration_framework PUBLIC ${PROJECT_SOURCE_DIR}/test) add_library(integration_framework_config_helper config_helper.cpp) diff --git a/test/integration/acceptance/CMakeLists.txt b/test/integration/acceptance/CMakeLists.txt index a1e5dea2bb..10b23f701c 100644 --- a/test/integration/acceptance/CMakeLists.txt +++ b/test/integration/acceptance/CMakeLists.txt @@ -7,7 +7,7 @@ add_library(acceptance_fixture acceptance_fixture.cpp) target_link_libraries(acceptance_fixture gtest::gtest gmock::gmock - integration_framework + common_test_constants shared_model_proto_builders ) @@ -16,31 +16,37 @@ add_library(grantable_permissions_fixture ) target_link_libraries(grantable_permissions_fixture acceptance_fixture + integration_framework ) addtest(add_asset_qty_test add_asset_qty_test.cpp) target_link_libraries(add_asset_qty_test acceptance_fixture + integration_framework ) addtest(create_account_test create_account_test.cpp) target_link_libraries(create_account_test acceptance_fixture + integration_framework ) addtest(create_domain_test create_domain_test.cpp) target_link_libraries(create_domain_test acceptance_fixture + integration_framework ) addtest(create_role_test create_role_test.cpp) target_link_libraries(create_role_test acceptance_fixture + integration_framework ) addtest(get_transactions_test get_transactions_test.cpp) target_link_libraries(get_transactions_test acceptance_fixture + integration_framework ) addtest(grant_permission_test grant_permission_test.cpp) @@ -56,31 +62,37 @@ target_link_libraries(revoke_permission_test addtest(invalid_fields_test invalid_fields_test.cpp) target_link_libraries(invalid_fields_test acceptance_fixture + integration_framework ) addtest(query_test query_test.cpp) target_link_libraries(query_test acceptance_fixture + integration_framework ) addtest(subtract_asset_qty_test subtract_asset_qty_test.cpp) target_link_libraries(subtract_asset_qty_test acceptance_fixture + integration_framework ) addtest(transfer_asset_test transfer_asset_test.cpp) target_link_libraries(transfer_asset_test acceptance_fixture + integration_framework ) addtest(tx_acceptance_test tx_acceptance_test.cpp) target_link_libraries(tx_acceptance_test acceptance_fixture + integration_framework ) addtest(tx_heavy_data tx_heavy_data.cpp) target_link_libraries(tx_heavy_data acceptance_fixture + integration_framework ) addtest(get_account_assets_test @@ -90,26 +102,31 @@ query_permission_test_base.cpp" ) target_link_libraries(get_account_assets_test acceptance_fixture + integration_framework ) addtest(set_account_detail_test set_account_detail_test.cpp) target_link_libraries(set_account_detail_test acceptance_fixture + integration_framework ) addtest(get_roles_test get_roles_test.cpp) target_link_libraries(get_roles_test acceptance_fixture + integration_framework ) addtest(queries_acceptance_test queries_acceptance_test.cpp) target_link_libraries(queries_acceptance_test acceptance_fixture + integration_framework ) addtest(create_asset_test create_asset_test.cpp) target_link_libraries(create_asset_test acceptance_fixture + integration_framework ) addtest(get_account_asset_txs_test @@ -119,6 +136,7 @@ query_permission_test_base.cpp" ) target_link_libraries(get_account_asset_txs_test acceptance_fixture + integration_framework ) addtest(query_permissions_common_test @@ -132,6 +150,7 @@ query_permission_test_signatories.cpp" ) target_link_libraries(query_permissions_common_test acceptance_fixture + integration_framework ) addtest(get_account_test @@ -139,6 +158,7 @@ addtest(get_account_test ) target_link_libraries(get_account_test acceptance_fixture + integration_framework ) addtest(add_signatory_test @@ -146,6 +166,7 @@ addtest(add_signatory_test ) target_link_libraries(add_signatory_test acceptance_fixture + integration_framework ) addtest(remove_signatory_test @@ -153,6 +174,7 @@ addtest(remove_signatory_test ) target_link_libraries(remove_signatory_test acceptance_fixture + integration_framework ) addtest(get_asset_info_test @@ -160,6 +182,7 @@ addtest(get_asset_info_test ) target_link_libraries(get_asset_info_test acceptance_fixture + integration_framework ) addtest(get_role_permissions_test @@ -167,6 +190,7 @@ addtest(get_role_permissions_test ) target_link_libraries(get_role_permissions_test acceptance_fixture + integration_framework ) addtest(replay_test @@ -174,6 +198,7 @@ addtest(replay_test ) target_link_libraries(replay_test acceptance_fixture + integration_framework ) addtest(basic_mst_state_propagation_test @@ -181,6 +206,7 @@ addtest(basic_mst_state_propagation_test ) target_link_libraries(basic_mst_state_propagation_test acceptance_fixture + integration_framework ) addtest(set_account_quorum_test @@ -188,4 +214,5 @@ addtest(set_account_quorum_test ) target_link_libraries(set_account_quorum_test acceptance_fixture + integration_framework ) diff --git a/test/integration/acceptance/acceptance_fixture.cpp b/test/integration/acceptance/acceptance_fixture.cpp index 5b32cd72fe..10e745fc27 100644 --- a/test/integration/acceptance/acceptance_fixture.cpp +++ b/test/integration/acceptance/acceptance_fixture.cpp @@ -8,7 +8,6 @@ #include #include "datetime/time.hpp" -#include "framework/integration_framework/integration_test_framework.hpp" #include "utils/query_error_response_visitor.hpp" using namespace common_constants; diff --git a/test/integration/pipeline/CMakeLists.txt b/test/integration/pipeline/CMakeLists.txt index d4f33fee57..a467cab9fb 100644 --- a/test/integration/pipeline/CMakeLists.txt +++ b/test/integration/pipeline/CMakeLists.txt @@ -6,14 +6,17 @@ addtest(pipeline_test pipeline_test.cpp) target_link_libraries(pipeline_test acceptance_fixture + integration_framework ) addtest(batch_pipeline_test batch_pipeline_test.cpp) target_link_libraries(batch_pipeline_test acceptance_fixture + integration_framework ) addtest(multisig_tx_pipeline_test multisig_tx_pipeline_test.cpp) target_link_libraries(multisig_tx_pipeline_test acceptance_fixture + integration_framework ) From 20cf2806040aa6e2a169897184e899069f523b09 Mon Sep 17 00:00:00 2001 From: Akvinikym Date: Mon, 14 Jan 2019 13:00:06 +0300 Subject: [PATCH 03/41] Move some WsvQuer-ies to test namespace (#2016) Moved the queries Signed-off-by: Akvinikym --- .../ametsuchi/impl/mutable_storage_impl.cpp | 1 + irohad/ametsuchi/impl/postgres_wsv_query.cpp | 246 +-------- irohad/ametsuchi/impl/postgres_wsv_query.hpp | 45 -- irohad/ametsuchi/storage.hpp | 2 +- irohad/ametsuchi/wsv_query.hpp | 109 ---- test/framework/CMakeLists.txt | 17 +- test/framework/sql_query.cpp | 296 +++++++++++ test/framework/sql_query.hpp | 164 ++++++ test/module/irohad/ametsuchi/CMakeLists.txt | 8 +- .../irohad/ametsuchi/ametsuchi_fixture.hpp | 6 + .../irohad/ametsuchi/ametsuchi_mocks.hpp | 37 -- .../irohad/ametsuchi/ametsuchi_test.cpp | 34 +- .../irohad/ametsuchi/kv_storage_test.cpp | 113 ---- .../ametsuchi/postgres_executor_test.cpp | 124 ++--- .../ametsuchi/wsv_query_command_test.cpp | 489 ------------------ 15 files changed, 567 insertions(+), 1124 deletions(-) create mode 100644 test/framework/sql_query.cpp create mode 100644 test/framework/sql_query.hpp delete mode 100644 test/module/irohad/ametsuchi/kv_storage_test.cpp diff --git a/irohad/ametsuchi/impl/mutable_storage_impl.cpp b/irohad/ametsuchi/impl/mutable_storage_impl.cpp index 921c356400..3c7d2ac1a3 100644 --- a/irohad/ametsuchi/impl/mutable_storage_impl.cpp +++ b/irohad/ametsuchi/impl/mutable_storage_impl.cpp @@ -13,6 +13,7 @@ #include "ametsuchi/impl/postgres_wsv_query.hpp" #include "interfaces/commands/command.hpp" #include "interfaces/common_objects/common_objects_factory.hpp" +#include "interfaces/iroha_internal/block.hpp" namespace iroha { namespace ametsuchi { diff --git a/irohad/ametsuchi/impl/postgres_wsv_query.cpp b/irohad/ametsuchi/impl/postgres_wsv_query.cpp index 9d2cf0f4e0..1e56168888 100644 --- a/irohad/ametsuchi/impl/postgres_wsv_query.cpp +++ b/irohad/ametsuchi/impl/postgres_wsv_query.cpp @@ -7,30 +7,21 @@ #include #include "ametsuchi/impl/soci_utils.hpp" -#include "backend/protobuf/permissions.hpp" #include "common/result.hpp" #include "cryptography/public_key.hpp" namespace iroha { namespace ametsuchi { - using shared_model::interface::types::AccountDetailKeyType; using shared_model::interface::types::AccountIdType; using shared_model::interface::types::AddressType; - using shared_model::interface::types::AssetIdType; - using shared_model::interface::types::DetailType; - using shared_model::interface::types::DomainIdType; - using shared_model::interface::types::JsonType; - using shared_model::interface::types::PrecisionType; using shared_model::interface::types::PubkeyType; - using shared_model::interface::types::QuorumType; - using shared_model::interface::types::RoleIdType; PostgresWsvQuery::PostgresWsvQuery( soci::session &sql, std::shared_ptr factory, logger::Logger log) - : sql_(sql), factory_(factory), log_(std::move(log)) {} + : sql_(sql), factory_(std::move(factory)), log_(std::move(log)) {} PostgresWsvQuery::PostgresWsvQuery( std::unique_ptr sql, @@ -38,7 +29,7 @@ namespace iroha { logger::Logger log) : psql_(std::move(sql)), sql_(*psql_), - factory_(factory), + factory_(std::move(factory)), log_(std::move(log)) {} template @@ -66,163 +57,6 @@ namespace iroha { } } - bool PostgresWsvQuery::hasAccountGrantablePermission( - const AccountIdType &permitee_account_id, - const AccountIdType &account_id, - shared_model::interface::permissions::Grantable permission) { - const auto perm_str = - shared_model::interface::GrantablePermissionSet({permission}) - .toBitstring(); - using T = boost::tuple; - auto result = execute([&] { - return (sql_.prepare - << "SELECT count(*) FROM " - "account_has_grantable_permissions WHERE " - "permittee_account_id = :permittee_account_id AND " - "account_id = " - ":account_id " - " AND permission & :permission = :permission ", - soci::use(permitee_account_id, "permittee_account_id"), - soci::use(account_id, "account_id"), - soci::use(perm_str, "permission")); - }); - - return flatMapValue>( - result, - [](auto &count) { return boost::make_optional(count == 1); }) - .value_or(false); - } - - boost::optional> PostgresWsvQuery::getAccountRoles( - const AccountIdType &account_id) { - using T = boost::tuple; - auto result = execute([&] { - return (sql_.prepare << "SELECT role_id FROM account_has_roles WHERE " - "account_id = :account_id", - soci::use(account_id)); - }); - - return mapValues>( - result, [&](auto &role_id) { return role_id; }); - } - - boost::optional - PostgresWsvQuery::getRolePermissions(const RoleIdType &role_name) { - using T = boost::tuple; - auto result = execute([&] { - return (sql_.prepare - << "SELECT permission FROM role_has_permissions WHERE " - "role_id = :role_name", - soci::use(role_name)); - }); - - return result | [&](auto &&st) - -> boost::optional< - shared_model::interface::RolePermissionSet> { - auto range = boost::make_iterator_range(st); - - if (range.empty()) { - return shared_model::interface::RolePermissionSet{}; - } - - return apply(range.front(), [](auto &permission) { - return shared_model::interface::RolePermissionSet(permission); - }); - }; - } - - boost::optional> PostgresWsvQuery::getRoles() { - using T = boost::tuple; - auto result = execute( - [&] { return (sql_.prepare << "SELECT role_id FROM role"); }); - - return mapValues>( - result, [&](auto &role_id) { return role_id; }); - } - - boost::optional> - PostgresWsvQuery::getAccount(const AccountIdType &account_id) { - using T = boost::tuple; - auto result = execute([&] { - return (sql_.prepare << "SELECT domain_id, quorum, data " - "FROM account WHERE account_id = " - ":account_id", - soci::use(account_id, "account_id")); - }); - - return flatMapValue< - boost::optional>>( - result, [&](auto &domain_id, auto quorum, auto &data) { - return this->fromResult( - factory_->createAccount(account_id, domain_id, quorum, data)); - }); - } - - boost::optional PostgresWsvQuery::getAccountDetail( - const std::string &account_id, - const AccountDetailKeyType &key, - const AccountIdType &writer) { - using T = boost::tuple; - boost::optional> result; - - if (key.empty() and writer.empty()) { - // retrieve all values for a specified account - std::string empty_json = "{}"; - result = execute([&] { - return (sql_.prepare - << "SELECT data#>>:empty_json FROM account WHERE " - "account_id = " - ":account_id;", - soci::use(empty_json), - soci::use(account_id)); - }); - } else if (not key.empty() and not writer.empty()) { - // retrieve values for the account, under the key and added by the - // writer - std::string filled_json = "{\"" + writer + "\"" + ", \"" + key + "\"}"; - result = execute([&] { - return (sql_.prepare - << "SELECT json_build_object(:writer::text, " - "json_build_object(:key::text, (SELECT data #>> " - ":filled_json " - "FROM account WHERE account_id = :account_id)));", - soci::use(writer), - soci::use(key), - soci::use(filled_json), - soci::use(account_id)); - }); - } else if (not writer.empty()) { - // retrieve values added by the writer under all keys - result = execute([&] { - return ( - sql_.prepare - << "SELECT json_build_object(:writer::text, (SELECT data -> " - ":writer FROM account WHERE account_id = :account_id));", - soci::use(writer, "writer"), - soci::use(account_id, "account_id")); - }); - } else { - // retrieve values from all writers under the key - result = execute([&] { - return ( - sql_.prepare - << "SELECT json_object_agg(key, value) AS json FROM (SELECT " - "json_build_object(kv.key, json_build_object(:key::text, " - "kv.value -> :key)) FROM jsonb_each((SELECT data FROM " - "account " - "WHERE account_id = :account_id)) kv WHERE kv.value ? " - ":key) " - "AS " - "jsons, json_each(json_build_object);", - soci::use(key, "key"), - soci::use(account_id, "account_id")); - }); - } - - return flatMapValue>( - result, [&](auto &json) { return boost::make_optional(json); }); - } - boost::optional> PostgresWsvQuery::getSignatories( const AccountIdType &account_id) { using T = boost::tuple; @@ -239,82 +73,6 @@ namespace iroha { }); } - boost::optional> - PostgresWsvQuery::getAsset(const AssetIdType &asset_id) { - using T = boost::tuple; - auto result = execute([&] { - return ( - sql_.prepare - << "SELECT domain_id, precision FROM asset WHERE asset_id = " - ":asset_id", - soci::use(asset_id)); - }); - - return flatMapValue< - boost::optional>>( - result, [&](auto &domain_id, auto precision) { - return this->fromResult( - factory_->createAsset(asset_id, domain_id, precision)); - }); - } - - boost::optional< - std::vector>> - PostgresWsvQuery::getAccountAssets(const AccountIdType &account_id) { - using T = boost::tuple; - auto result = execute([&] { - return (sql_.prepare - << "SELECT asset_id, amount FROM account_has_asset " - "WHERE account_id = :account_id", - soci::use(account_id)); - }); - - return flatMapValues< - std::vector>>( - result, [&](auto &asset_id, auto &amount) { - return this->fromResult(factory_->createAccountAsset( - account_id, asset_id, shared_model::interface::Amount(amount))); - }); - } - - boost::optional> - PostgresWsvQuery::getAccountAsset(const AccountIdType &account_id, - const AssetIdType &asset_id) { - using T = boost::tuple; - auto result = execute([&] { - return ( - sql_.prepare - << "SELECT amount FROM account_has_asset WHERE account_id = " - ":account_id AND asset_id = :asset_id", - soci::use(account_id), - soci::use(asset_id)); - }); - - return flatMapValue>>( - result, [&](auto &amount) { - return this->fromResult(factory_->createAccountAsset( - account_id, asset_id, shared_model::interface::Amount(amount))); - }); - } - - boost::optional> - PostgresWsvQuery::getDomain(const DomainIdType &domain_id) { - using T = boost::tuple; - auto result = execute([&] { - return (sql_.prepare << "SELECT default_role FROM domain " - "WHERE domain_id = :id LIMIT 1", - soci::use(domain_id)); - }); - - return flatMapValue< - boost::optional>>( - result, [&](auto &default_role) { - return this->fromResult( - factory_->createDomain(domain_id, default_role)); - }); - } - boost::optional>> PostgresWsvQuery::getPeers() { using T = boost::tuple; diff --git a/irohad/ametsuchi/impl/postgres_wsv_query.hpp b/irohad/ametsuchi/impl/postgres_wsv_query.hpp index fc6b4bd724..2bbf064598 100644 --- a/irohad/ametsuchi/impl/postgres_wsv_query.hpp +++ b/irohad/ametsuchi/impl/postgres_wsv_query.hpp @@ -9,7 +9,6 @@ #include "ametsuchi/wsv_query.hpp" #include - #include "interfaces/common_objects/common_objects_factory.hpp" #include "logger/logger.hpp" @@ -29,58 +28,14 @@ namespace iroha { factory, logger::Logger log = logger::log("PostgresWsvQuery")); - boost::optional> - getAccountRoles(const shared_model::interface::types::AccountIdType - &account_id) override; - - boost::optional - getRolePermissions( - const shared_model::interface::types::RoleIdType &role_name) override; - - boost::optional> - getAccount(const shared_model::interface::types::AccountIdType - &account_id) override; - - boost::optional getAccountDetail( - const shared_model::interface::types::AccountIdType &account_id, - const shared_model::interface::types::AccountDetailKeyType &key = "", - const shared_model::interface::types::AccountIdType &writer = - "") override; - boost::optional> getSignatories(const shared_model::interface::types::AccountIdType &account_id) override; - boost::optional> getAsset( - const shared_model::interface::types::AssetIdType &asset_id) override; - - boost::optional< - std::vector>> - getAccountAssets(const shared_model::interface::types::AccountIdType - &account_id) override; - - boost::optional> - getAccountAsset( - const shared_model::interface::types::AccountIdType &account_id, - const shared_model::interface::types::AssetIdType &asset_id) override; - boost::optional< std::vector>> getPeers() override; - boost::optional> - getRoles() override; - - boost::optional> - getDomain(const shared_model::interface::types::DomainIdType &domain_id) - override; - - bool hasAccountGrantablePermission( - const shared_model::interface::types::AccountIdType - &permitee_account_id, - const shared_model::interface::types::AccountIdType &account_id, - shared_model::interface::permissions::Grantable permission) override; - private: /** * Transforms result to optional diff --git a/irohad/ametsuchi/storage.hpp b/irohad/ametsuchi/storage.hpp index eaf219bc4f..96522e29a9 100644 --- a/irohad/ametsuchi/storage.hpp +++ b/irohad/ametsuchi/storage.hpp @@ -6,9 +6,9 @@ #ifndef IROHA_AMETSUCHI_H #define IROHA_AMETSUCHI_H -#include #include +#include #include "ametsuchi/block_query_factory.hpp" #include "ametsuchi/mutable_factory.hpp" #include "ametsuchi/os_persistent_state_factory.hpp" diff --git a/irohad/ametsuchi/wsv_query.hpp b/irohad/ametsuchi/wsv_query.hpp index a498eb3d8c..1c8da086a8 100644 --- a/irohad/ametsuchi/wsv_query.hpp +++ b/irohad/ametsuchi/wsv_query.hpp @@ -6,21 +6,10 @@ #ifndef IROHA_WSV_QUERY_HPP #define IROHA_WSV_QUERY_HPP -#include #include #include -#include "interfaces/common_objects/account.hpp" -#include "interfaces/common_objects/account_asset.hpp" -#include "interfaces/common_objects/asset.hpp" -#include "interfaces/common_objects/domain.hpp" #include "interfaces/common_objects/peer.hpp" -#include "interfaces/iroha_internal/block.hpp" -#include "interfaces/permissions.hpp" -#include "interfaces/queries/query.hpp" -#include "interfaces/query_responses/query_response.hpp" -#include "interfaces/transaction.hpp" -#include "interfaces/transaction_responses/tx_response.hpp" namespace iroha { namespace ametsuchi { @@ -31,76 +20,6 @@ namespace iroha { public: virtual ~WsvQuery() = default; - /** - * Check if permitee has permission on account - * @param permitee_account_id - * @param account_id - * @param permission - * @return true if has permission, false otherwise - */ - virtual bool hasAccountGrantablePermission( - const shared_model::interface::types::AccountIdType - &permitee_account_id, - const shared_model::interface::types::AccountIdType &account_id, - shared_model::interface::permissions::Grantable permission) = 0; - - /** - * Get iroha domain - * @param domain_id - id in the system - * @return Domain if exist, nullopt otherwise - */ - virtual boost::optional> - getDomain( - const shared_model::interface::types::DomainIdType &domain_id) = 0; - - /** - * Get account's roles - * @param account_id - * @return - */ - virtual boost::optional< - std::vector> - getAccountRoles( - const shared_model::interface::types::AccountIdType &account_id) = 0; - /** - * Get all permissions of a role - * @param role_name - * @return - */ - virtual boost::optional - getRolePermissions( - const shared_model::interface::types::RoleIdType &role_name) = 0; - - /** - * @return All roles currently in the system - */ - virtual boost::optional< - std::vector> - getRoles() = 0; - - /** - * Get account by user account_id - * @param account_id - * @return - */ - virtual boost::optional> - getAccount( - const shared_model::interface::types::AccountIdType &account_id) = 0; - - /** - * Get accounts information from its key-value storage - * @param account_id - account to get details about - * @param key - only values under this key from Json are returned; default - * empty - * @param writer - only values, added by the writer's account, are - * returned; default empty - * @return optional of account details - */ - virtual boost::optional getAccountDetail( - const std::string &account_id, - const std::string &key = "", - const std::string &writer = "") = 0; - /** * Get signatories of account by user account_id * @param account_id @@ -111,34 +30,6 @@ namespace iroha { getSignatories( const shared_model::interface::types::AccountIdType &account_id) = 0; - /** - * Get asset by its name - * @param asset_id - * @return - */ - virtual boost::optional> - getAsset(const shared_model::interface::types::AssetIdType &asset_id) = 0; - - /** - * Get account assets - * @param account_id - * @return - */ - virtual boost::optional< - std::vector>> - getAccountAssets( - const shared_model::interface::types::AccountIdType &account_id) = 0; - /** - * - * @param account_id - * @param asset_id - * @return - */ - virtual boost::optional< - std::shared_ptr> - getAccountAsset( - const shared_model::interface::types::AccountIdType &account_id, - const shared_model::interface::types::AssetIdType &asset_id) = 0; /** * * @return diff --git a/test/framework/CMakeLists.txt b/test/framework/CMakeLists.txt index a87d1688b3..7eb29b4266 100644 --- a/test/framework/CMakeLists.txt +++ b/test/framework/CMakeLists.txt @@ -32,5 +32,20 @@ add_library(common_test_constants common_constants.cpp) target_include_directories(integration_framework PUBLIC ${PROJECT_SOURCE_DIR}/test) -add_library(integration_framework_config_helper config_helper.cpp) +add_library(integration_framework_config_helper + config_helper.cpp + ) target_include_directories(integration_framework_config_helper PUBLIC ${PROJECT_SOURCE_DIR}/test) + +add_library(framework_sql_query + sql_query.cpp + ) +target_link_libraries(framework_sql_query + SOCI::core + SOCI::postgresql + logger + shared_model_proto_backend + ) +target_compile_definitions(framework_sql_query + PRIVATE SOCI_USE_BOOST HAVE_BOOST + ) diff --git a/test/framework/sql_query.cpp b/test/framework/sql_query.cpp new file mode 100644 index 0000000000..8e5f1a66f9 --- /dev/null +++ b/test/framework/sql_query.cpp @@ -0,0 +1,296 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#include "framework/sql_query.hpp" + +#include +#include "ametsuchi/impl/soci_utils.hpp" +#include "backend/protobuf/permissions.hpp" +#include "common/bind.hpp" +#include "common/result.hpp" + +namespace framework { + namespace ametsuchi { + + using iroha::ametsuchi::flatMapValue; + using iroha::ametsuchi::flatMapValues; + using iroha::ametsuchi::mapValues; + using shared_model::interface::types::AccountDetailKeyType; + using shared_model::interface::types::AccountIdType; + using shared_model::interface::types::AssetIdType; + using shared_model::interface::types::DetailType; + using shared_model::interface::types::DomainIdType; + using shared_model::interface::types::JsonType; + using shared_model::interface::types::PrecisionType; + using shared_model::interface::types::QuorumType; + using shared_model::interface::types::RoleIdType; + + template + auto SqlQuery::execute(F &&f) -> boost::optional> { + try { + return soci::rowset{std::forward(f)()}; + } catch (const std::exception &e) { + log_->error("Failed to execute query: {}", e.what()); + return boost::none; + } + } + + template + boost::optional> SqlQuery::fromResult( + shared_model::interface::CommonObjectsFactory::FactoryResult< + std::unique_ptr> &&result) { + return result.match( + [](iroha::expected::Value> &v) { + return boost::make_optional(std::shared_ptr(std::move(v.value))); + }, + [&](iroha::expected::Error &e) + -> boost::optional> { + log_->error(e.error); + return boost::none; + }); + } + + SqlQuery::SqlQuery( + soci::session &sql, + std::shared_ptr factory, + logger::Logger log) + : sql_{sql}, factory_{std::move(factory)}, log_{std::move(log)} {} + + bool SqlQuery::hasAccountGrantablePermission( + const AccountIdType &permitee_account_id, + const AccountIdType &account_id, + shared_model::interface::permissions::Grantable permission) { + const auto perm_str = + shared_model::interface::GrantablePermissionSet({permission}) + .toBitstring(); + using T = boost::tuple; + auto result = execute([&] { + return (sql_.prepare + << "SELECT count(*) FROM " + "account_has_grantable_permissions WHERE " + "permittee_account_id = :permittee_account_id AND " + "account_id = " + ":account_id " + " AND permission & :permission = :permission ", + soci::use(permitee_account_id, "permittee_account_id"), + soci::use(account_id, "account_id"), + soci::use(perm_str, "permission")); + }); + + return flatMapValue>( + result, + [](auto &count) { return boost::make_optional(count == 1); }) + .value_or(false); + } + + boost::optional> SqlQuery::getAccountRoles( + const AccountIdType &account_id) { + using T = boost::tuple; + auto result = execute([&] { + return (sql_.prepare << "SELECT role_id FROM account_has_roles WHERE " + "account_id = :account_id", + soci::use(account_id)); + }); + + return mapValues>( + result, [&](auto &role_id) { return role_id; }); + } + + boost::optional + SqlQuery::getRolePermissions(const RoleIdType &role_name) { + using iroha::operator|; + using T = boost::tuple; + auto result = execute([&] { + return (sql_.prepare + << "SELECT permission FROM role_has_permissions WHERE " + "role_id = :role_name", + soci::use(role_name)); + }); + + return result | [&](auto &&st) + -> boost::optional< + shared_model::interface::RolePermissionSet> { + auto range = boost::make_iterator_range(st); + + if (range.empty()) { + return shared_model::interface::RolePermissionSet{}; + } + + return iroha::ametsuchi::apply(range.front(), [](auto &permission) { + return shared_model::interface::RolePermissionSet(permission); + }); + }; + } + + boost::optional> SqlQuery::getRoles() { + using T = boost::tuple; + auto result = execute( + [&] { return (sql_.prepare << "SELECT role_id FROM role"); }); + + return mapValues>( + result, [&](auto &role_id) { return role_id; }); + } + + boost::optional> + SqlQuery::getAccount(const AccountIdType &account_id) { + using T = boost::tuple; + auto result = execute([&] { + return (sql_.prepare << "SELECT domain_id, quorum, data " + "FROM account WHERE account_id = " + ":account_id", + soci::use(account_id, "account_id")); + }); + + return flatMapValue< + boost::optional>>( + result, [&](auto &domain_id, auto quorum, auto &data) { + return this->fromResult( + factory_->createAccount(account_id, domain_id, quorum, data)); + }); + } + + boost::optional SqlQuery::getAccountDetail( + const std::string &account_id, + const AccountDetailKeyType &key, + const AccountIdType &writer) { + using T = boost::tuple; + boost::optional> result; + + if (key.empty() and writer.empty()) { + // retrieve all values for a specified account + std::string empty_json = "{}"; + result = execute([&] { + return (sql_.prepare + << "SELECT data#>>:empty_json FROM account WHERE " + "account_id = " + ":account_id;", + soci::use(empty_json), + soci::use(account_id)); + }); + } else if (not key.empty() and not writer.empty()) { + // retrieve values for the account, under the key and added by the + // writer + std::string filled_json = "{\"" + writer + "\"" + ", \"" + key + "\"}"; + result = execute([&] { + return (sql_.prepare + << "SELECT json_build_object(:writer::text, " + "json_build_object(:key::text, (SELECT data #>> " + ":filled_json " + "FROM account WHERE account_id = :account_id)));", + soci::use(writer), + soci::use(key), + soci::use(filled_json), + soci::use(account_id)); + }); + } else if (not writer.empty()) { + // retrieve values added by the writer under all keys + result = execute([&] { + return ( + sql_.prepare + << "SELECT json_build_object(:writer::text, (SELECT data -> " + ":writer FROM account WHERE account_id = :account_id));", + soci::use(writer, "writer"), + soci::use(account_id, "account_id")); + }); + } else { + // retrieve values from all writers under the key + result = execute([&] { + return ( + sql_.prepare + << "SELECT json_object_agg(key, value) AS json FROM (SELECT " + "json_build_object(kv.key, json_build_object(:key::text, " + "kv.value -> :key)) FROM jsonb_each((SELECT data FROM " + "account " + "WHERE account_id = :account_id)) kv WHERE kv.value ? " + ":key) " + "AS " + "jsons, json_each(json_build_object);", + soci::use(key, "key"), + soci::use(account_id, "account_id")); + }); + } + + return flatMapValue>( + result, [&](auto &json) { return boost::make_optional(json); }); + } + + boost::optional> + SqlQuery::getAsset(const AssetIdType &asset_id) { + using T = boost::tuple; + auto result = execute([&] { + return ( + sql_.prepare + << "SELECT domain_id, precision FROM asset WHERE asset_id = " + ":asset_id", + soci::use(asset_id)); + }); + + return flatMapValue< + boost::optional>>( + result, [&](auto &domain_id, auto precision) { + return this->fromResult( + factory_->createAsset(asset_id, domain_id, precision)); + }); + } + + boost::optional< + std::vector>> + SqlQuery::getAccountAssets(const AccountIdType &account_id) { + using T = boost::tuple; + auto result = execute([&] { + return (sql_.prepare + << "SELECT asset_id, amount FROM account_has_asset " + "WHERE account_id = :account_id", + soci::use(account_id)); + }); + + return flatMapValues< + std::vector>>( + result, [&](auto &asset_id, auto &amount) { + return this->fromResult(factory_->createAccountAsset( + account_id, asset_id, shared_model::interface::Amount(amount))); + }); + } + + boost::optional> + SqlQuery::getAccountAsset(const AccountIdType &account_id, + const AssetIdType &asset_id) { + using T = boost::tuple; + auto result = execute([&] { + return ( + sql_.prepare + << "SELECT amount FROM account_has_asset WHERE account_id = " + ":account_id AND asset_id = :asset_id", + soci::use(account_id), + soci::use(asset_id)); + }); + + return flatMapValue>>( + result, [&](auto &amount) { + return this->fromResult(factory_->createAccountAsset( + account_id, asset_id, shared_model::interface::Amount(amount))); + }); + } + + boost::optional> + SqlQuery::getDomain(const DomainIdType &domain_id) { + using T = boost::tuple; + auto result = execute([&] { + return (sql_.prepare << "SELECT default_role FROM domain " + "WHERE domain_id = :id LIMIT 1", + soci::use(domain_id)); + }); + + return flatMapValue< + boost::optional>>( + result, [&](auto &default_role) { + return this->fromResult( + factory_->createDomain(domain_id, default_role)); + }); + } + + } // namespace ametsuchi +} // namespace framework diff --git a/test/framework/sql_query.hpp b/test/framework/sql_query.hpp new file mode 100644 index 0000000000..ec6c660afd --- /dev/null +++ b/test/framework/sql_query.hpp @@ -0,0 +1,164 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_SQL_QUERY_HPP +#define IROHA_SQL_QUERY_HPP + +#include + +#include +#include +#include "interfaces/common_objects/account.hpp" +#include "interfaces/common_objects/account_asset.hpp" +#include "interfaces/common_objects/asset.hpp" +#include "interfaces/common_objects/common_objects_factory.hpp" +#include "interfaces/common_objects/domain.hpp" +#include "interfaces/common_objects/types.hpp" +#include "interfaces/iroha_internal/block.hpp" +#include "interfaces/permissions.hpp" +#include "interfaces/queries/query.hpp" +#include "interfaces/query_responses/query_response.hpp" +#include "interfaces/transaction.hpp" +#include "interfaces/transaction_responses/tx_response.hpp" +#include "logger/logger.hpp" + +namespace framework { + namespace ametsuchi { + + /** + * Implements some of the SQL queries, which are needed only in tests + */ + class SqlQuery { + public: + SqlQuery(soci::session &sql, + std::shared_ptr + factory, + logger::Logger log = logger::log("SqlQuery")); + + /** + * Check if permitee has permission on account + * @param permitee_account_id + * @param account_id + * @param permission + * @return true if has permission, false otherwise + */ + bool hasAccountGrantablePermission( + const shared_model::interface::types::AccountIdType + &permitee_account_id, + const shared_model::interface::types::AccountIdType &account_id, + shared_model::interface::permissions::Grantable permission); + + /** + * Get iroha domain + * @param domain_id - id in the system + * @return Domain if exist, nullopt otherwise + */ + boost::optional> + getDomain(const shared_model::interface::types::DomainIdType &domain_id); + + /** + * Get account's roles + * @param account_id + * @return + */ + boost::optional> + getAccountRoles( + const shared_model::interface::types::AccountIdType &account_id); + /** + * Get all permissions of a role + * @param role_name + * @return + */ + boost::optional + getRolePermissions( + const shared_model::interface::types::RoleIdType &role_name); + + /** + * @return All roles currently in the system + */ + boost::optional> + getRoles(); + + /** + * Get account by user account_id + * @param account_id + * @return + */ + boost::optional> + getAccount( + const shared_model::interface::types::AccountIdType &account_id); + + /** + * Get asset by its name + * @param asset_id + * @return + */ + boost::optional> getAsset( + const shared_model::interface::types::AssetIdType &asset_id); + + /** + * Get account assets + * @param account_id + * @return + */ + boost::optional< + std::vector>> + getAccountAssets( + const shared_model::interface::types::AccountIdType &account_id); + /** + * + * @param account_id + * @param asset_id + * @return + */ + boost::optional> + getAccountAsset( + const shared_model::interface::types::AccountIdType &account_id, + const shared_model::interface::types::AssetIdType &asset_id); + + /** + * Get accounts information from its key-value storage + * @param account_id - account to get details about + * @param key - only values under this key from Json are returned; default + * empty + * @param writer - only values, added by the writer's account, are + * returned; default empty + * @return optional of account details + */ + boost::optional getAccountDetail( + const shared_model::interface::types::AccountIdType &account_id, + const shared_model::interface::types::AccountDetailKeyType &key = "", + const shared_model::interface::types::AccountIdType &writer = ""); + + private: + soci::session &sql_; + std::shared_ptr factory_; + logger::Logger log_; + + /** + * Executes given lambda of type F, catches exceptions if any, logs the + * message, and returns an optional rowset + */ + template + auto execute(F &&f) -> boost::optional>; + + /** + * Transforms result to optional + * value -> optional + * error -> nullopt + * @tparam T type of object inside + * @param result BuilderResult + * @return optional + */ + template + boost::optional> fromResult( + shared_model::interface::CommonObjectsFactory::FactoryResult< + std::unique_ptr> &&result); + }; + + } // namespace ametsuchi +} // namespace framework + +#endif // IROHA_SQL_QUERY_HPP diff --git a/test/module/irohad/ametsuchi/CMakeLists.txt b/test/module/irohad/ametsuchi/CMakeLists.txt index 484203143a..4e37cb0321 100644 --- a/test/module/irohad/ametsuchi/CMakeLists.txt +++ b/test/module/irohad/ametsuchi/CMakeLists.txt @@ -28,12 +28,6 @@ target_link_libraries(block_query_test shared_model_stateless_validation ) -addtest(kv_storage_test kv_storage_test.cpp) -target_link_libraries(kv_storage_test - ametsuchi - ametsuchi_fixture - ) - addtest(storage_init_test storage_init_test.cpp) target_link_libraries(storage_init_test ametsuchi @@ -52,6 +46,7 @@ target_link_libraries(postgres_executor_test shared_model_proto_backend ametsuchi commands_mocks_factory + framework_sql_query ) addtest(postgres_query_executor_test postgres_query_executor_test.cpp) @@ -72,6 +67,7 @@ add_library(ametsuchi_fixture INTERFACE) target_link_libraries(ametsuchi_fixture INTERFACE integration_framework_config_helper shared_model_proto_backend + framework_sql_query SOCI::core SOCI::postgresql ) diff --git a/test/module/irohad/ametsuchi/ametsuchi_fixture.hpp b/test/module/irohad/ametsuchi/ametsuchi_fixture.hpp index 9d56e00d2b..1f9ec8e6a0 100644 --- a/test/module/irohad/ametsuchi/ametsuchi_fixture.hpp +++ b/test/module/irohad/ametsuchi/ametsuchi_fixture.hpp @@ -18,6 +18,7 @@ #include "backend/protobuf/proto_permission_to_string.hpp" #include "common/files.hpp" #include "framework/config_helper.hpp" +#include "framework/sql_query.hpp" #include "logger/logger.hpp" #include "validators/field_validator.hpp" @@ -47,6 +48,8 @@ namespace iroha { FAIL() << "StorageImpl: " << error.error; }); sql = std::make_shared(soci::postgresql, pgopt_); + sql_query = + std::make_unique(*sql, factory); } static void TearDownTestCase() { @@ -67,6 +70,7 @@ namespace iroha { factory; static std::shared_ptr storage; + static std::unique_ptr sql_query; static std::shared_ptr perm_converter_; @@ -188,6 +192,8 @@ CREATE TABLE IF NOT EXISTS index_by_id_height_asset ( std::shared_ptr AmetsuchiTest::sql = nullptr; std::shared_ptr AmetsuchiTest::storage = nullptr; + std::unique_ptr AmetsuchiTest::sql_query = + nullptr; } // namespace ametsuchi } // namespace iroha diff --git a/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp b/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp index 5602394386..2a9123d063 100644 --- a/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp +++ b/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp @@ -29,51 +29,14 @@ namespace iroha { namespace ametsuchi { class MockWsvQuery : public WsvQuery { public: - MOCK_METHOD1(getAccountRoles, - boost::optional>( - const std::string &account_id)); - MOCK_METHOD3(getAccountDetail, - boost::optional(const std::string &account_id, - const std::string &key, - const std::string &writer)); - MOCK_METHOD1(getRolePermissions, - boost::optional( - const std::string &role_name)); - MOCK_METHOD0(getRoles, boost::optional>()); - MOCK_METHOD1( - getAccount, - boost::optional>( - const std::string &account_id)); MOCK_METHOD1(getSignatories, boost::optional< std::vector>( const std::string &account_id)); - MOCK_METHOD1( - getAsset, - boost::optional>( - const std::string &asset_id)); - MOCK_METHOD1(getAccountAssets, - boost::optional>>( - const std::string &account_id)); - MOCK_METHOD2(getAccountAsset, - boost::optional< - std::shared_ptr>( - const std::string &account_id, - const std::string &asset_id)); MOCK_METHOD0( getPeers, boost::optional< std::vector>>()); - MOCK_METHOD1( - getDomain, - boost::optional>( - const std::string &domain_id)); - MOCK_METHOD3( - hasAccountGrantablePermission, - bool(const std::string &permitee_account_id, - const std::string &account_id, - shared_model::interface::permissions::Grantable permission)); }; class MockWsvCommand : public WsvCommand { diff --git a/test/module/irohad/ametsuchi/ametsuchi_test.cpp b/test/module/irohad/ametsuchi/ametsuchi_test.cpp index 5446ec9625..3378ee9ae6 100644 --- a/test/module/irohad/ametsuchi/ametsuchi_test.cpp +++ b/test/module/irohad/ametsuchi/ametsuchi_test.cpp @@ -133,7 +133,7 @@ TEST_F(AmetsuchiTest, SampleTest) { apply(storage, block1); - validateAccount(wsv, user1id, domain); + validateAccount(sql_query, user1id, domain); // Block 2 txs.clear(); @@ -153,9 +153,9 @@ TEST_F(AmetsuchiTest, SampleTest) { apply(storage, block2); validateAccountAsset( - wsv, user1id, assetid, shared_model::interface::Amount("50.0")); + sql_query, user1id, assetid, shared_model::interface::Amount("50.0")); validateAccountAsset( - wsv, user2id, assetid, shared_model::interface::Amount("100.0")); + sql_query, user2id, assetid, shared_model::interface::Amount("100.0")); // Block store tests auto hashes = {block1.hash(), block2.hash()}; @@ -216,7 +216,7 @@ TEST_F(AmetsuchiTest, AddSignatoryTest) { apply(storage, block1); { - auto account_opt = wsv->getAccount(user1id); + auto account_opt = sql_query->getAccount(user1id); ASSERT_TRUE(account_opt); auto account = account_opt.value(); ASSERT_EQ(account->accountId(), user1id); @@ -244,7 +244,7 @@ TEST_F(AmetsuchiTest, AddSignatoryTest) { apply(storage, block2); { - auto account = wsv->getAccount(user1id); + auto account = sql_query->getAccount(user1id); ASSERT_TRUE(account); auto signatories = wsv->getSignatories(user1id); @@ -270,10 +270,10 @@ TEST_F(AmetsuchiTest, AddSignatoryTest) { apply(storage, block3); { - auto account1 = wsv->getAccount(user1id); + auto account1 = sql_query->getAccount(user1id); ASSERT_TRUE(account1); - auto account2 = wsv->getAccount(user2id); + auto account2 = sql_query->getAccount(user2id); ASSERT_TRUE(account2); auto signatories1 = wsv->getSignatories(user1id); @@ -304,7 +304,7 @@ TEST_F(AmetsuchiTest, AddSignatoryTest) { apply(storage, block4); { - auto account = wsv->getAccount(user1id); + auto account = sql_query->getAccount(user1id); ASSERT_TRUE(account); // user1 has only pubkey2. @@ -337,7 +337,7 @@ TEST_F(AmetsuchiTest, AddSignatoryTest) { apply(storage, block5); { - auto account_opt = wsv->getAccount(user2id); + auto account_opt = sql_query->getAccount(user2id); ASSERT_TRUE(account_opt); auto &account = account_opt.value(); ASSERT_EQ(account->quorum(), 2); @@ -568,14 +568,14 @@ TEST_F(AmetsuchiTest, TestRestoreWSV) { apply(storage, genesis_block); - auto res = storage->getWsvQuery()->getDomain("test"); + auto res = sql_query->getDomain("test"); EXPECT_TRUE(res); // spoil WSV *sql << "DELETE FROM domain"; // check there is no data in WSV - res = storage->getWsvQuery()->getDomain("test"); + res = sql_query->getDomain("test"); EXPECT_FALSE(res); // recover storage and check it is recovered @@ -586,7 +586,7 @@ TEST_F(AmetsuchiTest, TestRestoreWSV) { FAIL() << "Failed to recover WSV"; }); - res = storage->getWsvQuery()->getDomain("test"); + res = sql_query->getDomain("test"); EXPECT_TRUE(res); } @@ -662,7 +662,7 @@ class PreparedBlockTest : public AmetsuchiTest { * @then state of the ledger remains unchanged */ TEST_F(PreparedBlockTest, PrepareBlockNoStateChanged) { - validateAccountAsset(storage->getWsvQuery(), + validateAccountAsset(sql_query, "admin@test", "coin#test", shared_model::interface::Amount(base_balance)); @@ -673,7 +673,7 @@ TEST_F(PreparedBlockTest, PrepareBlockNoStateChanged) { // balance remains unchanged validateAccountAsset( - storage->getWsvQuery(), "admin@test", "coin#test", base_balance); + sql_query, "admin@test", "coin#test", base_balance); } /** @@ -697,7 +697,7 @@ TEST_F(PreparedBlockTest, CommitPreparedStateChanged) { shared_model::interface::Amount resultingAmount("10.00"); validateAccountAsset( - storage->getWsvQuery(), "admin@test", "coin#test", resultingAmount); + sql_query, "admin@test", "coin#test", resultingAmount); } /** @@ -720,7 +720,7 @@ TEST_F(PreparedBlockTest, PrepareBlockCommitDifferentBlock) { shared_model::interface::Amount resultingBalance{"15.00"}; validateAccountAsset( - storage->getWsvQuery(), "admin@test", "coin#test", resultingBalance); + sql_query, "admin@test", "coin#test", resultingBalance); } /** @@ -749,5 +749,5 @@ TEST_F(PreparedBlockTest, CommitPreparedFailsAfterCommit) { shared_model::interface::Amount resultingBalance{"15.00"}; validateAccountAsset( - storage->getWsvQuery(), "admin@test", "coin#test", resultingBalance); + sql_query, "admin@test", "coin#test", resultingBalance); } diff --git a/test/module/irohad/ametsuchi/kv_storage_test.cpp b/test/module/irohad/ametsuchi/kv_storage_test.cpp deleted file mode 100644 index db3dd29776..0000000000 --- a/test/module/irohad/ametsuchi/kv_storage_test.cpp +++ /dev/null @@ -1,113 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include -#include - -#include "ametsuchi/block_query.hpp" -#include "ametsuchi/impl/postgres_wsv_query.hpp" -#include "ametsuchi/impl/storage_impl.hpp" -#include "ametsuchi/mutable_storage.hpp" -#include "interfaces/permissions.hpp" -#include "module/irohad/ametsuchi/ametsuchi_fixture.hpp" -#include "module/shared_model/builders/protobuf/test_block_builder.hpp" -#include "module/shared_model/builders/protobuf/test_transaction_builder.hpp" - -using namespace iroha::ametsuchi; - -/** - * Fixture for kv storage test. Creates two accounts: one has predefined json - * information in json field, another one has json information filled using set - * account detail method - */ -class KVTest : public AmetsuchiTest { - protected: - void SetUp() override { - AmetsuchiTest::SetUp(); - std::string empty_key(32, '0'); - // transaction for block 1 - std::vector txs; - txs.push_back( - TestTransactionBuilder() - .creatorAccountId("userone@ru") - .createRole( - "user", - {shared_model::interface::permissions::Role::kAddPeer, - shared_model::interface::permissions::Role::kCreateAsset, - shared_model::interface::permissions::Role::kGetMyAccount}) - .createDomain("ru", "user") - .createAccount(account_name1, - domain_id, - shared_model::crypto::PublicKey(empty_key)) - .createAccount(account_name2, - domain_id, - shared_model::crypto::PublicKey(empty_key)) - .setAccountDetail(account_name2 + "@" + domain_id, "age", "24") - .build()); - auto block1 = TestBlockBuilder() - .height(1) - .prevHash(shared_model::crypto::Hash(empty_key)) - .transactions(txs) - .build(); - - { - std::unique_ptr ms; - auto storageResult = storage->createMutableStorage(); - storageResult.match( - [&](iroha::expected::Value> - &_storage) { ms = std::move(_storage.value); }, - [](iroha::expected::Error &error) { - FAIL() << "MutableStorage: " << error.error; - }); - - ms->apply(block1); - storage->commit(std::move(ms)); - } - } - - void TearDown() override { - sql->close(); - AmetsuchiTest::TearDown(); - } - - std::string domain_id = "ru"; - std::string account_name1 = "userone"; - std::string account_name2 = "usertwo"; -}; - -/** - * @given no details is set in account1 - * @when detail of account1 is queried using GetAccountDetail - * @then nullopt is returned - */ -TEST_F(KVTest, GetNonexistingUserDetail) { - auto account_id1 = account_name1 + "@" + domain_id; - auto ss = std::istringstream( - storage->getWsvQuery()->getAccountDetail(account_id1).value()); - - boost::property_tree::ptree root; - boost::property_tree::read_json(ss, root); - ASSERT_TRUE(root.empty()); -} - -/** - * @given storage with account containing age inserted using SetAccountDetail - * @when get account detail is invoked - * @then correct age of user2 is returned - */ -TEST_F(KVTest, SetAccountDetail) { - auto account_id1 = account_name1 + "@" + domain_id; - auto account_id2 = account_name2 + "@" + domain_id; - auto ss = std::istringstream( - storage->getWsvQuery()->getAccountDetail(account_id2).value()); - - boost::property_tree::ptree root; - boost::property_tree::read_json(ss, root); - - auto record = root.get_child(account_id1); - ASSERT_EQ(record.size(), 1); - ASSERT_EQ(record.front().first, "age"); - ASSERT_EQ(record.front().second.data(), "24"); -} diff --git a/test/module/irohad/ametsuchi/postgres_executor_test.cpp b/test/module/irohad/ametsuchi/postgres_executor_test.cpp index 7e2cc7c053..41914faaec 100644 --- a/test/module/irohad/ametsuchi/postgres_executor_test.cpp +++ b/test/module/irohad/ametsuchi/postgres_executor_test.cpp @@ -10,8 +10,8 @@ #include "framework/result_fixture.hpp" #include "module/irohad/ametsuchi/ametsuchi_fixture.hpp" #include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" -#include "module/shared_model/mock_objects_factories/mock_command_factory.hpp" #include "module/shared_model/interface_mocks.hpp" +#include "module/shared_model/mock_objects_factories/mock_command_factory.hpp" namespace iroha { namespace ametsuchi { @@ -43,7 +43,7 @@ namespace iroha { auto factory = std::make_shared>(); - query = std::make_unique(*sql, factory); + wsv_query = std::make_unique(*sql, factory); PostgresCommandExecutor::prepareStatements(*sql); executor = std::make_unique(*sql, perm_converter); @@ -169,7 +169,7 @@ namespace iroha { std::unique_ptr command; - std::unique_ptr query; + std::unique_ptr wsv_query; std::unique_ptr executor; std::shared_ptr @@ -223,7 +223,7 @@ namespace iroha { execute(*mock_command_factory->constructAddAssetQuantity( asset_id, asset_amount_one_zero))); - auto account_asset = query->getAccountAsset(account_id, asset_id); + auto account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); @@ -231,7 +231,7 @@ namespace iroha { execute(*mock_command_factory->constructAddAssetQuantity( asset_id, asset_amount_one_zero))); - account_asset = query->getAccountAsset(account_id, asset_id); + account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ("2.0", account_asset.get()->balance().toStringRepr()); } @@ -250,7 +250,7 @@ namespace iroha { execute(*mock_command_factory->constructAddAssetQuantity( asset_id, asset_amount_one_zero))); - auto account_asset = query->getAccountAsset(account_id, asset_id); + auto account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); @@ -258,7 +258,7 @@ namespace iroha { execute(*mock_command_factory->constructAddAssetQuantity( asset_id, asset_amount_one_zero))); - account_asset = query->getAccountAsset(account_id, asset_id); + account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ("2.0", account_asset.get()->balance().toStringRepr()); } @@ -284,7 +284,7 @@ namespace iroha { asset2_id, asset_amount_one_zero), true)); - auto account_asset = query->getAccountAsset(account_id, asset2_id); + auto account_asset = sql_query->getAccountAsset(account_id, asset2_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); @@ -309,7 +309,7 @@ namespace iroha { asset_id, asset_amount_one_zero); CHECK_SUCCESSFUL_RESULT(execute(*add_asset, true)); - auto account_asset = query->getAccountAsset(account_id, asset_id); + auto account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); @@ -426,7 +426,7 @@ namespace iroha { CHECK_SUCCESSFUL_RESULT(execute( *mock_command_factory->constructAddSignatory(*pubkey, account_id))); - auto signatories = query->getSignatories(account_id); + auto signatories = wsv_query->getSignatories(account_id); ASSERT_TRUE(signatories); ASSERT_TRUE(std::find(signatories->begin(), signatories->end(), *pubkey) != signatories->end()); @@ -452,7 +452,7 @@ namespace iroha { "id2@domain")); CHECK_SUCCESSFUL_RESULT(execute( *mock_command_factory->constructAddSignatory(*pubkey, "id2@domain"))); - auto signatories = query->getSignatories("id2@domain"); + auto signatories = wsv_query->getSignatories("id2@domain"); ASSERT_TRUE(signatories); ASSERT_TRUE(std::find(signatories->begin(), signatories->end(), *pubkey) != signatories->end()); @@ -470,7 +470,7 @@ namespace iroha { std::vector query_args{account_id, pubkey->hex()}; CHECK_ERROR_CODE_AND_MESSAGE(cmd_result, 2, query_args); - auto signatories = query->getSignatories(account_id); + auto signatories = wsv_query->getSignatories(account_id); ASSERT_TRUE(signatories); ASSERT_TRUE(std::find(signatories->begin(), signatories->end(), *pubkey) == signatories->end()); @@ -519,7 +519,7 @@ namespace iroha { CHECK_SUCCESSFUL_RESULT( execute(*mock_command_factory->constructAppendRole(account_id, another_role))); - auto roles = query->getAccountRoles(account_id); + auto roles = sql_query->getAccountRoles(account_id); ASSERT_TRUE(roles); ASSERT_TRUE(std::find(roles->begin(), roles->end(), another_role) != roles->end()); @@ -537,7 +537,7 @@ namespace iroha { CHECK_SUCCESSFUL_RESULT( execute(*mock_command_factory->constructAppendRole(account_id, another_role))); - auto roles = query->getAccountRoles(account_id); + auto roles = sql_query->getAccountRoles(account_id); ASSERT_TRUE(roles); ASSERT_TRUE(std::find(roles->begin(), roles->end(), another_role) != roles->end()); @@ -559,7 +559,7 @@ namespace iroha { CHECK_SUCCESSFUL_RESULT(execute( *mock_command_factory->constructAppendRole(account_id, another_role), true)); - auto roles = query->getAccountRoles(account_id); + auto roles = sql_query->getAccountRoles(account_id); ASSERT_TRUE(roles); ASSERT_TRUE(std::find(roles->begin(), roles->end(), another_role) != roles->end()); @@ -581,7 +581,7 @@ namespace iroha { std::vector query_args{account_id, another_role}; CHECK_ERROR_CODE_AND_MESSAGE(cmd_result, 2, query_args); - auto roles = query->getAccountRoles(account_id); + auto roles = sql_query->getAccountRoles(account_id); ASSERT_TRUE(roles); ASSERT_TRUE(std::find(roles->begin(), roles->end(), another_role) == roles->end()); @@ -659,7 +659,7 @@ namespace iroha { CHECK_SUCCESSFUL_RESULT( execute(*mock_command_factory->constructCreateAccount( "id2", domain_id, *pubkey))); - auto acc = query->getAccount(account2_id); + auto acc = sql_query->getAccount(account2_id); ASSERT_TRUE(acc); ASSERT_EQ(account2_id, acc.get()->accountId()); } @@ -672,7 +672,7 @@ namespace iroha { TEST_F(CreateAccount, NoPerms) { auto cmd_result = execute(*mock_command_factory->constructCreateAccount( account2_id, domain_id, *pubkey)); - auto acc = query->getAccount(account2_id); + auto acc = sql_query->getAccount(account2_id); ASSERT_FALSE(acc); std::vector query_args{ @@ -737,7 +737,7 @@ namespace iroha { true)); CHECK_SUCCESSFUL_RESULT(execute( *mock_command_factory->constructCreateAsset("coin", domain_id, 1))); - auto ass = query->getAsset(asset_id); + auto ass = sql_query->getAsset(asset_id); ASSERT_TRUE(ass); ASSERT_EQ(asset_id, ass.get()->assetId()); } @@ -759,7 +759,7 @@ namespace iroha { true)); auto cmd_result = execute( *mock_command_factory->constructCreateAsset("coin", domain_id, 1)); - auto ass = query->getAsset(asset_id); + auto ass = sql_query->getAsset(asset_id); ASSERT_FALSE(ass); std::vector query_args{domain_id, "coin", "1"}; @@ -838,7 +838,7 @@ namespace iroha { addAllPerms(); CHECK_SUCCESSFUL_RESULT(execute( *mock_command_factory->constructCreateDomain(domain2_id, role))); - auto dom = query->getDomain(domain2_id); + auto dom = sql_query->getDomain(domain2_id); ASSERT_TRUE(dom); ASSERT_EQ(dom.get()->domainId(), domain2_id); } @@ -851,7 +851,7 @@ namespace iroha { TEST_F(CreateDomain, NoPerms) { auto cmd_result = execute( *mock_command_factory->constructCreateDomain(domain2_id, role)); - auto dom = query->getDomain(domain2_id); + auto dom = sql_query->getDomain(domain2_id); ASSERT_FALSE(dom); std::vector query_args{domain2_id, role}; @@ -909,7 +909,7 @@ namespace iroha { CHECK_SUCCESSFUL_RESULT( execute(*mock_command_factory->constructCreateRole( another_role, role_permissions))); - auto rl = query->getRolePermissions(role); + auto rl = sql_query->getRolePermissions(role); ASSERT_TRUE(rl); ASSERT_EQ(rl.get(), role_permissions); } @@ -924,7 +924,7 @@ namespace iroha { shared_model::interface::permissions::Role::kRemoveMySignatory); auto cmd_result = execute(*mock_command_factory->constructCreateRole( another_role, role_permissions2)); - auto rl = query->getRolePermissions(another_role); + auto rl = sql_query->getRolePermissions(another_role); ASSERT_TRUE(rl); ASSERT_TRUE(rl->none()); @@ -980,7 +980,7 @@ namespace iroha { CHECK_SUCCESSFUL_RESULT( execute(*mock_command_factory->constructDetachRole(account_id, another_role))); - auto roles = query->getAccountRoles(account_id); + auto roles = sql_query->getAccountRoles(account_id); ASSERT_TRUE(roles); ASSERT_TRUE(std::find(roles->begin(), roles->end(), another_role) == roles->end()); @@ -998,7 +998,7 @@ namespace iroha { std::vector query_args{account_id, another_role}; CHECK_ERROR_CODE_AND_MESSAGE(cmd_result, 2, query_args); - auto roles = query->getAccountRoles(account_id); + auto roles = sql_query->getAccountRoles(account_id); ASSERT_TRUE(roles); ASSERT_TRUE(std::find(roles->begin(), roles->end(), another_role) != roles->end()); @@ -1073,8 +1073,8 @@ namespace iroha { auto perm = shared_model::interface::permissions::Grantable::kSetMyQuorum; CHECK_SUCCESSFUL_RESULT(execute( *mock_command_factory->constructGrantPermission(account_id, perm))); - auto has_perm = - query->hasAccountGrantablePermission(account_id, account_id, perm); + auto has_perm = sql_query->hasAccountGrantablePermission( + account_id, account_id, perm); ASSERT_TRUE(has_perm); } @@ -1087,8 +1087,8 @@ namespace iroha { auto perm = shared_model::interface::permissions::Grantable::kSetMyQuorum; auto cmd_result = execute( *mock_command_factory->constructGrantPermission(account_id, perm)); - auto has_perm = - query->hasAccountGrantablePermission(account_id, account_id, perm); + auto has_perm = sql_query->hasAccountGrantablePermission( + account_id, account_id, perm); ASSERT_FALSE(has_perm); std::vector query_args{account_id, @@ -1143,7 +1143,7 @@ namespace iroha { CHECK_SUCCESSFUL_RESULT( execute(*mock_command_factory->constructRemoveSignatory(account_id, *pubkey))); - auto signatories = query->getSignatories(account_id); + auto signatories = wsv_query->getSignatories(account_id); ASSERT_TRUE(signatories); ASSERT_TRUE(std::find(signatories->begin(), signatories->end(), *pubkey) == signatories->end()); @@ -1171,13 +1171,13 @@ namespace iroha { CHECK_SUCCESSFUL_RESULT(execute( *mock_command_factory->constructAddSignatory(pk, "id2@domain"), true)); - auto signatories = query->getSignatories("id2@domain"); + auto signatories = wsv_query->getSignatories("id2@domain"); ASSERT_TRUE(signatories); ASSERT_TRUE(std::find(signatories->begin(), signatories->end(), pk) != signatories->end()); CHECK_SUCCESSFUL_RESULT(execute( *mock_command_factory->constructRemoveSignatory("id2@domain", pk))); - signatories = query->getSignatories("id2@domain"); + signatories = wsv_query->getSignatories("id2@domain"); ASSERT_TRUE(signatories); ASSERT_TRUE(std::find(signatories->begin(), signatories->end(), *pubkey) != signatories->end()); @@ -1200,7 +1200,7 @@ namespace iroha { std::vector query_args{account_id, pubkey->hex()}; CHECK_ERROR_CODE_AND_MESSAGE(cmd_result, 2, query_args); - auto signatories = query->getSignatories(account_id); + auto signatories = wsv_query->getSignatories(account_id); ASSERT_TRUE(signatories); ASSERT_TRUE(std::find(signatories->begin(), signatories->end(), *pubkey) != signatories->end()); @@ -1295,24 +1295,24 @@ namespace iroha { TEST_F(RevokePermission, Valid) { auto perm = shared_model::interface::permissions::Grantable::kRemoveMySignatory; - ASSERT_TRUE(query->hasAccountGrantablePermission( + ASSERT_TRUE(sql_query->hasAccountGrantablePermission( account_id, account_id, grantable_permission)); CHECK_SUCCESSFUL_RESULT(execute( *mock_command_factory->constructGrantPermission(account_id, perm), true)); - ASSERT_TRUE(query->hasAccountGrantablePermission( + ASSERT_TRUE(sql_query->hasAccountGrantablePermission( account_id, account_id, grantable_permission)); - ASSERT_TRUE( - query->hasAccountGrantablePermission(account_id, account_id, perm)); + ASSERT_TRUE(sql_query->hasAccountGrantablePermission( + account_id, account_id, perm)); CHECK_SUCCESSFUL_RESULT( execute(*mock_command_factory->constructRevokePermission( account_id, grantable_permission))); - ASSERT_FALSE(query->hasAccountGrantablePermission( + ASSERT_FALSE(sql_query->hasAccountGrantablePermission( account_id, account_id, grantable_permission)); - ASSERT_TRUE( - query->hasAccountGrantablePermission(account_id, account_id, perm)); + ASSERT_TRUE(sql_query->hasAccountGrantablePermission( + account_id, account_id, perm)); } /** @@ -1359,7 +1359,7 @@ namespace iroha { CHECK_SUCCESSFUL_RESULT( execute(*mock_command_factory->constructSetAccountDetail( account_id, "key", "value"))); - auto kv = query->getAccountDetail(account_id); + auto kv = sql_query->getAccountDetail(account_id); ASSERT_TRUE(kv); ASSERT_EQ(kv.get(), "{\"id@domain\": {\"key\": \"value\"}}"); } @@ -1381,7 +1381,7 @@ namespace iroha { account2_id, "key", "value"), false, account_id)); - auto kv = query->getAccountDetail(account2_id); + auto kv = sql_query->getAccountDetail(account2_id); ASSERT_TRUE(kv); ASSERT_EQ(kv.get(), "{\"id@domain\": {\"key\": \"value\"}}"); } @@ -1398,7 +1398,7 @@ namespace iroha { account2_id, "key", "value"), false, account_id)); - auto kv = query->getAccountDetail(account2_id); + auto kv = sql_query->getAccountDetail(account2_id); ASSERT_TRUE(kv); ASSERT_EQ(kv.get(), "{\"id@domain\": {\"key\": \"value\"}}"); } @@ -1418,7 +1418,7 @@ namespace iroha { std::vector query_args{account2_id, "key", "value"}; CHECK_ERROR_CODE_AND_MESSAGE(cmd_result, 2, query_args); - auto kv = query->getAccountDetail(account2_id); + auto kv = sql_query->getAccountDetail(account2_id); ASSERT_TRUE(kv); ASSERT_EQ(kv.get(), "{}"); } @@ -1563,20 +1563,20 @@ namespace iroha { execute(*mock_command_factory->constructAddAssetQuantity( asset_id, asset_amount_one_zero), true)); - auto account_asset = query->getAccountAsset(account_id, asset_id); + auto account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); CHECK_SUCCESSFUL_RESULT( execute(*mock_command_factory->constructAddAssetQuantity( asset_id, asset_amount_one_zero), true)); - account_asset = query->getAccountAsset(account_id, asset_id); + account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ("2.0", account_asset.get()->balance().toStringRepr()); CHECK_SUCCESSFUL_RESULT( execute(*mock_command_factory->constructSubtractAssetQuantity( asset_id, asset_amount_one_zero))); - account_asset = query->getAccountAsset(account_id, asset_id); + account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); } @@ -1592,7 +1592,7 @@ namespace iroha { execute(*mock_command_factory->constructAddAssetQuantity( asset_id, asset_amount_one_zero), true)); - auto account_asset = query->getAccountAsset(account_id, asset_id); + auto account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); @@ -1604,7 +1604,7 @@ namespace iroha { account_id, asset_id, asset_amount_one_zero.toStringRepr(), "1"}; CHECK_ERROR_CODE_AND_MESSAGE(cmd_result, 2, query_args); - account_asset = query->getAccountAsset(account_id, asset_id); + account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); } @@ -1624,7 +1624,7 @@ namespace iroha { asset_id, asset_amount_one_zero), true)); - auto account_asset = query->getAccountAsset(account_id, asset_id); + auto account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); @@ -1633,7 +1633,7 @@ namespace iroha { asset_id, asset_amount_one_zero), true)); - account_asset = query->getAccountAsset(account_id, asset_id); + account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ("2.0", account_asset.get()->balance().toStringRepr()); @@ -1642,7 +1642,7 @@ namespace iroha { asset_id, asset_amount_one_zero), true)); - account_asset = query->getAccountAsset(account_id, asset_id); + account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); } @@ -1666,7 +1666,7 @@ namespace iroha { execute(*mock_command_factory->constructAddAssetQuantity( asset2_id, asset_amount_one_zero), true)); - auto account_asset = query->getAccountAsset(account_id, asset2_id); + auto account_asset = sql_query->getAccountAsset(account_id, asset2_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); @@ -1678,7 +1678,7 @@ namespace iroha { account_id, asset2_id, asset_amount_one_zero.toStringRepr(), "1"}; CHECK_ERROR_CODE_AND_MESSAGE(cmd_result, 2, query_args); - account_asset = query->getAccountAsset(account_id, asset2_id); + account_asset = sql_query->getAccountAsset(account_id, asset2_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); } @@ -1778,14 +1778,14 @@ namespace iroha { execute(*mock_command_factory->constructAddAssetQuantity( asset_id, asset_amount_one_zero), true)); - auto account_asset = query->getAccountAsset(account_id, asset_id); + auto account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); CHECK_SUCCESSFUL_RESULT( execute(*mock_command_factory->constructAddAssetQuantity( asset_id, asset_amount_one_zero), true)); - account_asset = query->getAccountAsset(account_id, asset_id); + account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ("2.0", account_asset.get()->balance().toStringRepr()); CHECK_SUCCESSFUL_RESULT( @@ -1795,10 +1795,10 @@ namespace iroha { asset_id, "desc", asset_amount_one_zero))); - account_asset = query->getAccountAsset(account_id, asset_id); + account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); - account_asset = query->getAccountAsset(account2_id, asset_id); + account_asset = sql_query->getAccountAsset(account2_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); } @@ -1822,7 +1822,7 @@ namespace iroha { execute(*mock_command_factory->constructAddAssetQuantity( asset_id, shared_model::interface::Amount{"2.0"}), true)); - auto account_asset = query->getAccountAsset(account_id, asset_id); + auto account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ("2.0", account_asset.get()->balance().toStringRepr()); CHECK_SUCCESSFUL_RESULT(execute( @@ -1830,10 +1830,10 @@ namespace iroha { account_id, account2_id, asset_id, "desc", asset_amount_one_zero), false, account2_id)); - account_asset = query->getAccountAsset(account_id, asset_id); + account_asset = sql_query->getAccountAsset(account_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); - account_asset = query->getAccountAsset(account2_id, asset_id); + account_asset = sql_query->getAccountAsset(account2_id, asset_id); ASSERT_TRUE(account_asset); ASSERT_EQ(asset_amount_one_zero, account_asset.get()->balance()); } diff --git a/test/module/irohad/ametsuchi/wsv_query_command_test.cpp b/test/module/irohad/ametsuchi/wsv_query_command_test.cpp index 681e84b35c..6be942d964 100644 --- a/test/module/irohad/ametsuchi/wsv_query_command_test.cpp +++ b/test/module/irohad/ametsuchi/wsv_query_command_test.cpp @@ -7,9 +7,6 @@ #include "ametsuchi/impl/postgres_wsv_query.hpp" #include "framework/result_fixture.hpp" #include "module/irohad/ametsuchi/ametsuchi_fixture.hpp" -#include "module/shared_model/builders/protobuf/test_account_builder.hpp" -#include "module/shared_model/builders/protobuf/test_asset_builder.hpp" -#include "module/shared_model/builders/protobuf/test_domain_builder.hpp" #include "module/shared_model/builders/protobuf/test_peer_builder.hpp" namespace iroha { @@ -19,22 +16,6 @@ namespace iroha { class WsvQueryCommandTest : public AmetsuchiTest { public: - WsvQueryCommandTest() { - domain = clone( - TestDomainBuilder().domainId("domain").defaultRole(role).build()); - - account = clone(TestAccountBuilder() - .domainId(domain->domainId()) - .accountId("id@" + domain->domainId()) - .quorum(1) - .jsonData(R"({"id@domain": {"key": "value"}})") - .build()); - role_permissions.set( - shared_model::interface::permissions::Role::kAddMySignatory); - grantable_permission = - shared_model::interface::permissions::Grantable::kAddMySignatory; - } - void SetUp() override { AmetsuchiTest::SetUp(); sql = std::make_unique(soci::postgresql, pgopt_); @@ -50,12 +31,6 @@ namespace iroha { AmetsuchiTest::TearDown(); } - std::string role = "role"; - shared_model::interface::RolePermissionSet role_permissions; - shared_model::interface::permissions::Grantable grantable_permission; - std::unique_ptr account; - std::unique_ptr domain; - std::unique_ptr sql; std::unique_ptr command; @@ -64,430 +39,11 @@ namespace iroha { class RoleTest : public WsvQueryCommandTest {}; - /** - * @given WSV command and valid role name - * @when trying to insert new role - * @then role is successfully inserted - */ - TEST_F(RoleTest, InsertRoleWhenValidName) { - ASSERT_TRUE(val(command->insertRole(role))); - auto roles = query->getRoles(); - ASSERT_TRUE(roles); - ASSERT_EQ(1, roles->size()); - ASSERT_EQ(role, roles->front()); - } - - /** - * @given WSV command and invalid role name - * @when trying to insert new role - * @then role is failed - */ - TEST_F(RoleTest, InsertRoleWhenInvalidName) { - ASSERT_TRUE(err(command->insertRole(std::string(46, 'a')))); - - auto roles = query->getRoles(); - ASSERT_TRUE(roles); - ASSERT_EQ(0, roles->size()); - } - TEST_F(RoleTest, InsertTwoRole) { ASSERT_TRUE(val(command->insertRole("role"))); ASSERT_TRUE(err(command->insertRole("role"))); } - class RolePermissionsTest : public WsvQueryCommandTest { - void SetUp() override { - WsvQueryCommandTest::SetUp(); - ASSERT_TRUE(val(command->insertRole(role))); - } - }; - - /** - * @given WSV command and role exists and valid permissions - * @when trying to insert role permissions - * @then RolePermissions are inserted - */ - TEST_F(RolePermissionsTest, InsertRolePermissionsWhenRoleExists) { - ASSERT_TRUE(val(command->insertRolePermissions(role, role_permissions))); - - auto permissions = query->getRolePermissions(role); - ASSERT_TRUE(permissions); - ASSERT_EQ(role_permissions, permissions.get()); - } - - /** - * @given WSV command and role doesn't exist and valid permissions - * @when trying to insert role permissions - * @then RolePermissions are not inserted - */ - TEST_F(RolePermissionsTest, InsertRolePermissionsWhenNoRole) { - auto new_role = role + " "; - ASSERT_TRUE( - err(command->insertRolePermissions(new_role, role_permissions))); - - auto permissions = query->getRolePermissions(new_role); - ASSERT_TRUE(permissions); - ASSERT_TRUE(permissions->none()); - } - - class AccountTest : public WsvQueryCommandTest { - void SetUp() override { - WsvQueryCommandTest::SetUp(); - ASSERT_TRUE(val(command->insertRole(role))); - ASSERT_TRUE(val(command->insertDomain(*domain))); - } - }; - - /** - * @given inserted role, domain - * @when insert account with filled json data - * @then get account and check json data is the same - */ - TEST_F(AccountTest, InsertAccountWithJSONData) { - ASSERT_TRUE(val(command->insertAccount(*account))); - auto acc = query->getAccount(account->accountId()); - ASSERT_TRUE(acc); - ASSERT_EQ(account->jsonData(), acc.value()->jsonData()); - } - - /** - * @given inserted role, domain, account - * @when insert to account new json data - * @then get account and check json data is the same - */ - TEST_F(AccountTest, InsertNewJSONDataAccount) { - ASSERT_TRUE(val(command->insertAccount(*account))); - ASSERT_TRUE(val(command->setAccountKV( - account->accountId(), account->accountId(), "id", "val"))); - auto acc = query->getAccount(account->accountId()); - ASSERT_TRUE(acc); - ASSERT_EQ(R"({"id@domain": {"id": "val", "key": "value"}})", - acc.value()->jsonData()); - } - - /** - * @given inserted role, domain, account - * @when insert to account new json data - * @then get account and check json data is the same - */ - TEST_F(AccountTest, InsertNewJSONDataToOtherAccount) { - ASSERT_TRUE(val(command->insertAccount(*account))); - ASSERT_TRUE(val( - command->setAccountKV(account->accountId(), "admin", "id", "val"))); - auto acc = query->getAccount(account->accountId()); - ASSERT_TRUE(acc); - ASSERT_EQ(R"({"admin": {"id": "val"}, "id@domain": {"key": "value"}})", - acc.value()->jsonData()); - } - - /** - * @given inserted role, domain, account - * @when insert to account new complex json data - * @then get account and check json data is the same - */ - TEST_F(AccountTest, InsertNewComplexJSONDataAccount) { - ASSERT_TRUE(val(command->insertAccount(*account))); - ASSERT_TRUE(val(command->setAccountKV( - account->accountId(), account->accountId(), "id", "[val1, val2]"))); - auto acc = query->getAccount(account->accountId()); - ASSERT_TRUE(acc); - ASSERT_EQ(R"({"id@domain": {"id": "[val1, val2]", "key": "value"}})", - acc.value()->jsonData()); - } - - /** - * @given inserted role, domain, account - * @when update json data in account - * @then get account and check json data is the same - */ - TEST_F(AccountTest, UpdateAccountJSONData) { - ASSERT_TRUE(val(command->insertAccount(*account))); - ASSERT_TRUE(val(command->setAccountKV( - account->accountId(), account->accountId(), "key", "val2"))); - auto acc = query->getAccount(account->accountId()); - ASSERT_TRUE(acc); - ASSERT_EQ(R"({"id@domain": {"key": "val2"}})", acc.value()->jsonData()); - } - - /** - * @given database without needed account - * @when performing query to retrieve non-existent account - * @then getAccount will return nullopt - */ - TEST_F(AccountTest, GetAccountInvalidWhenNotFound) { - EXPECT_FALSE(query->getAccount("invalid account id")); - } - - /** - * @given database without needed account - * @when performing query to retrieve non-existent account's details - * @then getAccountDetail will return nullopt - */ - TEST_F(AccountTest, GetAccountDetailInvalidWhenNotFound) { - EXPECT_FALSE(query->getAccountDetail("invalid account id", "", "")); - } - - /** - * @given details, inserted for one account - * @when performing query to retrieve all account's details - * @then getAccountDetail will return all details of this account - */ - TEST_F(AccountTest, GetAccountDetailWithAccount) { - ASSERT_TRUE(val(command->insertAccount(*account))); - ASSERT_TRUE(val(command->setAccountKV( - account->accountId(), account->accountId(), "some_key", "some_val"))); - - auto acc_details = query->getAccountDetail(account->accountId(), "", ""); - ASSERT_TRUE(acc_details); - ASSERT_EQ(R"({"id@domain": {"key": "value", "some_key": "some_val"}})", - *acc_details); - } - - /** - * @given details, inserted into one account by two writers, with one of the - * keys repeated - * @when performing query to retrieve details under this key - * @then getAccountDetail will return details from both writers under the - * specified key - */ - TEST_F(AccountTest, GetAccountDetailWithKey) { - ASSERT_TRUE(val(command->insertAccount(*account))); - ASSERT_TRUE(val(command->setAccountKV( - account->accountId(), account->accountId(), "some_key", "some_val"))); - ASSERT_TRUE(val(command->setAccountKV(account->accountId(), - account->accountId(), - "another_key", - "another_val"))); - ASSERT_TRUE(val(command->setAccountKV( - account->accountId(), "admin", "some_key", "even_third_val"))); - - auto acc_details = - query->getAccountDetail(account->accountId(), "some_key", ""); - ASSERT_TRUE(acc_details); - ASSERT_EQ( - "{ \"admin\" : {\"some_key\" : \"even_third_val\"}, " - "\"id@domain\" : {\"some_key\" : \"some_val\"} }", - *acc_details); - } - - /** - * @given details, inserted into one account by two writers - * @when performing query to retrieve details, added by one of the writers - * @then getAccountDetail will return only details, added by the specified - * writer - */ - TEST_F(AccountTest, GetAccountDetailWithWriter) { - ASSERT_TRUE(val(command->insertAccount(*account))); - ASSERT_TRUE(val(command->setAccountKV( - account->accountId(), account->accountId(), "some_key", "some_val"))); - ASSERT_TRUE(val(command->setAccountKV( - account->accountId(), "admin", "another_key", "another_val"))); - - auto acc_details = - query->getAccountDetail(account->accountId(), "", "admin"); - ASSERT_TRUE(acc_details); - ASSERT_EQ(R"({"admin" : {"another_key": "another_val"}})", *acc_details); - } - - /** - * @given details, inserted into one account by two writers, with one of the - * keys repeated - * @when performing query to retrieve details under this key and added by - * one of the writers - * @then getAccountDetail will return only details, which are under the - * specified key and added by the specified writer - */ - TEST_F(AccountTest, GetAccountDetailWithKeyAndWriter) { - ASSERT_TRUE(val(command->insertAccount(*account))); - ASSERT_TRUE(val(command->setAccountKV( - account->accountId(), account->accountId(), "some_key", "some_val"))); - ASSERT_TRUE(val(command->setAccountKV(account->accountId(), - account->accountId(), - "another_key", - "another_val"))); - ASSERT_TRUE(val(command->setAccountKV( - account->accountId(), "admin", "some_key", "even_third_val"))); - - auto acc_details = query->getAccountDetail( - account->accountId(), "some_key", account->accountId()); - ASSERT_TRUE(acc_details); - ASSERT_EQ(R"({"id@domain" : {"some_key" : "some_val"}})", *acc_details); - } - - class AccountRoleTest : public WsvQueryCommandTest { - void SetUp() override { - WsvQueryCommandTest::SetUp(); - ASSERT_TRUE(val(command->insertRole(role))); - ASSERT_TRUE(val(command->insertDomain(*domain))); - ASSERT_TRUE(val(command->insertAccount(*account))); - } - }; - - /** - * @given WSV command and account exists and valid account role - * @when trying to insert account - * @then account role is inserted - */ - TEST_F(AccountRoleTest, InsertAccountRoleWhenAccountRoleExist) { - ASSERT_TRUE(val(command->insertAccountRole(account->accountId(), role))); - - auto roles = query->getAccountRoles(account->accountId()); - ASSERT_TRUE(roles); - ASSERT_EQ(1, roles->size()); - ASSERT_EQ(role, roles->front()); - } - - /** - * @given WSV command and account does not exist and valid account role - * @when trying to insert account - * @then account role is not inserted - */ - TEST_F(AccountRoleTest, InsertAccountRoleWhenNoAccount) { - auto account_id = account->accountId() + " "; - ASSERT_TRUE(err(command->insertAccountRole(account_id, role))); - - auto roles = query->getAccountRoles(account_id); - ASSERT_TRUE(roles); - ASSERT_EQ(0, roles->size()); - } - - /** - * @given WSV command and account exists and invalid account role - * @when trying to insert account - * @then account role is not inserted - */ - TEST_F(AccountRoleTest, InsertAccountRoleWhenNoRole) { - auto new_role = role + " "; - ASSERT_TRUE( - err(command->insertAccountRole(account->accountId(), new_role))); - - auto roles = query->getAccountRoles(account->accountId()); - ASSERT_TRUE(roles); - ASSERT_EQ(0, roles->size()); - } - - /** - * @given inserted role, domain - * @when insert and delete account role - * @then role is detached - */ - TEST_F(AccountRoleTest, DeleteAccountRoleWhenExist) { - ASSERT_TRUE(val(command->insertAccountRole(account->accountId(), role))); - ASSERT_TRUE(val(command->deleteAccountRole(account->accountId(), role))); - auto roles = query->getAccountRoles(account->accountId()); - ASSERT_TRUE(roles); - ASSERT_EQ(0, roles->size()); - } - - /** - * @given inserted role, domain - * @when no account exist - * @then nothing is deleted - */ - TEST_F(AccountRoleTest, DeleteAccountRoleWhenNoAccount) { - ASSERT_TRUE(val(command->insertAccountRole(account->accountId(), role))); - ASSERT_TRUE(val(command->deleteAccountRole("no", role))); - auto roles = query->getAccountRoles(account->accountId()); - ASSERT_TRUE(roles); - ASSERT_EQ(1, roles->size()); - } - - /** - * @given inserted role, domain - * @when no role exist - * @then nothing is deleted - */ - TEST_F(AccountRoleTest, DeleteAccountRoleWhenNoRole) { - ASSERT_TRUE(val(command->insertAccountRole(account->accountId(), role))); - ASSERT_TRUE(val(command->deleteAccountRole(account->accountId(), "no"))); - auto roles = query->getAccountRoles(account->accountId()); - ASSERT_TRUE(roles); - ASSERT_EQ(1, roles->size()); - } - - class AccountGrantablePermissionTest : public WsvQueryCommandTest { - public: - void SetUp() override { - WsvQueryCommandTest::SetUp(); - - permittee_account = - clone(TestAccountBuilder() - .domainId(domain->domainId()) - .accountId("id2@" + domain->domainId()) - .quorum(1) - .jsonData(R"({"id@domain": {"key": "value"}})") - .build()); - - ASSERT_TRUE(val(command->insertRole(role))); - ASSERT_TRUE(val(command->insertDomain(*domain))); - ASSERT_TRUE(val(command->insertAccount(*account))); - ASSERT_TRUE(val(command->insertAccount(*permittee_account))); - } - - std::shared_ptr permittee_account; - }; - - /** - * @given WSV command and account exists and valid grantable permissions - * @when trying to insert grantable permissions - * @then grantable permissions are inserted - */ - TEST_F(AccountGrantablePermissionTest, - InsertAccountGrantablePermissionWhenAccountsExist) { - ASSERT_TRUE(val(command->insertAccountGrantablePermission( - permittee_account->accountId(), - account->accountId(), - grantable_permission))); - - ASSERT_TRUE( - query->hasAccountGrantablePermission(permittee_account->accountId(), - account->accountId(), - grantable_permission)); - } - - /** - * @given WSV command and invalid permittee and valid grantable permissions - * @when trying to insert grantable permissions - * @then grantable permissions are not inserted - */ - TEST_F(AccountGrantablePermissionTest, - InsertAccountGrantablePermissionWhenNoPermitteeAccount) { - auto permittee_account_id = permittee_account->accountId() + " "; - ASSERT_TRUE(err(command->insertAccountGrantablePermission( - permittee_account_id, account->accountId(), grantable_permission))); - - ASSERT_FALSE(query->hasAccountGrantablePermission( - permittee_account_id, account->accountId(), grantable_permission)); - } - - TEST_F(AccountGrantablePermissionTest, - InsertAccountGrantablePermissionWhenNoAccount) { - auto account_id = account->accountId() + " "; - ASSERT_TRUE(err(command->insertAccountGrantablePermission( - permittee_account->accountId(), account_id, grantable_permission))); - - ASSERT_FALSE(query->hasAccountGrantablePermission( - permittee_account->accountId(), account_id, grantable_permission)); - } - - /** - * @given WSV command to delete grantable permission with valid parameters - * @when trying to delete grantable permissions - * @then grantable permissions are deleted - */ - TEST_F(AccountGrantablePermissionTest, - DeleteAccountGrantablePermissionWhenAccountsPermissionExist) { - ASSERT_TRUE(val(command->deleteAccountGrantablePermission( - permittee_account->accountId(), - account->accountId(), - grantable_permission))); - - ASSERT_FALSE( - query->hasAccountGrantablePermission(permittee_account->accountId(), - account->accountId(), - grantable_permission)); - } - class DeletePeerTest : public WsvQueryCommandTest { public: void SetUp() override { @@ -509,50 +65,5 @@ namespace iroha { ASSERT_TRUE(val(command->deletePeer(*peer))); } - class GetAssetTest : public WsvQueryCommandTest {}; - - /** - * @given database without needed asset - * @when performing query to retrieve non-existent asset - * @then getAsset will return nullopt - */ - TEST_F(GetAssetTest, GetAssetInvalidWhenAssetDoesNotExist) { - EXPECT_FALSE(query->getAsset("invalid asset")); - } - - class GetDomainTest : public WsvQueryCommandTest {}; - - /** - * @given database without needed domain - * @when performing query to retrieve non-existent asset - * @then getAsset will return nullopt - */ - TEST_F(GetDomainTest, GetDomainInvalidWhenDomainDoesNotExist) { - EXPECT_FALSE(query->getDomain("invalid domain")); - } - - // Since mocking database is not currently possible, use SetUp to create - // invalid database - class DatabaseInvalidTest : public WsvQueryCommandTest { - // skip database setup - void SetUp() override { - AmetsuchiTest::SetUp(); - sql = std::make_unique(soci::postgresql, pgopt_); - - command = std::make_unique(*sql); - query = std::make_unique(*sql, factory); - } - }; - - std::unique_ptr sql; - - /** - * @given not set up database - * @when performing query to retrieve information from nonexisting tables - * @then query will return nullopt - */ - TEST_F(DatabaseInvalidTest, QueryInvalidWhenDatabaseInvalid) { - EXPECT_FALSE(query->getAccount("some account")); - } } // namespace ametsuchi } // namespace iroha From cffb81d9f26e02646038e690690a8fc02d76c373 Mon Sep 17 00:00:00 2001 From: Igor Egorov Date: Tue, 15 Jan 2019 11:54:54 +0300 Subject: [PATCH 04/41] Add CHECK_TXS macro to acceptance fixture. Reduce code duplication (#2003) * Add CHECK_TXS macro to acceptance fixture. Reduce code duplication The macro checks amount of transactions inside whatever kind of response - block or proposal or something that has .transactions() * Enable Ordering Gate replay test (#2004) Signed-off-by: Igor Egorov --- .../acceptance/acceptance_fixture.hpp | 6 +- .../acceptance/add_signatory_test.cpp | 39 +++---- .../acceptance/create_account_test.cpp | 11 +- .../acceptance/get_account_test.cpp | 22 ++-- .../acceptance/remove_signatory_test.cpp | 71 ++++++----- test/integration/acceptance/replay_test.cpp | 36 ++++-- .../acceptance/set_account_quorum_test.cpp | 13 +-- .../acceptance/transfer_asset_test.cpp | 110 +++++++++--------- 8 files changed, 156 insertions(+), 152 deletions(-) diff --git a/test/integration/acceptance/acceptance_fixture.hpp b/test/integration/acceptance/acceptance_fixture.hpp index 0417f9068c..8fbfce9401 100644 --- a/test/integration/acceptance/acceptance_fixture.hpp +++ b/test/integration/acceptance/acceptance_fixture.hpp @@ -6,10 +6,11 @@ #ifndef IROHA_ACCEPTANCE_FIXTURE_HPP #define IROHA_ACCEPTANCE_FIXTURE_HPP -#include #include #include #include + +#include #include "cryptography/keypair.hpp" #include "framework/common_constants.hpp" #include "interfaces/permissions.hpp" @@ -45,6 +46,9 @@ namespace { #define CHECK_COMMITTED BASE_CHECK_RESPONSE(CommittedTxResponse) #define CHECK_MST_PENDING BASE_CHECK_RESPONSE(MstPendingResponse) + +#define CHECK_TXS_QUANTITY(i) \ + [](const auto &resp) { ASSERT_EQ(resp->transactions().size(), i); } } // namespace /** diff --git a/test/integration/acceptance/add_signatory_test.cpp b/test/integration/acceptance/add_signatory_test.cpp index 0b3d82d27e..bc68875c9b 100644 --- a/test/integration/acceptance/add_signatory_test.cpp +++ b/test/integration/acceptance/add_signatory_test.cpp @@ -36,9 +36,6 @@ class AddSignatory : public AcceptanceFixture { crypto::DefaultCryptoAlgorithmType::generateKeypair(); }; -#define CHECK_BLOCK(i) \ - [](auto &block) { ASSERT_EQ(block->transactions().size(), i); } - /** * C224 Add existing public key of other user * @given some user with CanAddSignatory permission and a second user @@ -49,11 +46,11 @@ class AddSignatory : public AcceptanceFixture { TEST_F(AddSignatory, Basic) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), CHECK_BLOCK(1)) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendTxAwait( complete(baseTx().addSignatory(kUserId, kUser2Keypair.publicKey())), - CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) .sendQuery( complete(baseQry().creatorAccountId(kAdminId).getSignatories(kUserId), kAdminKeypair), @@ -83,11 +80,11 @@ TEST_F(AddSignatory, NoPermission) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) .sendTxAwait(makeFirstUser({interface::permissions::Role::kReceive}), - CHECK_BLOCK(1)) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendTx( complete(baseTx().addSignatory(kUserId, kUser2Keypair.publicKey()))) - .checkVerifiedProposal(CHECK_BLOCK(0)); + .checkVerifiedProposal(CHECK_TXS_QUANTITY(0)); } /** @@ -105,16 +102,16 @@ TEST_F(AddSignatory, GrantedPermission) { .setInitialState(kAdminKeypair) .sendTxAwait( makeFirstUser({interface::permissions::Role::kAddMySignatory}), - CHECK_BLOCK(1)) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendTxAwait( complete(baseTx().grantPermission( kUser2Id, interface::permissions::Grantable::kAddMySignatory)), - CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) .sendTxAwait(complete(baseTx().creatorAccountId(kUser2Id).addSignatory( kUserId, kUser2Keypair.publicKey()), kUser2Keypair), - CHECK_BLOCK(1)); + CHECK_TXS_QUANTITY(1)); } /** @@ -130,12 +127,12 @@ TEST_F(AddSignatory, NonGrantedPermission) { .setInitialState(kAdminKeypair) .sendTxAwait( makeFirstUser({interface::permissions::Role::kAddMySignatory}), - CHECK_BLOCK(1)) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendTx(complete(baseTx().creatorAccountId(kUser2Id).addSignatory( kUserId, kUser2Keypair.publicKey()), kUser2Keypair)) - .checkVerifiedProposal(CHECK_BLOCK(0)); + .checkVerifiedProposal(CHECK_TXS_QUANTITY(0)); } /** @@ -147,11 +144,11 @@ TEST_F(AddSignatory, NonGrantedPermission) { TEST_F(AddSignatory, NonExistentUser) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) .sendTx(complete(baseTx().addSignatory("inexistent@" + kDomain, kUserKeypair.publicKey()), kUser2Keypair)) - .checkVerifiedProposal(CHECK_BLOCK(0)); + .checkVerifiedProposal(CHECK_TXS_QUANTITY(0)); } /** @@ -163,7 +160,7 @@ TEST_F(AddSignatory, NonExistentUser) { TEST_F(AddSignatory, InvalidKey) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) .sendTx(complete(baseTx().addSignatory(kUserId, shared_model::crypto::PublicKey( std::string(1337, 'a'))), @@ -180,10 +177,10 @@ TEST_F(AddSignatory, InvalidKey) { TEST_F(AddSignatory, NonExistedKey) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) .sendTx(complete( baseTx().addSignatory( kUserId, shared_model::crypto::PublicKey(std::string(32, 'a'))), kUser2Keypair)) - .checkVerifiedProposal(CHECK_BLOCK(0)); + .checkVerifiedProposal(CHECK_TXS_QUANTITY(0)); } diff --git a/test/integration/acceptance/create_account_test.cpp b/test/integration/acceptance/create_account_test.cpp index 8dd2955400..9cc826c76c 100644 --- a/test/integration/acceptance/create_account_test.cpp +++ b/test/integration/acceptance/create_account_test.cpp @@ -11,11 +11,6 @@ using namespace integration_framework; using namespace shared_model; using namespace common_constants; -// TODO igor-egorov, 2018-12-27, IR-148, move all check macroses to -// acceptance_fixture.hpp -#define check(i) \ - [](const auto &resp) { ASSERT_EQ(resp->transactions().size(), i); } - class CreateAccount : public AcceptanceFixture { public: auto makeUserWithPerms(const interface::RolePermissionSet &perms = { @@ -181,11 +176,11 @@ TEST_F(CreateAccount, PrivelegeElevation) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(second_domain_tx, check(1)) - .sendTxAwait(makeUserWithPerms(), check(1)) + .sendTxAwait(second_domain_tx, CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeUserWithPerms(), CHECK_TXS_QUANTITY(1)) .sendTx(create_elevated_user) .skipProposal() - .checkVerifiedProposal(check(0)) + .checkVerifiedProposal(CHECK_TXS_QUANTITY(0)) .checkBlock([&rejected_hash](const auto &block) { const auto rejected_hashes = block->rejected_transactions_hashes(); ASSERT_THAT(rejected_hashes, ::testing::Contains(rejected_hash)); diff --git a/test/integration/acceptance/get_account_test.cpp b/test/integration/acceptance/get_account_test.cpp index 580aa66a03..3573d00f26 100644 --- a/test/integration/acceptance/get_account_test.cpp +++ b/test/integration/acceptance/get_account_test.cpp @@ -11,15 +11,11 @@ #include "integration/acceptance/acceptance_fixture.hpp" #include "interfaces/query_responses/account_response.hpp" #include "utils/query_error_response_visitor.hpp" -#include "interfaces/query_responses/account_response.hpp" using namespace integration_framework; using namespace shared_model; using namespace common_constants; -#define CHECK_BLOCK(i) \ - [](auto &block) { ASSERT_EQ(block->transactions().size(), i); } - class GetAccount : public AcceptanceFixture { public: GetAccount() : itf(1) {} @@ -45,7 +41,7 @@ class GetAccount : public AcceptanceFixture { const interface::RolePermissionSet &perms = { interface::permissions::Role::kGetMyAccount}) { itf.setInitialState(kAdminKeypair) - .sendTxAwait(makeUserWithPerms(perms), CHECK_BLOCK(1)); + .sendTxAwait(makeUserWithPerms(perms), CHECK_TXS_QUANTITY(1)); return itf; } @@ -224,7 +220,7 @@ TEST_F(GetAccount, WithGetAllPermission) { TEST_F(GetAccount, NoPermissionOtherAccount) { const std::string kUser2Id = kUser2 + "@" + kDomain; prepareState({}) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendQuery(makeQuery(kUser2Id), checkQueryErrorResponse< shared_model::interface::StatefulFailedErrorResponse>()); @@ -238,7 +234,7 @@ TEST_F(GetAccount, NoPermissionOtherAccount) { TEST_F(GetAccount, WithGetMyPermissionOtherAccount) { const std::string kUser2Id = kUser2 + "@" + kDomain; prepareState({interface::permissions::Role::kGetMyAccount}) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendQuery(makeQuery(kUser2Id), checkQueryErrorResponse< shared_model::interface::StatefulFailedErrorResponse>()); @@ -253,7 +249,7 @@ TEST_F(GetAccount, WithGetMyPermissionOtherAccount) { TEST_F(GetAccount, WithGetDomainPermissionOtherAccount) { const std::string kUser2Id = kUser2 + "@" + kDomain; prepareState({interface::permissions::Role::kGetDomainAccounts}) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendQuery(makeQuery(kUser2Id), checkValidAccount(kDomain, kUser2Id, kRole2)); } @@ -266,7 +262,7 @@ TEST_F(GetAccount, WithGetDomainPermissionOtherAccount) { TEST_F(GetAccount, WithGetAllPermissionOtherAccount) { const std::string kUser2Id = kUser2 + "@" + kDomain; prepareState({interface::permissions::Role::kGetAllAccounts}) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendQuery(makeQuery(kUser2Id), checkValidAccount(kDomain, kUser2Id, kRole2)); } @@ -279,7 +275,7 @@ TEST_F(GetAccount, WithGetAllPermissionOtherAccount) { TEST_F(GetAccount, NoPermissionOtherAccountInterdomain) { const std::string kUser2Id = kUser2 + "@" + kNewDomain; prepareState({}) - .sendTxAwait(makeSecondInterdomainUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeSecondInterdomainUser(), CHECK_TXS_QUANTITY(1)) .sendQuery(makeQuery(kUser2Id), checkQueryErrorResponse< shared_model::interface::StatefulFailedErrorResponse>()); @@ -293,7 +289,7 @@ TEST_F(GetAccount, NoPermissionOtherAccountInterdomain) { TEST_F(GetAccount, WithGetMyPermissionOtherAccountInterdomain) { const std::string kUser2Id = kUser2 + "@" + kNewDomain; prepareState({interface::permissions::Role::kGetMyAccount}) - .sendTxAwait(makeSecondInterdomainUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeSecondInterdomainUser(), CHECK_TXS_QUANTITY(1)) .sendQuery(makeQuery(kUser2Id), checkQueryErrorResponse< shared_model::interface::StatefulFailedErrorResponse>()); @@ -307,7 +303,7 @@ TEST_F(GetAccount, WithGetMyPermissionOtherAccountInterdomain) { TEST_F(GetAccount, WithGetDomainPermissionOtherAccountInterdomain) { const std::string kUser2Id = kUser2 + "@" + kNewDomain; prepareState({interface::permissions::Role::kGetDomainAccounts}) - .sendTxAwait(makeSecondInterdomainUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeSecondInterdomainUser(), CHECK_TXS_QUANTITY(1)) .sendQuery(makeQuery(kUser2Id), checkQueryErrorResponse< shared_model::interface::StatefulFailedErrorResponse>()); @@ -323,7 +319,7 @@ TEST_F(GetAccount, WithGetDomainPermissionOtherAccountInterdomain) { TEST_F(GetAccount, WithGetAllPermissionOtherAccountInterdomain) { const std::string kUser2Id = kUser2 + "@" + kNewDomain; prepareState({interface::permissions::Role::kGetAllAccounts}) - .sendTxAwait(makeSecondInterdomainUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeSecondInterdomainUser(), CHECK_TXS_QUANTITY(1)) .sendQuery(makeQuery(kUser2Id), checkValidAccount(kNewDomain, kUser2Id, kRole2)); } diff --git a/test/integration/acceptance/remove_signatory_test.cpp b/test/integration/acceptance/remove_signatory_test.cpp index 5f07e4de0e..7beb973e11 100644 --- a/test/integration/acceptance/remove_signatory_test.cpp +++ b/test/integration/acceptance/remove_signatory_test.cpp @@ -35,9 +35,6 @@ class RemoveSignatory : public AcceptanceFixture { crypto::DefaultCryptoAlgorithmType::generateKeypair(); }; -#define CHECK_BLOCK(i) \ - [](auto &block) { ASSERT_EQ(block->transactions().size(), i); } - /** * C264 Remove signatory from own account * C267 Remove signatory more than once @@ -50,18 +47,18 @@ class RemoveSignatory : public AcceptanceFixture { TEST_F(RemoveSignatory, Basic) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), CHECK_BLOCK(1)) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendTxAwait( complete(baseTx().addSignatory(kUserId, kUser2Keypair.publicKey())), - CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) .sendTxAwait(complete(baseTx().removeSignatory( kUserId, kUser2Keypair.publicKey())), - CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) .sendTx(complete( baseTx().removeSignatory(kUserId, kUser2Keypair.publicKey()))) - .checkVerifiedProposal(CHECK_BLOCK(0)) - .checkBlock(CHECK_BLOCK(0)); + .checkVerifiedProposal(CHECK_TXS_QUANTITY(0)) + .checkBlock(CHECK_TXS_QUANTITY(0)); } /** @@ -74,16 +71,16 @@ TEST_F(RemoveSignatory, Basic) { TEST_F(RemoveSignatory, NoPermission) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser({}), CHECK_BLOCK(1)) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeFirstUser({}), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendTxAwait( complete(baseTx().addSignatory(kUserId, kUser2Keypair.publicKey()), kUserKeypair), - CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) .sendTx(complete( baseTx().removeSignatory(kUserId, kUser2Keypair.publicKey()))) - .checkVerifiedProposal(CHECK_BLOCK(0)) - .checkBlock(CHECK_BLOCK(0)); + .checkVerifiedProposal(CHECK_TXS_QUANTITY(0)) + .checkBlock(CHECK_TXS_QUANTITY(0)); } /** @@ -99,20 +96,20 @@ TEST_F(RemoveSignatory, GrantedPermission) { .setInitialState(kAdminKeypair) .sendTxAwait( makeFirstUser({interface::permissions::Role::kRemoveMySignatory}), - CHECK_BLOCK(1)) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendTxAwait( complete(baseTx().addSignatory(kUserId, kUser2Keypair.publicKey()), kUserKeypair), - CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) .sendTxAwait( complete(baseTx().grantPermission( kUser2Id, interface::permissions::Grantable::kRemoveMySignatory)), - CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) .sendTxAwait(complete(baseTx().creatorAccountId(kUser2Id).removeSignatory( kUserId, kUser2Keypair.publicKey()), kUser2Keypair), - CHECK_BLOCK(1)); + CHECK_TXS_QUANTITY(1)); } /** @@ -127,17 +124,17 @@ TEST_F(RemoveSignatory, NonGrantedPermission) { .setInitialState(kAdminKeypair) .sendTxAwait( makeFirstUser({interface::permissions::Role::kRemoveMySignatory}), - CHECK_BLOCK(1)) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendTxAwait( complete(baseTx().addSignatory(kUserId, kUser2Keypair.publicKey()), kUserKeypair), - CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) .sendTx(complete(baseTx().creatorAccountId(kUser2Id).removeSignatory( kUserId, kUser2Keypair.publicKey()), kUser2Keypair)) - .checkVerifiedProposal(CHECK_BLOCK(0)) - .checkBlock(CHECK_BLOCK(0)); + .checkVerifiedProposal(CHECK_TXS_QUANTITY(0)) + .checkBlock(CHECK_TXS_QUANTITY(0)); } /** @@ -148,12 +145,12 @@ TEST_F(RemoveSignatory, NonGrantedPermission) { TEST_F(RemoveSignatory, NonExistentUser) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) .sendTx(complete(baseTx().removeSignatory("inexistent@" + kDomain, kUserKeypair.publicKey()), kUser2Keypair)) - .checkVerifiedProposal(CHECK_BLOCK(0)) - .checkBlock(CHECK_BLOCK(0)); + .checkVerifiedProposal(CHECK_TXS_QUANTITY(0)) + .checkBlock(CHECK_TXS_QUANTITY(0)); } /** @@ -165,7 +162,7 @@ TEST_F(RemoveSignatory, NonExistentUser) { TEST_F(RemoveSignatory, InvalidKey) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) .sendTx(complete(baseTx().removeSignatory( kUserId, shared_model::crypto::PublicKey(std::string(1337, 'a')))), @@ -181,11 +178,11 @@ TEST_F(RemoveSignatory, InvalidKey) { TEST_F(RemoveSignatory, NonExistedKey) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) .sendTx(complete(baseTx().removeSignatory( kUserId, shared_model::crypto::PublicKey(std::string(32, 'a'))))) - .checkVerifiedProposal(CHECK_BLOCK(0)) - .checkBlock(CHECK_BLOCK(0)); + .checkVerifiedProposal(CHECK_TXS_QUANTITY(0)) + .checkBlock(CHECK_TXS_QUANTITY(0)); } /** @@ -201,18 +198,18 @@ TEST_F(RemoveSignatory, NonExistedKey) { TEST_F(RemoveSignatory, DISABLED_SignatoriesLesserThanQuorum) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), CHECK_BLOCK(1)) - .sendTxAwait(makeSecondUser(), CHECK_BLOCK(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) .sendTxAwait( complete(baseTx().addSignatory(kUserId, kUser2Keypair.publicKey())), - CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) .sendTxAwait( complete( baseTx().creatorAccountId(kAdminId).setAccountQuorum(kUserId, 2), kAdminKeypair), - CHECK_BLOCK(1)) + CHECK_TXS_QUANTITY(1)) .sendTx(complete( baseTx().removeSignatory(kUserId, kUser2Keypair.publicKey()))) - .checkVerifiedProposal(CHECK_BLOCK(0)) - .checkBlock(CHECK_BLOCK(0)); + .checkVerifiedProposal(CHECK_TXS_QUANTITY(0)) + .checkBlock(CHECK_TXS_QUANTITY(0)); } diff --git a/test/integration/acceptance/replay_test.cpp b/test/integration/acceptance/replay_test.cpp index d76d161ded..c662b49c94 100644 --- a/test/integration/acceptance/replay_test.cpp +++ b/test/integration/acceptance/replay_test.cpp @@ -6,41 +6,59 @@ #include #include "framework/integration_framework/integration_test_framework.hpp" #include "integration/acceptance/acceptance_fixture.hpp" +#include "interfaces/permissions.hpp" using namespace integration_framework; using namespace shared_model; using namespace common_constants; - -#define check(i) [](auto &block) { ASSERT_EQ(block->transactions().size(), i); } +using namespace shared_model::interface::permissions; class ReplayFixture : public AcceptanceFixture { public: - ReplayFixture() : itf(1) {} + ReplayFixture() : itf(1), kReceiverRole("receiver") {} void SetUp() override { auto create_user_tx = complete(baseTx(kAdminId) .createAccount(kUser, kDomain, kUserKeypair.publicKey()) + .createRole(kReceiverRole, {Role::kReceive}) + .appendRole(kUserId, kReceiverRole) .addAssetQuantity(kAssetId, "10000.0"), kAdminKeypair); - itf.setInitialState(kAdminKeypair).sendTxAwait(create_user_tx, check(1)); + itf.setInitialState(kAdminKeypair) + .sendTxAwait(create_user_tx, CHECK_TXS_QUANTITY(1)); } IntegrationTestFramework itf; + const interface::types::RoleIdType kReceiverRole; }; -// TODO igor-egorov, 07 Nov 2018, enable the test, IR-1773 & IR-1838 /** - * Basic case of transaction replay attack + * Basic case of transaction replay attack. + * OG/OS should not pass replayed transaction * @given an initialized ITF and a transaction * @when the transaction is sent to ITF twice * @then the second submission should be rejected */ -TEST_F(ReplayFixture, DISABLED_BasicTxReplay) { +TEST_F(ReplayFixture, OrderingGateReplay) { auto transfer_tx = complete( baseTx(kAdminId).transferAsset(kAdminId, kUserId, kAssetId, "", "1.0"), kAdminKeypair); - itf.sendTxAwait(transfer_tx, check(1)); // should be committed - itf.sendTxAwait(transfer_tx, check(0)); // should not + itf.sendTxAwait(transfer_tx, CHECK_TXS_QUANTITY(1)); // should be committed + itf.sendTx(transfer_tx); // should not + EXPECT_THROW(itf.skipProposal(), + std::runtime_error); // missed proposal should be thrown here + // TODO 2019-01-09 igor-egorov IR-152 + // redo without exception handling. Need to make ITF able to handle + // "none" answer from ordering service when there is no proposal +} + +/** + * @given ITF with hacked OS that provides the same proposal twice + * @when YAC accepts the proposal twice + * @then a transaction from proposal would not be committed twice + */ +TEST_F(ReplayFixture, DISABLED_ConsensusReplay) { + // TODO 2019-01-09 igor-egorov IR-153 } diff --git a/test/integration/acceptance/set_account_quorum_test.cpp b/test/integration/acceptance/set_account_quorum_test.cpp index 5664898ecc..16b4e942b5 100644 --- a/test/integration/acceptance/set_account_quorum_test.cpp +++ b/test/integration/acceptance/set_account_quorum_test.cpp @@ -11,8 +11,6 @@ using namespace integration_framework; using namespace shared_model; using namespace common_constants; -#define check(i) [](auto &block) { ASSERT_EQ(block->transactions().size(), i); } - class QuorumFixture : public AcceptanceFixture { public: QuorumFixture() : itf(1) {} @@ -21,7 +19,8 @@ class QuorumFixture : public AcceptanceFixture { auto add_public_key_tx = complete( baseTx(kAdminId).addSignatory(kAdminId, kUserKeypair.publicKey()), kAdminKeypair); - itf.setInitialState(kAdminKeypair).sendTxAwait(add_public_key_tx, check(1)); + itf.setInitialState(kAdminKeypair) + .sendTxAwait(add_public_key_tx, CHECK_TXS_QUANTITY(1)); } IntegrationTestFramework itf; @@ -36,7 +35,7 @@ TEST_F(QuorumFixture, CanRaiseQuorum) { const auto new_quorum = 2; auto raise_quorum_tx = complete( baseTx(kAdminId).setAccountQuorum(kAdminId, new_quorum), kAdminKeypair); - itf.sendTxAwait(raise_quorum_tx, check(1)); + itf.sendTxAwait(raise_quorum_tx, CHECK_TXS_QUANTITY(1)); } /** @@ -49,7 +48,7 @@ TEST_F(QuorumFixture, CannotRaiseQuorumMoreThanSignatures) { const auto new_quorum = 3; auto raise_quorum_tx = complete( baseTx(kAdminId).setAccountQuorum(kAdminId, new_quorum), kAdminKeypair); - itf.sendTxAwait(raise_quorum_tx, check(0)) + itf.sendTxAwait(raise_quorum_tx, CHECK_TXS_QUANTITY(0)) .getTxStatus(raise_quorum_tx.hash(), CHECK_STATEFUL_INVALID); } @@ -70,8 +69,8 @@ TEST_F(QuorumFixture, CanLowerQuorum) { .signAndAddSignature(kAdminKeypair) .signAndAddSignature(kUserKeypair) .finish(); - itf.sendTxAwait(raise_quorum_tx, check(1)); - itf.sendTxAwait(lower_quorum_tx, check(1)); + itf.sendTxAwait(raise_quorum_tx, CHECK_TXS_QUANTITY(1)); + itf.sendTxAwait(lower_quorum_tx, CHECK_TXS_QUANTITY(1)); } /** diff --git a/test/integration/acceptance/transfer_asset_test.cpp b/test/integration/acceptance/transfer_asset_test.cpp index 1fc241359b..31b10ca227 100644 --- a/test/integration/acceptance/transfer_asset_test.cpp +++ b/test/integration/acceptance/transfer_asset_test.cpp @@ -5,12 +5,12 @@ #include #include -#include "acceptance_fixture.hpp" #include "backend/protobuf/transaction.hpp" #include "builders/protobuf/queries.hpp" #include "builders/protobuf/transaction.hpp" #include "cryptography/crypto_provider/crypto_defaults.hpp" #include "framework/integration_framework/integration_test_framework.hpp" +#include "integration/acceptance/acceptance_fixture.hpp" #include "interfaces/query_responses/account_asset_response.hpp" #include "utils/query_error_response_visitor.hpp" @@ -71,8 +71,6 @@ class TransferAsset : public AcceptanceFixture { crypto::DefaultCryptoAlgorithmType::generateKeypair(); }; -#define check(i) [](auto &block) { ASSERT_EQ(block->transactions().size(), i); } - /** * @given pair of users with all required permissions * @when execute tx with TransferAsset command @@ -81,10 +79,10 @@ class TransferAsset : public AcceptanceFixture { TEST_F(TransferAsset, Basic) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(makeSecondUser(), check(1)) - .sendTxAwait(addAssets(), check(1)) - .sendTxAwait(makeTransfer(), check(1)); + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeTransfer(), CHECK_TXS_QUANTITY(1)); } /** @@ -96,14 +94,14 @@ TEST_F(TransferAsset, Basic) { TEST_F(TransferAsset, WithoutCanTransfer) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser({}), check(1)) - .sendTxAwait(makeSecondUser(), check(1)) - .sendTxAwait(addAssets(), check(1)) + .sendTxAwait(makeFirstUser({}), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets(), CHECK_TXS_QUANTITY(1)) .sendTx(makeTransfer()) .skipProposal() .checkVerifiedProposal( [](auto &proposal) { ASSERT_EQ(proposal->transactions().size(), 0); }) - .checkBlock(check(0)); + .checkBlock(CHECK_TXS_QUANTITY(0)); } /** @@ -115,16 +113,16 @@ TEST_F(TransferAsset, WithoutCanTransfer) { TEST_F(TransferAsset, WithoutCanReceive) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) // TODO(@l4l) 23/06/18: remove permission with IR-1367 .sendTxAwait(makeSecondUser({interface::permissions::Role::kAddPeer}), - check(1)) - .sendTxAwait(addAssets(), check(1)) + CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets(), CHECK_TXS_QUANTITY(1)) .sendTx(makeTransfer()) .skipProposal() .checkVerifiedProposal( [](auto &proposal) { ASSERT_EQ(proposal->transactions().size(), 0); }) - .checkBlock(check(0)); + .checkBlock(CHECK_TXS_QUANTITY(0)); } /** @@ -136,14 +134,14 @@ TEST_F(TransferAsset, NonexistentDest) { std::string nonexistent = "inexist@test"; IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(addAssets(), check(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets(), CHECK_TXS_QUANTITY(1)) .sendTx(complete(baseTx().transferAsset( kUserId, nonexistent, kAssetId, kDesc, kAmount))) .skipProposal() .checkVerifiedProposal( [](auto &proposal) { ASSERT_EQ(proposal->transactions().size(), 0); }) - .checkBlock(check(0)); + .checkBlock(CHECK_TXS_QUANTITY(0)); } /** @@ -155,15 +153,15 @@ TEST_F(TransferAsset, NonexistentAsset) { std::string nonexistent = "inexist#test"; IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(makeSecondUser(), check(1)) - .sendTxAwait(addAssets(), check(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets(), CHECK_TXS_QUANTITY(1)) .sendTx(complete(baseTx().transferAsset( kUserId, kUser2Id, nonexistent, kDesc, kAmount))) .skipProposal() .checkVerifiedProposal( [](auto &proposal) { ASSERT_EQ(proposal->transactions().size(), 0); }) - .checkBlock(check(0)); + .checkBlock(CHECK_TXS_QUANTITY(0)); } /** @@ -175,9 +173,9 @@ TEST_F(TransferAsset, NonexistentAsset) { TEST_F(TransferAsset, NegativeAmount) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(makeSecondUser(), check(1)) - .sendTxAwait(addAssets(), check(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets(), CHECK_TXS_QUANTITY(1)) .sendTx(makeTransfer("-1.0"), CHECK_STATELESS_INVALID); } @@ -190,9 +188,9 @@ TEST_F(TransferAsset, NegativeAmount) { TEST_F(TransferAsset, ZeroAmount) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(makeSecondUser(), check(1)) - .sendTxAwait(addAssets(), check(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets(), CHECK_TXS_QUANTITY(1)) .sendTx(makeTransfer("0.0"), CHECK_STATELESS_INVALID); } @@ -204,12 +202,12 @@ TEST_F(TransferAsset, ZeroAmount) { TEST_F(TransferAsset, EmptyDesc) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(makeSecondUser(), check(1)) - .sendTxAwait(addAssets(), check(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets(), CHECK_TXS_QUANTITY(1)) .sendTxAwait(complete(baseTx().transferAsset( kUserId, kUser2Id, kAssetId, "", kAmount)), - check(1)); + CHECK_TXS_QUANTITY(1)); } /** @@ -221,9 +219,9 @@ TEST_F(TransferAsset, EmptyDesc) { TEST_F(TransferAsset, LongDesc) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(makeSecondUser(), check(1)) - .sendTxAwait(addAssets(), check(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets(), CHECK_TXS_QUANTITY(1)) .sendTx( complete(baseTx().transferAsset( kUserId, kUser2Id, kAssetId, std::string(100000, 'a'), kAmount)), @@ -238,14 +236,14 @@ TEST_F(TransferAsset, LongDesc) { TEST_F(TransferAsset, MoreThanHas) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(makeSecondUser(), check(1)) - .sendTxAwait(addAssets("50.0"), check(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets("50.0"), CHECK_TXS_QUANTITY(1)) .sendTx(makeTransfer("100.0")) .skipProposal() .checkVerifiedProposal( [](auto &proposal) { ASSERT_EQ(proposal->transactions().size(), 0); }) - .checkBlock(check(0)); + .checkBlock(CHECK_TXS_QUANTITY(0)); } /** @@ -262,19 +260,19 @@ TEST_F(TransferAsset, Uint256DestOverflow) { "19966.0"; // 2**255 - 2 IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(makeSecondUser(), check(1)) - .sendTxAwait(addAssets(uint256_halfmax), check(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets(uint256_halfmax), CHECK_TXS_QUANTITY(1)) // Send first half of the maximum - .sendTxAwait(makeTransfer(uint256_halfmax), check(1)) + .sendTxAwait(makeTransfer(uint256_halfmax), CHECK_TXS_QUANTITY(1)) // Restore self balance - .sendTxAwait(addAssets(uint256_halfmax), check(1)) + .sendTxAwait(addAssets(uint256_halfmax), CHECK_TXS_QUANTITY(1)) // Send second half of the maximum .sendTx(makeTransfer(uint256_halfmax)) .skipProposal() .checkVerifiedProposal( [](auto &proposal) { ASSERT_EQ(proposal->transactions().size(), 0); }) - .checkBlock(check(0)); + .checkBlock(CHECK_TXS_QUANTITY(0)); } /** @@ -287,8 +285,8 @@ TEST_F(TransferAsset, Uint256DestOverflow) { TEST_F(TransferAsset, SourceIsDest) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(addAssets(), check(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(addAssets(), CHECK_TXS_QUANTITY(1)) .sendTx(complete(baseTx().transferAsset( kUserId, kUserId, kAssetId, kDesc, kAmount)), CHECK_STATELESS_INVALID); @@ -321,10 +319,10 @@ TEST_F(TransferAsset, InterDomain) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(make_second_user, check(1)) - .sendTxAwait(add_assets, check(1)) - .sendTxAwait(make_transfer, check(1)); + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(make_second_user, CHECK_TXS_QUANTITY(1)) + .sendTxAwait(add_assets, CHECK_TXS_QUANTITY(1)) + .sendTxAwait(make_transfer, CHECK_TXS_QUANTITY(1)); } /** @@ -375,11 +373,11 @@ TEST_F(TransferAsset, BigPrecision) { IntegrationTestFramework(1) .setInitialState(kAdminKeypair) - .sendTxAwait(makeFirstUser(), check(1)) - .sendTxAwait(makeSecondUser(), check(1)) - .sendTxAwait(create_asset, check(1)) - .sendTxAwait(add_assets, check(1)) - .sendTxAwait(make_transfer, check(1)) + .sendTxAwait(makeFirstUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(makeSecondUser(), CHECK_TXS_QUANTITY(1)) + .sendTxAwait(create_asset, CHECK_TXS_QUANTITY(1)) + .sendTxAwait(add_assets, CHECK_TXS_QUANTITY(1)) + .sendTxAwait(make_transfer, CHECK_TXS_QUANTITY(1)) .sendQuery(make_query(kUserId), check_balance(kUserId, kLeft)) .sendQuery(make_query(kUser2Id), check_balance(kUser2Id, kForTransfer)); } From 40954d31d4f3e3ba22dad0c562aa2f0427802c64 Mon Sep 17 00:00:00 2001 From: Akvinikym Date: Tue, 15 Jan 2019 12:38:41 +0300 Subject: [PATCH 05/41] Stateful Validation Errors to Documentation (#2020) * Introduced codes to the docs Signed-off-by: Akvinikym --- docs/source/api/commands.rst | 176 +++++++++++++++++++++++++++++++++++ docs/source/api/queries.rst | 115 +++++++++++++++++++++++ 2 files changed, 291 insertions(+) diff --git a/docs/source/api/commands.rst b/docs/source/api/commands.rst index ef0fb82366..5b776a74b0 100644 --- a/docs/source/api/commands.rst +++ b/docs/source/api/commands.rst @@ -45,6 +45,17 @@ Validation 2. Added quantity precision should be equal to asset precision 3. Creator of a transaction should have a role which has permissions for issuing assets +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not add asset quantity", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to add asset quantity", "Grant the necessary permission" + "3", "No such asset", "Cannot find asset with such name or such precision", "Make sure asset id and precision are correct" + "4", "Summation overflow", "Resulting amount of asset is greater than the system can support", "Make sure that resulting amount is less than 2^256" + Add peer -------- @@ -84,6 +95,15 @@ Validation 1. Creator of the transaction has a role which has CanAddPeer permission 2. Such network address has not been already added +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not add peer", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to add peer", "Grant the necessary permission" + Add signatory ------------- @@ -122,6 +142,17 @@ Two cases: Case 2. CanAddSignatory was granted to transaction creator +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not add signatory", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to add signatory", "Grant the necessary permission" + "3", "No such account", "Cannot find account to add signatory to", "Make sure account id is correct" + "4", "Signatory already exists", "Account already has such signatory attached", "Choose another signatory" + Append role ----------- @@ -157,6 +188,17 @@ Validation 2. Transaction creator should have permissions to append role (CanAppendRole) 3. Account, which appends role, has set of permissions in his roles that is a superset of appended role (in other words no-one can append role that is more powerful than what transaction creator is) +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not append role", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to append role", "Grant the necessary permission" + "3", "No such account", "Cannot find account to append role to", "Make sure account id is correct" + "4", "No such role", "Cannot find role with such name", "Make sure role id is correct" + Create account -------------- @@ -194,6 +236,17 @@ Validation 2. Domain, passed as domain_id, has already been created in the system 3. Such public key has not been added before as first public key of account or added to a multi-signature account +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not create account", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator either does not have permission to create account or tries to create account in a more privileged domain, than the one creator is in", "Grant the necessary permission or choose another domain" + "3", "No such domain", "Cannot find domain with such name", "Make sure domain id is correct" + "4", "Account already exists", "Account with such name already exists in that domain", "Choose another name" + Create asset ------------ @@ -235,6 +288,17 @@ Validation 1. Transaction creator has permission to create assets 2. Asset name is unique in domain +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not create asset", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to create asset", "Grant the necessary permission" + "3", "No such domain", "Cannot find domain with such name", "Make sure domain id is correct" + "4", "Asset already exists", "Asset with such name already exists", "Choose another name" + Create domain ------------- @@ -270,6 +334,17 @@ Validation 2. Account, who sends this command in transaction, has role with permission to create domain 3. Role, which will be assigned to created user by default, exists in the system +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not create domain", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to create domain", "Grant the necessary permission" + "3", "Domain already exists", "Domain with such name already exists", "Choose another domain name" + "4", "No default role found", "Role, which is provided as a default one for the domain, is not found", "Make sure the role you provided exists or create it" + Create role ----------- @@ -305,6 +380,16 @@ Validation 1. Set of passed permissions is fully included into set of existing permissions 2. Set of the permissions is not empty +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not create role", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to create role", "Grant the necessary permission" + "3", "Role already exists", "Role with such name already exists", "Choose another role name" + Detach role ----------- @@ -340,6 +425,18 @@ Validation 1. The role exists in the system 2. The account has such role +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not detach role", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to detach role", "Grant the necessary permission" + "3", "No such account", "Cannot find account to detach role from", "Make sure account id is correct" + "4", "No such role in account's roles", "Account with such id does not have role with such name", "Make sure account-role pair is correct" + "5", "No such role", "Role with such name does not exist", "Make sure role id is correct" + Grant permission ---------------- @@ -375,6 +472,16 @@ Validation 1. Account exists 2. Transaction creator is allowed to grant this permission +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not grant permission", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to grant permission", "Grant the necessary permission" + "3", "No such account", "Cannot find account to grant permission to", "Make sure account id is correct" + Remove signatory ---------------- @@ -415,6 +522,18 @@ Two cases: Case 2. CanRemoveSignatory was granted to transaction creator +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not remove signatory", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to remove signatory from his account", "Grant the necessary permission" + "3", "No such account", "Cannot find account to remove signatory from", "Make sure account id is correct" + "4", "No such signatory", "Cannot find signatory with such public key", "Make sure public key is correct" + "5", "Quorum does not allow to remove signatory", "After removing the signatory account will be left with less signatories, than its quorum allows", "Reduce the quorum" + Revoke permission ----------------- @@ -448,6 +567,16 @@ Validation Transaction creator should have previously granted this permission to a target account +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not revoke permission", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to revoke permission", "Grant the necessary permission" + "3", "No such account", "Cannot find account to revoke permission from", "Make sure account id is correct" + Set account detail ------------------ @@ -489,6 +618,16 @@ Two cases: Case 2. CanSetAccountInfo was granted to transaction creator +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not set account detail", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to set account detail for another account", "Grant the necessary permission" + "3", "No such account", "Cannot find account to set account detail to", "Make sure account id is correct" + Set account quorum ------------------ @@ -529,6 +668,18 @@ Two cases: Case 2. CanRemoveSignatory was granted to transaction creator +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not set quorum", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to set quorum for his account", "Grant the necessary permission" + "3", "No such account", "Cannot find account to set quorum to", "Make sure account id is correct" + "4", "No signatories on account", "Cannot find any signatories attached to the account", "Add some signatories before setting quorum" + "5", "New quorum is incorrect", "New quorum size is less than account's signatories amount", "Choose another value or add more signatories" + Subtract asset quantity ----------------------- @@ -568,6 +719,17 @@ Validation 2. Added quantity precision should be equal to asset precision 3. Creator of the transaction should have a role which has permissions for subtraction of assets +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not subtract asset quantity", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to subtract asset quantity", "Grant the necessary permission" + "3", "No such asset found", "Cannot find asset with such name or precision in account's assets", "Make sure asset name and precision are correct" + "4", "Not enough balance", "Account's balance is too low to perform the operation", "Add asset to account or choose lower value to subtract" + Transfer asset -------------- @@ -610,5 +772,19 @@ Validation 3. Source account has enough amount of asset to transfer and is not zero 4. Source account can transfer money, and destination account can receive money (their roles have these permissions) +Possible Stateful Validation Errors +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not transfer asset", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Command's creator does not have permission to transfer asset from his account", "Grant the necessary permission" + "3", "No such source account", "Cannot find account with such id to transfer money from", "Make sure source account id is correct" + "4", "No such destination account", "Cannot find account with such id to transfer money to", "Make sure destination account id is correct" + "5", "No such asset found", "Cannot find such asset", "Make sure asset name and precision are correct" + "6", "Not enough balance", "Source account's balance is too low to perform the operation", "Add asset to account or choose lower value to subtract" + "7", "Too much asset to transfer", "Resulting value of asset amount overflows destination account's amount", "Make sure final value is less than 2^256" + .. [#f1] https://www.ietf.org/rfc/rfc1035.txt .. [#f2] https://www.ietf.org/rfc/rfc1123.txt diff --git a/docs/source/api/queries.rst b/docs/source/api/queries.rst index 6cdd0178be..20f38614b8 100644 --- a/docs/source/api/queries.rst +++ b/docs/source/api/queries.rst @@ -71,6 +71,16 @@ Response Structure "Quorum", "number of signatories needed to sign the transaction to make it valid", "0 < quorum ≤ 128", "5" "JSON data", "key-value account information", "JSON", "{ genesis: {name: alex} }" +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get account", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have any of the permissions to get account", "Grant the necessary permission: individual, global or domain one" + "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" + Get Signatories ^^^^^^^^^^^^^^^ @@ -115,6 +125,16 @@ Response Structure "Keys", "an array of public keys", "`ed25519 `_", "292a8714694095edce6be799398ed5d6244cd7be37eb813106b217d850d261f2" +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get signatories", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have any of the permissions to get signatories", "Grant the necessary permission: individual, global or domain one" + "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" + Get Transactions ^^^^^^^^^^^^^^^^ @@ -160,6 +180,17 @@ Response Structure "Transactions", "an array of transactions", "Committed transactions", "{tx1, tx2…}" +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get transactions", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have any of the permissions to get transactions", "Grant the necessary permission: individual, global or domain one" + "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" + "4", "Invalid hash", "At least one of the supplied hashes either does not exist in user's transaction list or creator of the query does not have permissions to see it", "Check the supplied hashes and try again" + Get Pending Transactions ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -197,6 +228,15 @@ The response contains a list of `pending transactions <../core_concepts/glossary "Transactions", "an array of pending transactions", "Pending transactions", "{tx1, tx2…}" +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get pending transactions", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have any of the permissions to get pending transactions", "Grant the necessary permission: individual, global or domain one" + "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" Get Account Transactions ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -249,6 +289,18 @@ Response Schema } } +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get account transactions", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have any of the permissions to get account transactions", "Grant the necessary permission: individual, global or domain one" + "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" + "4", "Invalid pagination hash", "Supplied hash does not appear in any of the user's transactions", "Make sure hash is correct and try again" + "5", "Invalid account id", "User with such account id does not exist", "Make sure account id is correct" + Response Structure ------------------ @@ -324,6 +376,19 @@ Response Structure "All transactions size", "total number of transactions for given account and asset", "", "100" "Next transaction hash", "hash pointing to the next transaction after the last transaction in the page. Empty if a page contains the last transaction for given account and asset", "bddd58404d1315e0eb27902c5d7c8eb0602c16238f005773df406bc191308929" +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get account asset transactions", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have any of the permissions to get account asset transactions", "Grant the necessary permission: individual, global or domain one" + "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" + "4", "Invalid pagination hash", "Supplied hash does not appear in any of the user's transactions", "Make sure hash is correct and try again" + "5", "Invalid account id", "User with such account id does not exist", "Make sure account id is correct" + "6", "Invalid asset id", "Asset with such asset id does not exist", "Make sure asset id is correct" + Get Account Assets ^^^^^^^^^^^^^^^^^^ @@ -375,6 +440,16 @@ Response Structure "Account ID", "account which has this balance", "@", "makoto@soramitsu" "Balance", "balance of the asset", "No less than 0", "200.20" +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get account assets", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have any of the permissions to get account assets", "Grant the necessary permission: individual, global or domain one" + "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" + Get Account Detail ^^^^^^^^^^^^^^^^^^ @@ -447,6 +522,16 @@ Response Structure "Detail", "key-value pairs with account details", "JSON", "see below" +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get account detail", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have any of the permissions to get account detail", "Grant the necessary permission: individual, global or domain one" + "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" + Usage Examples -------------- @@ -568,6 +653,16 @@ Response Schema Please note that due to a known issue you would not get any exception if you pass invalid precision value. Valid range is: 0 <= precision <= 255 +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get asset info", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have any of the permissions to get asset info", "Grant the necessary permission: individual, global or domain one" + "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" + Response Structure ^^^^^^^^^^^^^^^^^^ @@ -613,6 +708,16 @@ Response Structure "Roles", "array of created roles in the network", "set of roles in the system", "{MoneyCreator, User, Admin, …}" +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get roles", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have any of the permissions to get roles", "Grant the necessary permission: individual, global or domain one" + "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" + Get Role Permissions ^^^^^^^^^^^^^^^^^^^^ @@ -657,5 +762,15 @@ Response Structure "Permissions", "array of permissions related to the role", "string of permissions related to the role", "{can_add_asset_qty, …}" +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get role permissions", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have any of the permissions to get role permissions", "Grant the necessary permission: individual, global or domain one" + "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" + .. [#f1] https://www.ietf.org/rfc/rfc1035.txt .. [#f2] https://www.ietf.org/rfc/rfc1123.txt From 64aa13c20528f42bd66d0e64d05e86bede48cae0 Mon Sep 17 00:00:00 2001 From: Nikolay Yushkevich Date: Tue, 15 Jan 2019 17:23:59 +0300 Subject: [PATCH 06/41] Add new transaction statuses and JSONB field info (#1978) * Add new transaction statuses and JSONB field info Signed-off-by: neewy * Update docs/source/core_concepts/glossary.rst Signed-off-by: Akvinikym * IR-70: Update information about block structure, reference block structure Signed-off-by: Nikolay Yushkevich * IR-8 Fix comma and repeated word Signed-off-by: Nikolay Yushkevich --- docs/image_assets/transaction_status.sketch | Bin 35360 -> 34892 bytes docs/image_assets/tx_status.png | Bin 22368 -> 64672 bytes docs/source/core_concepts/er_model.rst | 2 +- docs/source/core_concepts/glossary.rst | 14 +++++++------- docs/source/locale.yaml | 1 - 5 files changed, 8 insertions(+), 9 deletions(-) diff --git a/docs/image_assets/transaction_status.sketch b/docs/image_assets/transaction_status.sketch index fe21d45dd5c6dc0df2782a1089984f5714edc21b..03ed231df28afa2594d6e654955edb034168665a 100644 GIT binary patch delta 32844 zcmY&;V{~P~5^ii~Vq?OIZQHi(Ozblg+qP}nwrx8TJ9+2cci)e9*6OqSth4v-?yst= z>Z`8YM6k(NFo2>AIK&SSFc27!|9rUWFijaiK|q|rKtK?IzfB!XTDZaV2W=7|@pUUHKO zE@XVI_Fs=f;>R+71#QcSMm@bnO@PH>MY9z|_v8WOlFg+g)^T1-lo<-UDFxE+&YpTS z*(#LH;Z{kjkWq82YX+L%BWdZVC1RiUm=!If#EKN;q*~<6ryswiP2my6*{LF#V(+gYLKF$(w6jNflX! zHM1&-CXrL#<~S|+WqMY7##_a5$HpIki)BoV=c}=XHH8L2vcA2yYZTnBOfz^(Bs4T+ zIk?GF)D-g|yFXdMN7Czy(BliWCE?|-Qdu<>q;B2oV;6BTtGT!=|b5lT|_2uK_L)hU^@0_34CIle= zoHOIco%uC)(fZbf&&AElm3I9-a{2IYr{Um!Z0^cLK<=pAn^q!w^2smjngQd(#l!7o zU($Z}nb{us0WiJ)ejxT?Bn^|51Yt>#tG&qM8K?KPud=~^#wl#%cTz`^zD z>gA<&;Sh4tb$hon&C#ym!_URn^Lv^oWaNE5-lE34O8f1_&?z|v>DkZM&ym~v^W3nA ztR4TC-fL3kZ-A4GB}T&i6qn^9HY1Kdiq4-x<1wTpd-iS>OQ9VxYlp0MPI3!*{miLo z{Tu~%Hr;J4<;n}lh z45d@V3gT8yCcy_I7Z#U5H@0P4886qGp6=8yxGy#y-fpThFH@x}L@oUv)%vD$ao!CD zYiH6On1G-85i#;e6wG#o;{_{ScM{rMSef(sb!1|DdA;K<>m4U?%Gr% zzm$=QN+llLPTIpO{_^nTWNm`=4smrjS)MG|ZvdzlOI2%4yL_~}uQo#H?ca_;+b$B; z*7kx}yL?;IlI^Sot22suq_bzdzRx4R^z7R4`A{Iec(3H{KQA}R+hT5_u5ZL{5^ojS z^u&8yLhc<>S_1vVs@n-u39{vgBCKDWUoT!CH0_V3I!{fRn%3)C^%MC7I(Lz<`0mVl z#R11}eRP#$Q%(J+Ak?8oO68z+RKGMaROAowSN`E}uGjXO@45~7tahU;%07!>)+|2f zg|1%Oo`qBTL!Hcs7IgBi%++e>b3Uo7mD2t0t_=W+36XVd#}I}CqV zb}TmME3uLl7&Qmti`J!9lXx6qG~VEPegPJ4%-CJwVS`k+tn`F(PU+LEx+ke>!6R_eti zM&;ljJSdD!LK6WJ3?5}4S2KEjSC3~TU~5NqbK0(Dz18D2)lMo4zO&PZimL1sLIAc( zv5+DN>J2zGF_$Irl3&{_-IX;isqDNfM#)>c5nQ;nO&sf99A+yQmt~lLxBo~9rv4}t zRk!Vm1eo_f}IgO2K%IozfgsfjK|zPmh}a*XVrF$sYEX{)TPUyGme@zRSMjL2skze?5ZB1mq@}s*UO)joeP@;heBV}K+Hd~P`;R9w&GG0T>@eJuU3bFs3zFNnkYr^2n?71I~<`Zw7yuYSA&0(?HV zZ$8{9l{aoR-8VPmA%Fu7Iq}_c+oJjur$X78CwXLrX{<`CqH-A*uWlYL{s4;wjhYyW z&&r0^0~NcS@E@Dqy4MJoTPxnkHxI##R0Nx72aC~Jw~}w+R{BXUHMfT@cjqonPL3T3 z_W0*mygsfU8y`%6&Z@pXg!eJXtVi3BjiOz!WE=b;=ez$4eeMG>da3*Xzlsfp>OGX^}Y z?C#)3Mi^?!(=QlLDB;1ZX6cHRs*t{pYtzmrmQuLhKzg`1Ar6h1G3-frif>R~VZhl_DJm2h<7c-FVpibn@Ja_R+H`OJ^?iFd zE^i@92y+sC@Z<357oeNM6V|wg?H}tJd{S?pzTu`{L=f^bIB` zfFT8-W&>uR@}(h_tNTdBNZwuCVYIs_(kE3%!LTO(43i8_q85^bS6VeYzBskIlId!U ztd9~qDr43jFelLVGF6;cl2#4i^1~H#Ex*geDZjuH?i*lbNdga^1dFFr{LD_cc@OXaI_w(oE-^JClZ9;Xqi@I^t7dqA*eWG*G_nu5D{ySH%x-8S+oQ`J+X`HTJl(YQ(6 zeB5`GszTQXjg+(EAX(AfceKl?yW0d;Y0LPw?yy>Ldf9XzE77y3{uxa1W7@dY|2p_r z`6wW^)T}lWTugy~mcsbJo||?KM-HJo73%c;02yN84I7ze zq?evn$v50)K{Eo@{aKN@ijxW>ns9thgv*Oc0{Mu5FHu~}Bw$>yJh*y#NVS0;6kacb zA!jy|L(U+mQCv(ZK+$At%r9s(lf=W8!~$T8|66ArU&cIZh#zU}ygPkRh!ID~EJagH z;^!W2_bkR6x1z?Wd&f-3?rWzJIz|1sDWZ~DoWd{TvJG{Tj9hACM2gm#Q~Itwp3=K< zKDdBN!LDB3S~S#UOcm8CkLBZUV{$KSBVGM;gkpvHq_kv0!6_ z4x5lLeSOo`OR>0w zy(tF6)_6W?dqNxkjIT=t-HqM*2W88(M^v-HiJo@L&dyxeIr!&Sbe{6XQ%8G3#-RiE z?ajtli^yY{UC-rnjzEXTAck@sE0kUx*z&L7>;+tD7`Bk%AgP^=R=P$kRtG@b=ln$; zy;^1#J=$kro1&zVr@37ObBO1%kU6=@VqwOBg(wKq&os?$D*Jdp)IK0s@4c zlR~#z(og%UVY&Csq?WuS%U+Z*wxQWSqtt?xYDTFDCSPJe^ znj*uMm1T`k(OMm*mVtk{X#)Y(zvfGkCU2tEiw@fa5XbxfNQwp}-R)K=z-CNnwJ;pJ za|{&*uZc)ugUb8};KBf>!=`7-P#=wQ4*df7O%NC9IV-y6?RJl8_e=1N?%8DB$EM{)Abq^0Cseaz^PN~z~}~=B*X;~ zTO{yM#sJS@NK?2jwcgPnn#Lvw4>^4QR7P4_sI>YdlNCnT4MX&5ki9}zI&y<~wlye{ z0$s6pc?E^ZF(0lN5$q?l5@o-0#s6LOb%Gp|g9yH9A4riJYita5(*Z$U z5JV*gKgb1YM{EMuw9CTV|1pIxwXefL#^5Sq~!Z!VT48^`lBvk2MdZ3}F7|#fkQ#2&{+;uPYw@ zU>P&qbfPr;p7^}Yq9O(|Rr4?94oXys;e)wiBG$%o(4PM(^5gs6%ewh z59mUyN3pi(kbuLtdt_U6K{5~M>B7j-9P~3*Ztc@DO6Tt(ti)RJE$CH5Hn(xQe z^X+TQ%=tCL>)AiX#`t|x?HKDNq*)9m(E!El>@wBj27rB)Pn$(Z8=FRbO>Dv>0h1^n z`2a>a8@96x23|Baw<&^e%B+B=^8LLWcJXjbDz~m|mIVZd`9iL?3^heOSaUhpQ&JEK z7vc~Pdy>|vTbspN;?>SZgn9>j&4nTvU(U$Mqtw>c=TV=+?brcYj({IGzYhM=JZ9~w z_K)u!0N{Iv*65=fU~_w$5JjwvjcOGeT1a2T+wog)KB#=5Uz*Rmu7L`kRjs6VEik*n z4enC_Id&69f($>(v5^q}>MOMYe^syBQEJ297Z)GFM{vLFCWo zvz!14?8)5c%M0n;&lgkJ=x(B0%SEY;{sQ{&8~}2(`2A@Pv_zEv`zbYhRXysgCPZoqP=K)9Fr)Ynk-vxRFlC3)L&NrjIq55mnloOz!92I!n#oz~u1upZwc( z&L(or-V7w-hsg+@F;u?NCeUVuQ%r*My#NqWX>#F(|B!t>@kDB3D!6fE-4Nf|)9xOK z5>t=#aLLgNx!EYB>o5(t>iF>!(XtD}Y4fa1`P5}Eoo0tc^-iO7abUCKZ;&(9z7XW= z87;XNsR@6i94r=f`wrV~g@ih@ogzxWp;*i+n6HgDgnF!Gx0Gy%Rwf0w>o1ZSWz9*eBGTzRD0{zad<(Ns5P2NNuomA3g7RbN~s<#X<*Iotd#KVp3=n%Z9GWXNisZDu1_ z%hl;#$ks{jNB&-SRyZRBPWju2g?$O!AA6ecCD>zA%lMWe5UYSu zBnYX^G*L2Z9>QXuolDFw;Q`7Ls_2+7z;dRHQCSB|5f?RCa1^3-u(_^dIqm7k>LmhC znZS+BB-6kjFnTNtV3*#aZj?5inwJ}(>^Bn%ssEDMky7LB7%IkOYWoH~-*mpiCIKo3h*B<+-CK6g z+*iR*l^E4M-CwJ>I=dRzcjc#cycdWJ#<-6I^sZF2V~>-@r?3tug~7!5sNu^_HY8N@ zVGzg^OW8`|Bkl2ayg(A1g`>zEVbv2AN>So#6E;OaIUy(PY`EGWP|N4I{=}N}xdp+- zKS2gAz?aoaYdUBdFafM}OT`EKoR6Vddv$fAjE86a+zG602Sx-lkk?mCEP$e+Urh{q?~5WXqRmGk7lb zOS7RVFiMrcjOjUYgfMvx5l#kDHkx0pMEWgISB1=mQ0sm)y+?l8wh9G&>oIRGA0}NL z#TI?KSNj2d2yJ1QW$k=*6c0o6HI`9Dg+@{;(Ab5I#6-E+UWTbT*T-JG(Z%EpxjOGg z?^4JjBLcQKH5IU|5jlEobIU4FJ76+pD%TiI7psBFn49YH3r$8_!l6$5=B7Wr0lCm$ z$g%lkVL?jLQyD=t4IXW%aPhy(Z?Bup$tPYWrcj8LN_x9)Xg8;!3HthFVLByZJax*9 z_X{!p%Enz$1I7{u+m4v@$PCv$ldYr}Uwz9x_7aC3AXVy2o_3%{{g<4{5TnfQFZi&v zp8A%xJIrYo>auPmwjYI95gP7__%d((aU9!@^D=%pdaE>IK5F})awhT-VJzWG>OwK} z)~E*ayqvb$*!l7Mf7_D%YkJ@mkwv13Q2&-UVZ$TraQf0|><*~Tb>BPqzi(+yd1}o?!zqXZoTtE# z@y#yZ$>PG)zdR{e@2T!z6Ze9tsuv##S&z ziqI3$31vM^#~64?8@%AB>AQ^e$5##JaI_3Ru1kox0w#ip_G5_jMzk|MACFER=AgTD zEpLSLG%iXQ9r)_>yl}n*k$qeUY*0@#YJ%2slPfer4Pr&$BkbDD!G%xlW_-+uLjmEJ zUNQwhmhcv3K~|@gw+!n4m`~B=6bonJIHWwOgza{}p_~w=Cw93ZolI&yuLYfXDEEU? zLPa}jsH%p7;egE_k3Yl>eDiroEkGX~CP?}-4xoUc;?>W7!c2)q-QVGn-Q>iE17EF- z@*jSoW-mZlTO;dd)I@l?cLx6aW+ zgbY{5t$p}p(-S048vRij{cy}T^TSStd7QXd4e+tx)hc<#q?#8Rh>a=S#fC)pgW{Qr z=~p)c3^So}Bw?vJ!E8jJ4v*7moJb7*8_`f+RdlBHZ^H~sQ<1}HTb%f4Y5iE@}Fp67n7tZ)M0#9V2yu_ zmBd~xEe~feoKAJo9*=foB>|Phb$L8w6~=j?{SB3J06eaT+vpkZC=z?R!M>XP+j65bd zOw6?LLJHxiHg@K|X4h=0Vau_#Wf=`#Jrta8RV1^oTtQPTIL($`+)u#6Jj&xKHyt~D zGYN-V9LHiXXo4lrtSR25F(L_^qgoYa1~o+u8LMhrYhohY7t)V(u|t*fj*JQ#m^H6@ z#2eL^bJ%Thw16642tW{F+kKQh9jeTglq)k0`3Cdu_h5Gt!M>xX$&83W;iOk2W8}hqwJqW(@Kyh3Sg>LK%nT^r;|aj(L(2 zsH(hw`zT}(veh#Olcrp8MNtX2np%ewq>64zip8k^cok`BQUF8S%Re3Hs0vR9*Q5dB9BJ8VgpcGir2eDpozmv0&SWNCO0vSF56;_D zz+jT-%8Q<{*$~ipfx{`~VdsL223g~CXE{0S>}aYp%1T;GKf%VQ9!gEgIL8?tRavJq z(T5s=R_Kl)d#=wCq4XxgHA_)fhJ)e#cB-1g;fn7zBQn$L@VQ&|fbuz)3MY0E#5ZvX zEz2ZW$rOMDY4hvndj#><<0vq=kf_Gtja&5?!<1YPO9fc>8CulO3i8CCDNrmF1(&KI zZ}!hWCpa)~RXrM3vaIT%V@1r1Gy6J>xzMbaPpYx{t(5a=hnp}P7DSetAJnh=u*VFJ zx%~y*2nW4ndbxA~6=9yUgPTgJinCH9#i$}}ZR06(3-O)_m}2q>&_81aAaoK=Myh%h z&XXm5?E_TX2P5ka3Bj-+PP~=_X1knx>pB_r9CS8Xb=ooFy`=s!w1t@RAZJE^D>by$ zFh&jqRuwhC>vJ|n3vxfMqBGHvGKn-HmeKkdsk#@>8~j!A3Z+G;!=Wt2&hyd$*)V=o zf+#SaqhPI)?gmegkB%2~h&igO)=?tNN1f#Wa{&nF+tGHd791eZ@wt8hhd;l{a!n zV*_J~u0pY~acRm{-p~$?i4;pR=OupIT=B$)X(uUN@01(2As8Pml%U{aKC20lqP;>( zD+lD6XL*NrP*o-eifDp@%hQ##-V&evkfYz$QoL-`Jp8=H#GNC{&aC~}%$8i`312Bj za-Ub?dM<6SD9~``3CU=-VEj~VPuwe_!{@`y*Gukx>1%2Xapm2Pbd5ul{wHo~F3k5> zjAb)Ow!)lBeBtdrZrxdvOQ>}>cMU~}*Z}Z=6K(poaPN|fyab;ETYM52EzSSfNTAxO z#5KyO%@=>>Emustgpx6rJ4B8FrX^Y<{ro4jR69s|>TPPoqg30)x-47_-&O>_73b!gKdT9omr#kk;{!NI zT*7jjG@*zXDkdt^&7mhXT5;=PU==TZ{;+-HH-jPJfDck(LUL7XWE*jb*7uws)K&DA zsE#LoNZr)cwOcM16x;bL-w|YiyYz4D0pwY0j=WO65JL4#YRc>@qV^50IWSlE>6i8i zlIi-2p&378;ye-D%B%{M2h)8fb~2zzO!_xMsy?Nfi+G+a;xF6BYE^|+GPF5O*+uN8$;zpOGtW7 zINMGc6`UF*R3ymj>IS^yFn3V#k^mC3NBgdZRN)lskW> z`dkK<5RO}_Hnkqo#WL58SLTJo8TvRYvh7DEZoc>#rfHBd9j(e!tAuzO-TSRukab_%7QEQww|}@V^kkYsLqe6sli>pv%*@vE2}_X*04p+VQfKIkWw>g_kC+?wn& z!>y2(ZRo$Xr+@3Vp~#aG$;RyOBK(eS09+~*3aor^Z#@%i_IDo_Fsh$57(5;0`YOKA z0m!np|5q3R^1q^!A@nUIEL{)~G+_JjpTYH&JC2iq&R z^kfh`$&zM9g>jBU1BXMS(j{E1xTqpbwa6kC8ZMkjEfRw!>~*H5%3w9S`zeyfoWML` zq5iPb<9frt({{ph;`h9LgLJa#KzHpvy*99J^ZmsAUdd34;|6Twv zU`E8TY>~BSswkm2U|96J#2@{61rjmyulFYdVeu4~ZO$jBt92$niIq{E!F~zz|&c+n5#5zQ88%MBdwP!r9R&t3x3KZ*Zy5? zbGAT=u~6J-wmX|(=qVgaFvx?$7+;7H_!4#KWQtL*(#AmX`}%lIWU-J(nnAVuiX`Mq zce~jZ5exsJRPydpm^1>VD13?eUM;Y%D$~9E(9-ufr`53K4g;xZ^kAfMUdl zWDFEMyU=#6ey;VmiI#sHnZ#VP-Nxzj4#46I5+1W~Jc(AN$yyEkkmIDq?^CBtzpFj4 zalOe#0-1j`$#ZQ_x7k*!egf-`l>Gm}DvA!uL|PD5{LdHy4ueLt!D0qZ5M2Cle@NzZ zn&&3}qPoKTB`@SE!vWBpS0t07T&>%7`t?qv2z7!qu;bOABOgHI0n*64jskeaZk#+v6Jaf_ z@c&Oc;z)@H*5XJhZ~Mh_fdh=6;)!7~=(FzIul^%jDp!keI+>p0b8gx320~U#;v{O7VM(ZGNBE$xmjmSX%lZlbaUA<@VJVIN|0ydBm>Z0wLLmXo zeliJ!d?u$5pVxySq37LU5iI)fz3=PYk>{w_bmj`;R|bbezH|}|?#!v4$c728$A_g<_gDiuuJd zl`_CLsA_?@>qf03LUOfUHwyIK*AP_LrDU0wInUFWI9$0!6Y9Hd<`pTR-7WMxu1PTQhb0z_WAsVj< z%;9KUK(^nTl^0OpX-ZS+Y#vGogkK-`3U7NMM2r92D@t#6LH{+SA(+qLO^Eh-}oTWyhDmgK8(dL8Y4& zxh?+c-1XONy$;*=B+CF!y`sh%S?5-{=Yt?J4s)Wux1D%8X)QdFZBkcdw*t6Pv9loA z{$v^pFEbergK7J|yIOF=sQE!PO^hsU+H@S+F@`?}=2IX* zdQOrTJtB1+q89vs+%gP|t&ss}6fo-j9hKY*+pqxLo- zQBc9P02{>Qg8j1cZyARdZrpZyrBVSvJo+trF(?6+g_18Wt#)=R)r^NsY2xhAqI%Xv z^8&+Ly8C(i4&=o9x-COPZUFTjZlFkU9xRg6*D^SlslO?p-#{85=Hj#*)G|fh>^=HPrOctF_|tN_`4iWP`plAyfXk?fV`!*Oc8?P<#Zo<{_On3 zam@$`Etoq?T{C1tp?`!3@YhkJCZuDWkP9eihpDsWDjiQkdSJm;KhKy}yUSUv8%Aw0 z?aa0~alE)FE1rM0QZ@BAi)H91q!!b2Z1jR{PHiS?W*Ip z2mXomLF$0>31E6bD#QetEN#JpBV~ko%g{OW^2K(&QK~AkTFzuRGVfp{rXp&x<4!-N zE}Q5qtp71KwokogqbeQQJ&7;3b|VYqt@HPRxXFq?I5ZOW$>>Iqd%W}a*JpDFoJT2) z=pq78O0AB}q$W!+^IE;jrYsiIi!naL!Wry#kruuQ0Q~}~kc5h76cn;~Z?j{mu(^v6 zJ1;{)e;2X)B@F|-5yE~wB5#io0{2~}0iane5Jv>}$iPyD@L1(F6nokE_Dgw7tzo`1 zLYK|d4Fz#=oXZdD{Q09;HC5xtWP`fvF%i?ZKM?jUxn?!q+W#AZqT?S-qxF&n=#ZyumA`=&D)bFPqMjPVG$X5Gm6Gr^sOjO)z@=1Rg>*I^l&V{%X~q2|*Or9rOvyNSHE_E`UmQ76gDa z58<#`Cvr=?{dV1U^^}(4%8N$#i3Mav^C7U83|m!4ZGk~f9@WWUwbf^cK)CbpJAQw? zH=X~w?C!)aHR$mRB*7zbAOZRGK8y=_9hH@g4x-_#GQT^N%0#`m4VLc!HSFu(Y`gc6 z23Ie|WYEuy83~l%ZIvle`%wQPlZdY0`k4#V)8-F6R!hT;Q!TR_jfTFS%;DKC>Hw@ii$$UY z!&!}@^QlRjWaw<2dA!a^r|2lXVvmG+cQ|;G<({kzG!$T%CV>%u0~LOO(P=m4Cedo( ziXWEb_(@Ntzcydg@vIF<0kpB5U+8pOV;@_6-=FYVS84oszT`U~IB(t8r$E9O^g9Eu zJMZb+uiLHVjUzfnP-DeoN8lM@hp@<)Vd2^0wBXF9Ber}9`F>%^tX<6`Qg#7dV-UJ- ze>)0j<#-O02^1nWRlzJ2~Jm-*p3XvD-(x#U!X(JFl>y#=TDzw|6-7MQbPyvR>&frs zrBjrKCzJ}6-{Jxe*n;Wb?iSt_67(YNQc=&2Rh{v?4XhvI-85zZ`j7=G4=htL?;aKu77Akc$DP>&ZqWoBNQt9}9g=nGfhQcM^~sfoGC+D~ zi`oi|$~`3UDij~X*yKm>`a>Hx5JANEbQjKTGl#aDY_Rm}99qJ6!UQ5EJM_RZUj{&` zm5bo`UUo&Sca{bg3#HI$6mtay`E3?UvyTt6{(u1PcC8=^Q4i1dWw`emK8mQ$oPOe< zK>+zAu@FoyH1l%BOH~IBIaNixtv?Djw+mGsD(mlk(d1mf_g81rkK-Z#*?(z#-bjcz z%&EYTQEWxuo37)6Wn;-@@DEUM5#^NGrxL3MSUjLEH`{ts4P8N*F9$G+tp2f>$DI5h zjsT7%eVJ?lD|=I`FUCIxURJ!HKp@1)PQxe(DQ%S=?`H(=pL9?jg0P0?=~^~PbyRCUe@9RM1B~cuCz~*6`ykSwp0H|kQ%xJLx31P=!3=bR-ppEg+5e9;UImck&HSP|_ z&`~zW_=sb_fp(YDoriFr0>%{cKY)G3W0K8(nkA~Di65AR`3}nG@K-grj9O%Rp~Fft zlGd9~N$=S=>?bBW?DY8rsz?TIM)aP}6)cTk@~IX|qP_tm)UyEOFMxryHh4m@CIGulxdY?8r(s!ZsJ zX3!Ao4HqPTELAZX8QV&y$j+K~b!s^vu^$Wi6KO@R;Yq>E@$%yL`H0&eXfwOpCvqB$ zXN}4~>_#-9RFK`o2jF>0vk2xnE)og|R;|(J<96KT;P>|4?fr|te0|VmG9tgh62#E6 z`4wq$QLf2Hft1I$)y_bO)?5sYlJdEB!zH~T6S^MtMQ`YJ)1<u~%Y-d~j z)(D<>;X+zJ@PT3`li}IhgOM_sl-!K5Q>WN5%yrMfk_lVCF^4Yx9rGmhGM*(ehYyYX1?LskP$WFakL{$emONmR? zX4Xrn_px|y8KJB2P+8rmy%JQHLlBRopx%blJa#zc#(-Yg$S zsc2!LT%g5c_51Kx;^9cU?rLF^U;_Nw`xCiyV zHJX_C1DH-pIXB|&O*-xi=r$sqtU(#ygh`}UN!M<)%zJvVmK2M?G-J$vt>g85`*B0d z?y${E$lrZdoWY6M5yQbN%%!sGGt9)&@MMR@?$2 zalNx_?0SIhMympA>`N6h?P$N3J?KXzF6owS?VaT=sRSMb!ehJ=7i0o%Q(6V`n;o0?Z!W z^|xp9d6zYG%hbgMTd9>wI65s>)W|D64gr=n+m^fD3m$GqXbHBU5j^5&_kx3bfhxrp zV~R+~D_KQgIih0G_#REbvxr)?u*6i%H{S|0RGPjH>a+P`4tNCy9Gi~ddw7q7=n5zZ zW5D>n{Ej;B*cG|pvCl)9difY$?#k`~bAbYKioU~4FGUjS{2o?D znugra@KrSe(ps%6Sz`c+ZG zQ@+oxs%D8|Z7X|8@27QxVECDU!`pG%n%cccj8C^~DqSD^c31D!mpzf)#OefZWvVb} zmcL$DkAELGj*}}nmkKN)`gZ36#ZiilQt^cTiReAvj^uE*U%V_={Kp6^zvz4Gcl)%> z2Q07xzA~)8J~>@^t?8%1#M9rNt_GmYa`?MEg?{qh9ZyNj2z>K~q%i`F^0kp;Gj6gr zM`8eBSUY_%Bc!~%18&xfrAmKzvg-n1s$oh74z%2vUC-`7-usB43MsIo?~kXBhDsP6 zO~j0zMf=-_ocu~s5Uzo_CNo-Wy)EH5vRr{hM%~WU3zO-#FHQTk`lOR-Qh^tbfXitl z_7TKKeL$zH2mY%W#RVX(-^~JFN5kTRB`)j55W7<$hyT2C-@8pL%!*(jn9mL=d+}L+ zLFre#R-+{e^4eb4c-Dr-T0v~K*SQou?IEmRx_#%ZcoDD?5Wn$QsVpFKQ={rlRsykg zy$DY>`_Jed3Kqn_Pk&Tc6K03)&W{D6i&mIec~bdDs_3=BWHo>#gb^!>SCBZ0B!?R# z0p-u5K<m|>2mvy%l*2?&|?(?B#nD{Iyl-YU1QsPs&6*^$=|4O%%TvuW~E!FEDq;n=Vta87lb1q=QsU&OyGKVpe=r+{E?2ac6 zga3GAj*-WCzUD&j&&QPo1W*o#ZGZl^QxP)SH=Kdl8bA-fL>9ppi~*19x$I4MM`TFJ z)h1g2FtzT?37mdjyGc8?8)9E9Td7D0(jw6Boo{uttm`|};)Xxd!7Vyjf6a9UZ>fcD zi+%W44F(JFPwSI#_?6Bb*!&y~+Y?~`lrrs5{0JZUt#OJNMUnRt$F828Jby|6&Cjj9 z>{U<#K*BP@qfI78 z{3zJMOW4yb9uVYTJOfDtama-6e+@Q=bNwm+a7C}&Fo_w7bLItzNb}Bo?jcN6KRax`z@tSyeUar;!iqJ9kzx8~7#`MSm(;);)rAlPP zpsNCIhllo9?CbUu$z2tgVCn%| z=HH4Jgbo?|Xp|`=#KQ#>aD!y~h7~_s;#9gSdhA}hf6fy-;^~jkD2zluQ-eE^yI!uw zxr6qR;?4d61-BfHC9y6n`m>&QtdY)qv=mA_ThYMeju!E6mMLV2?Uc>uFz%YS2IKne zke1S&oF8}&^7d~aMl;%asl42??(@|TFqwMdF^GNt6oN$1Z(sL<{*)<62N5Ze`S7CCm_3; zzDQNBlI(7T^2qeU9j_K)@=dQwAXiA$IjI31u881D$BSFrZ=tBH$@~?!(omgF6iIhZ zykN2?-@qd~jy!$5Bos}&IBgG?KI1co^^)t?Vj#l>DUuKCGDS(5 z^-u)U!>QcUBhi>@iW$GXu6{;!j4TV=-kSbk&PU2!*7Ly1sn7KbLv{3SlDupx)>VS z42nV*u(%OR+4G>>_7i~PO&zitgR(K^;ptbG@()w%AOLu`k7Kf`KN*UPd?6c6q~K%} zLJ|t$Kss;?LWn|u(*&>CRZ!-*wJk$p{3td(SpLc$DaD@2U%^rFLVlB%%e4j)M45|! znsZTfLV?W4V9IS~+4XMT@^xnLqwO;p8%LP05PuC}LiM3VMzZ`;rZbD~H{9l53A|7D zZ_!=O9<173fMtkN2@&<>ka}xyScRDy$t9UdwA3w|*OP&OC{+@iIPB%&WEM?5V7MRc zqG6peVDcN%tGJM39_o|aC4QU^7JjKftG_WEXmA(gIJOSw@IzPq1+^hWYZi?6#Zw|A zmW+N8)f6XGbQh~stjq@Ky$ja|7z!MfYDV9&wFcdBl*>S-v*y~0ZFz^vNQ#rgN#gx| zOV`f}!leiB@$cUdKomON%WIiINikVFZMm+D>5fh+uBK7Nr0V3!KQGP=AhRV`#Lh>wXg8$CsKov*Fq!9e)RKF zqr{HWQHL{GF{8$i^~6K{8Vj{RLFsZcQ0mjOldUR>lel{Vg+?}ySQJan zEP0en4w(kA)_WVA%WO7IE@V#*@BarGbk644N_c!+?Lnr;!lSxCt=*?(QxGzp!_-}eeKho(%qndba#hJrwB+2s5BzI=Sjek^EfeZjP&5ZNt9mu0d2Y(3cmd1rag!~j|Kd3=2=HHRF&I5y}!Hnhsc z?FESm)wNwxB6kC*@J<$Nu~{6@o;2%Bf%u>;)*;rqG+Da)&e+Q9Fux|qonw92-F|fW z$$f4Fs|sFZ&);*Gg0}MoWxW?P&tK}KhtYq;E@#0&Ru+^++v>&M%)>-S2vdNIfZLt; z%H~W*V1yU%qm7T(6wUgX%*<7q%<+}IEtQGXoBp_f@bf!Vo!Ou1&%}9Yc@RewSlPe9 zOCBOa`FZFl>}$SaVf5Ve5J8aFqXM?1SC5DayT1D?Zwr-!F-M6R^sRZV^dgDt^dA!~ zAp^Ga12U%kAL4MHmh=YSYo?_HcfDug*R>9i9Fg`=B0&=;lMs(BwQWly=55KDJCdm+)D@0dsBu}J@K`bE znhRUEV?qUcVbbUwsUOz;sWPlB_|jaEXwdzHSfU9 z*|d6az0#l4dC=3eOX5)rNaB#;J;8JQ>ljwVcIG!$T3HoZFcP0uzU^1N;!EB0?70D_{_0M2v`Md=lB{^vm_P2%btaSa#M1Nx(87TO5fq^cg(=uG z;!qT|eGWsQNS0M|(I-_C5G$Gdeya-&(0a&6a7f08)MV8m0d88xa-%bG;8c_P&0F#a2?82s`iI4 zNH|`AqGdrm)APa}M63?5X*vm4GOTH}#!2OTEzwq?6h}Yzh3C4b=^XFkIY0XVQTOwQ zc_>U_$uP$&akI9)T!5A!DlY#(97#2WRW(T9ATk1eTinHr^QcNNncT{BH853^AuHTF zSK|fz@lJxRgB<-a0UO3!xwgk0hESf zH-{(`@v?R*s9kCdnwR0o$W#N-^8P&?ywvTLJ%K60oME4dm(FtJc~zYbAS0xoB0)02 z0!^jP%Ss$tq3r-6HX5dW}@<^?$a z{~X#1U#P|-iq7*+;X{C(E>m`&olN632y^WR6|U{$5O(EuGH4PzT9rV zP%ac>sB6|g17>FdaU7Z$U2)r=6K;FCGKEWolsQGhKechC1Mz!;%YO>k&+265pBhQ`jU2c!|a-9BvG4*}1-!Pfo_OP41vGiYR}NOa%24dshD(h-`}uafd%ze5dd@O@FQ z3tkx5Dh!#&oEEl+;G$a6s2G8x@ZPS{%tylfw=v20q9_*M1Y*0IPe=!17TlqAs@>4b zF5Udd@cKq=wppy5sm;dZi%*VM{BZ#kH+4w*3xY674h40xHbR_ayL@+*)&oxRASBsM z*W_|JV_5*`u*0Co+uj0%pvE(?Qy-L2MVl2He@X6<;+2*=l=36 zc-9)*BL&-WiqsRvY5U=$snu2I;8~AEa5gJFPY#5{jDQS`7=7b)&;&VGIB-c#mK&9h z+~Ink^b5k~KQ5wT?rfn#t0!wcTD^GQbKH?{%52Qe$f z!Q}s@CB-jw*ScuQMJc#1PwgjjS!=&&%x`SJsT%b3)ei^UYwz>J$=UPx2TNR!MX9$ zv?gAc1Bi^lX`<|<0-N+^D(-?-5=MffhLy`MoKFk;UM|8?i__02Lq;qvsnOHkur z{}KwxGyZWr`sTpzJ*9dT<%HJpJD22rVeM$}%QsFcDJ5E#6BVzE62BV_(dkf03{N7< zD#E?bBE5^?AP_=HgzFt78KGG?kKCeEOo}_3w1mQagZdq1>du=!=V>48b9^zZ=Mg{Y zb7-b#^}y#Q*;u^46YNjp{RHa0GS}z#GUA?R76V*EG;T{i4R=?&R9bJRzcJ+~BDa6z zY=Y&Q3;pE=>QK^xXIa8S#SVA(ZuhnG<>mK5i9FNx%tC&gW^q0igE%WZIwk1wS*I5?1eFU(B1wgnT z7kWk{v_DlPOV+ODuzqzq`I?~6YV3JP})VGu6U)!3MSyVr(i<{3Ezp5QGW>uYoSy}Sd z=!9yHjEVfN*b>1aB()fZzW$L=yC~{3_tqPv>l9Lnx~??fVHYnnT|+~!?E9_imqYMB zLHF15*_XdUYJuoJR;WUO6GGkJ?0xNIf6kKSC1d~di1`+bhGOr9y)L-gLS~;0LEoFR z&+qCtGxMS-1d(49Na3W+;D$!}}&x4~k7=2fhkqxZ5a zyB1gM5N?+dwh4xo9M1#Wew1zV7;%NuopGJTh~+>VN-+B3ohaf8G|SI7O3DCfUHx>C zT|i*7FRlGI#>S>LVjUrI7aOJgX2DqasLM3D~z z$zP`bli+5G4sPx3dCUF!1br|+=z_(Oq0r)Z?X7-LD0=WyYhEy~M60N>HC#BYICR1LBN^^yTWSFJ z8xrDR8rF<30+ylc1J0t?K9};RZ5%2L!JP<1Ggeu*dOB}T>vCm&ftNFjYm&H01_C5m zK0st0n5>e<`}jbK)+=(S@u1LW4WP(!T-`&XJ}3Pj#uOcKD3GoF+J&kacsFL2psgI# z@xzg4nT0v4fdS~p7_zAOCsZ#`1?=XOPBvRgSkFy+A7{!Gsb-Ay6xPdpe5sZ*R$mm=+avMUN19<6%(IoMp?*S@D?9~nAG4DT;nE;hXN`oObnn6X3d zrTsjKRT;2F+QI#Q`bJ9F_u5tvBaLZby(+0(#{3S(OPN&tm^6!BGzWhe;2J+32*@cI zAKDctHlz3s9?DJK-)tg+`fl$WX%`wcIj0iEva$O*(JR+C-qQ2F%coHI7YY3zdsO6- zcT5b?3C6QcDWMkSl6}G5f#?Kvh|5J;0zCGl^r??{%{r8T6h`+z&ghokOeqO|{-k|t z{@cVGwoT)8BZ;$Cs&h11Bp@Mqrb*}Bywj7fRd(&*-!ft2Jubc6b0<9bAe3#C{Cr6oXFHH?`k_a=xjSq^uf zw*H(OEj>S1@mf+qpaOv4R2iPkrd~3Nre!NvCxguk3JkILQ0M2Y2hRIN}2li%3O5XP-K4~8%GZ~uJ zk#jZGD>v-np@p`vh?KG`)W>qIMl9Uc{HhpW5X_*5T{A^~v z4t55{k`*hgxY~D<$p;$tx?i;8PV0eGs9k5i&FmK=9x3UyN5|>FosjX+l{bXy)CTdS z4@iPJ*>uag6_7k+8R#Y+6%dpWJxjsTJa-s{b`Z|By*minax*8cissAS7VOxsudl98 zT2X1cJKTO1d-}G2?Q7;jiJ>#v+l;tTY^C(B(BNB@ub+hqMz! zw?GBA;bAAfif?Q$9}!8twa=U8YCLEb z#TF&iR+ALh#xB6pmHovubx{W1%XEk0P{2IDhiR@dgoSrU<4}q8omi`1Le^~|1J5?X zpIx__Ab!}N3nN*|miL)7`dAg2)9vx+X>D=lT-h8!M19WI`WOv7R+G7DUx^mWc6VIf z`|e$rjPhdZcN1kcyHq&LZ%}f!;5L+O{OOR)#Yiu!$**=wgY=zi7^}vo|YXa1A~?0qg(p17W0H46tEULsrNc4 z*INdC`@%*+%o^BC9*!0ZL%d!cCSe)J)M?HVqPmTp+Rc^@a8s4{kj+VL{0IQRD$`{H z22JVhcKsvbvSUmCskKIK0r|9(f-Tq2l?iz16%ikRhn_HveJUX@6LHDye63!AkSP?$pF!y6p> z@<9{a8{udBx1_l1%K?$U4RgA~-QR6V%i#{dwnv35Ul9CiW7ZDmN*)f#WC&@(e^1O0 z0FJp=U!Zvo;}=HwlsL>H66IAR@Z}}?Pj4IEf3eTrYsM{q{V3~-T%BpU_0HK?H_8>|=tb;@@IZGZ%e*onMxXtXicVwjonj*@ zwCaVAcnf-&R~7;_th36Zw;(OfH?lJ~W7%SCqZ zI;@ULwn(I&Y9GP2vz%a)rAo}SVw8l^EkamqSrZbv@@|zaIe*Z0)o@6%?069Gn_>u7 zE_a+Ht!|$lvhnWnoM<>E-Ca+8X)(P2wDPNcvxgmVO;L^UhR{e;d`Y)06pMU=YeJf9 zqY7$Ag;56{AV&JA03fjNcSMSV)qo!&>NemPSA~;i7QKEa7PQ>{^$?&Hw4Lej8rJYL ze#1W1N{EeRY0gns2!y;Nb-}_iQweso_L(VT@Gjq2=xBxKD!vMXAygCn z63h4d$~-jdJg!sX{Nz0Y)6j58T?^99(#h`_NiRH*azk$}kI%8q$(&gLXE=Y?4J5fOQQkt{Bj3Po%?5LVAm?-8$9(RSqm8scb6Y}1w-4We!XV&$HJ_=7Ltng#4)4nK zt7K}Xyr1ELk*c|B2tM!5v_|d#!q$Xsm+5Mjw^Y;moUSWQ9BIqDmfaId-b{8Z-wa2> zl30u*e0@b^QYTG5M4jf5|70KW@g-r`w^j;>sil&xFs=MMS0B;D{TnquO8Gm-_mFMe zjW4Egh{R>4&E#lJ1*r|1C7Mh=onEiM1}q^rRWsaHDKg*(8qv7CL>Yaz3wheCf5pbH zdnb!@Dqp7br|$+kL#sE}rFTepeY;UlaEQMhw{{2Ku356#LBjH6{(IIF3I8vZWuNL! zuD_QyoGYFN9ZAxnjcjJKB6M-XUe;gTQ8?oDhXzpisOxFhe$) zW>DLlQVZXBz3*99<)=_>YzU2JOCuUvPkqsOcjse*ir(j`sP6)2(i=T*je8-PuTaQ! zPjoV8)#nyfL~h#3;KA*Ax>KOwy}v1(Y8`9crp?G#eFfWYkLWR|u1lcU0R5GJljYU& z*69`lw%Vy&I?-oLn(&VB@-cY5n-jK#3M-L_!Pv2V)<oV&wJVMCfs!qPK-%u5)iapxnx zr0;N^l+r3M>92vd<|(Xr_(ct-)M`yGe)ZBkZx>Hr2&J_ep{%^~%MhqCJN`MFcxAty zv;atkWoj<5Pv}qe%b-44Ow!J$x@i7(4ciLo0=DD_=re`=O@{cU-JZZ&_?GE%TtPnY zYb5BFoR0nJ72~-%#A1V3?XCpOk(*%)#aLAc_~PMPmSaRjBZ_sZ86rnJ&hMX|_6sf^ zi>bg+Olxuz$~JD6sCkynLyetO^nbz5X6;YZRjbV8OMsUULMM7s4E%&YiS1dr}somcm6x}Se;-bLsJbS_B z*StzK6wE*#N5Xm6L$lW!fa>UxPO~!u*skMZghBxmMbs@p;$dXauRX&uHF)7s%iu!j z{6Dq;_f>RosIYOzxjNy!U|dr0c-N$oRi+QL4)uAo@8q4vW@vjYcIk;`irFhE{)7t`Na_H!B~{D-#^5N)oSuO(h;xnI?Pfql>xz1_Uo#!7X&(pp*@u$?U+ z5GbFrT?MwSl>>P4ClgWo++PAbs74LB6v?Yywjbfd)X!{eb8ZCbp@lS{ViWV3WFJ_Q zLff5~uC-e;HJH0}9%LMLe|2Uuv6jea>-e z_mR6Wp(W=Di(zLLHP__!V2*Wvj1}C_*Y28g9OEkFY$_LU4&)syfs8CFySc`{DD72` zabLXom`;+Sl(rhR+FRJHeA4XN) z2vyFplp2kMlUprc&L*z7%Mv02Zeuu_&qU=6HjT(uSQ;$_miSZL%F5DWJQ8QIVb5Z2 zr}*(cn)4wI?NpJ>vyg>cWB9bi*ZIh8cv)1fOMyqI-ik~y3(wW-Qd8Zn;w(8uv4)1al=gS%oKaRX>Jl8xK)mt**m#coAKN*?mo^R72 zOs(oPtzP|Q@P~%w>)VSrH7{oMLLf#>yU0*e3nr@p%C|&!vo{=KmutR5@|q%z7);I8e;%g#rl+AL*?ADzziDHL|G5$*TOq@a zM#EC7`Zl1SL!4A$;t(wax4J$ElR7=duIDXiIM<^O`lTA^>|AQY&$zbA6rhZRvWYs? zs}gLbK*7z{^ynm-NMn^5-wx-xRBP(GPEFLlZMRDaeIs)Yf;Ddam-_AWKkX=&B}k`# zk1v01D4TZ-@_C7^RM&1Yn!m^*CR_0tOX=d%BKPc;6Hd@Rc+aG-@QI(UJAFuyQ%T4yUC-1sAYpD4V|M%V~0_6FaF0H zt{f=!paRY1d%@Bi57sH1CHgKSbTiuo=(+i_Lz4Qy&aE9~^ZH7$?Mdl~1^EpEL1Oys}X1sf?`VyF`jc9;ka|ITZ+?Zi@ynUk{X zuITy93H>FGdRNFz4^ONcW!8v7_aDRV!%54~2!A#{Sua}?4caj4RTeqCVVj`MbPJw~ zr3t*DeO;CP!cxvmpP~!ZwB|?be+;t^&U$#X$D%nM2~Ai<3>8hvB(C>nVxG$=$1-** zpJ|dR^wp}s1_5M_@#f{qNNJv zxVBW&=g{JLBZU{!{S8c(5vj*>5so?nJKz#79pnu=6J)Lh-pCwCl zWwGK1I5>)54`Bp+ff9FTo-Ql%F_Bh+i&CoHr6qG68lBC}qcKN`U46A&AIHF1zE;xi9GRHX+m?GWZoD6yy8V0UB=lkDAR)RE< zF!*fcf)1JzoKZiSo=aX0Ph)>F%y`v}D*Qo_W^Hh>+Vq9x#`o`CQ50R*kK265Jf(KD zfluS7(;vkci7U|1RwMhVkEtXQcLAz2-U=&vux{WoxxHvx5*r6{!BCXo0x7{=n>Ki& zW-)LJ=8=hpjzRYHUVB=Jgh-Iz6a%dXm1m@^@^ZXE>bKL02^YIvSDeh%a2}Wbl>5TH z-w{EN#LG?Z#9RV@Of-gY>|G~~_J8Qtap@=d^Z1?L4R;a)n+MY7AQTad<1G>eZaz2>YDMDyZgRB z-x#xrn#`srz$kVG9W2^oA>JRhz*${kPq z)Sx@!C8#{6?$>YHHJl8HCM{1#f+d9vV70xKwmVseKtb5-x5$%Azy>A$-kE-}92)@Y zS7G7APj{xpLYMq~i2#hmMjz%mkV9Ok?e&h_vS%N96%t8c@k32C{D&%Vzwj zg#si6o1rTBRN-jEdcEQxOG^6w)o$&J3rgiazd*_v-$373g5%DJZ@H+)AOizm=NAyT zH*)@NW_m^+&H}rW(T8!J;D?9|p8e;^>^<$%!2!?hDm5vB%RZM<8T%=%6H^;M68Z6u zb)JjIX`?S~ikWc^*-wZbbUMY#QDx|h{vbyOTgI(;Jn76`y~d}&uQQ#j@AGkmrM8{~ zdByT(CQ`La37vTS4krTgQfcJ~2N1&Nz0Vi7fjou7miY-GPw<;zy_4Oe9)fS#PVHX0 zFbTz(guh{t;jx)ga98?G9v|$mA|yO=yXCs^05*gJpD%>s=a;-YQ);bO1ZBcDA27YX zmowv4EJSt?ZW8$=;d(LNO!cqG(8#EcDqO}NN5iF++^COar;hgg#@v^l`gok~lx*-f zml+)IR*Pl`Ir<-bxBRm))zF1^dc1y+kf|4%3bgcsN5Q#&*@zvwa=1)H|Y(r9%3YXg)a zSLv0VV0Rx3?<4<}iMY57J%D31)}%vtCT z2yD)vUor%J_;U6+g$XR)A#ShEW5@3FwF9`O1X#3XDEnTsYUV2yYcPJs#Ftt}S*4H- z+o~acg!Cv1?hKM|S3iq>;i9Z`H7q9u6G^vLh|&8u=XVh*$2R$I&adz$YDW_Q4GC9D z1_Alry4Xv+?EgfCKSOJX#&Em|>)Q}8*dJo}N_*$VW%(vdHZLMc+|`IEhc zrca(qAWq=zQJf-}BMvbTgkEAwvXc`&LW)NQV@FC#{!XY!l@}A04vHn+{4<|m+ixL^48QEsD1T}Np36H}~hpdh7^#7wzGBwzz-b=sF+6=V& zH!r`fJ0Vr-?}OqI)1E2Y1KjFj*oB|Zc$ zTE{y0Xd$A1weqgT6J`5N*~o6%^$@xMuvb2MLOOlBX^Dc)VeA851p*zA^YgDoUOWvv z*#M4aXR2b5X#miGQP7!;b&-Y;pEOD6YOe{$p)Cnb0KMvuvOpL+lCtOyXcjQfdINQ1?kF+j3_XB_E@g}sd-!!P{?-HwHwqak0}hiI3Ve)2?UIE@KQoInUqxUcUhzZV8~%? z64VRTU`&;P6PK_W_>9kv1g}TyYpeu|J(zEtBsvbo^OxcSgK0EMcpJkb;26Zh=Pw)R zoKwmeloi3;s1hRkPSqO55&uOJ>Zd?3-Rf=puvb$LWL1X-(CV{z>*b`>WfSM zPkSQyUC@C9ikr&h#Q!X#!MfKR&4il2@ca4zRU_}$DxW2woo;%UFpxcw_Ig1TaZX1A zvK3yW$s7Lfswxy0p`uA3q4~nBhKKfK2&l1JAj=>JWGH>!7|c?OAa3eEk6jCe z(@wxHC_Q+z{!Uc|B(uMK0o0vJm0G|X{fs^BOZ%?<%>tcV;Jvrxvd;zIWQI_G4DMdW!X%LLL`gLRV2-Hm(uBFD`Gelu1w+S~1w(0;l-8nT=JpmvAi52}Mf#O7@8 z<&ICjhTim%v;}FjWp82*n5S9IKVkq|S+|m?t{~u0F`$>r4;X&AoC$1exZ3la8VtB- zwa1bdOOn7M_`TY8R)AWyOahUH+5&^tr2(i~!5WfSfr-afFdjr5E_CLvc5km~#9_ z5NOFjYR|Hi){`VX<7H3M*f-xKKHZj`1IJJ74^>hODG73Mt{dYle&KnRSW zsKiDka4{l2z#qBPJChHW7D^6us zBv+kT#d8BhgG4}|c0Li0YQZI3mRFO z8C!s7Xt(%y-|71t{Z4V|qN=6v5s4T6>H4R8Tv?UnJTjlju_?e;i-BH0kC{A*HVQ9T zhnL|41ne}5%v|~J?rZ`0dc>jNGEX z_ZAWq)Av_ z1zwtBV8fm(by@mlul?14pIpd+w)JskdkbjgUVXHtx%uE0D&s1LfE=oxomTzsM;vN< z=OVT<+68$8Hef|H)^ZQ`MI+BcU4X{B+Mmnj_U6)gE&!c@ht%}g8`y${lrqKKqt08O zL_dTC0Uxki1mx*N2V!)Fu&Fa%Ip;CsN6|i2KN{8Kk(E2#O;ouEs{z!xkXELS=RCOk3FM?(+QLuq<#ldHS)Ob)BjFe z^K}5-W#3dEq}J6E@3-8;8xIqfCB2b@e$1Jij@LP^UW8TyeRKp7cW%%*cP}_|Xcpvg zINa%*@Dq(lejE@?n*jYmg_M1ZT9NYs=oCI3EgBpGZ#y=_G0-^*-t?z`%c9Cs`6Qt6 zcc#YOauS%p$JZ9iwVOOned$o2|1i6&bNy>z@cj+;dB+L{15f^3}_W-eJL*uZl6550PS>3pkJ*TG8BmXbXYeu#4g#hsX7BpiZy5>1#!q zhjDXQRSO0ID}YZz?eUVd8t#W;2yuM$xzlJPQF;H#7or|?;PoA!oNo{>6M5&=KCu<) ztAYMS_v@E0YF8(*cfCn8&axC#ByL^&(?JJQM-s+-0Sl^QvicT?GE|4%4D;?WmiadO z$UjyCgv>j|6rr0V)TGB6{=qm@2t}z0R#Dxc$x5J}!#rz{^??;D@ctqWs9BaSG>bmh ze_^-hi&oZK=*a$;$gLj6Ty!)=)4zO?YHUV3W1oEf1jhN}oeFqF6bC-643%`4lM;Jn zFAPw$5f7n&j#qR{iK^KkOKJ4tX!Ww)?V#zaunXhYO_zA(RdMSfTK+J%N&|Kc?f%~o z+2>9g9I!%Ie@gyd{AqMfVxw^Qt3$y*gnv9qda8Oo70?Xll@a=xJW ztHcq%LLs)sG4Oz1M1#+i$*J7X20GW|6HIr}uQ1fmvu+zYX*}RqRqR6?cb2Y^EZ-u0 zpc_Amv~EqV5mK8f#P}prY&1AV3vHS3P(*JZr3pFG9|-v=*oZyh4>@|lk71L<`8qSC zY_rdOe(8t%3G+kZ*-@l%sR_YMl&^qbaT%CEI>hz#i3&F5XRyVW`I4b;mRheU-nhCh z_up-JzPB)_Kh}AR=FM;}um-|ZQeTl_&{Rus$z>|FN$3I4&7_?|k=p+-K2S7pD-o52 zQavo0!$_f~jRT2N4G;-W+F&8#p01@yb&`(Hhrzr8#ykU{5EfkEdMbBkQMWYaDyd>)@KFwt*$NAWL`j}d$YS5HnghPzf)R@l$ zhY!q9;Zz4Tu96N*zT8w@4>4S+D=UmGs%qsu5x!chZ9SopIt)q-hW~cB`>Y)l7_mI_ zh++LB#)>n>N{GFVPo6FOW5J+JOao}co^py5}b&1VjrnnfV zreuioZ!!6@iH@Jmd()h}xuB`G!>SZ$N283~57TRk;*rl z4!!eGFAMPngQOk9=SUN#3ep5pZhZiGoh#+FDYUB)cnlx$jsg&@FGj3Fw_>0gGv-&R zt~W8BqNLa9c*>Q)(hlK*>VU~HDpyiSz8vUaetk3iZ*xJ^Ro;7D^DJFb`MBNkpX{c=5QPS?^wIg0uwaUvzR!CMVlUoRdwzVfP$Myq zb_b`g99>Vgb>vJDH0yz*=R;beKL(alO4J6e6OXt^hY7+NviHGz;_wpsBAm3H(wJsb zc8L1AHZB9d#4?Z@JW+B_!34(Bs&4>F3T)yc%8P#GNz~37>>+sPJ!7={) z^n2v|!|#!{O%2(6L5+=3ba{SK+=k0FgIs0e;vqCk%vKJ;qpWPK2{pCDpx_x%P5c#I zjl~_;0awLfT=hGsAV|}t;8~Jpgq~e)?V@p6WE{>9Ssm*FQy=ED&+S~E&w9~oX0D{) z*b8b0(LH$)IbQl*57g>G{;pK+b8^|4DUr^(=Vo1~=q9InSrXp$BsgrtN5>6OpckMf zEaHR!pS|JNKKpJIu@!;$Q&L}Zy)*P0W{=wNVhHX;ydFdA(Un!5G+(`H@gwC#SBYux zIQwHU$NbjjQ!M};^B@s=2Zf_3K1s38CKaXL68^>I zN$+vRo6fdJxl~P3I^RRBy4{-EKG-n%uCht??}g}J)7_3m8&1P+o7#`%2{$WtF}@O2 z7@dq*%C%$(BWT|^1szo=5z=Nto@;y^Pc2SXgmlT()IY zO>LhF;n#ol44FB?bsY&cno;#!P6i27R4-y(4-7%rPO=1o9Y1pcexlE`a_9v;-c`=f zQPOiwf`<(o?GBoWgq>xL1NLmpQn5NXqR=!VD&d^ZwXK)STfRr2kv%GAtPg8vF$I~{ z86(2wy+K^cy1eCZtdScYuC4hM%2~{Bh^W(&9zBYdQYHueJc{C4Rc$ybQRIUi4q9G& zuqVHuKi*`tB)Rkk_KBmtEos^)_?JqWQVBdHQx}HrpYHX-<#UY$elO+;wg>3(OCByO z8ExwC?;V$)IXck}Y70Z2AtK=BymI+%#foZ4>^T1{)Vqq!tpsCFHpw(tJ%Ub)e#+1SC0w8ZRd5U z=^s);J4oUC1V5WjM-Vl!yLM5mkm~RTIUl2Vl*1Ojs^0XYVx*2bHhi4HJP)Cna3t*0 z;y=jP?dZbZ=klLZ@EA^obU0w?^_J23eEwwx41-~EQovxE%V`6WlceVGD`z9dw=Hl< z<6K$ne?9&zD)`s9VoSMdR&?y2gemd&86aWGbp}%p(PlwZud#=p2WieiY==?_Oayd# zg0!(EwHJ=E@S#{h4T?2H%NAst@T~2Xh^rbV11+YoI{X=01@W!NeuM-psur67f++3&O(cts(dE{V02Voa_kc?(N$r0ph96H}ea-Pz271aLy#mV>HX z09<$23gNTY-8eo41(Ajy)^c3`?BRKWefR3gseQhwF&-h%+aLuM38o2Glg6dh!vc29LD_dOlb@ ztG0F+;W^0tno*LLvOyAVW1{AM=n|y1AoSFbFo1|-hs$D&X zDH0|mr{>Eb(|`m2jzOMWx|}Wd1w$-&K!wFjc@Os$=sQ{!SnsrsAvA6NJ3Q%O_tv)a zzHt?=^&V7jiXQT5UrTmM|ue7230l%^(}qN!dNr8H-#g~KQR!*z~%ls3>=RM>5fLqk6Y|* z%KIS@PY$OnuPRqAW8$y=93J5z?i}IYA?U(Y#1@?JaB$8E9-%yx|M&ljfA}W&CHOIb zPq2mOPPhzZMF^!$z~bOfFbKob`oH=351;(k=T#p6S6cr6J?TSWJsbF(D7ch|SpA2e je|_!vaY9oV1?B&ZX8+em9{!YLO@M_lqeKNi{N?`vhgp&> delta 33243 zcmY(q1yCGK*e#3&3+@`+gS)#sBoN%)-5J~=xVsbF-7UDg1$TG9`+nbltNy#Sv)#MX zQ`LR?Jm)#R(=#3mk(&qsD9J)XVSqt^!GitwTAf3Fp#%p56N3N)Lj-*{wli|EF|l=K zuynGs4OY^!3t+%jMqxS#959OFLOMKdzAocV|-++(${L_(iuoM z|6u%`?{~1H59uH=QZ}(SPr*0J`&-_^uq5I3thu0Xq;B=(GjJiR@mG_WElE}U3zncg z!x5S%mfqRR3E=wp`w9~2!uAHU!>;ap^3oz_x8&)bf|jkLAG!v-!bnIBi-UhO|NEmt zC41)Tu|AY$`&4Kv=eHZ7pdapWitnf?&ZHD2QB&mjAv39=QEbw;kpC~FkgXi9D&N7t znykUW7(tNQ8*Zg{+PHJ>tS0v&p;iE;BP&7O-qL^76e zfrze7>;kM-Sj}w9IAOg*4;AlQTWI<)1c#=VguL7nlkT~UmO3&&@12itwwgbWqj_(i z*MP1pm%83HH@nxlPr3Gzz|H*Bf4xC6SVQ`-N>pL6BC?5%B-?pSnYdJ<{ny34_kgu$ zEmtPJ#2j4l{gz_cMRsfZtUTi+?^nLh?oZ!GSGMD5#@E0P;X_k<ejN|s^ z+wD8Y^=iGS$*qrDQ8PZGx95xV(8$SW{X-AzR3~pay)2i-Iy}Kyzbqt%YOv@2^g*xwBr9ZtrrIDJek|F6tJ?cec&Gt(;?;Bo1G5y zS9$U8nvvKfZtx1Ms=_n!17{5077W+2*OV7JsA>BQN# zDRW`SM6?k@nbt~2oJ@uwK-jH#X4rQ0WN5dL)rI3cIhysHhPxB}C_RbI3G8mjXcP4g zrQan8+j-W?90}~-&h%k}eO{gmrYU;QwEpsTC0zVh!^*jOLB zP@U>S>BCHo%QZa^6iyltKOSrB%VhA97``Yhy60f;jI zv(FS!82_gr<-@Ik83#2V?W@{=N+<)Vbg3a}6q6WDqR&77K1POl;0@cA=R#Zwz6nj3 zM4bia3uBoVvP43!Rcz12U4Hihl$QEg_O(ev0ut7>s>Ey7^HpiRkU^GpI{H@=p8C}n z3vZuXHbQw0$&T@=1Rvte)4+CQ4RvEXo^0K-Ju4rmg#&kvvkEz1H}7L~V9Fe4h{Oc- zXI>#%lA{BiEG_5Qbw>FIXFRCJNRX>P> zBb06Iw>}ppsZ8URn#;Au=04imD|{D91hbM#><4E^IbO~J>{!Utm`9Cre24cI^Nc~D3Uj3V)yhJ128t7YDqv*5{U&G-AR8(ju ziwbAR8c_^(bR8x-wCx_}S2U-cjz^15TW9Y}uxqlHMoPbQ#%r0SY-;qr?mqt3h@tf1 zQ|r{m2NvD7e4S9J=IyJ(_apL1=F`p5e;Lw*O+%y13ca<3#BOMb3@wbJLDOolh~Rc} z4IIJ(R6(e$aSd=hhPBg2@zOF4$>5~k6-I+D#ud+5L06b9MnAs`o5e@N*o`~8-g^G} zuoFHd-ccDyB$*JGfwGQt2-slV`R1Ol2&3LC2o&bf|5?jx`j3}u?aK+fWyBJc?PGAuZRL{5 z)0(!`-FVjptu4co1EammuIjt?L#20Z$w4GPr{0;?v!UVywih(EH=W*H*|X->lMg}a z7+}dzUM;MqegXdHx{}~&tvG?M;allvNm6eePiN-c1Z}}>fxjc25 zJ-O*MZA|D#w@Eq|o>)B0qzw9dU2=ACjvO!IVn20&3-Kj|jThp?&Ag^QigBps9-<4>m z+IJxZbb3zb;n{96bPbv0h*FW^Arg6PC(W{6RJ)snET@hu446C0haDcPTjiOF@E64s zq4gggx5XBJ8t{b>bCpsra?WvchrjqN(yS+@7X=t^^w~@l&IJxMPOZLqE`OyZssOSH zIgUntx~Vi$4JXZQKD)0l4saejB<(yZf41;kz)_ppD=D)4tZQ4hRM8OR(u;x_PEzpE z?mD}f9ZpaSe&{n?P#_t}<#25K(uNp}@bVNjy1{yqwY%cZd%kN`x-^^wksP@=wXsvU zZ4OkkSt+L96{`8aBPD`jrk90Wqnf8ma<0+3TF! z0CmRgK8JC{g4=6={3BURsd2T)6{@jSk=$QGt69YjoIcNMOx{OCdCLcqHF!!e=9zfs zaMp^C=_2CNZGi7m+9NVfqZAPN1~+oC{qzxrtmn$Wvm$QB@iu?hwJ9G_FpAzLZ8UW> zH1f?V(}iln13Q6fr6aR&=dYz!{SpPnwM@YV?Lw_|3V%~ivFoiuIS`)jy7D=nbZVz5$UlDgr!APyRB?Qm5z_4F@E8d*)UFrWU}^A z%d_Eg4LHVlR0!GVdqMz=d<)ttu^!J|s$Wft^lV!!hh_ha7ni+;kcYvo!uk}jc4mu64Y3ZfZ3-3 z_RtM}ejCp6f(u=z^#NUnfX+_$*J_}J**AlS=Rr__MiqEdw(oZxdr*ka8auKWr7d}% z)ZumRjI%Oz>vUN+LPnf^E_LKtzbLBgpL_mL7Ww4L*DNu^+-(#>N5o5h{?n{}T-7-4 zKFe=r#3lknQaXy4ddM4N52kbTu4G{4U5u$&jmP+`c1l}t9VNm~{y#tGYeitFyDWSwd2?@We zZX+|--1yaIWUjIyv4kB{`OP>OYE{-J8Ow9p;>bfye;V_}v~?*!SXpw+5Hn={j6PBn zCkc?Jp;yp%Hbtu2!OYZD9f<&X(%2U&oFR6LEJ}(5^&?)suQB#ITPW1OW0;z-Dm?x+ za5ZH}hCw>$gF5botzx;Y281;FUD8`jp2^lr3*nRa^~VY`epZpRaMD0`)=x+>eye32 z0sA-v)QXFpT2o1pJgr!}q#1a|`lTUzj{rmOKsCer;#*&LVxG^&3~4)e=B%iLz(sTB zij7RSlAI?x{H^hj)xf%D$xoebDv!s5?kGocn}?E6XThKvj?+7)`^sj1Z$ug`$;R09@83^={u)pJ|gyb<&;4*dO+q5yhplB>?;Z|R@EJ6KcBs@?MYjC-l03|#=iw0Anj%T&TIl8%6IQU{SBb%NK8p%-6sJ$! zb6ce=okCZ_$)BKR;t)-2dFhgisOY%ie@G0(Li^BcY@})zb_PbanDk#G*u=lpOa$tC zpnOMOKB!BFvHqO&s|*oh8l;EY1;S9lIY=DL5w<0uf8^d$ia+r}S#$rq?DwM^;tXt9 z+FpC&M`zoMuM#}lFKBh~-aMN4^kRKf-eBfwwyWat;NnC+JlJ06%ghLMy0y3cHtD0Y zz0R-R*|%GtZ(-J%8-ccxSwPx8A=>U?&WU_}kf*aHYbxsm2V?WASXb-uA3!OOlw09f zEKNv@s4sijcz0)TBHD(iqq7<6Y&9+C!s=A|rTAsJJVZ^)E%^d3ri4ceYt(*r`cT6F zfiRCTC0HO-nZv5GQeYx@%P()T=x>j^QqU!1HGWGIPfE6e;4AEh{Ja83vJaJ z44T`Cb3rh?Z_#?m(u%YJyjL27pOt6?+UK>ba!cayt*z3kat|^>@9qpq;w1m5>y7UY z?Z@l>h~}{^|D`)-*7tM?H`B9HcU4H$AQfn$-k@WTW6BH@-N{w^)LtTeo&AHtpEgZe zOJfSX?5l@MXF%af=G|n*?nUWaHK3i-kiHvpkz)C@tBTQ+gWIHK+oCRI*Ti^=ZBev| z)MXgkk-W?b*{hN-O=%s1j%jwk9wQql? zkfO|P$uy7VrHSqGW>kDi(s5}?-{;XTBlxR)8uKlpZl}Dc2l*YoR+Dr7@ z-RAPKFwp<=^gOwP-#mGUN*I0Zs3+x6bggo(l!%kY^5a?Bhzifk(|Zj|fM-dd+I!|n zaZ0r|?s%#oO(0;IC+**JdNa!J`;=o3$}ZQB$VqW4=FdM)+bl%1gkI6>iqZat@crYD zXje|8f;}G?3@r6Qfu!GiV7XHJFjEC{5m1V-asjpZ*Rswao+C$7n)$UCdkdpdx~$^O z74xH2O9-<+T4}uZC*3)1g1I`2B^ui}JZ0JRUuBO}E?3y}^%1{EN-{MJF+|GaJjjvI zyBY-PaMEY@QHy4%bOg_h&nFj&;-ttU%WyJDPw0CH`p@+?m`NOefO|KX*7AX+()IuQ zM+Rua2d3A9jnx|?kBYX~P=#9Tqj3H4H`z}(OjvoX-nc_8=qnLqtfP8Ml)nyC<>(IY zj*1jYWg?atNQbTB4#(y6t<;f)EX%CF5`B8r|09nvu>6r}Ps8jXAqpWg4E5cGa!-VI z-}f7<;I9cdCqq(7siXX$Fd>b68genmZ#aN~c~7M`ggH#FT7yuh`QCICr0BQ&r%!Sg|-4JkDo_U8*@b#!qPp%&RV>Wz&{pt zR05?IO-d&&mA2IUNcDmjeT&-U(YE;2tKNJ$adX&ioH@9L2;TvQV52|v6-xdIP1y`z z52ux1;2$1x^(25cb7FM!{RnoP;vthk4ule4h-g+_gin1;jhW13tYjv@x67k)u}>dJTJZ5SVtJNUH+j$hFS+37Pm^4p)>5+&_ELm(JDELDF@Y3eRq^32vUCYSBeKLEML?j8QO5z)r4!jOJp zX!Wx05eUVZXdY{!)I8(jG=}fzeUhBC$3}hg?ookPK&8j);CsNNm5 z+V=hR2+mZ_$Z)80P|?T<$hDg3u^?R-h5Afz<#r#`kgEC|7ffm#z%`y1jLTnIjQ42w z3a{XYf+0;#b5$h2Hr8lUBE>{d=ea_#=Z(pZGHQOpd{Z z`WjMR!RGRD`#!+sg1H0F(<+@<6_Fpjdjx*HtSd4dcG0Qu6o^L>6Rz!2lx!ZV(g{Vt9Cinh;_^A0OFO zC9zCHapC~ST?T}sa08f(U$6>Ic|6p=B8aWgK?!@Z@TlPad^+*Ok{T&)Q*0Sj*~wzeM9D2er_ZhQ00NX4E&A~MuSm?7Te9uZR-Dgg zZT+QKa1`{i+Dw{A_Vp_B7F9-FqZOQzCe!DQtcK?ok_1qAkP@#t3Ip=kRkshq)bIO}Ss zKMxrBjWm9Bh6Nn^^o<5s3)*2S5cam96!-6xdMA5CSiDLd?Q2-fXIwOQ*6qj2ZBx&+ zZSvY>vuoMLWLYnNPds>6tM}O1f@v5yj#(Lh4DK-x}aIT6u*#Y!Er78nqXs;NN{RDBwZOgOTd(Yl?9+c8g z&S_yg^hAH2Ho*X#f>f_GSxJ}FkGG3_RcWjq^Y(&#|2h|~VxH--2YlE&dn74K(e^KR%)El(bD9S%~O|GYj145wxEFsnIh zGEH9`*H=v6ixLPhe8p&!*)sMTzSHv>1dAP_Ryc?^4rs0%I}z^6QyRDCbie83`znF^ipi(TWSx5QEr+;qoL0s=3Rih;4tcg zy5O+qEvskzTBEEgP?kjA1=6}-z3Dchw052-f>KMorvLir3@RdJE1a5V-n;cif?9ZXB@tdy3R^KMvU5aM0m+TYM z>_ikeDayv3P|Kj{S~vXR2x0pPK$rhf!&GDm!oznNP$;)Ej)2u#Gs5 zf8gn%6!5$rc1L`52^(+H3eGvVM@+W`VMEo}Fbip$;Hwr#EXEStuL`hfZ4p|2roIm> z6!5#kyj2g`CQIS7m`E3riU+a5{^KYlM`#F3G$Fy9v5Ved3%wv{b2^&Lzvm?mprn<_Q`ku3hn#;RphtGpfw4Vz)^pq#Z6r` zD5%>#1SClmj@Y7Z&UqiM&%v}cb97h|NJEI=@repjAlF1pE|YrbZPeEPgFa1bz?<;I z*Wti5I*@*^YHvJ?)opT}+v)IsE8PmY_`S`?xUWyp#pRe2s`E4ta6h<=UrRJGZUxfR7 zfw93QFnRZ#Zai0O9#9|a^f_v7{Drk)_8Ng$Mq}!$R~xc26~Sr0$`qH(tZJfJUlho)g>T z4433CAAFW%-YvEBx8RQpYBoMHcpLRY*^d{?KQ>*#k95m@?E)Ku9i3}^T7vE~Za$%d z#adyCymX<&x}9(RTf)D8no)U&aZxUj?F6b6a9Zd#wQBKC*?o2{1}xRDJK&<(Cx#4IYApY{hT#k zHN6)GUT-GD1wOBcXSP}1nC*(TIbj=zkyHkcj%jD2MXymlcqXWmiL^WcC+QMtZaQH~)`EBJG_r>XaC$aa# z-wm@$E9VKCElGyiZk1NaDl=24WV)9pXJoZr2Ldf5+4$f+-zZ7mw06+5`9j1ul3>h0gVr&q+Bewd%T z$o#9Yf-0<|cB{U6td^a@Ucl|U!Gv6KX;B1q=hB)WcHoG&4#AXHd6V0tTkR|8=A^q) zzXI#>{N7_HP}}<*f!B$3NUgLWy<=R!V(Mp#rxF!iQPXTKvWsRZX38WyDTxEr@jO~l zL6~_S^}r}RmUwrZLIGp9^FT4KZ@j>%LdmYMnIjjzw)F)uwO62k%u44dP9CxRq1JP1 z9<=5U-uIb(?==-$1-{tI&HcfJe}8j+v5yTJ+t+rv07XpO@W{C*Kyepd$CZ#Hk_Qx+w2@L!k zBV4$a3CYH|)O!d?BA)qTCnyeui&U%>9l`;Y92{$KJQ%O&Uy&;ld^8qi6uRNA5b3{>|o1oqne^y3Vc`fYQmQ(7`;nGI6@r9EKZKWDgFfBNE5R zO%)LyWL`GS>?BMe`4TF$*DJCy4i^hOhn|R@S4b)2C|Vkwn52x+o_ct9^L|&S)y@Ws zHXTkLx9w}og$xU}I}pGgFUj(QVKVNJo%px3b1o?ftXLlkNd@U@X{tTA1XPa^zSxu! za6Oqy9EL$QO4E%>ApW(F3x)DmEV=HO?6nGQw#pfv1Wb?<7U~anIAw$QSSh;Lc_tHx z7iY|z{sq}S%zj@y7Topsdj-0@OVPI1@a~iAcPUcE>&K?Kq3-R~YTvekvCUz=TxIvi zl;HBW(G3JI^_TMCj}NizoeP+D7uqES;6h>NEs6LCgIISP%KLXEc!+Z@NMQ^!=5#a( zX@cBKS`1M@1%gL(o3(-(l8Q`WhE@`7gC_e|fFw#HS3Mtxh^npLXhd78#E^q*p=h%( zH9k7*Og0O8YS1v>c5aTXII4#pHgzYQE)ngBpD4;7zrV2Wbf{z$G{K}{-iXS?z<1(F zKFB0F3|TxUVUfF&nY5(5I(8shMbxWobOXIbabHrAkGF0Wl0m2;n|y6bHma%U;ox0+kq<)|89_6$UYGTAwdT zhWPoi1%%)+3`MyLsD!l6G)DTgO6IbChq-7hq0$)fVL!`z&57eiL?jb2V#$fGG){^t zO7m#pGEvF@Vciyk+3@eDlQY=Q!~K~y__U>?70^&_MORh1wi2fJfLEr$0P5Wpv|Q6LnZ~q_88?!;k1Rp8i8qX4G5}1KJBUp`(f`lz-miy25lrxy65Z_#> z{xDGt@qrd4Lr(O+#=g5$Gyoer&%eA_Mh<(w8SONCu&q|5%P)rpd|5mgdhM6P|(d*1MQ#Yiq@rK8pma{F;K5@7mpo&aG6!8z(9 z+Hh5%zmL|;s9_E-x?Jyl(P1vNZeBP`7u+~W_7>dxCz|`TsbOT0-Jy>E7U*&<}RtPd|_m`HDQjJbTr*U zU*C8wwxu!q_e>kP@ya)3R{3ws`kH=!DhTC?82lpO>T6gKQ#uXHACDJOl5jEpfdbwA zyKSuV4O@514`BV4hfYF9A|jr2_3+`3zo(O3t=RT)0aA!~COqhf;W&|Hj2{)0&SPfs z?AY4*ZINk84k&d>onHN2+`S*yTG?#D+Tsy3G=0=RO6>yMZ#v>J;T-+l*&(j$`K-qb z31!OQlmv;#hBB#-4?~ZZ@uHt2R-6`5a+M98i46@~0S+F6%$Nz!>XJ$^ z;4u`+%%y~DEM|+zMlDjJ?$zWCjko!NI>|_a5z!{zej9%J?0yy^DZ(qs3GB&xVNA{5;M3et`>Tn2qN5~1Imc(^} zi2Nqby|L$TwmW!VOfORu6+eG`S6aV?2)GOGKk~WrKJO= z^>5;8j{f4wb6qI~B!F*uvOXn5Aq@5M8l%$6NyhXZ@xcOutXFh-EZnfDv!~xOwOp9}>3yW1Jv`iQtr5V+JmrT^C=;EXq=!WU%ikpd z#pP*7@Q$;W-b0yrV1L_hAE>i3G!ElI&*_`97Bqmu)E8&-rroNvOknGqHXRL*Nj@2e z;-3@-d!G|Ok2Ypap4EGSD|mRW)|ZOl{g17%uba;){-}qM_XuLr8b$hC*P7Xk3@x4O z8!8E@U=={TLvE^oVyS~+j>n8e#l5~~pYU1-Dj=>cw~Ki-Gu^)7CLZu3MEmAA)N)$q ziPwZf8Td!XOX;KkxmrrwXw`ge+?S2=3 zAhp#v7b*HzkFS1h3O5Gxx7z0S=m5ql$oq{d9l);SYsdQ~K;xPvxYha0c-uMSCV2^{ z!PGmHaHe8U@vi(+?XLiAFw3;*7Le$Uypa9`A_qqH79P@VgN!}0R!Rk$G*9H@0me7& zBDT*Z!8Sg9gA()*(y-Df-VSu{BUb_E_CST4g1cwoRN?L=AaY~X?QFYK|M>#6+tpc; zdiwIWVUg>>Y1;^cIn1(q?Gp|YrgaKR}^*p8E?&1pgx3n0R{*-{=kU_ zZDcM09dMw-#mU6+fA)uCj@2RsFrk1=>oIdW6}!dvWrjShu;FgcvTAiK*n=^p?#XXy5GRGR$dZBLHEw`ga&+Tk`bv~>9&&vBa)tNA0O=G_9_V;@sF*?OA8X(z*CM%o zW>TGsRs7QvJKAnG+-HY7W-{uGT|H20L|2+>t z3*Q>%tON#Tr<9;gi%(&v6k8pxq#%h1j|UH$c|>U`F%>W{h&WI^2^Jdk2}Y53myk(| z3&8kc49+qH>>am**byxzlN@e%OK;+VshP9B*{iv|xn3R7i*-&%N9SGMP0^)-jWdbNA;6EA7#0@d z{}w`Eu!V?zYEoaog@sVU;GwDj?N*Tz#hhZPOHtmD<2@owMoAVD&;?b(#GO{;n8N?l z5EcTWKq5N-{S{d$o6p1ibg{C*a>0zvanGv3YKeiz>*0HmbV7*Z!ML=^SaOlN9^nuT z62TK+k!(uLP&EG76q!VnbK&psGFi|s92A7ihFm72iQyqgKH-~yk6ZC^wcXiKBo@7l ztD^`96)>S#HS-&c2Q`I3Cx5-w$;l)7gFqIE3=>?Ll%Km=Tamv?t6zHHH{X>R$zS%EYUfzAXE>|vqmQQ0zD?r3$ z;nKY$6_1F0(CYT(U#QZ_5KPAB@Rmgue2c{`Cet`>1oRxM6dZY;si^3~PrpHB@}PF{ zu=gH8Sp;?PO0Mcvm0^Qxpr?6mb?#}SNV&ikD&}MtE9OW*qARS%hk`-h% zssaVP!VBd~N@V9-Ea8lv6QWfCeuA)nr3wY9;i5s*5&Ht+;NbA1Mqk+Tv{KqqXU{hv zJ`|a``1EXje=Ozr^|HH!fV=LNGV}hF6Gg(cW@u)_fPX&$u3s#8NN%&n2!V@U-q~mJNd=`M`#ZS1oPb?&|h?BinPl4 zP#bP~m^|)Ru?1-1%9$KC*=pS!Q9p;G(qMfN@z|uWTm?U#&@n-G#5RxOlJbCr%;Dce z$Wp#^imh&f5DHn5_AUV+=yASuI_!qPeP2W{8%+t z^W-no<^62F(Pn2p9FukTxCPvr>))R&a2-(Pc#C|+X4p9vu3)xWZ756c(v|mlePHLr zV6#~jvq?`IlPM(R$1wJu|S_T zkk*2FNRA9Hw_@@))}O^pfy*+UZ?ZS~oV+9fuvf*ljSK1N4TJ$KrgO2~r2{XZqH%9| zhy;Tb5P7(82+wAVSlt4k5pa64a(ORW0_y`}5X_mxZ6uLNFUd-#;ril!E@!qv-LiM}y|t+kM>_vKl<>0J&t<3EeT)I zC4BxdQ7|f?*nfqXU2z!8xY1KkCVd~4p$17?&prU#BOd?ZJdPOv>;LUc=k-t?Fy7itJk7~sCEY@~`jps2 zMt8#VK%0w^_487hYAB^*cHiMo(#p{!I@QmQ_u~w^#d4iB!PE>F(~0y-V`|Ad=!qa* z+c!WA>;R((eUIvwhy=?LNw5$K3X}>W#BaVE*h-E1p&z8(FDG=C%hOy{!;^KP=uAon zS6UtJ%z(XCv+YFTTC-gVNatpbW(tnS(^yZ0ZexDb8j5dj*bHxt%!Z+Vf=F1?m<3Yh zYbmpK*K>=hZ2rs#Nh10pNg|)9i_XVy09Eba);_Eh)!%(aCip>Y3Kn?1KgQCaUYwzZ zg?q%BPG(9&!r&zv>-QSn>@}z26hv+wAfC5A9cJ*l*?PWR56{Q(ojxt2^M0&0eDZHIi6jaU=2~J{JkCliakS6lJva`Zy{Y?s0h>jr+FtWPL~anP`d6Rt zkLN%B{E%in)^2s2dA`Md!X0jzuh3A{pzxSNNXa-70m~w`X+z=e_t|K3;ZA5vFl4t& z&0N>w75-;5EU0uFzNml_Lplp&5nnB35ua}FUtwdffP~N3xSJ?W0W|)AC?QbNn@@bu zq*bdZweT1k>Bw`N_1E0w@_Av~HaZ&+oSxvUfp$EeHBqgAgh9G$lRBR%S64LADWO-) zGma9!?gl z#y3_3-|txOFE&DJ;GlU+nA>&+h-3=0p2es>l>9E+P8m+-k8yb%cAyrj^>Q5d{-QE& zfk;pY(kRfD;m{*tAs*qGTZRG>D#rmuwhr-;K#&puVh9j8{&Fb{S!KB)B23&=EEuJ2 zp@*`Joda39(g~Ec(+UTO26fYuZ@HQ=IMwLJ{P}ZD5lg=I1Yni_6wu5 z7zPyZALnRb!-8WAbCcV4L9n=Ve+oG<${6Q{!TqmPF$fEN4{F&H4^M6SFHW$M!)`Dr zSn_p4VaY%fwJih&0~pDZyDIFKq5|tH^=2#MxdLcX!wxq~+l%F@Z3-WYEsl(^g&hC! z{n<`C@$X+ik~9xAb|_STWuQQ!ENcaZ8e~x#Tu#+A8&6mRzND6`SF5Zw*>D$ulHN@t zD0~}C#<1|%E!i;XH4~A(@kvby3dUN_mu?>sp{u6Iq%hdX@EH!+$5^E>VEm#268zU; zA#`EMi{`fHR~&90W#WmlW?gQYcS(F_g8j}WW2zo$T2VnlNdyZJ@M6h&Jzo4Yo0%}7 zO^v(Vx4*ikt42^AK9SQOL>WCuqEjCkA?mOPd&=al->r3#SsD@4HTFP(`VPD7!c&ND z3@&Bc9|6*KgJ3N*bSNnQL@{E$u8xzdyU@_cPQrbL}6ANEb0}NKi_T@*)esC5u^1HqP{sa-&40a^2^ES@l447IeK;>L z8jhh^t2|s`Ln{uS%;ZM*p_Y#Mk1Dh%VUdw!sgYhFHv}@kQXhHfQOL{^W)1x>f7vW% zRW>_37Cc<38k;>_ZeWB#S6U23;lA~=ab^C;1On-g|0V`%_1OO8jO&?<`xt=QfFQ*8 z(oenyg-J8t!p0X9DdKp-y%1e2q?E$rv+j?V$QT0dz8j5RaWe0_BjwRlW-^aUeFRBU zazs$fqCk0*)L6_>uC_Rk-Jh)_$Rtt=KW?at+az-ro++y5Y1EshDRbIw%B8jq<&A;z zMS*B25{=f{;PUvtk%W>XAm_8f<`fiB{glY3(W-4AGt`^}li?0Zt(&mFcnM!0FV#c+ zkl7^3I(da_>M#D|lcZ-Gy0QEk%|>7F*NiNdaz0ze?rAHD-{0iBKf3}WFlZ`FwWp@f zR_fi2bf&U96Du!8Dhi|VIg{5=f5mtnW|jTvq#h2m0_k!|BVe^uBTmEn=?|GhWo_O! z)CG@8dxaK&tw$gNN+LT&tg8ShXN`)TBA5&{NJ4xT$qE|E{*q1hmTKPes9l!$X)e9Pn@nYIyvPK)64ju=*&En?<#)sQUM~6zL>4$ivZ%6G zJk#c(1TSIWKM4E$!Q@b?v>p-9munMh-YB)2tbcTRvNtgWZmd308zn-Y5`!XglGI=T z4c~scI7<*vNaH*cOJgzFQI@j<#CnGFoDLaGUG>=qXmp&1&i9zf#-fS!g?q|aNwbK* zN(r~<)Ea)#>3G7X*KCMw1u1Ao^)KkT5=A>pV7Z3GX1PX~=F0B2xV=K7ekPUK=mHAV4mfswB{k>}NL#B_7Q5m=P1*tqtx?XS47eO%_C;PElXnNc2CJseA6tXB!=$pNi7zvU(1 zhUE4#M3F*)HzxslSpd>86h*hvF?+%0DQx9F6l+zh(;*8ww91);9?pwwWQKVQLw=#VSz z8V5?bQmeK_<;|7y#{7~D%i$6x=7yDeRz&hd<4lB|uXnmjW->!$f5&MKad|LKzl^nQ!Ojf#;7SR5^d5{agYj7Wc4g(85!HrGV4F z?F>d9NJ4*r!ME4da*%0ev3bwDnD*x|60oDgA*n9AN;(Pqi0d%t_f zd*huk&>`+~*n6$H)|&GdlRI{qE0+fWC}NUS@a|Mi&X-h7S>%rM>&wpPBz3+|5J!iz z#3#_tNw)*Oh*)lqyL^WCollFbiJJ-t_T{s#Z7neRQ#R-7Kyh2M#zYU%3fs;dj6%CI~OnrXAjt!(9u@i|9D>L}3PCE)me2$l zKO@{mgmw-91imZbyY99aUzSQ3(;#}gyUSw?Efa|dzPqa?RADw&W&AU8{CqAA zBSX%lA|LYd8yK1!4nKWFbPLSEX?KEoF`m;Gy zU7gn*Ws@QmmJUb5hXp?y0P~Ex#VzAxJx>q{De?2+0q<(W3ccN^lTX^(E^Bmj8iK9= zv`*T|SXKEcsZ3?&WeP)SWp^60i&CG~HF}>`ZT#_X(kr54naw2ZP)iF^f1kODUoHph zBQgd#;TrC{AsrRJSls_)()pnP=*8_G z)pXud8=JdXVa!&^D(vlu8%^1H(fUFeXBjbun{h0p4LwK;sJONB^zR9Ru zp>T*%(mme&=gq(QUfu)z{Id^l^>;QEWA}V-7^HLR1z)AD-G_ba?{kZz_3ZjNBVxO5 zd3a*rsYuZL(D|*^z0bcfNM$!*OWk@@rf+^xjN@gljYOyd<0_pV2z4Z6I4uqYHM>fD zcG^RfG2GW6WKkg4gBa`CD^$mdZbSZ)qas(_EqjyvzZrWHZUWWoHIYYfcWx?SW{KNx zFEtGh9U2mk$8RyVI>s-yg5+?}`&Rd#lQCZ4M87#Ade2xY$B~<=ToqiAgiUjC%O-hFqkl|?+!6Szd!yje^hQ9seC(8L4a`yAnZ2Nw%-i`uIP7V+^T@MOVlt)9teKW-Y<@P?RKj$5PFgj7=vffIq|!35DmVZ70MnDr4?}bFJTmp(Ef155Q5E$_rAtgTG=Crq2k?)Y%XuY%6%b)XJ{p{0jWCl{0pCT%TwQflX3MjB^CH(J;jl6(UKrK#3$=E!q{;??FSv3GGoX(0yGx^cg$;bqNh3BQN$Ax;t;Qb(ui z?xm$!8>=Z{nSaNCM4a0bH>w}b-*NJA3Oxhcu2KilCH-tR7{?54Ab)BI`XDKqn0tA# zZu((NBz0DaPM&agVe#jh)#moRKWQ?vv~z9XM`fbvrivx65BJiJ;(Nng7OOw;tQgG4 z7AYe39cbArsj#;|ZTk4+|NMnS4wsKV|K)n%VU~lcDv(StbdRrf@_cW|nf-}!poQGK zco`hyf9iiRQRwkazT;snk!t3fx7$u;=O@*+rt`m!p4$db=1uhw91~d6gM%6i&l@bZ ze^E(`{P2V2ddgENXo#q+L~{5JdgLW=;@)GQBm4e;*E5&g494?;J$$|Kt8K|u#X5_Y z+viBGR_OJc%m2_*U>VkdLs0E}myK({U%!+y zc@yiP-Qch>c;tKNY`pP%qnYYTEni6$&{LNZw@jL4!%n8VWE(!$_R`VhxuDWg1MCdr zXKNDia{!4R66H0mVz>}i^rb)?`ED%_y>axtTV$ZFOOIaYTjay(H_$FU7+K&2=>;zp zCJBi=D2wy`o}!Kd)Oue6pA)KP$jZ*01}bf~ zEi>!oS>2$2g5!P9_WKSGhql(FH~4hk>~t>v@XM*R7@C9UNR|0Up;w%Cj==%#(*j__!nehf z)r8HbUZ)*dfFE>Q1 zstkW$e}bk5nBYRd0CO65$b496B70k)BF31NF&0Y`TxdR&9@PWbuJ|Af3Ne75_-%aK+40sQ`1AAl%(Oe1uj*1~n`5#Oa({GRj z#64Hcez-N~ONw^hn_rg4cx*K#BZ4rU;Gtz;k|p z={~-Z7W`5FR&mDOi)W4JFIEoK$VXTm#3~_Gw2%>DEn-Z`GRvCsydSl6yZNIsmy4T$W9GIT4M0(I^9Ou(uRb^F_DJ%ipkq zqPL*?lS2H?^RV;9bdUW!{8u`5yi%Mur&~Ev$?L`SXQt5S?dN6~`p}&dV7LWb#+}*- zjU?Y)&$%pMC1$7>zDfDp7A%YaQ%A^QVg77Z`?EF>?IqD;h+GL?@mk2`XQ4q^q}@ zQRm#c$w8@sNm!eV50@|=ZHq&*r&amFtqVHPVZYK?A)->}al|p%LHj=PiA=kgv*5rf zm8OnAzt??3Dp1iiA&Un+%e%irwENk5L1nA&Z_^R7blT4`G>ppfEYE(e&%T;bZGPQU z7m(!|C~K1BLd_?Gzc#@ZO!e|%s?royG3^>1f|^xq04zFWioKR^^{1>z&vsD0Gu`W4 zJ2zzVn^AiZR^lhe?bBx`-AK+rWl;F;N7-o)O>_l2zw<5$r^~)>@swvqQ%mW!*Gm6S z_@&+>y8#(m{U&GEfy-9q8)BEB(QZctTU?~5Vx*rwRY^5t;>2%bT5v>WrGMQ( zXMYT~=3LAR;jY8pxN3}olgQ%Q(Ue-25BC&6@AhK06Ge14Vn2U5ZecHE2)Jk{(x{sL zkq?kL#2o8rHS24*zoRYPE}{FY7ayi7V5NV=P4xYciX{Gkd(>PFHm38Swwrbgk_qW= zDAT>Rk;MliDvK+{bEjJz>dKcqxiU(t7<39@AI!UPp366~!8V==UB7rjz@iTs=Hf+8 z6}`$>@Y?7B)X$bgM;mLq}0|-b;RLjkCj!4rq%9AP2jbz z2xo5}!ZAqsiTD1xWEwzoCQ{Wl$hPTeu&Q4e;31ttLm)ZWV zD_v+a%}{d+^Kqs55c$dU7Q)A_w!olew#w5^g5pBixGurh6BS0{uRQ{z_)=>i8y!3>N2vvO zEB3qdzubA^H_viO(@yE+@yD{ngDnQW)R5wDX1rS)QZvJSl5 zXn`R`%J&4x*TpzqWFRhuB3r)4qQ3_v7SHEGCNBr&Qo;9ysFtR-l~q^G$h_%GekkjQJDMX z68#5L8`V8?x{KO`K@~u)8&VDj`tM&M2y-j7@AKGp6~;ymGzg>MJsT|ZGsewUFZ}${ z;qDrP$@G9T+TIw-65>9FzY>@4UT$n*S* zz5x!?6N_x85n&A*ms#)3yd3YG`<%g?V~3M0XkX}T-ax{j1;hM6d05?%m6XFqIQdW|g#&Lh{GyI*%>6lu1kFkb9;ZaT2(z$JHY^*CAMgz8*1nYI6cEV>m`AY1pKX2ZRoo|qL!mLrOwO{K=PP6tsgA+A*747Pm z|3aIb|I*_1n&~I0{1@I=CowPhS8je&Q}(`7g668!6OYT>D5IV?(s8_AqQrSvqt9t_ zD~MG-6t})L7Y@76qbH?G!D&J@`*uI;^hfVQ9yvUqh3OUTioJL zH{W@!)rGh)Aqhq>q{4M0vMBk-aq7H?`X-bDd+9T1@f#DdmzB$A(Qb_w;nhuRxMifM z)9)>k zwxVa9!$O=+q}Ka?Y(IDIGHAF=60zR16kN*)Ps(1j|5he-wc=;Y2vNe2`0FISJr~oi z#vQs`!@y<8w|h|*{3^*!nN%;4OWEq`k7uJBdnr1n#4^9H`l4c9l#Ypnb&5o5 zfHu9%JZ)-j!>dEf$2vS`N8$|&U;r0?BDFe zDJ^*~$0J{AR~Aiww|ZXcbV9^`7KnnE4F)}VNr`9j_S=AcaX;?};_`t$9X1`Qq%UfE z{Lhy@cpI!*QLMg6ahA2{-wkx|jB}MFbQBtuGPk*?sF!Gb3WY;XUs$&NIiQB0N zMcBDG?GXcX>DERDlm)ieBL=^@YmslEOV>|P@rPNOZ?mPi>(ziFY$dYR7hz0h?$?E3 zV2RlPqARf?lR46ldeIleFYXSP%8#q-5)k*HsG5{%c zyR?}8orMtEx|tu;&({l8cwdi5Q@p1eQ3`duR+*vOR3{k8(;^eD)(}y!z1d;q3Ux|x zJ6`3Mri#;vKK$P7JfOp3n?`#CJzPKyg>V!ej<4BtzEb%&9`C;LpbQIfnf)gCE`b9V z!z$$@|CUBE=Q3wJ`*3TtTlwW@)%yr^Yyul)x9(0P4!K*X)V^;-LBC)7$n*sUR+8y@ zj_2Deb7Gn&y4M4|b!>1#m9yK1-?Kwm*Otfo<9>{hQB-f(sjLG|+Gc z)OIeC>=AdSzH80Y9ZBn}6}FdZmN?yZi@d0S`{{M#ydG|fNod;T4da<~)YvKvzqkK$ zgFJn^tDhX2Y;QBt`O7bmYgu|FTWr?jx24OKHh_{&f6*m`Qh2N)5>Gd4=RX)IiwDoe zg~Cts%v2i7t&OuWm@J8D-SE|E{~xs?PNR4|)nU!T8XH2r*K}gGL{?0nckDL?2gbO7 zNGPxuWe4l~&!=c2#8v{qTyN|M@mg$n8FK6EsuM0ghaX*Kv%;fhva|Ft^3WCNdNh%4j9KJ{mVt0Qsc$DJ|WwulcrZ6a8R8wqI zq<<=3fs!Lp;KqlLI9%;XzMVt_9I6dkL!3g(?9#J@oJY&-WESx!J;s8BTc6rOK8%X- zpTUSk`|FZV{utFX3X;KpO-9vH>~xL$&(hGaLXC8KH_4A$d#QrYqW`ir7~5#RTFD8k zS|d1$NXqIfx2LDs+t*=nMq7nTHCuh2&u)D$|lVd3@Lh`yiNIs47~RJ>V! zm4u9|BZ(hXtBkr`joGiU z*xT)j$rsIU*_JntqnUy&^)vnbNArI-FSR%IYl@IK#okR&=q;$Qj6-W#;^Y2D25zp< zb~(Lv)Ds<4A)BR$@ko!`Msg2%qVc9F!;m-;yH%Z)E=5<$l?lz@l;1D5#|v{NkD3+0 zLf=5k;B8zDrtmh@c_zAe6Ksm+yq{vuUkD3i`H{N{PNd`r%7=UH3})r}jjo4Jtyo8> zM$oMy*fxEI;HMPMK?QR>qfchjr~{qE0anXOrSd?J3)}M5;_dO2fOb>2kb*;o6&x zw(EeK5!`4`Fc(<1n5Bv3^yPD)-I7{G`Y?gWnV23_puu)(tqAp-5b#-q#({ATI6oE zP;f+oY?Hfa1R+5zqCLO-E1BUuEonHcXq@l`4XUOQjV1SDvSV87Z)PFpw4EI?<943; zx7gJXQth{Lk!^l=IBhgaLER_Ki*;DmhoVI7=o<)V#CFpN^al`sI0ju}z)PL#kiNuM z=H3h=y+?VSh*M?EGqM2rke;vbclr!ebZ^vr*-D;e-|Os*0?_9xrF*eP=|u1q)BYj# z@ITR8@^vN#N`2$2x1zP8U2Cna{}5XBl!WWUx5Qr09<_J?f4-Hbyc^pk)hu-vu{8-G zeWfGVN|OU=Q29PlGWQwvtIY>mp{TGBF8$93D+!YE^0z!q-}}dKY~n@qs)g26ukTRKmogYnHBa&n^-Rw6D&IBu=G$lY9e{$ss?!Ao;#ov8z9nmDLZh3}% zTT^Az;74J$%}VIkisk7s%r;7fmKrwt{XL>XCZc)9hhRnR0tc=2fXcwAsI;ne`uAG` zyE315lcX92gT@Td31!<(VY8{?v@rY`UqWnwDB`h?Bp)cLn z5-Z1`^BN;rQ=E~bOkAT=R5aTPGGS&03>Ea6cA0N_zS2Km56ylhKN{w!(opF^zh2+yU1e2}>;yn~I{0Fwt zHE0z#*vLP+s&@eczceh!X<+J#j+?3 zVT*Xp;WJaUJ>&-CeSJ88G@neb(qq;V4v7_)Q^ z;}njXH|!L=PH@AC`Nt>COB46Ooag(2U@oY&Ppw6c79Z296Kf3KJ%n5{z(|K=sV*8f z_>7)6r`H`<;#<#i`~T1;qJ1`4W+Is>&( zGIkIn7ipRwqp{G7P^L-UrCX5zX=rquZ8F2~2ky(L|N8+Q-6lpH(cUW@sGW##9?ZGE zPoTJZeebsir$O!iO0@3MNDY2cdYkdWE|+QRKlkDzfZ6l6DUfk9wD4doQmsq7GcRA> zhLUIX&)^r9J%R|UB2{YG#B%X6f7rZ3%zyN^|3h3|P{&k#deWx_LTj-L5c1ajf6Mxx z0b0|p2jdtIC8;Q0M!~Ax6Vur${J&>{k{nJ>nc>irWo&2l7wXo`efxS9i47&o>SWYO z&g1(^XuMxjsT8=ieTEs-Yg-zbM_jp%t37KmhBgDGFri1zjEBllC}2j>KD@+Fv;wmy zC$IWFy+VGm2l`=3th9kw+13+#3VC_Ci zp_?tA3I+H$Mau|j%~Hf<^0uwpzrmG(E>RzSH2)f)Is({T6J~zPfD~U23g&!=49MF^ zSo#=kHK1Vz?J%(*5pj{nFu{viT^U!rG(GRR)!hyfKb&FG=|dTj00 zUm7*7X!RHj`)yz0o{Yv;$e$`9<)e8t2~X6JY5?4pm(1nm5l*}T_x60F0IFZCtN@3q zuMD60AZfkb5;ZvPd15TYas!1XfZ}}^$P_8mo7xlrhu9Vjv(08E7e>H?-xkbbDl3bL zLlQ>|1X;%m_raqZ(Wwplke-d5tsh5WsR-PlXh{Emi_R zKDAIAug&b&YJdy5UvK5}J(FX2Ho&4=7cvh)!| znCp|Q!sGn=`N2dGcXV|fvwzKG#eME43Qq~75^(x{*sv&CThSXT*Ry=unO2C=!%g(R z#uSI#+k0X@YXR~ePo@X;r+BF-^@Ak_xnRf6lW;tyZ31%XC{nh3U+C^k^9z?1x8fJh zSdB7WF`!}A=v!i6_Yd*{C_bm>C6Z3137ais22R~qT$ZEBu8_k+VnAzC>~V-!A{1ErYa@_80+Zg)A2=(C$pl?v2561@NSlFy4b%EvetdhZh>Ekz0e4W5njE^U zCYbfl!7%*(lW0D@E6=|__3aS_3ImgdU~4CFPv?rVD%V%c30~tw8jyA8r5!PJes8_g5Mm&}^ndseEy60G$y_ALJIIUPTb7 z$?6Hy+e31rq7I^rfrn=|=@CsP`suj!2HRL3_t6#w=?KWzh)QuK&AlrSf@ z;_TI@;?v1&|>{pLiL7m5+qHT+hqLU}^d$@^PXzMz~0oP@Veb1ww ze$8es0H_Egh~zGRykzD3*(SEkiwIO7n|f1PPeK`Y?V?1t1rM=A8r{#-cZe6>2wf@8 zWB`7%`vTe;&2`wFdmrZB8J6(v51#P{(qLEmb--Ro(kn}Kxx@8@5qx*pP~sR0?PXbd z)VsJLEsi5;lMuNbNYcuYgAj-dHmGIgYPQ=-O}kFmVG2(dImjQk^j%0Ns;m+YXlO zPqyivj^*W0O9fkA9S*Fc#luDO!wArR&!)h}>R?tpifYgp7&ikInRtbzGiZ<@RYR)v zO92PVzj{uan<2=K9~ZJRWsrmy5t|mvUybuGBZI22{d*w-CogxmK`^gYb3DB=Ih2Ex_RoGBO6_xEFv1pJMS~M4hol?uC@%8U z-uP3J(|%`QiW54IZRW8k4E2o^Fq=jf%4+YI%X~9)#&Ifo;<>dk;Ak}G;&>lkzVh+Q z$S0#d%pD4Gub4Wep+8){*jwSeGwrM>Zs)t&8kyfU-aJ0+{~mx%n01ATM``*M5ysuh zImI3rGoSN7TSkufdVltWSF&5Et2vI-m8xI*iMH0)iX9>GflS^15zOUSd7ydBIV-Ry zzEFCae$ZK4Vf2L!JCz|;-_{6N$OK=z1Dw+%uqWqnY@fnvO}1ftMfkUF3JHq)K)L-W z_mg9c-_R_lVMywAQo(oNjL2%(d#5tkg*Hyz(L=n5O*guZ_FfJ+L}vri0J{SL6I>e7 zVh=@$6SI0ji~tAa8&o)MprERSrGsm%z*@hZITL*4_37@SLF$cd&=DPT;<5>Yc0}cb zn@@hL!+5#)*75o|9-U-65Zb9;(tUaq+Tp=X>H5c#l&nd$zf{5uL4g0J`|IS@GH+(_ zuk+)XUXpEBN(H0$FG$p8lA#%;{HU!~o$d%zc~zjZ)Qbb!E;9XzC^7-pv1o;0J{a10 zKOa=gVIfGBTJNw@LZTTg#66&WBs7Mnwz3!MNI?q_a&~URzXXTVlhEhYk84B>$*^h` zSdWp-r~X@Wnt?$S>i8fKPx?4p81R{*ZFLZ=Lh65Cl7^v5?bRTnKtw9yomy)6?PBWp zsTi}^zw@<63CuTQcoY7=-Njw}FBC|ufoBsj0Ur6$mqG*MPy$q-`10Jb_t_FnK0y3M zhN=6S6b`r;72wrLG5pSA|HRAwW6EAsg11pKFU^J2CaDc))PaQP6&UoWM4qjA&mPBG zkA*ZDW)M`c)IO2+D$h-SIQz69yGhW1gDKlUSsj(0Cf2)YK25-7G)ugdtzh5$^Xo^~ zn3xny-wN;y`Srh@LWW}u2QeStDZ-V{EcY$Lwi$@zz74G(bA-h_-fjSuk7|z>baOre zY@jp>>{!y6GW?I*NmN>g0y!LeTAJW3ZS7;MJ%iP{y*wEbDb)II%p9lNfd`-ZI0q4t z0Ria1lr0`yYKQ+{xiRGjteCCc0?@mi?LV3%pdSndCZVi$4PI0*`2Supm{1LpL@stH z2U(m)84dS{Zk?Sk1_{RMUWSkdcgZi2?_;NcO7;z5sP+>rl7?|wKiwL`YStc2{n3O~Yd!rM*aO!+c5glJuYA>Wq|hpJ7Jj*~41DFuFn<4DJN;j7Q48P? z9Ivq=C{oFSs}iwaMT#U7h{{t)vHp!%t9Ek^?G+#Erz@rLO9Fg17dTlgM^;~endC-_ zAm$1?KM)ZGz-LzJyC0)~BOU39Ex)p+K|uFRA4($Sv3comI*~Uuaix&LJ~EueGV<$S zP=PX*OlSsIfN#W`?~2~=CAn%iu+kHX_M{2?NOdEwv7II(a>&I-wh+0tVAPf{gVJGO zFv(;k#MFhZH{UxRG+rdjyk9*8$WkV-onaC0fiW5bx=4oiT@i9b9{$|cQ=%Y4dE007 zN~gjwi)d(n9ni`7pH{k#7!2;N(Fy?Cd5Z<0asnXC5zxoo4f9w_FyaI|jRtG%Jz%s` zx}wM_Lcov?gHuC09hxkC$FXMs>1BhABw21%)4)p-jfC57wF-O?NOy=M$%Vdv*%1c^ z(zh@?rpnR-SGc}M=`br?cT^`f|0f^=Ne5cJ``L>h^i7+4 z+k(pid;;$Dd}+67uHn!rmEoUIBqw^L1%Qu#>Arz#4Pl-xyc-V%usZwje+>SN*Yh&l zgRxc#Jwa$BMI-*Pr>%$&+zX394^$|Z@FE^~InF&cvw}DCGrY5@#1X!7)-eJRGhDMP zd?Mo1^FT|+2pC?z2>Uk9Z1ODYk?cU6Lgk0#WJJsL{uEBTMbW&!6-)29DzBl$Azpq= zp;6#ci~vI+Evsm7^0}B2vlMCdcar7+4CzKRg&7KKAI3&;q}KoT5GaUy>>!|gZD_dh zXy;}f#U+o8_EDp6)69QPm_`{HaRC!{o@BKx*-~RgeNeZ8UZh=>N37~Pb{&95b2!N)?lu}_IBLf&M4nsn5pd$kKHv({5WBK{ut5DmA?!#)+ z69C`wsme568gpfdUk{6)xfUuIW8D@OYe3}Ar;q&)-d**2ol+a_Nl+%Ksna7pA>D*G zvXcm9v z%=JcX#%e;WN3u}t2{@K$?+;t0U?XI}xd0FA0l&PwTwvJhcRKklZoIzdE+C3hxFfl& z${~rxAn&00Asskf2fx})mM^r(>sIJ?^uEL0@H|F-1q!_2`|sZH6i*;sU@#wF zPbfNF3Z@uQQx@tKXb^DOH+xwwC%C!9X=eE@c(5@9>9OJ3?-iS9DTN7IX8G|YB)z0<-l zNKcS=(%6W#fKBjutrd+%x&8n>rC}{>8SrAtA*cPWf_Fa6{9asq$@)Ge%mx))Z-Wa0~XTc#Tb z_9@fKsRp;Iy)T}QUn*={_}6TtK9gwDU?!MhoDszm7C!sYt}c4MJ3YV^pQS`W69Zx< zJl`&eUlHiZ>&Rfo)v7TjqjYH3dmoQYnSTEsTtRp{&)G~dmP6<>!W=S?_&D7oWUBe2 zqe~$IrBVMAI?y>}Q3(@)XK^?d7EOw9Z&Pf=Yc-ynp>=;3j0-&N(d<{-sQeu1=t?1X zhA>_P)O;);_KVnWI8n-*NXtW{D8XSV6Re}~El{Vv^Ln-5ee>#-K@&B(Tio+VH+nNc zW#AwNa2h4#hZR%%($7E%Ng}*M2_C`rjq&1h_(PQ6FqkqE09I4X<}r6lR_aNx*TGoE;NQHM;Ju^_^2EK8qWl8AlB~ze`5)-mkM)CPRyE)&GD)re8ZzUc zYcj-6brvVc{a9CAMSlicVjiUW1oVb@vXUIb578y<(#A`9!oxmb#tcS5ygx(b;$?Um zU&V46WBFhs7mLlK_tD~_=(6=ia=9iS$Bx+8err347_e2!lce$meIg>5|E4f?1U=nZ z=lxiZOJeMqLVdP=U?Tk8W{&XP{gkJ<`OHKTphrc)Zw$%M zN>Q+3BX^2PQLC+&Ua`Ll8zpBVi+&n zvpSOAZdA;q;G>Nq*IW=2xJUwBUGogDo@ zF6ZC1P=*uW)ZLedNU|*|?Xf?5af_vB0W6%V)HJJ-A%y!_r^CJavRI4%s6q{8CbPjsqG* z^DoYOvsVUv9XrnxalY0XqSOLoeuEiD`XSaiCgWM>M|oA?Vl{fomk>@PXYzst=ChX= zWet^~xz2cR1690?fJ2}P|Hgp4MLjMJqjR22+NC(?;E=J|L4i^cXQ_EdUYciElcD=W zT$Ezun>jvQIGN4|XbV0N-(ZQH5DqkDFmy$(`6n|M#zG2ae8WdSy1|6RFs%5i%Bc&0 zaCG`Pe3qlM=l1NjNO6JA>xdKf9j>5v2G(1kI?V?M4UQ7rCNRlPlw85^9l5Y)Y|bH% z-e$o>Q;7I_-|X@HyEVl1*^WGK_BPO!F0Md7VzPMV3FR>a4`D)6B<{LsE{GH4Xro43 zv^BK|Bf#$`w9|SP5je$L`?PVYgVY&j`MrN6{{3+9%5GVQNunLd|Lq9ijHx>G4w$gN zB`B%THaxxPS(Kv#eK~SZfsC~Z%T4rdIb3|J>83~pBrcW!Nc~Vc2HO`$kMVJ?9!k(C zy^eU46oW*!Q(!Ms9VBRKd&4i~%bWQ7j}+ZVvM4~xzc*c#l%GED{(JKdbao0fv}*$8 z1*j1|o)t@iW(K*ybQ+9+LEgWUS)!&tIiDq+ zWd5lkKfrgqmMQYSgUW^)9Q~*t@amlvS3w6Ig$3wA=s=+zPj2=5`|m(msbBNUikT+b zUwf&dtaD$~0NwG@|GhT2Nivpdi;?VY(FF`-OR&A}ReRkWjOQy;e-t&~})i z6QkPmDku~2`r2IxYdCK|(nlX%Nn z#nJ$zlnxMUN-TJQl`4TBz*Wfl+XtOG^@sNB{2D!%1+O2ad=iH-)9{&;%rlrVN-EWj zBa5ruHF74g)PczP7vivE8%J5)kt>{b@LkoGW8=n)z5!`rvY%AoJWe-TRjW*TKPaWG zyjQEU!!m8!CssDKng?R%SOP=}-U1E%u*kt2wcL4g^q74b;T7}A8!@Mufm<9E-=rLwX z!F$#La(-EwDrBXekmlyc)+7es0+`23bdwnxWfU(+5? zUMLX=?xC~R=Fwe%dtDTPo3DD-%O!8Y70ghDeBX?ir9celUih}&aqH1c4kodmd|L@K zSqDn8*QQILd#l0u=Aw})L56hgy%W#_a8W!qM4*9XeRwL6Gh;-9?tKsBlub&aI^Yvr zy;)!7%{>-=wgbib%BJpGHYKM|5a)?YE941AgzsI6%I~Vzd)t(39z&Z5DYHF^v?@TB zD>(UCEZv6}QlJk`;pLjN)}&sc1oS+JN(%1wlMTN=N^tTcv*kNXxoDEe3i_rTz4X|e zhA#=MTB=1>rEEnw8t#}SHm{=C%$%CaL7Ta zZ8ViwE&J9x3i#ig*yX26w8y|+{qYbkjZ(lNL@RsEO9C&^UQe#k{cQ@pf(1HTWi0kq zo3Ch*26a1Z#XnX?FpvQKA%oq5|9OWNB;^gD9{*xHyvMxlUtTlHBCJbD!cpS0gQl8P zoNo{*^2`{<^>bJl7>T&NAa>FxKe?UXyo7W4P b#v}btJI2Rr|NHfeCyp}YIf7Bh<2U~gsBpt( diff --git a/docs/image_assets/tx_status.png b/docs/image_assets/tx_status.png index 908e4d4a784a067fa62cb88b6e22318a9f236b00..4669ddcd96d0479efc4f0f563efa177636f2e7bf 100644 GIT binary patch literal 64672 zcmZ^L1yq#Z);=8)LrBBWAfPay^iU$9gd!+CFocv6QUgdyN~0nTB2v;N0z(dsbPOQf z-8t0%#rym2z2CRif335YtaoO?IeYK3_p_hq7TIIfN6)da@Gh{h zutkUnfG74Vy>Y-l*v`*Y9%2>sGOuA_$zwf!q@aVq-b^Jj_pLC#_xYB&JSpC9WlIrJ1i_j#lpfnR@Z`h0IZ5YSMO2+K=a9gv2pe|DPq_M$s>l z%~L>h^LdNEyp4_iObTm1Iea({d!drei%rG$UrUt?=fP=~vh*_&DmH&2yeDdE4gIbo;whoodEo;5 z{8wCi+}w)Gwn?&PyR%K^g`_rFS&HQB#d0rR7Nwdk5@yDWS#-pBQ3%3YuXb4kl-reiqQh>CZ^y01&>U3=bYNfq{dBoG4e&fV@jns zr}j;~H%G;jjzwJPh0R)=Dl1*%-tM^7V*KV_accHj4~Z1(s(6X|ekD^bG_G~?IJn5V z#QjtC{d>hNg|_QRtueX|XWIP~J3M+T-s`@PZI8VeEO*s$xa9w9#Z2M7Evv;8$PmZK zV<`IT{o!j)0^Y`gm0ir_nIo%58|bGhV~WeWj%Wy}Owa?}#+c(<`v$8=wn0hDmwXXQ z;+#bSgT{=4g{ix6RB+tz@NhkeIcvm2DN^|&(6e}%Cmx#bh2Ic#y)oxaM|4bjtfUt0 z@3)3O+qSvm?*FH-F6`aOx878jsc_GAEBlFx&LBtwI^!xZyE@SSx<7}rkPsft$o|B` z{eiK-QboPypYA*PMZNbse+6s4l^Oa4g4&TZo|gxg$X~|VW34l5pL3W9A72tGw|ixg z7Nflg#xxtR`#u5wH3YnQmKsP1KE6s%teAloZrkD2EjT-YTV{m`}$|R<%;rAMyAB>fb&2WI8cfn5h`904kmi=pHy`r<$7Ng zL1Q>nR%&9BY6>>6-ss!M zPxzuPTZl?ei}dPNsPoqu(huMu4t$u4O-u*n;*zoSy`!%=uw_9(`Zht ztURI`UBzEF84}`{>FSEhkLCFmp_P45zTAAR=^Za<{7sC9r=~6i{ee%_5AVO%pg)$( z#b8EUE+=t}pCXQdmXFWXYx-R=6wd>JvXD5QY2Dp@+ld6!oz2UWnPQb+h-O#e=9Dzy zsoQofd=4p`)4MxhQm_TE|hSy>`HA5-sc`cnt z>P1=h@UbA=h$*xWEuu6AyA?a=312}w6v$HLm_cWo-04QCZ;Qd`zc1_PqF)E_E*9#! z7r930cN@5A4eO5E3Mq=O>Q6vYb4!e~M zSAX3vd{P`-RFig4W@H{RNfJ>)D|-^;-$`s@J=jWTKkkaix5Kjq=ucnSy z&_HC<|A~AXO+UNSEDzl0-c%hNI3`Vqx8*|^Guxe`T~ldlux>ONtL(*&-bUd7f;ZWv zag_iSzD_S|Qb!+L#j;E$)7%~0*0@_vL-3V)x5*M!0oQOwIqC~BNzL}#E0)yGO`PWI zzIgdk&odZ)YdboojUr+V>9RJ6+hMwVy0^gkQv_@NhQNK55GRjt#n_nG+) z+V-&%-7)XA$h6%nQ=g2G;zwstkq2;(b`O!GAN7_4cn-hJ{tuS7N~%>&Uh) z{{#?%(jxaQwJGyad)Lk!6WvdQgJb_UmiR7*?RlEz%Tz`HSb>trl>ZG4ax5E#<$PPr z{TIzQ$ZdRPX-YJ;a&ZhcMMEsP-No9Ux#O&EGJV##Q*OpO4kr{(f*%fg4RUyCrM6v-o`$9Ve0+ofxx3hR3+ zbx|%bZ;FX*k}HKEMQ1K_kkIO%?hP3poO@eCdTtkYEEL!bc3TZ>D$roZ6nM zwXB-aamzVfDko(}> zqH535uA9-TEJb_!?HLMkkm?RaK0+E^1NQRftfb362qR>TN)g7#Ka=TUMQCEk;wpU- z2_EzMrv5B`F+Ij|O0i$fbj|BMT5KZM^Q6sQ@7tg5BJk(`Qyf)=ZXlGdmzE&_q3#eb zE-e}1qf!g9(5RH4?yR&pFZ!!(F|V^u9u~;h9^h#y5o zwL~@s?0w1s1_}z>I7F|5*d6(e`jka)<;*G>8&BChBRL2%w)^%%2zEq!1m0#l+P;ThdgY! zRGcbNn<0^J+Bz6;8HF7NKTSU}8=B^OnzLVO(?0y4D}_Zz{O|@P`n#}UarO?BeJY46 zf|S&e6#s}Wn2#ul6VSD975q`vRB;kY$zz51bT5QmQ|$N`0$nuaFmE8%pL+cjFrFOu%io$FlD^k{isTUr81~K<0Vt4D+#(?L`P* z#n9Y~6C9kYprfg@#?O6EkuWmtfFtTXKBZG}**V$^Tp3xMs-Qdj+Vs_LpQStXJMkBZ zFD?E`6Vt(t?uKJ++DV_=Vg|2w{x!j=HcIJ`I$xK|hN9m9`B;oIk`%-UavIlXC0~Jx%$tU>qA-(})Yp;lN zuKV2nbQ=*AKb9vl-5MmqV|Bjl8Gi=%Ju!*jFpNQ(! zXMfrGS9i5{zv>H4R(`tR2Gv3%_fxu<)(|}ZhGqVGKnlwuQKa7l zcHvDRM;xx}eQ%<>osppPZRTcw`MFZU{l+o0plkpuslCT5Z|+E^28&KA;aiQT6ONCD z>Xj#E@4K)jZMCvBqJvYcDi4k8?xquJKDm5=NEU3o z9?Jh(Mu$tDy36wG_DtY?@i!A(7m3y;EJq0l!v|>5IR0Y zK$YyPyg;L$r=wQ771zGW9dTh(shZ$uIFzm#6549sG!jmnSSS-#q~3bdG~e7UKlYtF z^qmGrJ2Tg6dmo8;tpdAw$o0@~D8Q$aZzd|vni~#&P)>cbLM*zVD*Ekncbb5_=iqaB zgtnV6l;yYIBd^nHUmg8wj7AK zH1=}De#sZlj0PXdqZ;|5Lh=i-2c8xSC-~hnt1Pn_Q+-ctGeRhTrqF( zNQFMV&KKWMyRp$1yxG*Bogy6SY+#-GxI2i}pjaZ(W3OwaI%Sxted!c@qClPdSc6x& z&b<4s`nt{!2YMP#0_it1p^2^Z<5XKTnPKW_W_Y{LJxF1>i%V1+p#)$A-ygsD;vjF# zA<$Uxig-arODULkLaUq;P?fl98G^U<%dtlB?>0S{Ya&bY^E546oNE;njkNp(bm{T(F5vnBguFnUEqL_wUIJX{zASkBrR4nxz#h!lj2F$Z zaETji^&K0#;DtMX_DW^g_aEI^tp4ru(y7O8{?A?5N9(pJ8lZhEw)v(65Y{4N%t@IE4 zLXH?s|EB%%CT zW<$sk%h}mkUn4cf5lTZ1vO6WhKnr4bjz3b3%XhB8r+$5Z2<u}@eB3u?z z))y~z%-g3}c{3RFxQH!8v5V&7M|%3YKFPaLG$y@JS;$z$^MEvW+beuGi2w5Ch(-1t zqF7ZV54`fdey@mRG-{wNip3C*^C7Fg;;Z?V4<37h0cQX}9^-^~9~ zjXz>lYcxSa=A3^OQxG-uaZwU1pLzPg4lbzTm2c{4u{oMrfiF<0U?zq&WGv3LMjx~D8@R+fDl(` zV<_R_-@XZ)cpn6Kq=o8hh>6*mi2KbAp`=h83MON+Z?AcQHtqSrp&0q&$CS!00Y)IA zMy&pDE=vccwkLzoPyMzb+Un~11pcIj9GW5%vSz>Um(8@cw8Y>cIP?JBgRt@GCjz;< z@e7^FHwb-In{o6o`+R7&dd=&MOvNzPBn$=BDo?4*vAb5cNDmN#N_5}%89mn3*3KU- zo_PlPlCH2~nb7q%{O-Dt6BO_Y)zjII*re`(hRz9Z14kRZ^8*L)Q!BRhZoIm>}AN|cqv(?!yY(z$PlsE%x^2(uD?w3EvLZ5+!nP=bRxi#e!0| zq0*c6&VF|60%=1PjQx7qnej;odw(U5!M#OG>ILqT6zb0kgpWpRh8+W!4P0l z619i?8_rNy%bSbyftL+jKQc1XP9Ko-{2M}V;Q>=XrnIo>zEwkrYk5HX%ZacM(Jz*b z_rL6%s(p%}>*hb*-t7>5>!{QkcBJ8y=aS{MM?EaT#`+_18XGSC5LKwy62Nri!_?Mh zGfG9u?Z3$d^h0hxrR_9u7&J_;uP&<=R7@hgLFy@pD^et4KI7Y5BW}}HTV=`;Tor9s z+!($w4>SSk79C~=oA1}xC)`bQYw(mCB$o-roH37#71OtN*(xe$%RXVmRyRB|zL^~* zza<0Hp-y$}KEHS_vPqf+1sE}2jB34L{0139rCtF2cDCtI2by5U?t5fqbn8N)-Ml-lD_ZOtXygGZaT2+dbs9(HD0 zG9-ZHE*zsJ!F{vU^jzD)R~TGp#CN-k(U!#~7CwizSb3=%t|Qejx(29~ffN8~{HSkY zqQ<0o>_mU1x3!8%C?}W=42Gdd=3gzqhSqtyN!Unr%72$7cp8M9=&KXI+*xpDSd3?~ zdA_#!y&h3LgZs2SsAfA1Vzxe!VUrfb!PMtDA9TU8i=-rY&%5_b4BtbJx$h#84yYA( zu&WFW{iYDo^U~tPmkHO>E9D3ec#AI|FYgLE|7GEXm8QpRqd!#Nyo>qOq>z5uBdAsc z_U*k%Z3z9BWJ}!QDIv3}X`3rL?ewNTg{$lW7v$+#HcxUhqhK8@o}L$%)Ms@_*m174 zeceynyaWC(#2S*@iUnM}2rdc`$mDR&W(yv<@;h!7%{wbT=lNj&&2n(jP3zg#_)-t{ zDp4{84TN_u&{wRaFlgUk83;D?5ew*O-*h;6Azg~{>pi1O2MFOT+?;v)qyScjF^9fO z$kcIi+8Z>k{ZX&D`xu*&&t>6zJ*4JC#zC{{I?$vVO{J)RKa8E9v=`4-L-z#_>=<s-Qgi6cTw~mAiJFW58^EdYcy?#2NJpk)4t#Jq;*)sie57< z4GVj5P2SsWf29o5!}*%-G&MQC^I@uBd-+|;;?CzBZI5yo66lgTNO3fw@3uVUbAEUt0#4e?k7r`{DF||u;f@%C3(`0A53KvOSJ43?F3U|UpA)LM zVqEd8ce`tNtb!i14H5ssgyY_V*@}2gI zdx%mz<#;_?O3BEa485l(LTth7546fY0ZH8|LG|*Myr1-Jw}3sK>A9F20Df7d4sAoB z{Rv&VJoWx?M_X+MWMySO9Xwi};pllpN^rg&xM84vV*EKi-g)SoQn+ovf%A3~OA7AJ zW42l%fPpmL>Y4Z`-$vc2%1-K2&sSrn7ltssRh*A}CoHzq&zN&+z%>?LvhZjWx`cix zIIVz!za~wv<;9;>u%brZR$&a1%qx!N=_9m*NgV^BRY|b!@7`L07rCR!O`LJ`-uyZ?$`ziz@Dx^t#v%?N5 zoUt)$*&h>}&s|G-7Q|XkBDhczfL(_<jFN3utkV{y7&_|InU9J_Inn3$UoAwF$^dj!uL=3y4Wn_cU z>%|3Xe{3gQ7X;Igea`sf1sz$5zB}toDN5l}@o{mkKl1Xnf^e<}%!10;{yK{qC*eo! zeGQ$gq#Gm`Gl;h;oI2Q}xMUFHmyh3VruFx!BTmwn+S}9SN02j(5Pv@LU-AW%`}f~% zLz>}`bKasqThn!hZX0A05Ge#sgbz#X%%EQ=k$6n}QT#qfDBdHF z%F0T*Z}ruJ@jOGLf5#>NW+>$+i9_366cl`_R9Ry`cs2cw!FwJ7c%5&QO(z5hA2A#n znr`vtP;u7atm$1IFIDIXj`Of)(`Bo2!VO(!_r3bhU4i5$Y|%_i^KVxe_Ve7_3}!6W zOn^#dvDxUxI;wV2mYZv>Rd#4v4V-ID5zD(o4B!F3u`322GDyC)X!tdKI~V~F?Ijqx-cFs=r8f{CRm)a z9NslPnZ}Bq7#irioZ>$LMvN@lCOb`nA0{~Uf@cZJe}__XM&%b4=6{AD6b=>A%g338 z(2$n6rkDoxAtP~JodOStgzPC;jsC8pFLgg5VRvJur-xGOgccRK(1t+lwNZ5&R+K@5 zkD~YE$7@!r@$pM5D>EEIZ>g+YALKM-)V`RQg+Q1`b`9s$eLD${S_dfgcD5;fSun@XJlVpJpbk%Bm4}apqH=SDw`b z3O;kD$f=&1P-|K;cL;r~OpoEHPP3U*@f}U~b1JdZ_Vkw8KW5t;$c7Xjwqg_3n6AZ} z>gfc0-QH7m#wPW%GV@!`8Jk?)!h&B%dS6!JuO`ddsnX_xx)}VXHwUvXQa7S!zU1^1 z=*sWtLiEb}H%4xW_MB_ei#RB!lt7ktHl$UGO&WZpnpti6<8^Ng*I$O$e{I8?hA?Xf z%t0X6y_5lxw-!SZz`mX=DzrgbrHEX{7SsUQOWIFjO8 ziazLJd=&KJVK+U;^iXAom!4zFB*O+0`}T@I(sHkG~p_MFh!R@IdYE-rpDNZQ57<z1_XwYT&1{9H&$=P8*~!~;bL{nDSnAAEA2rwn%yJpMX7F~s0eJUM9Hps-_<`VXc!^{vMb`mVHaqe@xY3kE5x4@ZO|kFgE2Armfb;C-XgG z^%UvScz;?6umcavkNk+%vx{JQ+)DmY&;={DLVuT?>=7T88T!Qv$It3MIyJG#>h{9J%rv$-haE* z7_~#9^po%RRb2LnMzA~U7r0}ONQC!CM%cJ(|Fs(;qanJ_lFDK+tgNL&H)1!(QFerA zB@u|;RHB=M+&dhggtn}~-#E@E;!)jn{*PiU>jD2ev9qNyC*W{P&jiaD30{UGv4Uw# zfoh3GY{8CA9}T-5z#8#Vq)@zZ)_t`<<2pnFO1K=TzEW6x(nXS#mY#n8*4x|rKg3kt z`++StbI;e8ujwy)gEX-Tl^Wth~l20 zAgsntcf_5D*1TY?-|~%KqO3P7GO(y|BD76}nuh@fRN3z$*mp%sNMUVE84J}3XONLD z6r-~DTJ6toLKuk(!Rg1VOw$0{LCW|Dk`R+T)WU!|a#%v%-@XI)&eJN=zPtpF>iTg)3;M*p^n@rtc87iO zrn8ph1ldf4MvPs9+sdaV_nWW*VCw8fJu6r@K6M3%L%u_?)&Dd{)uwUB%5r?#r^$H# z_gXgN?KX1={@mDYMx<>*?FFY72KrjNr7VqA&$-ry#0+*QAh zgWgo%@2uE+CEN2Sz)@h(0(d9}TQ(er!;ur*wOUSy64#r*dJmm7l_@hRF)G`(^kY$@ zoDAKURuJDSA(z-(jlLoSkJ_@Yk41!3hIOJ0ZkO+uw0ZuhQcIaZ83=k49!>;?OErX4 z%6;agQgy(=2KlZT)7U-*aK;C`64{~E_wO(4Z%+Qf)hZQ$J*sRkaS*~@lpZd|NW|Vr z1adZRfKkX$uEKNtk&z9mJNuYU5&Tw&lXS-kRq{b;of#qdC#X;OmHDe5t2=1oWarI8V5mt6+L(1g^7;eXiU1R-pNy_N{&zY2STU*dmQ-t;BKm=W zF95QG4f_w?>=c{OtNBjIfa>C2F6*3SF3q+gOlPV zTDT`-``1F0GN|VE5bMsx53MTGSCDUAP?K(iy0aIF&2=ay6_+zxy>nRsIG{4#Dm?Wy z$jMVLX8;}agC_<6P{e`V1pt$HW8oVkC*m2}g{V|C* z=5#-`RBVA%o_ONDYzki%-mIU-Mn@izQog#uOzdrX)sXSsq9fC7DYXOsrdAVjc=V7_-?`Jh#n6{?hQ3$2sbB>dvMY*pDoPB~ zfo^+y{koVNhv5e_xpaVodbI6>nCr$-O=c3r-`TK%J2#emXB5L z^NX8dnQ!***#EJfAKvp|c^E9p!JxQ(A<++i@o6x2p3LaWTf(Psse+jRAuG~RjV+ri z>!kCG;9DaT$NP$sB5x#wDA{c7?1Ua&RYO8W?I&(KT$+zC34OkP1m1GnnZ9TD7@rxU znTB;KJs+tv`tg=mBguisX$R6DnXNF*@-l|WVXj>tTq3GY>?2s{U-#At>W)>aj!ALxO&{mpKa|yMHFiQS+n-Y@EIc znd|)7B*b z!>&6!6mHeo)!U#5ZCfI8`T+ut?d(~JYPx7*^?|s*!_~WDYCo($9;Ah_kU1xBM+(%j zL>NaBF5{X+O<9#X7doxBcJu{Qh!~IfBHT@_+LViBh+AHd;!v_o%;JCQla zyfB2pe6W9|kl`-<9YCLpMDMDQ{sdsCLx4~8BMS4|Kuq5PnDY&Q0!s7&ZH%6RQK*S2 zAOoHoI=`^BUC){^jM!ErFxnPr2Z+@MT#((p4DhIZReotP`C&((OK3q3 zMyB8zu4#qPLn$KlL3Vf5_U@leDb~Ut<0s++!;`^q;PS4Gz=ps4$HmOl9esV?c{Mf5 za4r{pjTNO|ea;}KrMdg=DD5ntr0dqAf!Pqc{QIU~Q0?oG1~waXcXRQ;@wH3HHK+8lkc2Jmzp&#~*q2?|8cT!H*1Q%(SJ>vinP~ zUU4xC^1=wv1WZ&|4{*?54j%Z;6@y6MNs)Rd5B-I(s%(rnyJlD11XL|3M(K>&?c!WL z^L<KT%28w zYG6=E*=wRjg~aU0xFldua&56FM|Wl}-&XexsDo$Ff`k3HzBuN~Qk_g?^X%KIzK`h^ z;VfUH$v_)e9-W|t``F<5+yiG*N8#Lt@FMgIoQ(Zbh-+(Hy~Qc+Q`x+YK7RhuW|I56 z!063okql(_CavlIv-j>?F*SIrbMZBA6np;3<~Ir5j=1p`uf6v?3ok3agQR5E7WSoh zR^h%*3hk}STNoP zZ(4pQ@E=5{M**a8{M!Vv@{BiQDl(=>et-%6w;wFT{Xc#jEM)7Sta<*2UqHa~l{nvM z+CL#%_6aNJ@YP=d=)b!)c^n&cEIW_LHqF*;)fc(>`JZeCZbAoG^Qu6Sa|-M%pn3ve z&A@rfH}U7VxTW=)>7j+a+tpLsGDk;8f9BZT z?o8`F>m5i?<_D7zhKGgKBd#w`@;T$?bY!2ymL+nIT=^XY!*58?e^yKwKmw|@zRb}4 z&oc@_98?Hs=b6b)HB(!?K&-KUY*x0-QB9F7bo-YAuaH4SM^&$(&05Tj{+XYjAH(o= z*;x_!oQB3uB@tDcY(9N64Lb2WvZH`mv}8nZcI@bO>}YYS_i8Sx0qSeNH1D3Z9&}Q+ zpz@WkNOQ`t?w~}Rc-I^N=Kodrfzp)V-%XOSA7-so|y zbTBD#zEORDE7Bt+-Wx1VMa+L-0fC`o*x_}xnB%QlGNJQIVG9QIcL?4uxWp<*=FgqD z2KU`rSwm9uM@A%Ouef6H@wSl%nz|lm^Gc_(so9@@Hg1i=lmKv0XTWQG0=s2CnV2m0 z<}Lf(^+=YsNG7z~C5B8*tBvCG9(Ke#A?u_o$^A_n+}YQY(ZZQ-{4+5x&k4?;_Py=D z+^UE97ba}HQ@fa!{-w9pB&rxbxYLypQu!%3<1VoUHZV>v`x>jSM5StlGq3fYt`l2Q za=R&CxA_x!y8*JUR9x>~1+OYwqGHO+W5tynf2|<)?)yN#lfen66B68G zItMGTI@U?PuvzN|he{7I)Ak%)U7kCruyVH569O6@UxEm07$@r?YZVi^Z9EV8UGci2 z?weVUw=#IHS94=SN7ksC^zZ!;z;R1g;Vy(aw5|gz|Fkk_RBPuCG2-42hbIJ~w;!lYVZ%$||MeNz*=d*jf03hVq&sg;;?X%L{0W5|A zL*Hh`{B|ar0c|w`z6y%^=_&_kt5@60st$P9@$$~eu%&5i2yo8#jN zNM8)6+vwSS?xj?8*}l`tL|)wT2_XM8AC0j_5=kmfe^F7>X56wz+`XcKg(fF8H=9%& z6^VcCs}06Ou4*13kU|SreZRIX5iHH8;xjR^&Jb2-quEMtZ`0r2q@vqILS3KG1^Vay zv7p1;5KYMG9^mEBU@WnoQ*@E>i%NQju8%HH-+Emq^r>{*u`XEiM|w=Zyj zln1TRL(cikj)5kSU3Ju%3KR$G=@Li0?!bLw z5l#CTH&I5lfdXua4Ypmr;L@~sBnE*V*CLTA<7CyVylo$uT&OusQqlQoVPdTGr_=sT zTguAt9;l}aMX-W&eLG2J&d81d;e>;bu431JC!iim`3548omXFG&yppL5O@gT z7PbnHyt+2$EY$YkOq~Lx#8Pop`GiYA3dSMk>Op~oK2I3CPc5~r{(vIf+H%N{&BN%dKKX-KApIg}42ylby@B4>PATMx02+kKATxREZ*|RJ_ z_(HG|yHofm4Eej_9i>U;f^io@%!iP@)Z{maeS(})ujrlPzS)ra*O{pgG?z3y$V1%s zWQXc@sDx^hXO$!dnYVAe-ebAL&TeZHWepidO;qvp{vqiPn(#RS}d zeMOS}zBIUPt)D?QFPXGWeLut_6Mwd zC!;1%_ojs+qTJF#o&&?wk94*kPI6*NhZ;7U8v&)P1+lrv)$=zDq)A<~G(f-_UgM&ras^8fKkB+R>Z7t2|)C;4F`-^9oFZ|tS zuEn0<8B5$8`|CX7uZV~pkLnD%K5Rabhs+cCnXZyLiq1nm=Di{C#<(cDNm6?_l3H3E z3H$1V3k|ElYpqD=S)g>Pne1b=ojuIiuQ!D9FOh4!I zmP@fw3pS9+x=7KAX_UvOBD7G@zDI6fOIX;_%jq{!Py;+XF|Y?oTe;kP#x^s(IB#GO zweZ6}B~Q>7olp-+d}++ryW+?T=&Lx>;R{^)5k}TYIftkfR}5w2j&I1}kkq9pSjN*u zm;SKpx2V6al1%nxpRyc(GPn|TvU8D*U;(`{&>HgIjf-*{&X0U{XeS$xp6wA#50CBD zT;c>W$I8w1DD1KT#(aQXIbw2Gv~s{hz)f;}d-$)6so}XN@&V66jB;93voXiz;&hKP zwfu=%H-?;57O6`$qOtL9ujqRui}X)b0&A5h=5Yr=;Dqq4@U0kg(Z93a{9t-vc3hlj zXzaD!ow9^^>Dm4aMM4wLwXC>LJQ}wU!WF5M_%~v+_gKof4&{cFdt-i^>K7C;Ixi}% z_+dIbCtej1jkv)(N+b5L(r%kE4L$b-O9QvEcdSBxKs0o8LNR<~@cm`?zXKa>*sQs1`uyYbfQ*vcD1l;@zoBQ zhi#PtjpIUWFW&mbMhNt1So_@h?c1NmZsV3Bt|=-c6cTU0FTZ_PWwBKsGk#l7dFe)X zR1Y8)bC&vG!-1!fUkJN_P=J_bazH0nn!SE8VM5z_Tp!${!=i+S z-*QAFM4nfq?eTbF(eb8SX}SRT5wJZy9@PPqA3OvDxWH+%&ooz?K;-ha_htUQ!CLoW z6?d?OAG{eWnR7#$8wI#Wwly{VMN3}S0vDrlws*Mtjv&vb0<41hYGB%Tm(zj9hy`)& zL9WSEhE!efA|N=8SMZV7i~@9}J697OioND-5qz>PtGhWdGtae3-985@@v%Zq`OXqR ziw#Em@e0p}9TFu4Cq>uihuS|tr{&_+d}fdE#}3Kj#hE1p3Hsz7LnD6Zev7Dn8@U3h z^tlZ7EV&>R>E2CqB}uFMsb=&mGYfiAp~C82K|kaY60FVj{1^Qt*OvIqJI-2_U}%N% zlWqyBP;qR}A1%q&?+(;nM3Y?`7Bh~0OMbwZ*lgH^XE;1q=fa5&fBXuN#oBzf%J09Oo1MPyp-J@ZE+8Gh! zju3orU~uwk+}Vodu)VDwcvl#!He{|%i{b{PDs;2k zkR;9HE@|c9XJ91a;+ZQPlDZ%pD@p!Y`7iO~v3)@E&e*aItp^jdyLDK7uzWZaDx)|0 zj`h&=RaDrF=;-^m3r%?rK-Jl|wVhq0YW0s@Wxlw7gzBxYjWtsKR;nwke(d}d_0VE%P_6#E!uqSW)FiU^b z9|}{ca$zq7#ij9o_|Ck7Eblbu6cSqDzewn$B9hrOZHY#ALa-)7Skr|75`h*_XFTFb zk;TWa7J2fMM$StcerLD3O}aC_%tpiS()xXJ-_{pOD^U`j^31(UANurb6l!k9v7QpH z2Jv3jx!#_X)o9t!8@i?0e44AtmhDq?;|I6U!8ebFfj3D;l19GdanW@PNtbx_SW=Px~h<%1$(XT8{&G?`q+V_djxMi$d+*y`Ho)NF|*Y zK;-p^bkBQakO#QPF0lF33f*5^inzXZcbtf_Y+()A`yo15y^GwJXclAJ0V%b;14xRT zNSDZWCtsnYF*R<1@y<=s&+fgBaphr^eYzqH*{uU~OVB>PM0{y(Bl3ImbLc#6UUh7z zC6$TqoGI^@9rezg=lnTpUmF+*^^_rb{AoE%p`-)aSwVh(?zMer4^~%1G5U&&FBZ~e z4js}@a{)1Q;&;~GWq(%MHP=2AtD@%U0m$Yj4W5hK20LRoo1ET8U!C5L_y))J>6>S4 zGyb3d3qXD{>S3I3#6kS(D_;(6{zuB>^mBsk(J*wd^chn}4*O&+C*rW+as3>BitPSE zv{wBZgdd63>6-z(kfe6X3oBk$(&wGm4%>Bjf3V=KOlc~Y^mX~LILMj0tbrJ`IkihQ zLI%nutaab1KRZI3R51#-FUzIBBR)&-CY-s>gqZ&MM;XP7OY|kgSLe+35$qm`Gy0z? zyzpchzUJaU)nWPFVUGU4i28OX8&wAZ&Ss{$wPkHzX6h=OWdK(un8$RGo_KbA4q*{X zXg?J!@j_FT`Rmen=8gmJR`nDo@r54l z3_%E{D-nD1eX)Ia=qq}grJL_|unMbX-VWLK&qGbkomt$U7bK+CZbDAcW6w+z(MBpB&DT~dgtW8^Q0 zcj0%g)HA!Zpkh$)2<2r9*M;e+w5*ByTx)cz8p7kUay(P%R+Hxok9kQm!Tw(+Wfq+Q zL?L^5S)e`Zy?Z}dpO>IFs1__5Z~vrV#{)#L(<%G=``5vHkVSd=(fr-I29%-7=zM>= zLW3t0sk0YuP^DcXhlN2}lZr*GrqTWiVKzw}Ne;*eV-gIXL%3`m^eYcRzFA#}W%N|| z&QVtsDO*pIb`s%is!)$boZ5%aO=}!RyD(RikYIK=>Hw5efFx% zF5+)*9Erd89)e{~&DNoZ4uR?uHI?-58zT)$!qj^>a-Akd_tK(i;n^)3FM-)( zFr)v9@!n#aRK6_8&P2qhIG~iP1vT;Eo#NRs&|BX?<#ha~kJ)F}g^C{{z%!=Bh%M_2 zxn>fxswm{BT@M$z1NGZHk|b2VdIuK`r6+;-f1S6Q*(;BfSCZK%rN61O#* zNBa{%O6d(Po{MpE(2L z}Y~imDTO0e`1Ka9X&x9?*DQE9khi8zRP{@cq96!xj>a4k4 zU&*)Wi3wc(oIugASL%fBZ~07>Zj|nWSxejNRroB;%qJ^t)AF zHpO6!tV&?XKWyy!G#i^`wi#KY2yZg*vRMLADcuU z*1hTh5Bnn?*)J+svWyRe!SQS0cJzd#@`Z+8&8l%VPR%!@4`A+;@d!K9IUgd``KCKR zn<+r(3BW-dH|V`=V=^NK{s+fI@CaqI*7~RpijCM062;akp1R%1$=$}dgIp2JJBE)* z-3fuA5VP0FCM1a?J>Y@2iVO0d)by0DqY&$aBb#s~1J^_P9SQ zzjx(b0k$%iBvu>SvK&Sno~0I26^K?1 zH)m0YUOipmd zKE?1)tGluhH-b#3UWccfFCVm$txs@M$AxOEbO{{9mVjM)3qUSA&W~8Rj7mq4i}>7z z!}M3Xs9(U!{&*9+?r*;;<0;aD#xD5>2sf-mP&Z?!hv>R316(X@=rR*^FY~A{t5>=Ht#gNK6}*51#O*M4`rJV8NxBJ-%xcbeCrZp zV~C2&3p@e>pmKNaeD?uPc`dh~qebtLeJ!eH>RZ zF%8FRBVI}|J?KQR$m)iKgS$E_tEk|@j%JuCDRGVoR?};)X#Jw!(yQs^p4NP{h+@mN z{SLDf;c~kCN>Py`?nxKvh1mt;%8sBc50pj5X4z})3O=Uv-ptk)Gqz*3xJgNetS^7s zUjsw#7h~YUqHzHFqaGF(;1mCK4Erw-I?^!g`S(xxakH91uq?LPGCvkkLzYsxw-+WR zBOU)tG$ra0{{<&7@F+dX`y6JV%r=6%K==J30lc#F?TOU)+5qrV%zvu;q3`(Q{ z+10zfOOijH5=8wl`Z(v#GK2VFwPUoYCG`H)a0kH{k+$tfN{&8Os@P(k(jf@gHUCcm zr62JzN2<<`&+-@b&QTQ}5RvhUL_KG0aKZc|c56Q_dPk>u9ojU!|HP9cZ}WHg?kRWt z(r(8^h>7B64P<+{b=NVpL|}aI+d&X;;OsU_TKv6({nrcE8EnHs4%c%nJ`Z;!c#5IW z>-kRjm%dBc@d5#A`PKEc*JbM67w9f?Wp4rS4jL*d^?3O_811%2?vv@XxQ*I7oU0O=p%HQF%%*Zh8%#SW`ZrCLO~y01 z9vLs{L#8TGbiJCEhe)0w3t-X8zO*0BY=aD`RF8hifE%lp~1H3VDqTJT(2tIPijW)@{8QD z3qL;%>?jn|Kb(AG5Wsq2u81x}0VWlJzR6+(9$VWf_d)r)Bv64WK|~KNz$>#4lsjH< zo^-6>ay9x9(<2|?Sq@LBe`{kfHbPGPt*?G#*dj&OKtgagL>gnc)al~`h2Yb0z&fbK zu%E&PV(Q(3yO_{7G+kOaWa%4yNqdhfSF}(O-+>MZ$8Fq$bKHq|?aQ(ooWh%d6b1)U zKU%Ub({)HT?&o7jKa&Q4Fj0_(+0#4#AeJ2Ci5FqE=VH9Z9#(ME0Gj5e&~PFIbeDVC zYa_B)m(RNEPE=;cH26tvb*O2MR|ABeZoap2vEkcR2j&zk|r4Z&&~u2?VJ zq&zu|KV8@AZ-`BvM#j3WJF_l!9GTl*mh0*WXc>55F1jfnPiaYWAI4__$RB0Dg5IMT z`r&!tlQ}|(5TF}2hrd*6_X2NWBl$mXk%i1Zh5D=>| zfsxqYv{`S^Y!{3`Sm%H-~A{4LQ>Lcds-P&}#xJi&%nO z)T3;LQs;*?M)oU0^!B(ru`{ZTOyzi$_+)NLlB?NhO(!M^s)@%I0tSdZ9a{zs@TfMd z#0HGfh6blgS6V=pUH-`Ezo&TvPaBLOcOU^^6S4(op#@wY+|nobo6*#bs`#66svLoh z&uFFIk^(&@DD~Zy77G>b<|^dM8IF@o z$`-*u=usVYGqeZ;1BBRO%men@<`Yt?+a=Dz*~r~H%zO?o9TnKBPPw_{U%F(m&7ie3 zp+}nsHIZY02!Wv$1ZqrsVM5uuQJfH%F8{V{O=)eU1VSpRH z>Zj2G*URWdm6aQal^tU@XVT9F#!$dqnKM5;A0yc4tHwYo#tP16Shw9wOS|#7oV~cg z%@-@=)9bXn7)<9$)<=Q0fh4Rn_&@fi6?u1{>byQGt?UR;nxC%0E(UUZz%uIVL+boa zz8WbfLjkJ=mHheD5dqYoOAnKZ7ZgJYeU`H(h!My5^l6BjnO~5NjSUto2$2itI*3>h zXqIGKzXrrMfE+LsO69!19+|S_v$-=?wp`5);=;{LUsrx3#@+8XVHOu|xP<<!TZO zK>vxg1@UKeDilE4Kw=aZWC5l=6*aZ@TG4LeLrx&r&x#H87R ze`QA8ZC0?d0@}RA<)@n28e4v2KUru)dcpEh)4VNRnWL=0PwG?Oic*MFB0Issdg(*G z!}MdNlmdPg>)XRgFwu;C?+tQrED1*CUAW<&XLOu05;*zh3CI*mtI!5BuCV^&U4$JGS7#F4aNISnODASp*lj zx!bVhMgog0K?+`zflZL;ZnXTi_$Pt#`DvY1F{k8T-;)rGZQ)>`zDWUSolsnWb=rwe z#QshV=JoQBnH1#Z-BzP=KB%^Puc>#+^#WKFZ;&<}U-X-BVQ8ZUIygy&Bk61%iwj%*{v=_PTSW%rAhn}jwm`JlvA>fGTHob z(gocN%!Ht}EwZo=;vxx<+5Mp6a#lZ3U>)((YShFywNim+=Jc>4(69`Q!4Q~LZPTU3 zZ7PiA$zma2+h1$Vw2j)Db*0YH(wk#_!E-#N0$2%J?l9}z-+yViBhFx{XpU&jE+TO7 z(Lt{X2VR3Nn*0cQ4UgY(MeA9sMMd+z%VP73cUj!1&d5yr*J1%+gAD-7n^i445>6i4 zRcQ6mQBz^^y73v=qp>IA%%}Qf-42$9=4>gvBDeN0y;E#UUwbSSv&0K_H!y~`EF4~{2)8NPj2&akdQD+mfQUkCT1s>*>qk&^}&BO0MG^g z)%Vqjq=)nh3&p*oW@6xP{lXqe6=&i9@9zrtf4%q zRMsMMg$lD`?}C6ckM|=I&G}cZom$h%k~hzUl1Iygk^%8?r;W34A>?+x*-k{K#~h5u z_Jhw&@ZZTDek0?&QP|Gjx^*G)&ig&tuk#QD1xzUTSI(a_Tb#5U>LPx>la_e$}# z3SEuK>-Dw{D+1c$va+Z|3RF9w`plo^8US?(O$V0ox7c7HNXf_$x#}pn^PAcl)tUpp zgMz$HnjC~lHpJjx;GQy7pZk9e8erk-H%g41s#?iEchIz4;LY95o)>4*hh?#Sf$qM0 zcM3iy<(@w;Q~db$=C@pXVbj6FepdC{{fmy;{m+V&pW{m6sy0upSt6J5TR7hTEP7u^ zLkc3&+>5{H$jO8~0VWco*vt_BsPX}Pn!|qK3gL%@Dxl|yb^rx4{y^~^%80D$m#uyy z22{JQa?630&S68OSCDXw(%@+3FDVYMyr;KQs+U?|_(9K=DX9EhE3XFee#^mjAy8RN-`S?Prytd!%mr+FNk`m= z?h$?+(Sz^Ev#(W+?om}gRN0Y$%D;XvlG^`BY;lqQlGx12T}+BBmWzNwWgA;$|K1N$ za{+*w8~q_ZPZAAzQ&Y2=^vFlp_i0g$vAv#3aXx({pIh$5$9%GeLpkTFP z)nwtStd#f5Kb%lA5n$zU*gmURp@JusKi#NnG(YF4m!6AxlC*TyzfVz{U!=PzR|@c# zz)3+jgnD1nia35=%STd7kqD_mq399A5{-`{kB074|H}kY;=CXWAe;4HzxXmb%ESTC z(zTsgnB(2@7tqFU>FZu*8+m1*fLbs}So}g9G4B`AclvsMzHpSCl-lDVloxsBj`#bi zjaBW|&8=hje1Rwahi0E7ejdFdWwBNtgk@k{RX7!<2^pfdg1-?n$e;k3I-YLK9KiJr z<>tL7e{Dg*e#=cIq6Hw;1;Nv#fjRCs&{=%O_e%yb(gN(f`mgzo(DhY3`9q7Bl)^jK*?XhAR-m9f&| z$hmc6wCbpu8>BsAnLx}Vqf|p;M^csh^B$u`T(2xq7lnSLEqiUw*F{%x?d&nh9-{${ zGWl*xk)!zctjZDI+a!EoS`|FQ4sS5Dz`hu8m-9b!H;Xq*Os& ze7&GuaRL4P77tY&*yyVH9ywwpE=_?o&u2el^pt{(!~)U(G}#uH&T1SHY}vx|5ca|d z$>sjIa1e^90mf)Y-i12Vv~BzyxW*RhTIH}eT}LoeRt4c9k%FE*^xY&smy9TwMn=A! zL)$)We)+X~#fxXTg{KalGP!z7(R9g89gCHv|9;&S`xWRdU-Y3=jf7Op0Yr;ZXH*d} zK$#KrR1TZ$GW9jsoUGNf#=XZ{5zr2=0v4fOM=^245xm2!4K>p1Y(A*uTrXQ8tFYf` zz+BT11G+~jHIS#z7ANMgo%JDzbkr7D&p^=H3SBZg0N1P(A4oR_0JPwZpE=rHS#C2; z#Kqje8wbz+5S|Z6MXp^YgOeW-?t9P<*k1cc{Qy zysXA?LeSK8CZC@PV%|s$X%|jLj_o3k?tOq1$m??;#rRchk3#RP@BQ!`ra*`Kob~f^ zc(B%e5oY&8NN)*;k3RXNHWuGYI%yx!pzarl0-9YFv(mrW0i4^B*V6JdK1kqUDTt#M zRPrzKEC!n1)Y!=I64|$<5_#mxs}4MC4qc`@L{}T8C@)mkaAIXF*e}Pz3@8IV%iR3- zREXYjkBmT}J`otCvB=GmY0jzpn$yB*I=9miCM=b|#=rS2qcYC3^+%9B57b%k(E!3A zRu;s-N*1*hs{e!#&RQ#q#hAvp>o>= z=}l93^dLakY>NV}%YVsv<}lw*8(!(7zAQJEViOHaV1hEFAjd;9@ir{Dt0cVSIk6%~ zKWV=4>)^=5bnOJ3Tq@(`%`yCU1CFZ?7KGP~hrLiUo0!oEy@08N26BpEyN6|fYrsmF z;>g8WP2(6_i9Z4;WXukBw~)5hQKoYZsfuyOg{#P+E@|G}Tyh^FPot3&9l`?MBeMwG z%Q%SWeP@s>9AI_nJyG-Q`oHCR-U!GvYBoh7-taXKC}( zXb**C|Acf>;K2v)n9us9+vuz{sP$6QuNs@8^&U>RR5%sh!wjO-wUiO(rL2e%ArkV=nD7eFEm)|fG_L2c6V0e%kdtf2{%wcK}qhJy%NNISngqFW{idI z6eIvS-2m4_hf<)>gZv}FpW1#rxpe)``}VMdnQsDB1o%+^#bE)8YfMj;6_EQ4$H()I zQhKe@OgCC@5E81KA)S`xVt?#qJwfD47u)A5vtfXRCr z!F!P8T#fTicgBS5l!BhS6^%~hKcZ?+AI#82R&mkH;9%lod{Q(l?dorMA4kbRUlY~Um+?lOEt*pUWZ08x#6#-Sq02A?p-7Gvq-`vdA_L^!f z>6v;*)CPZrEa98uASqJ$R(0O+T(&_~uHvWisdrnjOJz|Cc85{?i_c16m@18Vz($tK z`XK=J@C@xbPbXl%J4Au?&OdmtgKOQibuF1d!Npt$fNnKSoep~9s~-P^p>_1a3Lf0M zpx2I>5W{-r2O@Xxg}FwtR5rlSHB)_E@4H`B%~H?KD@rZ40JwvHbf@q;Lb5tsOG%d% zE(Z~u$hWKeOnj=9!57A+ChVV&Go9!Xu9CI#{`b?|TjzLHxS={8ER2zXm6ODZ1g=KE zr24GIgQQ`nfejt~Uc&5-&)b1dU+=`hUW&4)I)Jbyhz^$EqGyd5>E)i>U73=|%jsFZk?OGVFe>Y)T=aEYcGFg8&9SYg9%)=H zKzNX;)*2aU-^(8c=JR1-L=3pTM8qKS!jdJc% zCEd!CqVdPys{#9L1=tp_L_}pl&3nHrX7b9*whgZ?YKLPD$n-BSiPV zvfs0Q1v8@xAf8Mc5`lQ(|Nk*2^kWP18rc+g&QuvYTr(BTH|c44GcPguI;}eWy*rNe z0SbK!K;8de@1+K-9CQd%nS&1@D1;M7tOfW69)${zPg@dK9beT-EFcGi8Wex~U+xx6 zMx7jtnV4d8ic_hN@OI8tVeHk66&%dTw$MngORLM`iiZ|pLDU5q5C8v)qJ2EaO45m_0yLDu6VdM77hN&I&M(mxvkFO$IeC(Tr)2#xavSCl$UF-EJ%P+&GbKm815xBM0k-ATrZ z6ahH6|94IQ&$XX2$+w;I>YFnQwx-VE!)fXyKuYpmC}Df^#{W4Mo~oZF&{^G*G;cw9;^+ zZ9Q~a0PAP$^}EP(#@VIoEZ~fWzoW_`67V*OmV7F&pv%RB2UP^9UoU0z53%=&C3+~E z?$ICVm-db9I9&E*Pgm^b_h)P72je>Ck5_vgrmwaG9RMKpLw1rS zM)ID}G{rPK5H^CNh}*Flkr&2cQIEd|R&yM-Oubr2$Lp5Od^B~+{O#qjfknN+GVSSc zf&o{_Ig5kxf~a{?bva?bV>=eQpHui zM$?6;ilQP~Jim5Vo11)bimtmP5Vl5ilR={V+<2kx7?puGO0@%Pxm+hXmdT|=r}Jbp z(z3t1TLh}TI9j$S)Ep=j*0-`MZ@HbvK8oU4hxw*<?scTIq?qJ7L z?Mhqz=0?z>pRToXka@nnn9ey&@Ev%}bG^S@3vFZT%L*=z9Z;1t>~ww{r*g^{g-82-ODt6gk}CgB+@Y4MbyB+psa++D-$;L zy|EuA_Jnl$U9#jb-BOiS;0@9^eIb!rY4+}v$8&w&G|MSD-W28Wb*bJWrP3$)>!kPV zlg4L)!}kSdzp*aY`#+U!SK$+jloL7!nGVtB3l&*UC$$rdM7#q9Vri70D>rUj(oY$kZo?_KzGZiY7S&oXfjy-1wV;KhymjYXX z=t9GxpD;=ACCxGF#@$xwFP*9A3%}myj#xpcF4p8>*(NE1!vMk`t z&tD_G+N)fjbKYOtCQv?WL%H3-68xySe#gy>$$A#K`J;HIWz&B8!bws5 zCG+KjY|KY89;I-=Fvq%BA&+we^0Qyfe#|Z@ zL&TM^H6DQCf1xwnD7ZA)_3LDk?&5 z?oYXiA9YEhuoFfSvbq_ROElbm!uEdOespwrc$$`#owc-<<^@%|rgOVqs;1}CUg7P; zH6HDBf8dnKE-+?^c))kJ*lRXZ5LL$YelsJ_18V3x8cQD0Z-3m+AXuwk&^^8w?8@>2 z7>SM{3jg7L@yvH!g z?8IWc;(1FPz9;&n{vx0p{uff`zzLmMSmV*uNTsZ@5Pyej*JIO&!wG>`i~#JeT?&~) zI;2*v9GNI3{SZU<_xK{dNcI;dLk?>ff2d7@6|zCu0Em!SSK{al+>f9e4L664U-Yv#1-o4*-z7 zoYvc~x~x8JSYdZ1&9wBuIsWpE-W?;rO(YH*s+6kkYnHv;n68$0({*-%rZG3>qdIn` z;>Z9I$FM;FQ$lPd0KnWs6=;U(}OA=>dQZcHK5+DD;-Lmn426$B~M*gf+{4N3C1wHVkv_*VD9k(ez_*J`-b~ViLEYn#(vTGG*GA%y8$P69dgt8t(?kgjxkyQNE=i8Tl6KQPBgTzJoj!?+@MzNYp%;0x#VT`#2! z6FK$F3*0MkkPQi$gVTf+m$nJ%w|93-_onULA0hma80nG{Gk+7$?unur-ff;HH`IXoY`2~BvM+)6l`}jpb;b5^FNKRT$Frt=)T}ZW1E;=_ z8NICRgsLuikl;{;`(aZUuOiK55Z_0VD?E|3D#z=NYtz-=0mut;X)E0dyg$ZV1CGx~ z#dzkM0fa}!`@>-OdacFIF^|3bHve-f_3=!B7vAzi7PAT5O`EWDTmce-rFOy7fs!hvbfy9hGEkleUw~n;ACS%=7Jc;v+y6Qsk zbb7cBMsIpm>n`T?7wE+u%x8ImQ0YY*ViV}F55ElKqfkl{FPeLJy^SW!7RHdd{QH(& zbtNK8Da~RBn#i7QYS{N!4^+Px>RZ*hB`nq`(ffmS;Uz$&-tfNfMW4L<-3o<^9xAdq zQYvt!H(wi@rNAB9u35t1n7R~b)y_HZLzhomuo%xmN zXlSp2+%W@av=o!W2r708WAfdhHc|BG`%8)X>#c?6c@G32~VMk5e**j{#s9}4M z<T@Kni_P@D>2B-8HmdC^-#G%7ZChP)xu-{7ZOq71 z)r7>Uh>mrt^>g;`8A*FMMep>znI_{oy(ad~67k`3U^*^5QB(Ea&3I6g@l z&Dj%Afp4bvfCOvZHxLyz&Ueqd^b)lpf~jr$6-}=sBJrN$%u(Ci8@GA>xz9rOe1-r0KgcpDhe0s^<(Emj zNLCfAB6eOn3`70Jv?rleh4EHBYofG<{pqXZMaB~aj)oPYUG}>@*zGUjkU!GqDLT$} z0AkA*_%a6^Zh_S%7S@GeT8484adf1)LWrPt9X!+_G0*e$_T$Mt7JuQZ`wbtTgYw4p z9%a9{aliy{I0(kANFnG5-XBqj0370=*)2eB>frsvOSx*L6xOA8xTdkPKf&gPkPXz7Uw3Clt!(Zh#!=E)7@25+kFc-MZ=$>tIT#Bt(Tk{ z%Dnbrc;xE!fnlCd{C0!BH5e+`d@5vZ+3)E<9?h?wmicWw?RHAi*?zy| zGp%kmDx9aOnL>Z~>c?qMwvM{X>}j0fkRq`31{_I*rsl!n(#cPhp!ls=zsf$~(yfF4 z@6UjXjwX3AM+7X!Z~VzWc$9)M$>^^_Q%j)lKeJ2y2z*u<6R ze7aifx!cK*i@8FnP>z`-$i894T8jXOZL;BYhwwc%&UjTa^ZZo~N#oTYEMTP)0wFx!xP4->m_oapY%0J2Tjl@^~Q zwUCZo8|#&h&gu)*AenkkA5*>(_1EpJDX}XL{n1p9Odv6fa9r=bqwTVGJ05_{Ph}zd zob{bR37Rr9dc<}Z0K(^b#&Wsn{>WpgB$LjHjnCo(@APncqLFPM&a@Iy$I;ytm3+76 zqGv}&uW?nMGD_}P(Df)WxRvvEQ%Gh?7^Ybd=Td8zjOqn$sg>=vk0MVX|91$eHGQW- z>I4jD%KZFR2`8Hin7=Tcj$M1CvNUm6Fsj_Dno5Q0W*Ud;=TV}pn2M6*{mR-7QWU*5 zj&(`=Tu9#V=w%wMqj;k1&g^eg|y_j*iXmhN)M|Q7riA z$+>au!GPUKR0El5i;7FJemfjWd_O|Z3%|%*90707te3~Hv*lW9ZGn1;P`1Y?yIlU` zjVm_q5uDGKszA)ZP%hIe5#)4*)y5FI0d=2&V?xm<)wQJP%%}a4(=$W7TsJ;vt8UEU zfpFi`&_t@vP^bF=FAP5eX z_dUWpE!`K*KB^A@XDY_BSy=IVVqW3@A#j&_-%@_C^f!~oZvAmWl|LDteut>VHXUzg zRWk9{y&=0!S4#|`N;bEg!yYSP{B1^T#(Rax(DP1DE{=*FUwk;;a}UMU2kSNpjY3J0 zxT)ov&dZ5~U#^H!7zbP*?3Q3 zW)38cbXl=#6zkxwSZ>-nU% z(E<2AIbG2NwLP90Is)LtbbIn`_)it^Esv= z-VtGbf0?S+^zke8BldD&5oG5sE&B+%*=~#TXhT}mT9CG;;MByf^=ZSLxVxji*-;wDWCkXLdy(o6eDk{eAO1GB>5TzpTkXN09gDx9Yd=>L)A7aY5me+>iXQ zrVr>swZz*S)%ZHfBehw^8LUitK$U_d;5-@(^?s64b)#%|;;COHZF%WA5P8TuG?1yl zj*!OZ(Qt9vey?kf@OC(RNQu@zo+~3<;t_D5`iUbUnI);fQG;}oZ}KeQRHqdqv*GJe z?yQNfgq28UU-~j_b3&K0oT?dv`e%{Y!M%96QCf0=D>npASY>9dB}pOg>2%K2qz~nk z34K3Vxmud9^r0kL0JyRt()`OF1zAfX#z{$`kC5U!lf>$JKevo+{=C0#y#H}GYiQg} zHHz-83t2ceTDW0_8M;2^>$n4h4#359eYxzbRV0UmKF*xgV{ATw)l`cumnR7^6#QYP zaqy6k!WEw?i^c~lDi;qPPp)|N*)rx6{vx2<(+*CIf<4)nTKkFD92on z^HfS^dg1BVy**-Xl3MxGwvEKd7`pF7agfW7z^QQe4gv7Vu;k+YPytSNP&+d^E7@YL z4WaqbEk@S7^ zVr(3Ob6e9|hBOPp#B!}qw#yQ_ceKuWIX z<4&4I2xJh{1-&xM4EoH{7&;!%=6U#4#r(_E<6dzlAB$QoeVA+!sVW%ukVEjDQ+ubv zUa<0%N}8d0NYWxk9GW3~e$o}zW}Sb05BXuVoEjtWraoVlOB$KAy2{u~*pw+$5*y=@ zO~Wv-;-V+KEb`b8ba;a-e_a+`ZA2us_@#Gi=~Ka$Dd%b@?**lEf2QlH_QE{YOaOc{ zdo)FLw{iUo$p9P)@x&OhBq1C`LcLxvpTRqyP8`uv^Ds6(kvz#C`K=1tNyg|iWvf05jtibD;xE1bRHp?=K^&hQ(TnTvkcU;`4k3*gSDR z{*u$QSj39>mSdVjx5YZ7MT3Sc5X6>pkXo8PFrEA8Ufi0gJ zmqxKb$F<>}bpF4vSpDbar zBC9SZ-rb#LOIo~ljRYIoP$x}0$TBD4>1rY`B#T+`wkpTA2iB?P=UVOWmX7DMR_?FRiqdK`MehcF24r!R1IhmF$8D+*pzt)Wi^;x)fho9?`CZT|&8gc7H; z@xa6fH$=)TJZBZeM+T&tXN!oPT+UXTbK%6ZgMutiOj9#G2d1d!0Ule*iYHBc^*c^t zzF12&8>){=8CLwZ38YYWc3H z+Gk)O^zq^(v09V@O50FPTPs4M+LW_pyZ`f5%pyacrk3TStoOc@t2R$kRn8Cu?7$oL z&fkT!-DK4|n7u~#nnbbW#UDPb38@r&GB)q%4YNt)ZN`+HQ-eo+oXO)V5gHO22oZTh z#or{;Gf4DP9eRz7u{1n=E=GrH^3aTk?3tKY+2uL@?AoNQ(%QRio}hDIH3t(wq-YxO z@XqKj4DL5FrnJDpzsdO(;R@M(8wWr$G9i#2nl1afH9l%P9|KVW7falzEL3(|VPa?= zi-~~+m%6=0gOJmFmUR5-H^K#xLY*zK>}I&Fga`-n_MqWQ8i%TvqPh6s-&oA5H1m#) z_Dj+=Q$OP7RfW()I<4`cg0zS9!u9>X$VE%stGVwR)RM$(P$@RdeU-Z$oWz0k*1_7j zY;CzncWdz(jf=D9R$}F>fu1FNyZESaum_|r(097CmFQ&v&!f3#J-&7VK=edifHAg# z6^B@JoQYECr{>F6xwagcvHbGE_N)Ir?Fx6$as;Q+M0+2ZcjfrQQPnJ0Wqibk;*dn6 zQsg~mb#Jbe)Ice+YD8p7Gy-a0>9m}H(>STQ(gAHArz2X@m)y3l>)%#FG~&4SkeK3d zt4<3)Y&f=nD6P8l(Y8=eyT>{4v7dT{IDwl;!Lq|qP3NWlOmDQOhQlsqp^kk3f!s5h zfVgrVP8J3QCDvfJ*csBm6n!R6n}M*&4|$_lhLh!W;X4|8GFiq{Q*Nw`H*VZCg{p9& z3^}D}vTbE1_Bz)X?`vEZy3yz}Tp;=VS7UlpSzlJ*9NBFm6Pnl6vL-Ji}fUbY{hJwvu0IR`5CM>J#g}#PfYF5l_+;Lp(D?vok z&sW#dMF7ods^RohCXun(zz&}~HE!g@c>k=w8K#ZH0L)^%@_QTiOV8vVQ#_hWPcwhFwab+g~Vk9+nA-RYX=aR>eZa=>BB2zv;kcw9t zuG1I#FI%!G<2_xP()MN*z#TSurQv>A?Ai&nNMP@hr$5k6iGY_YOwYx{c<07$Ld82A z>(V`ba;9<8XcOkoMwfRT3Txj`a9`|oK0`vys*@2^cW<|=$Vy5M!Ll9kgV$inUKmi- zWT9bK86q2CP9@E?EF1b{ZFFz-91>_4uoiS6O7P~oqyz_BJuWmj>;xoYxu)jXs8lvd9TG3lYPO^+&dHj*}?CD?ixIGHOUkTy7KA0F3 zLr1TA=|POE8+KZ&pWt(;G@%q}(HAc%_-!^^xAAt@$n2jr&L$_#x9JOEY=l(dFd|nX z%PyJSu#n15v4YCd$FgwW*5P5C6XsbL_K?Zu<_q@A>*woc%-0GtXV2jL%7s|cbL#$>z1l|Sigrl6J(*d_wAm_ln> ztE73~8Dm+=7zs-&V?!lS6+;F~;GJZ|IAJ>F^>QswJICU0_O$~OwEV|XiWPNzf+I&ijmE!#Y%@VSU`T#XloD|9A1>&Hr2Hx=Sq1{K zjVt4xzkP1!;`1JYwJ+8Kp?L9+F5n?3a_khXG>X49TS6Im7jSEdL(D{eu7Rjf-VU@Y zcEX@d(v<%$oXIF?Z9DKfJe%A?>Kl}^a zo#&<{49%t_NExBgT*Ao!N@|visNY=(dSut%^a!g~de+DLCv!G0nJEglcT>v?#rmv8 z!mnQ=gL*4UKp}B4xVRuw;QvS0S4L&Ib?qV|B8}25-QBHpOCu@W-Q8W%T~gBB@z5#V z-QC@A?#KOpXPkKVF9r<8x>wC>&Aeu*TS6%L>bc!+$~#X^AwJ9om?t0&E$H)8A_w4j zej~EIylb8Kjg2A@i2$FZ{5EBT^2ER2gbQdKN;i|=B%2nrbnI|g%1u@p7AK?1#v8G} zHDBX?MP*3M467_>Z1KG-65zLwLzyw3vvI{2{bjpFBKkTXn7X|wf)8W~C2L%G;ybU= z=Mo1*(v>aRKI|LirpfCX=)2ZL8b@a7hBTlwJ|d5r`Pp=%{Rfo4d;DB%f1yFJp5YJ&W~)+I50* z&*i7x`-WZ5d!5}Gbab^;iV_-!4Go;PL*73Y@^nGYq;(>?_kFbixr|etpM^MbEIxzM zCSFMITJMlk&D*>*=X<^7N}|a-txtNxXnPxz7$?M9DqnkFqC2XvJ**Apg}oY)uOrHy zFtd0hEno@d1z=soPz#kY$QmEcrf4hr#Wctky>iel8LCK(*e3+U-<=7j4Fqzb80ihQ zIv?o5_>XiqRe2qb6sP#o8^%HE)T|mMzDq znm>}8Bd8>#=bSWf(;3x8V9L#|_c`2RHaSQpq{fN&_W;>)RF&K(4T5+^1fPzAwH%Qb ze{lCT>LbshUq!tT4+H9%o_K5|Zf+NY!5}J@W_sBqVF=@}FkpJQQ6lm$*AOE(H%eS! zEAjbEw0E*na~vuWB~k0CN=GpF1~^QX<~Fzmu5RlO+sBsRVk5uA;Fbo_f%aEM z;(^iAolk#=W@jCzC<=yhVQ=P#?1f~hI2*&ue={7`lFn0VLod<%xI5Ef%DX`Fy5nd0 zYN(qs`D2%y|2`y&YJu`}-t+M|EZP%ESLH;oJETi^f*GMBL!_**8yeDKLncgzHzK)z zM6_I2&_JYv3f2M>U!&mkr{Eth51?!V+tJ)pz2BFhF?>r=n@s}9^K%dHZj+c%$o5Mb ztxLB=XGELWiDKz&jH0>+oS0b8-iU*(5n@AIEL@h5x-2VwA&m!vwi+>R+ztVz&B?e(!Q z4wl*8!8{?-+`xG+*fXI}8g&09_$}6b z)S*lgm;D`&jY^kueFu7H)K-mWVJd@7Z2mCP^8OAE=p8c|XBNu*ryYrj-%+VZ$IWSK zZvxdPB4}=3LO6&=f37XXM{~GRT8`wFJ*gc!C+?y1nlkFZ=WIbO50i+Uxh^^t9fgEza4lCz4-l^~B+v{fW zoR-$Vu?Z?E`KinD^g$ZcY`?mmR!hn&LfP?i}OGD><1q|t$r8a5DS>nOGVy2fFTG%h--jio<7 zXi3$UIV1KXkGyzzNJ1EuczZ3~O{ANZj%c;NMr6{qe|)=sm!=@(Yq-6v>VXiOt0D5* zdwn{=Cg4@tQK+dp`#pGhNKZ(h&GRDvcFFE^I^0aXtG{zs>!7jpQ?3!Rp+eBsg7j`< zse)}JfBwVw!W-j5`Va6?1wR4*?8kQ$R94wWvXjKuJE>NKLzE45cy613bC7o+lfG)_ zqwDnnWn>7Y!V570i;io$!_4=po3-}@?u0TxgQ=wA_5i2G5B)%zUyM-xVb7oZg6Phn z!9Og7ksMjlFpd-~H7esC!X)uI*-X!!@;ciEmAi7rv`A5aM}Giu*5V1s=s2{dZCi_s zOPHbSZ#yjGua#RF_dI)ab8Qv6%}vNzXNHff22zLy5tGfgY)VzURI3dmz;qE3ab6_f z+A=aqGGfkHPojqenp^ z&HXrPDIUfS^f4{=*v%RfLt-46=xf=`^3f3kb*IUB{f%Kw=flTOVXaJ9=Gx1u{olvq%|FygILL>5tYe7t<2_X zb2fc=!?quB?>nPbvLZYlOsdCWM87@?@|`qld(OFJ*M)Qr%9D@OesHh)cr#fmRyKU@ zEMRs-pqE6U@zp#-s(U=)d<}X~ipgrlk7(vkOfs=aKqgO_kXtB=`1OXSCVWtCa*HvX zCZ||zgZ?`b`q<|2=^~OM3DkQO>xwE8Xs^y={#rJ+BsI85qW0lcLvN0CjAya69pa@n zNq)ndrRkmpukna33R29Hr9@wxzPB|6BbMQN2Iqpfo17ncX{Ji`DCaMOAdBF~%ZOmVlLIF0G92d4(# zKRBvYP5K?uO!D#F%xy-3>;iN0WO=U@zf%RKUQ4GxWN&eMy+8)xAkU)=>D+7C4v?z) znk98vQI)|p_u;pW7p{tPc{EzGJ4x+w1=Vd*O9k@NO}BP@0z!F92=o`bMZ1(JKdHJy z0|vWcO{sCCO6=u7kfz7W=$3kX!uesWs<0`8z_(}qYm8{&f|IFdm(R%LTyv`DeS+tLFyH@#FVvj4_ zD8$ytx7qgw&&54i3n1uba$nx*Xh^ke`o3{DVP3TCT*~*dsS|`pmk4D0SClOGMv-}C zF>eE@KsI|vdT;&r0h%0>s_dZ5vNpR&M_BddpOc7~Tn-Q7F~Rn&N%L1M@*K1HI%~SU znO-lAX^*e^i^azLb@RkHxF!kz<9=kFcFE*(>$l?pGF)t+f;_fjW>?-?R&3EHO(jy#qw_U zHqAM?fMY-f$u0DVDJt+y?J+*KG-F6bEK9sCspE90-KIE64UY}IGYJws3OtB&H=uGT zP+N@0Q}SrRD{*z3WrO0+bnJHqG38*O0k=MVWk?I}cQ$-c6-F0w!sUzDsSSaTmoi^T zKKdslGX>L~$Pr=RA{FSUiApo<0$HU32`WFH)`Q5M+NW-x7F*AvUqb$=Q0cY7yU z@tUdRPKUvnWzdx3lE&PG@~03m3iKk5(_syfi!RWOo|EVY_VqLG5_La!J@t5=w{uix zet4umfctRFLYd?e4EH8g0e(&?0PR^*Im?PDtj|&`Zy&{DNA~+S)qFI{ z6K*06ww++v<2Tu-fnX^~da<~a2_4lAJ*+gX%|_KHd0iq~iewQs)HmvtrMT)?c^kh1 zFBX)UBn7*4HnEH%B6D*PZEX}E2Ta31J=Mhte#JFG$F#yi01X5TWaxbKdD5GBYLXA_ z8@2zOJ>{LHo<`Ljs}&c|=WY^b)P zMPJmkW^gS!R@k|c=DvZ>I+w!T`{NHZ+<=;em%VU;S9Y2wPhvhy`~-ZCvVp4}3~jF0 z$JN)|`shgma5QEYK+kxj@=00#<$HkaCyFwCcN)t8Ie!t)=}8G2S7WRd;FDu&I?0LR zi-`spbvS|I`q;UNYhA<1NwIaBNw!6EVqSm0_pfq}2rnniR-~(-r)_%Mnp*~ma-z!h z%O0(H0Q4ns4bzm}can_^N&i}&!pZP2PbW^~192ZMe|BW7RkSSw)6F-fX^41>H$L=i{ zr@jf%@Z{{Mt?4;xhA{DL)N=b^FYfq#OBIOE@-~mCw&9p_j}9e_-x+OcGT}=he_iio zjr{URf0}r-=*5=W%VFC(7)@LrD_4gr++lLDXa&OKLoTiuIv~2Y{)PWU$g>&F9HUtD zKiYt2iw$SO%QI38QZ*c1!#6%-AS|z94l%8a>yZXMQ*Wg`uf!p~?n-xO2{_Lddm7p8 zcBBd<6X_^kA*ECv31%>!(YV66^gQMt4HlBT6f(^%B9wN44EH!vSdMF&X}azCg`|fS zEajlJ=4O9#d@T(bo$Ao;Nelvx~TA!UC}B$ytr~V{3&{8i`9aF zDKglE&sPhzsVtl`GgyyCM@TU}C($a##!>EdOgat-tz#h`zC2nmplqg_(d`0MA=Q^$ z$I>BL#}A6WZ_d9OK}}aq1_tZT&@l&cHo*U4KQ7C@T6D=~DZ1Yx@_nWC(_!`sn1Ny9 zOya?cb$<1pt$o$k#h%B^UCPLa2@!ACPEF4aXsdYDa#V)U{SE3F79de1Bc>;(p2@{} z3SUtsurt{IQ>klNRZY#wYU6MHNFjT-OnmvNl85Py?GdB6N}|-A319zdZwR~&`%_?4rKJ`} zEDz1+a#j`?pTMgG7K%;uPIE2C3#gp#n~t7wMSL>CB<%L4o`v?^m0~{2qEfi2w`CRMuMP%2<7Dq7SWM z7l|msRz)V2zPQ>58zqHwhSzy6`H70n$Xc{0A|FSCyBB}f;95S^rxnB=%zTV>0_HofgMDL?PU&;Yc$G|svIv@Z2! zjdP==4!dmGyftS_R5^sPFaxF-JBPFZCC-oI;({P{;M5ieEvfftfZWSE4o0IvR6+S8 zi_BWTsbzv-s?^wsNZrOUREYjsp=}xBBN287{zs)HSV5R6svTF;vZ(HAyz+LYeX9G&z>M!d-5w~0>92=v7?kjew zqI1!wZC}kl1;huS_YjmwSBU0gHR~Ql%rRARk|pE|LDG|kqMVGf3IG97&k5|*ZL>q) zs1o>2%C9CMYf@W_qU`MF3-h7^$*K%gA~z!1g`nahY9GZ@u~)#`2=4~n^n5Sa*( zK1CCE-!bJdj1Nr?NGB{sINOp0grdsikr1uE6J4qVil3D@adH9d#HuYciRH)%&M|I$ z5}*pgs@}L|IY8BQ96* zjY1jK_?-uZlB;Cs`!?^2VYWNW$6uTYZke2;fFc;qUm1`?#3P*_dw*^9OR-RZZ;1ewY%LJFN9TLe9RDRS%`xE z*ioU068XR5V&NPPiO80Ei=(l%xqyRT(B+ddx9*Qns#ofxcoFUvHyKC66Gl@XKf(Ej3mr{#nsJ=yFtG*t!IK zTWBpKH9@@niO1IHGJHO(#yl$Wa_OPkXy_&+XCF35MFTDOXDr*{qnbp8>*Fe}sk~CjrfaQMj@Ni~JWq#;(eF@!>6Y(ycVXPk zyU*TOBe*_(?BcLZknDmg>?+r}qja)F`4*&Yd9G*@>P?gd!@>r6a5jXrA-#S!BrC31 z1pF!Ys{o#7;m>jBFo70rkTSujMjPU+LxShw^)*XD%`I-S5drT$WlF}2+N*N=kLu+>R=^%Y7)nRGTLjiGkK)868Vga4F7j4f;gE@K?JjnAId6! zFpu7dC1b-wTAz71j`H4~$(#n&QbTWWh*Zk`x9|R)3?VF}O~DmHiL$%ePorU+^^rV6 zP06Mmv9>Ox)w+Suw1bQ!H{+=dk#ZM=IUSk6kNJcCVOn}RHdfg`G71(e{A>?mlc~ye zBl;{EWcBvgl_z%^Ls9V*h?Ls{^Kn(&W-L>F0IzF8A~*Et2F!1DSyL ztuL^fcc(flc%GOM^+Zx1dTq7{CRB7(Xsr72N3jwu6@PFg7hX{9+yP0C0nWUzqI-D5 z4)rU#z1l?(&k>v52S-ywwxyoKsg!DaJv7eGDxe3$P^Ru3V0|%{Cuf3e z)e6Q$+==Zj^*`pT3|3C6c#LqJSj9id4@z|@Oo>}xwFa%G0Uc4zL$z;z(UqXf{P)xR z24x#5P(p&vHE7kdIl9F|kCuc7NuA-$7RTs(dU0~5-e|k6V!bvDA33*q4Y1dM=%}ev z*#Tl0(_-3Dta{*pNp*OXl$5F|CMOOVeIRBo7C$PuSwUDDNhdL6YZ3{M{@bn!p!|OC zqk7cC02@hF{NngsHKv!92(slprRd$e6HQ=G?bm887H{5`4BZ+U7;Hx+RpA6pA{NAF zuHE$l6>!a5!3diI{CU5A>0+A<2(AG9IjsS#FDtj)Fn%qx_pWUOJ*flgnn$9{f>l=ZcAdb+{kc0BTG2(t=wm-v?hrhufy^qGsJ zoSs~;_k`yz&cN|s4-(CA z2Bfl_Ce!Ug^V(>N_+P-_FEo6Ehlz_zc;PLIKKg@Mz# zmBRDX5kqsv&F2LLJ3J}>!c0E}oB-)?EgVH{|F%CYA0b<3XwyQY1+z2q^1OilRW)Pg zzA!Yc22u&&-Lz0pr1sK#G zWR`QGRqq6nnrPso5bkvIvE(+W0OJW4pAiF8^h|IgZS;T@>T`p+U9Ym9C?@$Ya7Y0e z;UmyoSNVub-ka8So_?XZs%r56AB_G7Tl<+Iaq3O4RZ5WR*^;DN>Sn&% zo+ES^w~l;*>`qB~|Mqm4taM=sE4+U8;H+86-wkf#Wza;n&yYsgdIj$oDmbrV!n zT8anJw_1qmi2`-LTTrNiY5lzKucxStUIbtI5Nj}uUS|pdfQX+W^J2+eP6s7 zuxX(PK3!VL8mNvfg(pMr)g7mAMHPI?@2McA!`rp^|0q4qci(0;8oloKF4q_T{@yn% zzz%z`@IcPZv0Tu45NEDIPAOJ2`Wa&5T#X#Htqej zsq{lB+JHU*<&*P4a-Qm?s+GFCL{us0!vA`1{t$Q)l}UCANzPoq5>|M)Xv8;dy&)>4 z*N^O+WdW5lsg>)=*7{RQGmMka>gw7Rj?2txfTYC zw&u$>M$3-`LBQpGi%gi`%|>&kb(Z0ucfUFKgz<6-_{o36=X5mbD5}zzAT%(i2^Uws zJwuv%U5`y>=1++mK{*S@pLoSSkH~?Rn96ik`nfB!i7o&$!j=V^z{39q$|yNn7V_hySON=ryyg~ z-tAPlz&^~eL%($d(kF#L5&a#~oPd3@aPlH2lnHBE$x00r&rbbgUy#zLC zcL?Xs$J%%WH#&WVDbVj%Nj*Pjxk10Ko@SadphG=4j#cKpy zHZ#gPvuzZRgo%5+JCORTR(E^n%)-D9ml7bYZtG1r)YZu@K` zoOBdV-?Bl|Y~~Kgmau>~=EtE5fg=~B&WX;A7+k(2SGV^fo9ReFD_k&5_5ewb-jjk; z7ZgL8Xw(I}4F^PIhn~@&r1S91NSVBqR<~QxCAEIH(af2-U28)vUt3L>__7pMGuJ!) zyxGlhs4aVE88~eXO?$79{cKCnl%+ z-Px%+4+Sh(wWWX@Mt)N}JA9V^TS7Xty(Yqmk4j775_iT~@`7BhP_#+MZ^{WFqzEjF4ws?#meR_F$eMgm+<8a9dqd4n9a#kLU$2Yi4os^DDxK5bE ziK+$k3O--B2EB7muk#&m8n-vpd>5e{%*FqC7^+Vx6B=`Kw>Y>U)aCDMOCqA$TW?nu zfh^`z<*?PeF|$ZO>c+xRFxkxK62G0kt&vpQ1latTm6enfz)E43FmcvRlklo1tvd{4 zS=au}PZAqNTE24{qvfDXe^N6IY^Y$YN;tclM)P`6o15r9ySt7LEZ7kv@*IyVFp(3I zISL~&93PR1;hlOPe1bOSq9((b(b@pNQe9BjMWVgh3F!4D)nIT|e!s$}&&|4|)Vd-^U*A@s z{}KG8AR5|L4s_2dWhk>Ei0RmM5;VQoqXbHTWVxm8BQDvk*V<>kd;r*yXgUn>6B83} zH3Ig#;VM1XhL#}o0p2J{hIhBJQn#1hH)?|ilLezQ022}){O+Z4soFCy8=?yWsF44q z;lgYzZF|=z4P(IXO#PF#8-Mw|U|aU(=<`#Lz7AU171vWIi?S4DLlRs!Me~Z2dKU?UuyTX}um3yT z%I4ZbK8Hc*(4-~UYgxd$=(%0(ZC=N9fod^XCZp>@W31c!XP>f{sxfCLI)Tdjb6RbU z&EAN`cicDls}qCe+ZH@fHKH}=6*Fnz%XWs$aadBFpT&ED=2?sBu-bMek%dhS2Cy+E zH5azWoZC+?8OyLqh!z3a?V;BH|$4Hdmw=(fciF&p1UJSj2;*q zXEHxz#6V=`DRdiAA{Unyi*@@j8QP<1oS!D4^*rQ4qXsa-lm=Y=c|;c&#;!!}wRTwz zk)pO(y{Zj|pEx(wbk`m{Fy*Wf(=viO_f=LtGLA6;3gt1%Fn@1~9{>obbddH}ItZZ3 zJIx>m$6A~gYpXodG5{xpBZ0e11nN+0DM8-Tth^gmE$pfdHtU2^^n5|XReNhM4MMhV zFeIoHKR&WE4W?y8)ET81bhztjZk&Q+-WWuYkuG@9qu^e<9y`oEU*cw zKT_kmEGenxFV~qW5fPsrzy^;R;fiD*p3vP0>G=91+SU(|@C!|d ziQpHMAZ_uGuG&-Njb=>Iw8p{RRiMDqPQBl1hzDTJ*|oh4UqeC^bBc{@|EXO(I zP^CX|>-BRQs<4lc6!`(DcDSyOBziWdQ7H^MkTWt}dtKFLcUsfW2Lyu)>4&UJj~0kn&9k)<^nZ0kUU z%N+CapX%>Y8d(yO4alLpiIfNfyP1?+DJ&e;9H7rnEvRmqL^{4tS=*)_)CUwwdk0YzdN9`v;6Y12O8QR0DaY(b9fIY zjKhh>zJW0%8-@x5mNsTcphMtyRIfO%wW~+cnajn(q@iMie;&7QD8NL2JIP#I!b9o1 zTA3U;+-lT*Q*<(>v|BXh-IsGSfA6&m3hNA_x*lpsIVWFEyF-}UbWXX|E!Eq z8DTk#7@orBj`5Azs-`}!_4zS16_phhr)_Q`dM_q=P=_nlrGO``zEmFWm2M_F8U3t{ z>a1-3<3{#jlG%I@AyV^IU0}|sBF}rbD1gZ@Qj(MCBa)D>N>1#Z!)Kgp25Q! z)iWEXTn$(G<&Fo|_m1mjOMmx5D*kAXNw@H@uOY~2)i7J?HzK?DIu zHIs-cy}G3Jn>FY;>koaKhoJV6#;`NDBCzJYb{oB^NYh)hI`ov>X|rqjfC7SHaq1Ai zy#>s(mnqp#j*+$Z8E;Q7F^{Wlq@|C$@p%VV-=4k|5gC0YJc;%H#3c2#h9jINMFcU< z0{OxP5yVKb2+x7zz>*3_DuUYbKFvDHj2&RMUXC6|RyGwTv;$;_#jYd{tX-(#L#BQ3 zxsl_PSb39f-B$;^+1-CP02Lb2v>a}euP{M6NWlDHs)!GVj{t?5mKXXZ6K$WRgQ#}| z=~JfD&XG&Qk#X~hV1s{(OIGBPMxUT_N(2Llv*}LaB5R7ftHvw(zKjB@1D|u*T=ET)mPi9aXSI7Kq@)r@>s3n zvpDtE_$QG@zs;H#SZ>tOu6$C;`f$YAnmfI7o%8o`Rn#Xv&K!p5=w2bU{?Y*uX@eN; z9FBf|_q5>*&!P(Z*?=PfSo;_*O8eto{n#PQTz~lp*I`MjwbZJ`5-wae*I%o03=+Hp zkd{^Gj7Bi-Q;xfWy5|dwHso#U`Cl2SM&^%xW^y0PK9yF_cf98?z^fmOV4>GHPyZZjq)V!*avaxT$#UJVs4-w{fN56E@6GoTP02t|HF^RaClOv5@X zrz)(O{-FKiR{sj%JsxbB2o)qHO44+W=;2_1naGbHOYwvY6r?(#{t->52j&Z2{6^J& zH+F2iKM25q21<(Ru(I#xIn_I*w`tP~FS{i!n_eRi^Ow~qHTVk7R;FOz#B~zu(DEY@ z4&(g!?=E`oB{e7v5zU_72%KAHRL50-FLyOWnwQB*ryjgB%wd2n#h2h>%hm4{V^tC9VM{m?f6?Dtq>)WSzZv=`twt(N`D-`Ge}a3dq@ zv5c{JDXp7dFsWV~kx_qXI@zeXH^C?<=}M3p$WQGrx388?j~NcXTbMRWPnkALTVOTY zR(XC#suHyBSqhNpX%QZQek#Id2*8Z2tF3Ze@rzWn)=4)aL0<;L9`=E zue1+lvZ3W;s_U>n5l@mpRc)>ehAS5~Qv^S=a_cjH!1{nUih$_S4yFgF$~!Ep=pUV( z2z)(=EUVF(f?)nEo7b1lC|htKJziq(z6Pb=E>@uU7t0aPcXrj17rcb!Ur}84avE zhYI)*c}EkA>LP5Uo}Z&lyI0i{=(m2BPl$GfZixJaqELcJPztAQ5*YMlh*JSva_k35UL;jRU+_HV*Jpk;FdQ6k zhf_GC;UAT&DR$IWNc(!<1haiON+|>@RS=NRkGJJ?w6q@KMH}hvTL6*KayG=2GTL83 zGq!>G-D+u5uK|~41Q?_FinrhE_cAes*-K6XN1N(tmz^I~ORnN#l9>5+x_5{k4|slm zlb(-xf1#W4Czb0P#z9;Vj~$hKf!`MD$Of9SvhvN-&(DGx9q&bs0gh0~vtrtn3Dbd; zk8f^*b+=%hg6fD>wM->ESgiiaOv)$7xe7f*N48f8JR3Kiq{>SUU>s@lv$c*LAPTI<31nO zJ)blDN)T2a$ZW;Br_9Z0gs`E!UJqq&eWnAYQ^;IDYk7>(lP^46r&%BX&})J~jFgYziiH=_`mU#< zZ6S}tvEy`Grz?;*K0{Ce&F2WE9VTJ2qZPwb0cdJlM66mBB+m()QC2WYd*7C9-IDE4 z-qLYPa9E-6KN|5GSR+2G{{{yfGA?9OpmxV{RlWjdPU%MfvyG|ur69vXx!$Bn4krdT zq-udF5$@0_pfkA2S0*YA75tS2HkhnWCn~D9SE1WOi3G_Apo_m+AfO{60?Go;d!u3S zImCd&zfAb9N=T4Ey(06^CnQkR$#D+^1P`0I&=*B{4n61@!Lor+ff<+|%dzp8!dw}X z({scRfEHLcqL#$;ccZ@f!?e%WBcJMy%y8KMNn%_?*FmWj`$sc*2Yf7SgU$3as*f*t zm)N4+d<0WGoN!vKDr&htsqo=Y`_|Y z0$X+;!l}UVjZoA#b7GF2r9EOGOMn%ZV3v3KKjD-yc<@=mTZI)@(KLP_z@BWPt`%J<7*!N?-3yzbnsVHtN+fKx$lK#gE z7YC1*duFy1Di9_F%#ik?j=g0J@LZMjltXbB;#Zr`SEZ6mC5@G>%PaX3Ax8o-9k}_c z^6!=*zp)Q*iIBS;bp-4wN8$g3X0q;y035kr!I_pqp%8&;7z5*=8B|5!sTY0+w+_g)pcLkX)VR-d0-7a?j#>a(=6x)-Mj0WL$;V!ok24(buJ?i2#j5KnA6q zO!V=W!j`*zclyT3N~*#=mDE3SLC6kZ&Im#z75H{d5N9ERuI`LYc>%!gaJu{LJWa|i z$Io!^Mk=;i1cIQ_rUd7HBkPObOob>a;tpOFdVcsn*XA?i;Y?|2;J631TuplAR_}|j zst|3=-y0z81Gr~?HAJYv2xrZMKVaw;mICqxco)yZp4z>u%i+=(9$CC1l)Ao(0Pj28 z{Yl{oFz8QND7u!j&TD4^S$%vC{GgF&>C5EI!M`9afYOf$Jn-}D`5M4~A|t$UxD)P@ zEEGh>EYV{7_P z@SnmS?2387Q+!7A*@^JxdP=!{rldG-D1`JZUB5F!=y{J8cTr^C@fna&K!O!@fCU;0 zK`|V7lBIjhwd4@Y@>z73uu}hBH>wW|FvUoSgctxF1W2H!)-gWNk(MF(fTbRxK>J;q zCokh*c?+MbvK_{vNcJ?!M7rv&m*fxjke155^b+W75@d z=RZ(E2KWFogx%yr3r{Ej#GZ=dNy{1y-~aka0#3w5sb&rAAMtHAD5?Nf%|ZmZhq*4I zHo1OgrmmpCT!B(OQv!SoEW$*<%?H}pNr06HEbvVV&zA3$LNNwvf)A}9JR~0g-uk-H zIN5zvA9L`S=HC?X!3(E^`El>pcP>*V2;6YK^4T;6+pR4rGLH*~vP<`@6 z1w`*ypLxF_{Li2M?|%UQMnM4u)PvQ2KwYUSDJeMyKlsN$Hgs!K7VMfr9S~nw)VUm& z309i~EcSN_`BH^vq`TLsI3+y*B>{+bw&Z)YaItc!1kkW8ksk0Xsm_l{O|Q>>gNoM% zHvYeV)(72RAPRqbNNx$yhlrknG9;ptOlK4FT*k%a-q(3)o)punT#s>gDw)|xW{ow? zDpf_%{BAt|T~wAP%tGeFW=?;-JHSzG&I>rei_Oe71yN>Kdi!)MuOmN!P+gV+Sbsi# zeilqkI0>#g4XKhPl}7dW*@wKft`qoFybhHXWa~MXFP%D+FO&Wt-Q=#27oR6F=KS9_ z@PF?XA=DeL#}#^7+Qtx*+S=M6P8o9uT$S6Ky>YXg@74LQwREyaW*pVMQxQ&SF&~Xm zB2Ix^sh&h_y2c5u$0%!m1V82xf^F_mDL$;5Rj=1!;G{3vwyynGtiF~56TfpOaS*~f z>Aqg!>)?ps4R3HEqx0muGu5q(X?BjdTt2%+A{%~XxAW+K_kPua!#!A!UE}y|Bf0t9 zHy2<%Gd%}tt}^Y2GrCY$~_fMFLCz^I`36Xh9+`EtvI;N4mh5E?FlxLl9IkcW|Uv1y>Z6Z zP*8Y;mm%nJG0H8C_lJK4EKk9#xc@we|L6)|g&`F5)Up#B6r`o4AAx{3+T8F{>hFl& zue5G_SCOGciVyC4B~1UZk5PTxiM{K3bi+m5&?0LD&rcTV77Ksh&s581@}*Nz7mxT= zTdmBfJ2fnv9_g)&y$XkOX#BC`xKaMGwfgDYc|cw(C@3bcQg!%KE`sbR9lCQ{;C>s1 z-4w}!jnUUZ?O$T@LE5M_amxS4h4z8``XdTpwg1)BwBuUhX9qoHDLHL3+-v3#h0nV> z?{RDSTuEUsU3WOCN-<`=f`_pCy`^^wAj)5>H{mg)=HGZ=uj)n2pVjcicajMyWEYB- z)k)qkVUCory1)7jh8M7>4v4u_ehfD+?yXr$;{w@+yl2~;dw-$URmhH=YNz<#U^Inw zlL%+G&P1kWR;TZKHQx)=xrs^V9aD&)=<%o8+G(fGeJ7f|>Gi}9#u7RIGb-5ZWF+=p zIfwx|b;$}voqgWR^lYeMxq%#f6tu7~FLrK0pOo1UrBSQ-4Q0dn0k`UVX0HtpI)na> z)gQH6KXJ^)&O{S7O5CuLCDHbJcFq-f{+520qFEdYbxIv9|+`@%NawCI` zmP{Jw;fAP<2Fj8g{~(TWx(t)O?<|l`y&+~3`wk<=EsRzG5CIY8U;PKX|5-f*B+y~y zWS#5rL_qdBz%zbm$n6=*MD%X+kIU7y3B2|(;nfZZC9022gqS~~i@xdU_u=Xkt-?c+ z)V633GnI!cEvpi6X0Ql8QlfW;+>i?!aZ}ii8C72yX3gmNvEZP}O8P#tnbK9-X0b zp87mK>M~Wv~kwo`)*z!oKdsN-5f)DUd zOb7kU|7+v_N1guW4U=Wl@D_XRsP%>CHy`$l-57(HF2Ijm*7LxHpq82Mbh^UJAl=YB zUr5)6g*{#m;PY((fyWgJ`Z3!ZE|&QRcIBWz{G7FcmR}R>iu~IufTpwKC*VlzUXAr; z`AI}|QxmzVy2$JO%#K}wVk|CU1zeh&hI%CTv}13x)qr8DMbjwvX-hR4A)%%Sy`|nftpCb$K695{> ztg6+SyO8FeA_gf~PJrpfyG{AcX0CQ^UU)=Q#1sW*3JS~0wu9T8Q`t&0It&R@JMNt% z4GdT}Tb8e}iL_D8p}l^si96&>u=^1e!2(17B!;TOS;z5!s*$UAiKytsF3LC_MMB3mZg7hvTs>pBW7#W+_p&_$ylgnS%bb9xgD@$aqxz%JjN!G|m+=JLpry2Vcu6#}2_%+mt7Ew;LNR@TFnhP^ zvwN)06YDjyTTh#DS^BN{KniX|NL2DI-ujsyUg^HcX38OoMSZEanb96Om*}{H=$-JF zVxO8b=P1FP23bH!y4{X+{$i6f)P|0<0`*5T@pGlrt|%k!S9YS8KhExgK}A%39ljBg zk`lUJqE`0?4d)x{wTIYH27#J>PNRUoTM9w4m>$uEzE4!}{lxT?A{P#uqI~ANpPaBv zeedpbPCP|Dv4a2_XN`#aSHa5SA8yokK&8;E;7Z;eO2bgEaP(*`p|=tL5RM~MhJul9 zDNKn43oJ1a-PcCX0NYkFYQS5rr1$oAz~ z=W7MfUMP`umiA9qMZaj88_1EGn)>*JAilHZ21u+%G@C6;4ST%CI!zn~RnACM-cLLI zxpK*3b_2RjG0TlQwVxz3Hxui`bia+FboW%Rw^VuoTBJ*EqS#Oa=@YGapO@C2?lz%k z`bHV!H$F?HHXzu}sC|~p@EVF8eJC3R34f0Y74Sr9=f32sK}p!~u=`|-9dUb5>)iMJ zj%`B*6w$8?H??hB1ILig&el2D?skMVS51AEu{7)ev5;#X#rge}OnL`v=r8oLaG8Ql zJyc{!Igz`ji+OB*j^Q5kb z0N5vJmCzG8Z(1<$J!wpn=ewdr9-h}Y5oGdq$2j^N|5QfkRW8Jy|J3%avH1MH`RHEI z(Tx9&bjnV~X!X%~?rwc;YGwu>dKp?E(@n0C9gx}w?ZXSBx<|Zwj;|jg%j?etx+d}6 zJ`o^Cbrf(Ch~q`~r8lf_yWO9=t-*XY{9jdlbzD?y(EkNd5Tv9#r5ouKX;4DAASsO! zOAC@q=OQVwgd)9^fFLcM3)0=N(!Ink@H^c1z1QEnpYzB0z@9zlnVDziJ98$SBXi>Y z-VJ6v7cHOMdtK^JD@cKX!VnLaL~A3Ln0dpqW7&D@By7#hM%Oxii_*$=R?(Cr!7tg{ zFubv12m1sBDk#%=kq5RSA|iP@S8LZeFw@2xt@_RCi}X0qV6CoWb~@coAiV(Y*DosZ ziyh0#w-nj#&b4sfspcTNm3VJHg3x)v|}iD=Sk%-;M)!k?<}#~ z61+)mXr2Mg?f|#?lpPEz64>{z6Wejz$GzEmt&AU~$0hqe>*}DQi zBCfu;pqmP!e4=9y4IKZt>+o*%9)4hHVHwoBX_u;}xCQe-jc+j^Bo5RjsIs&4b(}E~ zUF5BtuT`ASc4yLmGPV)7*tae`%9I+Vv+(DYJniWyuH^d^wv6LYQ(zq`vK|luH66te5gb0{xoqN8(3O@F%d z9B$ux6gN5GwHM)$!CLq%r|UxA8y@FR!0;$Mx(dcCaB;0gb&}r$&Ypk~W4oqN82e zx_x?t^F*T3o1aO<-JeHGO|7q$am~{gUz$+4<1Ypcc%Jw{>{C#3YW~}2J^0@A5C8}J zj}NBjG_|?8XYeWi-TrXj@o;UV-XZ_O{bgYG)`^6}Bs>3FIyCZw4AF)R>BhC#6Dve* z88>&Sa|pB!NJqHv)m};xdNi(55Vv7(+0vBCe+y< z26~Uwo5xGR#%~x^4NUfBl*12jN^@zHs6|vUiSncB~Oowq(oGze90d>D7fn zK$9(#J$~W)%zH}fN$Vy9TzP}(+44{JFSZI#0*I|w8s3g<$fKEkXF|ROPqm$h#I83` z9vL&;3Wsd*Zcz`5&^%gd8;Jv-xRXeeJh_mk3lZPvyM!>{jqJBM(AqSV%Ha<~>S*#c z#;nh}Dhv*3S?u`bX=VEzRPy+0EhWu7Xpib1_IrM)d?_HqQ{!yvRTve2K#{umvEF3j zTu<`%Ne*uMm&m1gPxRSpWa}3W9bx`Xpb=0lFZJLx`rWsxNJzQVQMN^oniP!Itw~O% zUt-ryV)50&MR{me*R0MaHcj{gcRwrih}V8hZGMJ|d|^MWpZI)AnW)ZDta?tFS^r)9 zdx!$VqbPRHGG?f;ZPSgj;LJAEbI)&jxCMVg@(jnpmD(lLlMm=?wq$UQa1l)gVyUDU zk4%<7B&9tgqM}y`3B$IKyATa-J@DsMwJpC|DgCp>9`TQ8p|zKA(Xu0*6JuOV9b6B0maog?VX(r z%b9N%zJS`56Z+(Kq+B4gV%Jn_el_Gaty#z+yLtFy7vt!Gly8qg>Rw`m3Yzx52TaW59GpGCv7*oT<)1bG*m_s(T3P9a#Ld542# zb!|1EuL~Sb0hi)#WeTT}rXk0RRTj@Dd@i(e!riii-ORub)}WdvitIlL!@rX^zq-W1 zuQ=GM8AZs^Nh&4S(gjMc@(gYsq!KfGDHd)wW%%rInj+l6y2Up|0G|-izAK z;pfQs>L9*-%Sbc@#+d0$4@z+aIjim9Xnkx_4ND2|nSt)$v^r`#5NW|=CPB+a9xfwY zD)|h=6l?ZB@}0xf`cnu5I|CuT5VeOdTrdi;vLM*)c-Mm({LM8?zGUiNPx$*&s;pR0 zP1}gj5Ss@@vPwT7YrR5p*s^Yr4H>A2!z}jAg-KSU0~t?fQNDIz@{m>hM`|BHQ0+w63IWQGRiU0U|N zfmhwkCA+#hhzT)lx4+$}Bd}W`duvfdY6JB|Z$-AU)i+m?L@*jZHPR@s%an2ClM`K9 zYb1(W=0~?;+=KW(a1&H81~y)%($7PsJG}q%uh0KvY%qR2AZ`IFJRA0{h%35KdHxID zG$cKmLEd4_B?r?Z#Per;Q>O*bg9v2H>mm+9F1MimKSqdr{JNj%TEJUOdbQX?Y*5P~ zgsE=T!hSpOzOd@}%PnAnMG=CXS9w@q(hGeq)h5oCxwW z8Jy?s#qLz_>RMMl`eRVHuR*MWB!jXy%~PaA=XZ9fx$tr?L$-pZa^uoxgeh$RWtO6f zLGs50G*G_8tDGnUIW~vqybe=E!&PbTpX+PbZK&!_FCXG34HovwyS ziQ~h#8lq~b3RS~=SkzXIIb*u9OwVeK!}S{zlU~4?pdus)OZyj$<5-tTnt&ri0ApqY zYWmttC*IBi|DoE4#{}%*AA=MV8C8O9KQuHV&pqG2FgW)(*BkcGYQ6D%sinW*yEFMd zP~UXyy!7^Z|K-c~o%ZJD4puU}E{AUdhzF=2ca~eu5QB!Uqy}DmV@V1`KUk+g%>FYQa4{O zN=I#MD0ke85C+=KL(Yrv>Qk(2!B3UV`OPwkB=^`I{iw5F2^X)a>P;Jv3e5$48o{Xk z+#Cb5>ZkFrTor#geHG$)Op8l)!0xN*MxV#wsHn8KDi|ukAGP4N!yi$YRW^Sd!mAgJ z6DglZT=c!Y?icCgU2OMj8o1fj9MUvd%_x?eCg1u?X9jT;nyeS7gnw`!v@ zkW0BqHURA_+uiZujknEW6c4f^#OLj9!&f_s?O4()rfgmv4{p6{rFgys@JYhw+9(>T zm$NUbk~`wl@#S9lB0n#5g`fQ0OJ8x1KI(}^s<#JJ-Eao|dQxW{^^FwUkzp<$x=a94 zED@#I5K#N%FL&6Sv66$Qc3wFB@_xnHSg^x>Lg3d|*R1EP!|vzW!!dzrz9&w|#yuy) z%3XoF(qn&yS@MP3=fdwA9=oCSFA{}f(D%^8k{|8*)6onAp%JRrFh)Mei9k}3pG;m6 zop?1vjx8e?KSs+6)mbQ5D_9ac9^4Qw*0Yz8vbCTPI2URRn6zb#22W!o=3!3z(rfZRzP;_H7np*En3%JC~2yjygOeQp(bFn7CFM z1_PDyXyD(zG}X4GMx2SA@6Mc1dHF`u(z_uWd&zfHPS1TA3bMC{=Xs7Vs!JSi8)S zGavq%xdMjz8Gbo8eu$J=qo?fge=nktLB_Vn=ST3#;*|cGrRgh-^3|SS=TS$O^7&tX zPoP>Toa1szP)qE;5*;LNxIAWf`8f2`F%NLrx+OzjZZe^@W=_C_e#ufPIV#aCY(uRQy z?|b-j!#Ye$fh_;U@9v0wyo)SfeE#xV$Wo@3>w04oR5E;j(%^FH=&|5n>XFHCX{Gf2 z_lrMg$-t>Bj1lJe{1S2>-XFD@#daLn4Vv_hkCS_d*!d(yEPqBG4l|6ZG)TE`@>rsM zGTkAR8+40iOYI?-r5^BB4f&y4%05H)?Y0%{`#_`U&K$apf@DcL-1{KTmop^xYqw6b zr=!KwS8KZ1u}=vLhpnNquZj^okRYXNq!EmS5{$*`|%(>?QD8F`zM~h6GO{@KWd0SQ5sG?N>{q-*@24X-uh_}5sw1XdInSq^w&(v%UM2N_P+?d!sEepX@4#GZ~+b>lOz zv#?ThGo51Ov$=vwUh5Lv;`SZGmCwPG!A{x1_J)Pk*GZ|>BbyYeyurEH?;9!`r$>=Z_Gm0Ga2=Rb`??8cOR5g?TY!1q|pRKvbK25J5!2sGY zS9<4s1ctjTtb3RD*xp3V2`rWHWMx%rF*`56P@SvOf{Is73v87@8((vmDZ-N9RafP9 z0r6i1L5oWBBMZZR!pw0_pjn!XSR&k5>tN|p@Kju`;6~5MSbf60_>P88N3Y{o-HfZe zIAlJ_-`v*C&oiArP4D>_sh3NIbJ*TvcAVmuAg`(HyRT|G|A@lG$({;R_kzH`_mZ+}hlEV4?zerQ`_RbRQrV<>RjI1zbnr`}X^-Dl3I)Czee z7y|P8IXP0j6GtPq`C*khJ9Hric%kmk<76A&DvIgv+bMLqY3nfL99EXo0C$J>{sgnQ zUwj=)lDdenlB+)H<27Ub;cE2?7?f4QnU3Ry|a>i^)Y zqSEvB2Z7ctPMHRA+gp>P84issP*GFmd58JUw}_ytMbdHqLhJ~EX=`!upM0A)7i3F5 zjo1&ZO>Ts~q2zp9)B~2vCY4)m^F;?1DV{s=8h?jqoy2NdJ5u5qOK3GA%x6oG3-f z<&-0t7*%qdLsD7W4kQ&C?TImni7<#MIRR>@sB8C#;eky+I@L8LYIDz2ZUtzc(aYhO zza3>Gct=iRqq%qRo-j>lBS_BBW$M{|YH{#G`B}hOl??N?LFKtc3&gr>XwOWvy(s;$ zWnq^G`|+9AF=+pGvf+XQeZ!;}v+)}bLrn>=^8a(FlgP`vX;XI9nAIe27R`6F1LC}9 z{=D|z|8Hls5fR8qTy0-%Pr$|7ed~J|0?}Y!Y>Ds1SET7`VfE&)P#R{oA9=nEFrJ+5g9MQu*Im|wgmoNrj)CUMG)DuChUsdN;l2J z(;gT*Mklp5q*BNu$icY8A3G`(7-_1ghaOBt8oUsliUt^4%Ul6;RmSk7Rynd<#W9Y1 zzDo!__OP@y(^VDh#(G_tbjzh)jlj-Kl_tTSt3JI%c3J2XM=*$PXhw%w?$JV>1joT* z__BebPaa31acOdnUTL}xX{7Z`-%8BA>hql%9spjGFd?3*yF4_jz2JXbofEM z_R zB}H$gy5tQF)Uc|m&ri`?jkD#Ro1LYkr)ecC%qb=XF0B(c^z+SjNO_SX^WnNB)KNbfM%}bxh;Ec0iEFFpM0BLtQ+VKx2r`cW^ zG9>F5BGN2z9iZ1a>tjhtKOvmSKDNX1G>$&;y4aXI5)p_`r4ucjuyl{#Zg4rOxd$&8 zNxK8GK^(9wvKm*Muv%f}uv$T`ua#`s+hRQYd6nOX#)C}@D))yuV8X>lQSw$Ky_ZK* z(=QUv{#!M0@(j~sFts}HkR@LF_&h+eDjk_ueKs5>nYr$nFO|8U7vR$`qfC8WKci`> zBZrq&o#nU4;9)x%b4`_*mfW#m8a$oTmq}Zb<4>WoW4=`o5C%WRvb4J{>OAUy4F15# z*kcYZZL99fZhon3k@c$Gz>wt|UvZ@~C?4pS%br&WFV0JqDyZB<`%{H^LO7p=c?zOl zSj6ET!(yW)5#E74$1qLhxIOj=vlBF}wjqMS8rNYongb?7I=R?aPzjfNiDDj!1l)Ot z&Qi8arGKrCB9N*yjz1|V{N#C^xpaby>U#sxx#Fq8nOJM;< z{;+0(*#yIH)(;boM>Vo$v~rfHDu?QRnXf64v^v{*iRD_U}jy3%Yu`+L?AN(vFHS{+0~Be`RW99?MbH*$y#STjM4R&#PDgxBpxp zvTF__*lM#Tmc0+vwELoFpm>NDEhRXYLUH^y(gBZAcYSw1!9q;C9* zADK6+A+&SA2a~K6X9qooy*Hduu%Vpw-CAt+FUnp=RR(m-VXPBqKlq2b66uS_v zLkvl&5*Br85NQCu@0NA_Kc)+?UI4IVZW|FgAc88hd%S-i|M{K|7hUhOZP^{z3Mmwh zU_6eZOS#DU&jQKnpSC1*AGoK_1t^*FsM? zOgw(N9rs;h;KeOlm}j0}f;A!%=g}30(3e?{%0CYqDQ`+lDO}+Hudl@zgbwiVB(Mj8 zXx0y{#9^ElhO=^#&L(p?qjOuW{{BZ++S1YrPdT~Wp9@q>i$b`<@f)q8H{w}k$YZjU zuIZed5>V(5Tu!a#2QE_{94~&k_w6q<@}`bggq$ry(c(yk$mheBYGkI`7P6sZr;h!6 zs|_oM)v_h2E061!gYg2S=BWPb(jLl{cOWeAKnO07xWUB?@$?dbPc(u-+fG1IV zMVE_=`G?p5y`PGJYaOOqqQ_Sxph|Z2&hNf240$LzrGenV)a~IXvLIogv(+3qn#a(k zq3g)^9{b}AfzzK`t^ecwqg(T14Kg~VP_Ay zshRaGeCMlOuDr&b;(wM-n8H;kOl;ckaQ|+J>S$` zYo@_8ao^xIR*3afa(Lml#l>I0TxWs$pVhx@>yeo(&Qs-ed!{>S5I_e&)Wh!#U&^QZQ@APGH-yDG4x*H&9FKSSzl%mV*&$#KRR^h- zHpzULBCL!`<_|A)4M~W-C~=P-t3J0micM*kB;_P~=XGc1QDlFk{sYNI*=^HRbcDhf zorHf8zXioVqK0dHQ2(K_*uPpit6kDbVG|A~5J&$u^vA8E%~8;B!71?-eA)BkGG7(- zJtsVIi0yG7Fn8!yL>vWEeRSWF{{%9x@x#G2^!12mAXVx$FfRw5wtQJ86nG0HVBFF- z-XoBMn|{9*zitT?cEdjNOYoX^IIA{vYyqu2ZH9P#eHDH&w;*#XN*k6_NWjai&5yPI z8_)I0pvCZ^F6ktM6V`J=$5&FxR%54bk!8-yPj9xf^?HjN2I?JV7`m=$m(hBIP z5mwrpuTFp&e-$}ZC&$&=3uI%~ot~a9pE?ZQ?^&`CzmTT&&$op*4tDo#7qDnEhv(4x z+n71&4c>OgV7)Q~j0Qw`3R`dJ9`43jK_%P4rozP9)21QP$xhy^x3~`3?Cv@?6`NT< zNt${DMT{3$O~KVWD~pTx6t9&r!oI!0NnVD6qf)-%I&8OoygK+2VAfkx*2e|QN^bu! zm$}`nFrQc1$`cfeZ!APjcOrw_H*lQQ;6_`!a2jTt}W*er18Syain| zqbFrxWXd}y87b<8%AB0d3LQiK0%y|6{RLSqIW31ftD$=_fg(%v|6_hfg0Woo(_zEo zu1HtyfteBuiruBr%I*OdVi#J8t8gC*RST!(2i%>&p4!CE?@-Kr1pGz{hKAA~LkC#A zTzVWL4OcUvEg=%OqbHA#Zo;X5Z>>H_X2A@5h(#Iz_|VGZ*#&Eyd%Cu7uRHbQPM&kW z-Nnv%v?kd8HFW==D1G*z_5(&+E@y7fFE=+gSh%fX8Yw#eIO&n{K3HEX`QksRPeFn# zFD4+(aGw8jZ3i=>%3@{Z7UMrotRPsC*@qz|Sfa95g#GIZSk z=gR-3(_0PapI#+UF(UJU%LvN;Wwj{o)_aMJ$xPGo7hbR6-Wsl{L%NdZq}2< zKsj%xm4e5>kp?AZbz5V3`3zNjtFYZVz(4R~ZTeiu<{uBU1eoZ$f`2D9|MAG)3zK?u zZr8wYkK|vVZSR9AK2y$##q`+Kj3*gxLTGojB|hZnTilvs>MQqUG2|X9hBK3K59w&+ zrsCyWZ@kaoy14NA_iqK{+On`6GepU*VXdB_qae)Rzv`6>eEtI$Xo+;W)da%2j*xq0y_Gz08{1M9^6cpxtRm&+5^|f9^@~W7x-MWJLvKWll;$RDg%Fw57)$VSYcY1Q*yq zE?E6K9EA?a?UJ;wQyv4cm}q(HBFuh@e|ZJ)(Vh;oe8*Gx>A6Jay5&NHC;!-S93;sN zQZ4A0rV66R{w*6@U3PqFXkaiwb-O?Vzb$0kodt{6JNp zo3h11(Tjuypd~*RP&3v$z?S!iHH^|=I6Peo@rg}z(ad&4MkMVeZ2{lvf*^FNxm~Mp z-pWJ1qJZUtVmHQbyST|Kj@5e^9ej2_3wWEGX*SncQ>Q<_km{3N1%?yEAF8Etl1X+<>bCE1<^n`dcUWfq2SYkO%yWi81W#x69;f#aw+H)s-nO1EW3Ce~D)Q z8k1vXGwiMOJ6X%KA0Ww>OFXt|P@5iaGofI~{R)n49>|hd$M~xip$b8k!xWY>5>(*` zWa_eaMNBtM(hN(H@hR}kmle!mp1)B__my%J0a}%^zLPcEMs*tm%aE4L1bJaiPzy}{>94ASL9ue)cbvFN$9I$Uk#CMgRDE%BcO|^YZ#xL z3jx^N%MQ>)mU!)168?U##Y50D}&D>c)af;^GlTJ>w4C}SxkTL_oH6||g|1P(`pp-P^EsLcsioei!7{Dc+Wr<77EVw%JWLtRRyY+C;T8*ul#Z*j4Knhg(OSF`iPJw2&u~)KSXR0E86eT_E z%8YF{;FHv=MRWyJ=^MFWADtiTdG`4vZ}p}g)iqrU(;3S#)w1pR|M0Y}W+6sUIr{Kr zCT5tdua}q4kkGQbvThY<)}%+)z4!2deg~2Ty(?k=LnC?|RSQ!>gJ+_V@+WsOrOWbS zBi29s-FU2uk8lFT!*~MT&{VH>Q`krr=!7qs4TjRYp9~}+o#f)`efTT0)($(s=bZCy zez?5OV8sI>+NgV;5Aax%61ZoE|Ak(Lk7#_;^VZCE>wy@0Z6E=Csnb6@g8~qPsezP_ zbj%|rSh&|E@5;$bME=({{bLv3kI{a`K`kHQ_sRg&dgP>-|NWn+s#f|lTE%b?0MVd zF+(YBHV52%;!bqWsCFLx|BtNC7UVu`?Jp(7UdWSp-03oKG||WQ24+hAC_qQa zf)hbB$9N9rf3(0K^(>F&2T%pYTJv8l_diqie@?($#20~Bl?AbKV%*@rfRCEeOT}`9 HH^Ki8M;S_5 literal 22368 zcmY(rRal!{7cGhwD^|34ffjdK+>5rjyB00(?ykj)ySsaEcees1KybG}2q*pibDq5q z7bF*CWv#iUj5+3biBwXM#zZGUhl7K|l$H6a0tbiS2Ya7{`UdtJCk3AX_5$y$A}tPA zGfj2`2PX-RY-f!xs*_iAeJ{o#KC(=jaS2|(^WsU^MZyNfYe5X>6 zoKlYiRSxP1+=1*4BXLO~f{V$(E{pEQyfa(lGUeU;_P!}kXQ7>4HZqri5f|WATpu38zqobqFiScpJ!G1L)cwhHrCDLzy zJmn;7FQnku`KI=O-hC_%j^H&aDoUeJDwfgcx6D064oX58xES_>ZWFc{QJe@vBqygZ z7gg&AF{Pa_RXKWx+Nep=APEYE{*>?I=d3Mx1)f)joZX}ffxBz!aPr5_b$#|%olpZc7xWfi7-X>5|nQvr^0#XT8`yMeBC=nu_&{wSAz;UZeH zPBw)ajQ>0I`T|arnP>nxLbCoJM`OQL$G=K!0@Q)iQPZC;Oq1Xmf*o^Y| zg#l$}P5BD(i~-df2@rl2d~*uhB#*MzwF}aS2mwKlF>#J=v?vxsI3Ew+Uw|U>bvOrM ze27$UwZyC%Inm@k#@Xzna$K+%66hKHvQ?*`JQb%P4hSm35q|7Xq~=LPgSjdW5;awT zBx$BmA8Dj|2clRi7RFZGboPe-D7_K_TcE2My0Dp_7W}s>4&OIhKou1gJFOm{_MAnm zjCSE%3lfO4{vqz(MZ||KY==U9>k4;tyE?g5k+SveaLG>$sw`0sUZXqop!>58?~XD1 zt3<$9On`1UoTuG+h2rzq@+i_tt{Um0qB6RlGfyooaSHU^f;_AU4cXs-yG)z^ z90&`SnBT>6q^hcl^DUTgB;P~q*XEwf4YpKrX3Ht>vQr|F=O^Dbk$L8gf6$z3zsdN+YROQ`Lc_0 z{xmRd@gZHr<9w$t_<@y$Wq{FWhBJD3dkQG~8|D%ad2hXTdkUnlS2h%zAJzY}9m!jG zEqdu*9nYTZ4=o!WSd_1!ZX)f9SV+6_SK*&GxO^Tj-_qw5j|Dn7fyD6vdwzgK0oVsu z5pjBhlA4yQbuCsKb=1_)<%t zlfDu1i}j9aIb?FD;H_X+RJ%MA&N8xbt7#j-^Q$&%vdEsJ;1g!omcwqmdW!x2-Yr0a zVVl=sGvrnn`PGV#4^iRH!)N3#l%xcS7g9zc@Ljiy_!d zLG1n?xc9uBmkAPBM|#T2;H!iW5Y2*TKhjAGE46)0O4D|G~RxcD>0n7+ZmkOiRu6xzpuno~7=w zJHUARmmS1ZNG!jg-=CP-Gq=~IJ(})|C{#0l6pO}x6L5N^HZ{y%=W>W9qtanCA_-z` z0tRvRaus*pd_R&|vlv0vI0w!id7^#fKKDzdS%t58OemY-qJ^RIeN;gWT)EkNxi)3N;QAYjAe;oC*Q5lwQ!HN1Q zAPj_h`v`6`(XV=Cqovt6eV`&w+w;nuN+DH-WMFv8YW}kgMqpYVb8KLN3qLdV4Lwr4 zd9Z7obfbJIvh6#3dS7ncvH-!rfExsf7aG9}uC=$IpQfcwtf|dRLX@9%>|QF9PNd-RGxm961?&vmcnE(wT)i{pDDX>g$@yKh zL^<+Kvk^|VY1+DR0f2tLFFX$1q`|>)bU30{^Af6JifN>8W1B-$E~;W;Vpa1vnR?ksrU~9fVwn`U&9;H;Ww@ZqjC?#Yuv##e!3UHK>8jwt zg3w&b=x=}Ms&Ie2Dl$ZvUTz59(Sv)VF+KB^)Snn@c!e zY`J2=z*Rxbn;bvFZvHnjwWgmhNh#tVynZ$};6iI4xQdhn(!%Mdez=}(SK&XmdsW(- z3;|HeYMo{h&r6?c7#6VqK?eL+TEbj%zsxwvvmzA%6ZI|{`J8Ar+PP7V6H8!5p=Cj6 zx>YwiVhP_TVM@~KRW_vr49iJB4$~nV5>JMjRnOuQ%cEVLdT{HSn?K=~7A_=L6wG-r z7KGVKhcxDa3MJ~vG#n?%@3!Jt0=Jxj@sc0nqyhE7-9gH0CxHU*xoi>y_zvo9@Xi&H zoiq{vs&yJt9d+yW6`T5WyMY{c&eDS}%5cDxkqnlrVb}))$w(}@aC3K8o;D6=-cdHo zU0qXF@~V=_>7C+#>5Nwzj?$5DoMn3ZR|F_7z(sGfzFaZl^g)gCmbT{KfoKtwA{yAq zHqbMvh#W+^EI9*tvpKoACMNIMKE-7M&{_@u%)8#Sj#N){lkIB|Ez&A9app)|*98TQ~tjOX|8*!Nr=4-oJT+dx*~_ert-u$CvJIJB-^)F!^w zsC|b??~F+G-V=80Beh^nIxCaMkh^=QU zT!sdQ^J4!moCg3}H3tW|z3~$?UEx@=-DvsH+n6dxiG~AmG;o|;{Gb4R>+EEA)UUEj zACHH9g@Y&0k-&z27MIWl|{1fzpoom!NV4VLv^TE(9vPMA5~3a-+TTK z*V4cT;X0cWO`x(Iim@UY8`Se=Yz@Ju_QWs^f#li zFk7P{I0pQO8^30{GJkDHCS|Ve8#yxth#+S#B`YRZwDo=3#bsbPj%;eW-T(39M=+Mq z?JQy{vvvbvYGNT1(~JK6!U7>b7zC>AJ!t(q&7%K!0c6G|N{qlmvZn5h>rZ9T{jxLo zTUkrbC2vME<=Y8YSwBCkAE0&c82=Y zzt!a#UuE#ZQsVOHKRK1X{oa!|N5lJc3aTL~xirJa_p&~4?WK@M4qm%ZB>kNWaN*mM zHe`O|%_AaGE3i&P^yPfxsfE6BE86z?;Ol&(PyT4Cx5%OeV zZcY~U1+@?@CvxXX#3W|=WMmb|n;&A>XG&uB6YafUpxqb8wT2%XvJ#pS(al}fFDGJ( zU>vOnYkJ=p$&WY_wW{p57NG zvmdkO&dId;8}Pbv`Cw|W9bRXdoX63PkI8mq*yoa}d!=`9U>|vQfhtUyPMJLP^PiiL zGCcQx>BBWbDagw+B54#0MT~;yXWc@~bMXnfp17!SJZ-BQdGO$~bQuYSKW#I!c%VEx zu%`|;_?O|ZV{W;b$gWYC)*Vjkk&cM zvmu4}((4a8t9ft!#Ig(RPfbxq-SJ-XVqKg{8-Qb4dgHl?R$stx{k2xeUc};Y;yZF3 znT4537UHq4K8b@N{%tRBRCS>>LWuq*EstV&)1m__Xqb7~ikCUiIwZFo4pXUv#d1}5 z6vRELf2r+V;0T2`Ge+zn6}OIVy*eW3*e;Xv2l{lQyl}X|#lz^*PQFCA+aP`l=Z1DS zIwmN0)d8DQSP&6fYzg9qt22P0@VpS0dOw_RI6FJL0zn7y!^>8d>0Md#t|`)Vto|hA zfnOP4%StniuOb$VN*SEx7-IygrGl8!>}UwVc96We~RVc%sd zow`Hx>xUI@^LGJJQ`0{zw4)XP%_uDLB4xk}`!Xh&S&797tvh(H zvcfd&=C=pR<=N4@uqJBU4gX%6cj;lwUv==#4-_iN{&7>+hXg(Ft92C#<4k|4BTg@5 zlaQFCOq^x~o|14{|7tShb0yh>8bYf=7Ox{pe+9^eJC0a>{I0ULhtWljeRejDKa>N@ z&sfY3Y8-Ou@NH#kJ2L$7Gc!V@8nhTVUw5$;OL{9b8G9Qt6^rz;W#|XqR~Qb)ee3OT zzYz6-KZYUQ+moEx=AzYR#eDbvOhAz~HdADPWW2AR7a4tr{yKQFarT4npJ{Ixu=$bD z7$!`p?A<8+<>Bxu)o!!xm-&nfkjP8Y{2TlE#i~cx{J8TU9GRlw3Ol}JE%`_|X5Xxm z{Mmf21rg4j2LVC%-&_`P?)Rr_k}Nt+^z3i7+L0r} zfVCC_xVmBf+k~%!0U6&w1yNRi7&5;0Av;kzK3_Q2_>K}|NL>$!{r&+ZZ5b$brIJFY zozp&!lyrJVuiMKtHqLu8YP-!5SW(vo6nG25YGylSkL))*+v@hln((0Hl z@zT?>u8dInXWg)6{4rLj(RdO^mi_jvP!nDFQ&$VTKB*S`5}u;q)y} ziO>8rRQrJGS=m7;shyo*Y#*1U3VX`n{*iX_6ZX5-*c7w%W;JV00lACdu<|R z_FHR>yZaq#cKYN{y&bU=vUy+ZnRQAgm->i7p!;<40e<$!Px5V5jgDtDd49AP7AS>C_Qb2D}Y4@LYU9j7IlcQd080 z*4xGbipb|jN7R*bBE<(9shClEX6B!v;0WDv-mr1P=3NxkALf}~Y#S~eXEorpo*{MT z;K!JFq#gnTN?{UpEy&+cG_{*hg1+!}%a08mJoxudBHv8a{ojTLhxX<)C>B$vI_i7iLj zearPCR+yWQblr3KH=GjsejTJjAJ564N2B^Gl5Pgbyg)GDtXAs6;9%A?#fXzl`m`j1 z`?pS-3P-C7muO%*7LE6K(yFA|deSAt-Q+{U;lAOxt4T_$N5YqaA?zp}>x~v$#X&2w zXNMka{Fw9~<`NUKyLDJlY;R~6Vot*$jXD0uAXtH4LSeX|8zaL!pES-5(`= zDHAr5+iVjP9e3UAiVAs03tN#i%PnZW1v|{{B#VJDItKe>@gnc*^=eYAtNAC~|4?}u zA_Dvmdsp%TIealHJKO{(^48i|O(WTwffVZ7qnd>jJdtAK#y6AurJNCt_oLu}mN>UCaaW`-zSf0GhtvfI-6=hG z0_W8BsI{+7Z?T0RjqK^82WaLrqiNh>`-UOjw+N5CEkh9lpI76oy#^fcS%XAxV3FDZ z-WKD2S7r65R8>}TA4VbzEEhrsd^X%~2NlLwWrr-X9r9b%hkx>Kq_pOo2|&Jab=Hp5 z_fmQ$KMYhgExKbfKe2jza^SXFk-|&amHwo3aJ$TQphIdZF#^o7E=+Ow9|J`KAH(m` zXNS=E>3h18>B>u(g7hOAf_x;*;w4`NTACbMbjGiF0Q`SIH+pTAT|P)n{JuNw%Vz~< z$U29S6-2xD*Oe9D+)|zyQ_XI*4)2~oSiG)>Cpnv&UbQI^I#2CwFp?-X$b9qy!HoDr;!-@JDwg{ttY_?j{wFimYuz>ARP_3U<#@fyK?Mvf< zIS7;%{_$OLIK7`T!bh`zJP|J{tYDsK$q6{_4r1d;TmMqSiHL!1WAu?*`Y^HToMzmQ z-jBFr3z~n_%5Rcj?L6?hKJ; zI1T1o@)F<2EfFrTm#pj;XDb_;L5bOif5)c6cXYgw zcs8(gPgN88TdQJvics8SRn4#1Bjeg-lqGkuHbsH|M<{uqY5;2qz`9VQwtvD?)^A~` zp=&N&Sm8#fEtm(V!SLE&{QeK9c}%YT=d10BNyhYUtirqecmJ811823((RA)$D)TiY zrHkWpSmZB(Urvg!^5hEX0z+b6uve9LpAir{#ENG!b87B8uHU6}KLILEru-{1=RpaQ zjPZNYJ$o}QF{V$+!Pmq9%bJ2m$>7A$UlSaQUvz*_s`*6mpKB5m%R6NH!U=fHcfG0DTlJwOIvD&sS=`o zVFVw(eBPzaqBi3{rSbNnSGI@>`gEKVL7IjpW1EtLQf$^KoJ9OPjRO zaLoG-f4n41zl|r8f|Yu| zByP=GZP0rLb~)X#Rjtv?x5)iEZsdTw9t=Tdt-l%KwzGhnkMn6IW?tJsa(Tb^nvLryJ6 zyZ;@W3_DetbeaQ8nAiW7#l zU_~S%A0@kQ&`V;>g2((hOc2>+-=_-msd}bZB$zM3@-`*XdaFG^TjP2eMJ$pu9_B-K zJ+1wCe3QbyoBYKist-)l<VEWRvH)ePe3=Lvwz!(QWAb@Y<#2xvrsg)Ox*Hv2`a@Gt_6|@PGj{}b0onMi{`V+RyA99%~7QwR5~Fy_{`$DBl8&N$4DHxZr)|}puJGZj1NTy zBY}8ZZpruG*XXr@AQCgBCqD*ORwx$w4*GQ^EAPH<2SUF_-jfzR&I-n>!O&NR=rH6Yt^kCG)mqrdC*8|a0DUXXrXjL zoT%h-ZFj{Bqqy$vSIQ+hdZhmrAB+&S_LPj_2+_ld@JHfoMVU5>^;b?(5PyZuxF!gF zT)W4#O3^@{*A6B>O+6X3k8$m9K}Jeot#}2sS9>Bw+E9rpzW<&$uVn* zf@lb`@x2)S&))N(`VX8Q$6XdAL9v$+&PZEi&dOQeRt0yfJ)E!J6N?@WEvl|JX?VZx z$4}A72&oothRz&RRAe81?LBUVOw}r&e2beNcVoR9)F}9`n*ZY_fo}Tp;>($5_g;P=N$>^_>7O% zbgucnVwe}+Q*qSg|GO~kWtJV)2zRhPOf@qpEOR5IXZ^S*+}YqW33`ytkVmwQ!SjhPfy3WQUz5tKfO-tX$tJVW%&X?__aV^(X=;bP}T(a|%mDcG$yoh&)|;(QR~@bAi^h&& zW>96;%=NjJO^|4EO~oxJ?mjBXT>6m~tC>V+Sl;q8(&tk^H!E^z1yJ778@JqUw-@2F z&ttuEF|8{XV>(Ips9{qfc9riFQqrx#pZc`urK>M!3o^a-TQ@Tzr8&NCHC|Uk{^kC! z&)Z_f@jv{hGCqe1`qhkvPh-=XK+|U)IRJy$-(?xbx*xXiCRdcD$*XgGuF-5uu)L%^ zZ-z;0D`EVZ^>Mjs>2cvBwWNN7S-l3kqJV7?|IMWGy0{imB_-1mOY3({6ml1(qP@kZ zyOX742F-G0jI9WnH2Oe96s+FxTT|Pm%F0QFe@4&;%L$6@{E{}W;4mF7Yn-mA_fkT{ zv(5hK*wn-;hVJSe_yoIFH4P1o#-4z%vP?x`Fs&4M)Y6isA;Dzy;uICWych7n8(Z{2 zqV)G#sixLCnRXw8xiw7Qpmc0A{o1)36?y!80nk(}h04YU>>u=0xt*;STRMOrf$xnP^uMhsm1<|aFoIh7DqdT6@ zJJk#fdNeqlotzdCu{Uc6R^Nl4x9!zW16(h*^#AxhpAl=H6b_vFb9z6h6>t01cBjvT zC_^axA&!flhm~JS)t!MvsMivuOVA>W5YHxT)JIA`-YwN_QjMoyP<}aAANgz5V9cjlw1P-MaNZ8 zMP~cZ z;Gb<71ogDpK-<=)@h4`wKXN5F%x zCu&5B0%zRSR1K5PgC@NJ@H)mF*W)4uke)nA3z+!ER9Vw7I-S6e+0mz*M98bDq;Man z^m||84fFBxvuj0W)BLvM2sxvOe~LA1qQ~y5!?~8~cfV#4rOE{(sd4@g_FRuwcKq4* zHfXZ^^|Y~VrxuKYT{AblTq39h-kSwti&UwGlX2qMf`VO-Y5m$YuqFRfWV>jAJ(@2wxGq0u$?=OXb}lx#hIBNsXz>j z>ZOO*8DP_b+Marle$O2Nda@iZcJqfRF}%pPL7%LSGi*wZodzx>M&by`W8V=O`qX!v zcUV;@=5FzyOJECCmGX8!?nWyiL^r2&*l!1`DJZn;E=It#G-bcJDCf4>Qa4~p#V}1a zHTMcQQm8MY%JeIVPEkXe+_k6!mbLIpdG%c8hB8E0hPa)+r?VTx5_5QLx8J@vZond0K7mG&?mKZG zl!7KDaCeuw+OpxVCoDtVj)a00TuK2#KXF$T>M&=UDcaE&J-srY>_dO0p2b-SI1Du| zRUDGOSc;FWY}!EiW8wVCaV#x}2}GhL*gXYl{4Ad(;76%17aD9RzoA1h$4jkP&^#*r zGb{pvu^JFgeGpCydM*nGr~AMl!@N{heZm-!ngt(GI%O*?ya_a-04Gk8lB1E!<*X`b zoOijD-`$QYDk>J^MWDpr(>%d2vMFfBj^`&=j{lYlq3q4Fu6-pU)?u~Cxodf*+Bxs- zW}}7T-En1Y87#H(q;qf74PcABMPFCd5i3RpWxH>wlOgkg_mbvvvdUHs>bc}CYBSyT z1Ib5GC_I#O{bWY&e@Af(EVFgr7OdA92YX#~`*YEoYwAN0e!L;8x*kdBRm9W&7~}7X zixGkhJeiNAV^W&vbMKd)N1AA2*a%Awj`R4e$RpI0w0KHSZn_wedEH8~)%=RCK^_mzlBvx(b#T&*?j>L~$H`7DzR)$Nh z_J*^$6)(h6BUs+K!oF&qZfCb$tFXy|B@nqbYhlV04EvEeA)?P*u|#%!eR*i52HL|) z$~mNwc_E?q#C=;^qnk>3XE=v4cVva%7+=I&>XvCZ8jFw`>3Yk{_FzBX@;Hsu16*Wm zJfu!E2=^JxS#v-@pI3yzzhOJe7HJ1V5kh}5d7lmon?HkKeR1ua$0~+J)!M;VEko8iD}hQ7E?>Mt9Eg&pIF)VenvGADz+!X!NR*va#ZPq^F0o6OD=6N59VPq` zMebJDdT;dC6L6OnAXKyQl4;L6OQ@-X(he7bVH@MTE6Xg-!Et+_qx;@S;;RkDLLmgw zOWOey_#w1ni*zD#A_%E6hkpq4vTy#NkgptK}0D6AK(fMvtp5cG<_uwx{60lKx?A z?hUM66Ze5I%!O-}axzXcRN=+Yj^o2i z`d#zi&c3md!EwlU2|TPq`NAlXt+z6j*?O3dl2lov@2_dv-_-3ul3`if)L&26@vx=8 zXb5%cuAx0;5rX++N9*}u_(0!6siNixIS-}zjg2eYSY(_=1znDZ3QvoavU5SBCKleg z2F%S2{rHHYeyg+G|8b|h9bP}CAY+MKF?=P;Cemo45c0B&HnnYVf({QxCfz3BSC?W7 z8fh~ZrfiQ+GEE%W18l>V&Tu;2FMC)8-?z7LCj9#K{(IrKwC=m)fIR7-tvYfeArR%( z5&sOEWM1~H*b_zaZEIQe8!S7KIRuiWdBB@L>e7umALMNXKO-{vMi(yi zaQu*;BIT!v3a%p59DJk5)MoyqaZ80LNUfKo*!(uLvUK%b3aX{yb@G9%$ckJ{QajYSnufPQ8K<; z>F=q%)tQAgPj^ZN+)e@v*!RdtEIMi{@-=0wtXl+_x)vFtzfG)Wi;;M`boSYlq{O4* z)dMOf7~eNmZA4p4U9d5XhF}RI{&f8v&NERHhfQ)~J>P0V+>l{eFh2MqiPhj>@F#=F~eYdK>QQb@65w=L=YoLTB(nI3yg3mw_ga!3y;0X=oV zI>;liWl8w-SlzZyqgJ-}xhKqnxN~Ez=96DGooC92qKO8A9NPJ>M`>)dnq>z5&P=cY z#F><(DLg|cOgPaA2F_Sh$NQrAeV?YhMpvhUADTR}oyHlw)K{uox1xqP6s0~gdHeiK zSi8j2)L}6Fwatn`DxgCKp5vYd4RqnRbzGZmYK_C4eC2F2ipDLw==-^r^AhO&Wke`{t3SmJ zj5gb|mNlvg{K?TWB$z;yxH2;L&hL7x)N*KA_R9IWjj#AfgNScr>HN{L>Dlp+M`y%X zK%wAbHs5!4U30~25ZlGGz1u)18i{@ah9Syet#hf!>?zU$V@A$-)n(W?Mf5ceNh|Bj z=kdnwtRQXf{ah=ZAELTj(mwA^#L#HK=mM3{7suzQRC$w z+H`P+$^P;KfP6<{BV^7oQC|!hsr>|F2jQ5@5dH;iV{Zj%WS{5zvp!rZ611{1QukG3 z33a=3!9NV^O-DR;`)P)K9_&*{D{W5a2CvYUrukOuCB~^VRV2%)eTbW18rXKP%L~}h z*m%dzE2i8UhWH;i5+O0-?VDlPSi+BaM*E;1 zE4327J+ep(`cL`SpEcg2%G2fKtbIz)usiNdjZdLacpd7t6S@4n!*wB_WmMP?2dQF@ zF0~6D4T(cTCdo5;Ha&c=>AV-a)Up&&c_f2KlrZE~H@|?!Wt|W=|2NPNzI3BjwDJ9S zO5;?SFXDI~hAVw7f4LsPU~ZotpAAJ~_jDqQh~4wq3d<>QxK2r@wL?ay)6^nLBf<4# z+2#U<>}3sO5s1rK9nT5q)}7}FL56dZ71H@

a5mroyx4!mO=boEM2;b5PU`AOe>q zs-(LBQX*7;!}Ty2IF6XDOu?(Sr8PZT1xA_3s$uSk$?PwlAc)iox#XHz+Mi+6)c`l5 z=k>I91>f?Mv(NrWszUgC3-Zl7zGNT{r`;=-6)OTB(jErq-iZ*03lbLqkz}flONu2? z`){|8MN$CjOb)|oOm^*D$gL|ReAnEzK5gKG%pg@PKNwMAYP=VF9K$ue7>(6_G6XwJ zzLgdC&O^KeRN;I#nk`8Audxs)NjGj{$x0Q zV$;z3>~pu%*|P4d0S!5$mfnvMDW{WV+m3?tv8oc4dY+rZLaw_(t!W<9i|A$E!ruCV z+tW2w#jdlp#&CmMV@4D!K4zb3KG313Q}Px-0*MCA$|MDne7ZHxYCdO* z1)CxbVP^HiYhTprY``{Uez;IESJbm^vP^ac`tA}t}{T{iUAhjrj%#&a<0Iz0Xer6Xy)SLTkssk@G?$^_cp z39m`&JZ*BZ)qUq9b^hWuFH2{PJ~rX?Sfg4RcYIj*YFH;BU^Z&hogU{DLm?!mOLZpx z4yER}w4kpA1al|{b&AugW1pFzeonE{3+=^few8yKrF61!K#l712+HB9JnR?Q|&-?$Y&lryA-;%u=PBpQOF>_M)&YkF#+(O`D># zpS9k+Qa(}!*B>ETeJ<#&JNy|6SWA8%kjc$>oWid$5$t~?2Zvk(qj(fP z@uYI;MV+$X3;pz$<6H5*St}T+3V>bgYzCI17y& zU^>CM8EKh@UYVzNW7dUBs$N7$lwFJF+&S9EwbK*(?m^nUeSA@~HkCu7cK*r|A+}Uw zZ4C0EYT<1;Y!KznV1M)EKbQnm0lvWq+T4OPxb3zdXHSeGJ;okme)gR$)!9n}huThj z-K^{fBod2KbVDPv;nlJ=f!7b18d)!;b9mo(kjj+SylF9i6BOpHdh#=Hzb!=oew|@w z6n7g5n{89_J9XV42Rr%6mpSv_kI91fQDX6|V797qQ3Ae`$y1;hAP>+b2O4{erjo za+JXKC7ROv{-S}(ZmiIQnXNag1E-FsU8fIoyGHRe5A`3||A_%o&D{*K8x7eg>@}{Z zoKjp@^XoaRkgU%OF3&qJtZ0T|&lMH3_|(%5_$VDMOcyS5<-)W-N|!$OQ5ZJCIr0;g z-Rw!Mgnvn*2`NuYjjQ=E3CGxs0-0^9TtaI96H5FynxJJxAQk|lO4S=oip@#q4u$TH zfCC{*OB|HL+TZ~}Cm`+_0s7X=;hjrCdB!JOdJh0QYpsWH#KS4UuegV)Gxqc8-;f#Z zRc3i!$k^nnDM3=>2BT zAm7-vp>)Ih1QW+;Mp)sL&cZ!(`b~nQnOJx>3mkg_B+?s;{<(?6gc^_k*MKL9CLtyD ziS^UMq2nBElwNs#7ul^Xv~MBI3@Wd?)Jea`6w~p0ue(NyUuzN(d{_^RPYs=Ab z$c$t4k#M|}eP!l_IW=Vb7b?@g$;N~+nm9^!5P`9T2;k_`=1Ks-{VxU*;D|khm&)? zYUBfJuvA|bNF5ySg;{wO^~|(iyty>(otDBjL3g0uy%h8A1}j?0pcfoYbKVZhNrh2F zwvT~6&1MrlXg^ts-ZB3-MeF}yw7tsnH-sn6>{BE zX06?jFl+Y%)TR5Fqzt8~j*rd2X7{G9Pi|4$&L3|~b3B(Mx_JeKKMHhiv*@YyKbHFpeXb;by`3dh9xB z*!Xmd!c#n;mmf(XFs=QCKQGFsVZEG{T4}eDuBhgUWtqEy%fIXP1fEyNQ zNGlT7XaA;uxc{OuE6Z=Ftp|h2P(wlCwX*Jk>&x&}ymXe&ESagt=Y7}tBH8Hr@rUWn zO+D?u8_B)jbb>??sROvarxhUb;FGsQF-JAD&=97gU^*7_P?7XU=*IHds1gHBJ)_3O zkqY#t*7`*|jS28ugsAA}7U{S|`#29o5kuUQfoI=6k=ga`nDSm)6g-}?T@^hXg%Tzt zwjDK@2S=f=ee{em-7Y+JBEJ^$=5gtt8Zs(nx zdtP#%-)~FPZ4KKd&ElR!x444WEMq*0eL2UYSzouERL5myh9_en6FeBRKq2VG54DzDcw$N z=csMTlk28mo*(Sr6Y5lkgWQ!4!ghVO!oi51hV&|Zk^^;zDa3@A+c-*Cv6^~%9~K1n z{FxMPE%AsDB5`1mfy0H6p4)|IJIn1u{}9L5@D(P&!T+b6yD@o_TJ4V{M^G|UE+;o+ zynPWCtDz^kK^Aw!={t_=PFM4PGSbb%;~JK5G2ER;od-K}8+CwJm+5kq_Ji?lpa#G^Q+Ss#4<-e16@@8M$VF`i z>D4OOPdnF3E1Wo-)^acymUD%aVTy~4W3P#3#_o}x*z}H}j7%<=+{BIpv0L6-9D8H< z@1J#MUn$=1Yz$`SRw}6e+8Sa!Cp>J5em3gOUgA~>y8T^uT*$EFThl#d zt+Q0_Y+kmw$+}pOg)`aT{dWZ;t40cdpbXY%R9?G^EY%&ac^ZeflE1OO*j?+)AE{eA zvhE}rVnh0nzF4naSjzOpBM0vtZTvq6qCz#AJ{QJurJRg^VifyI$blTF1G@};rms_S zJ0n-E6Wfqum)se|k4mw<$t`cv`49dgEQYQkdY%WUhWDJ0P3>U|0QOf4^gB+pR1o;7|Y89 z=W2aMypsKi!-Bj`DDDE`uh9rKqriQEB{@4Urx=o?`Cw8MnVz=k{F;^g29!c9c%80rc2HW;JPU58u z>fxClHSTG-F)_6lsrenyVlsWsL~%&;ngx}X9|1Prk<*1u>&RDkhaCG|3qXo{+gvb{ zc3bqgk2U`f1B&9K8re_37e@&O9EfxBcU`7*dSg80#dvD9hOQ z$TBF2$XX(@@B5J5423KqOW7(WB_m{SgzOZu@B6+CgXc`&@ALfr`~5Zlo!6QByv}`H z=f2*bH$askVt+BEtLtj8O;@?cf`%h_&CSh4#IaLk>|ROXX%s-H>z}SKLX`mH(WnPc z^leSIh}2uT$#<>X)8t9v0qz?>n{va>@G@reK{5+Be+;7FVfXl~_K4ggmq`Bmo2mfz zDGJ>LNZKv#?!%zt<>LKiezfL{5YUKupc(;?rJ8Zy-%m}gsm4RB1S9}zC4$XtjSRPe zi~r@3=#T&!^M(JO=0<>AI#l2Wu;MBBb(!D^Kuo}2lmQI|N5>=LUs$#if!fR9_O7ly zW)_wWv5QJGTl29xJ~x3~G$(%y28*9zop+wB4?5I?A1UcRAtJa)e@zk&xy|T)KT1>0 z892X3gsLYGP!#1R?C;F0%gCS@mI2megK}BH_{GIcU@=>T$a3U;0%=SeCUK?yd@p&7 z1c37Ni&`e`%KvrA|6R6!SMUG%`T497W7~2a|ITm+m`NO7hQcx9z}(n=^w_pHPb~*6 zzfv;1e@o#dR{x@tLmPv(;{~J=C+TqJ+3CqSH&hBm@iuIs8j=63B#ku(p*mUo$$=J`PnQ*HLNRrg>G|Us*v-q0& z*hd#nw-u=LtNA=cAn?7gA_x}^gdakL=w=AOEj(^?3kG)cZeU~|eCup#JE_^)-HUFq-4L_bM4Xl`y!G5+f?RsJjE$Co*7ICyXV`Gf!DzBzhx zZf-A#eg89q&o85iDU#^_&hL-0RFAyu2pSF~V?yb3{6D!aSsAy32eqsj{kcCe{TnFWMzx3 zwpF^hsBBAt)?6_&E9<%|erFyJa4{U4y8&d{8c_o<@l=g{mZhtAatVaGk-7Ao@djxG zD_f++ha=uK{q~$riDGYX7(E$_TK7ddxw+)-_7j{&G%bFH*hjo6Z082vg%YGHqC44D zcU>9e;}0j#Yg|S%$j6mGeKtgyx{2yG?*MB<`(}XFYLYW!RmBYGVw znh;zsAf&=)Z{N;;osDq&V{1f!)Xz4fK`Xjk;io^t6Wj*;|8OU5nmAZlO=#@lWSz(= zmX<`_SC=F#EqD2n+v6uYxeSL~1RumOGr0^2nwlKZX%wDNi#pV9*%zJ(GnLvJC6@D% z(!4Fw_#ODiQDq%GO?R-NN7b{_oue>tdua)%K9t{ct4xk{!gY7BFk7))A)zN~cheSc zCO^=-Em#d3{qpoi*-J-xe_*g}k=($ltEZ>kT!>iKPC)~VbzI{A1S>xC{(}RDP6oIn zbI+%+j?PX)WWnL(6?+F`;~${jZxLXUd&kj@I0J-Wvg?klW@R(18|;3E0Q1p=6wKP$ zP)i49xRn9Hdpn+FOguNu@!^t_OGLk44iXYSmA`fV<#^;y?Ds&oU1%qBi}H!8M{s^+ z?(4J$9wJD~W_zZMnT;KOVgJ~5@YAyHTdVZ+>a|Zs@+UWLQhg8j zS9P+5FZ(@FXGcfZkW^MaS`al*3%uuU_E)QK{dIkq`>w}IY2*0gtkrHm6tiVkDVVJB z!dXbP9?LCEg&?)NymnvaJ$H4Qpk->Az_f-u$+fRhwX&dQ zjzWo;164uK8qEFdrhHL_oEjU3wbGq+M+-$qR>VK%mwf7mVXJ%j!Iw2-JYuAe z=@FY(A5VS@35(Y{b029Hcn&T}n3+uGWVPRGJS=CJY`Vk9-l=4MfVJu~9ZOJ~eGs1u zU-*IVu5t`9+9AO53I4tAcCo`@Ca+|~R^J?Dejez*ExET+8+qyD_QiTTmt}jy!Kvv*o^CT3Hqft#_^3vU0Y0lUiQGr?^OYx z=iLe%U8ucBCMkaMKGxlqy?^$~sHu@lf`L3@xq#1$Yf3WqT>fhRm&p0u!1jo{xEY}8 zG*XiWi^?`d$TIAE^E3|_hxvMM{5jL;|N0sH*KN~Cc%W3uDdn|oTR4-ks-Bd$B~NF# zc4dm&WZd@%(|+ZVFk!lRxlbJ4QA5m>y6#xi&ADBNskU*ipM7kf6gDY+OcG`-EZ~obm2Vb4$p*1%0_=@4n(!{WuyF zZx1R?Ro!**HV2Q{*7IAC`xMTFPnWicIoh>}`jdkdIpn1rb%W5)d$#6b{EJk>w9kp% zn7c;!zLbc_yY~)@9jLE(!Z@&px+&k{pD$7+iT*T^@%s4Mn<%-dqS^j!vea2!UrigM z49bh#@7-6#A+rD4adxHwJC5TIuL1#sr2};-SNCZ?d9i@lbPY&tqzVdp!fU`LHF#)t z=s1zevf4!qt|aaA_wCYCJ;B4UYNG(p)JL9TvxyC95~&OfrZ#gX_MDHMK10FJ6<1kVN!~|N1TeC8Pf$JNS{SH`Fw#N*k&Pv{ zwN1%Q;oW=@lLgvh(^@MwXAYQm`w9Q3EWPOc((Dm)pjsnOc=+wgFt%(^W_>7~`zx&{UxBKUg~tc?>`#Ax!ETxtnBoiG=w z%9!wB+L6}aH^2U%wcfzZ_Kas8Wr9n-%IBztD9W$dy>`EV|r2C zg=`(%J8&)mYVr;a1fhncYuT(r2WNKOdWHh&2u?=peb(U@ALmjUe1)wrxFQQuJUf~_ z@A)IU$SgANuWy_Ia&mIo(?hB^I&<_^A(@Sowp$rhtbIowsr22kyu&vY03QA=sk5zD zM8hxUFUpsy2<Fes$i{6@awpfERf!+H2UJg114 zGhOMerHq5J*0!_0chn(Q6p1oq^gT{4v@YgRDbzxUX~ulvsO{FRsV~}WMLvowU=f17 zVwPB${wY~m$gL520t~8$XJksp*ozjvFp6a=e%Nc5Hl!OyRlYkUh`qmx4 z7-a^KkxsZxl~nZ_;U0%17I2*ZW(|jP>pA_r;x7Na-?#|7mFfif*g(0AZAE9cg0z9F zyX&`V)5o$S8|_e7Hea;)k&-A`lGY#0bUKHm)Ug*Z|Jzf}HCphewfIdGq5-*(kz+y!Yq3?@VXSI!w<6g@xr_czSYI zWoIMd-z#@GMI{y$o3K9D4GOy}Zpw@ML?ieZNR0_^pukakBUe;-=f_xA?N1?O{32^5 z(x1v$TdJke4=9q&hB!hK`Ig3y6u3?ozN=u$WY^4e_0e?SG)^R6su|YA#j5fd0IR0g zdQS*x%)>KQqe=QA`S0;PJ<(}nThd^hWSkm8CT zxFj;oH!{^#s>Pzx)^w*BG)E#&l>=vRCr!3H`gi!YL~xizMcyQ z$_71!I=E5W5}^yapq0x=NeTq>yWi>lmJ2n?LL+B<$e1eZ1Puj9a+AFNt1?`QW)yez z{UPEAmA&;Sqjr}mqQ`@GdNp}rUN8}7gJ{3GHtHdm+Su@d@;ZTTZ*hxjS-e&FTTAKG zw4_VQy?)g)RBVt8#K8q(alj}VO=n~0%TH+O*9rt~>3-cDaU#~LK=#wNiVhmZkF#wH znx@A?!HqLzhd@{Kz`hTfVQCPH!5nqLRR?pqLc$wn2Z}mOmY2HoBxBqGq z5*?&l#Li%;3SRnlalz=svbYs1@edk@LH_n=7* zo$c*rGv28j0dN#cV{OPAMlhRUHV=IdQ62?UIhRWQ_cK)UE82p#*Z6AZjaqGl?!?zy zxP0=_CCYIeF~#(sGm+)ltJ1* zK2}}k7%jbnbGq8KO!L-~JtvMR0`*lG~wje*Bnph~hYH<(o+ zx*GHoHumTJL0(&{x2%9^knF?OhBM&y-kz~-EJGPgw`UU~=QmufY=4}t9{R50tVjZa({7r2-5zn7Lnj$*F*~Yeu8(vInYgZS@mOGOdPOz@cIu7YNQ-YD(u@ zZrt`{lLkeh83!1k3Mh2jpbc#FIrAP~PQX%)+ljovJMDnz$k5FV(z* zANd*)QOvkIanFT<(;*9@jtRy3vX$;S6Ai%rq~`R>K8)Y7mAg`j4&e9y=UU_l>s5>j zIxFzR;{y<0{yXc>d2t^W*ozLKUT)>27m`GRTC#JvynjK+(!J%*9aZh!Xq+D=zvICa z^G^9bKk(QGLi2tr=rYZ?`Y!a^J+mX<)&x<$7@p*3AUIbkWv4Fn;S6onDC}GhQzE)1 zD0`9hxIAhMEXO(j9oDa24V#e?5PX@5(5}(qE&rz^{f#sd sPsgr6$kwp#;w}Y0#CD=#Tj!*9J&P}Nk2@a&%3P9LS_sW@b(@I)0P#iD-T(jq diff --git a/docs/source/core_concepts/er_model.rst b/docs/source/core_concepts/er_model.rst index 9f781c4108..91e5a43a1b 100644 --- a/docs/source/core_concepts/er_model.rst +++ b/docs/source/core_concepts/er_model.rst @@ -54,7 +54,7 @@ Account - domain_id — identifier of domain where the account was created, references existing domain - quorum — number of signatories required for creation of valid transaction from this account - transaction_count – counter of transactions created by this account - - data — key-value storage for any information, related to the account + - data — key-value storage for any information, related to the account. Size is limited to 268435455 bytes (0x0FFFFFFF) (PostgreSQL JSONB field). AccountHasSignatory ^^^^^^^^^^^^^^^^^^^ diff --git a/docs/source/core_concepts/glossary.rst b/docs/source/core_concepts/glossary.rst index 12e03e2f4b..947468d81c 100644 --- a/docs/source/core_concepts/glossary.rst +++ b/docs/source/core_concepts/glossary.rst @@ -35,16 +35,15 @@ Signable content is called payload, so the structure of a block looks like this: *Outside payload* - - hash — SHA3-512 hash of block protobuf payload - signatures — signatures of peers, which voted for the block during consensus round *Inside payload* - height — a number of blocks in the chain up to the block - timestamp — Unix time (in milliseconds) of block forming by a peer - - body — transactions, which successfully passed validation and consensus step - - transactions quantity - - previous hash of a block + - array of transactions, which successfully passed validation and consensus step + - hash of a previous block in the chain + - rejected transactions hashes — array of transaction hashes, which did not pass stateful validation step; this field is optional Block Creator ============= @@ -259,12 +258,15 @@ Transaction Status Set ^^^^^^^^^^^^^^^^^^^^^^ - NOT_RECEIVED: requested peer does not have this transaction. - - MST_EXPIRED: this transactions is a part of MST pipeline and has expired. + - ENOUGH_SIGNATURES_COLLECTED: this is a multisignature transaction which has enough signatures and is going to be validated by the peer. + - MST_PENDING: this transaction is a multisignature transaction which has to be signed by more keys (as requested in quorum field). + - MST_EXPIRED: this transaction is a multisignature transaction which is no longer valid and is going to be deleted by this peer. - STATELESS_VALIDATION_FAILED: the transaction was formed with some fields, not meeting stateless validation constraints. This status is returned to a client, who formed transaction, right after the transaction was sent. It would also return the reason — what rule was violated. - STATELESS_VALIDATION_SUCCESS: the transaction has successfully passed stateless validation. This status is returned to a client, who formed transaction, right after the transaction was sent. - STATEFUL_VALIDATION_FAILED: the transaction has commands, which violate validation rules, checking state of the chain (e.g. asset balance, account permissions, etc.). It would also return the reason — what rule was violated. - STATEFUL_VALIDATION_SUCCESS: the transaction has successfully passed stateful validation. - COMMITTED: the transaction is the part of a block, which gained enough votes and is in the block store at the moment. + - REJECTED: this exact transaction was rejected by the peer during stateful validation step in previous consensus rounds. Rejected transactions' hashes are stored in `block <#block>`__ store. This is required in order to prevent `replay attacks `__. Pending Transactions ^^^^^^^^^^^^^^^^^^^^ @@ -278,8 +280,6 @@ when the transaction is a part of `batch of transactions`_ and there is a not fu Batch of Transactions ===================== -*The feature is to be released.* - Transactions batch is a feature that allows sending several transactions to Iroha at once preserving their order. Each transaction within a batch includes batch meta information. diff --git a/docs/source/locale.yaml b/docs/source/locale.yaml index f7cac564e7..cbb677d62e 100644 --- a/docs/source/locale.yaml +++ b/docs/source/locale.yaml @@ -1,2 +1 @@ language: "en" -locale_dirs: "../locale/" From 69f9206aa7a3542b9862ea2d04c0ea3a20ebda59 Mon Sep 17 00:00:00 2001 From: Igor Egorov Date: Tue, 15 Jan 2019 17:26:06 +0300 Subject: [PATCH 07/41] Make command client testable (#2014) * Make command client testable * Remove unnecessary MockCommandServiceTransport from torii mocks Signed-off-by: Igor Egorov --- iroha-cli/client.cpp | 6 +- irohad/torii/command_client.hpp | 8 +- irohad/torii/impl/command_client.cpp | 14 +-- .../integration_test_framework.cpp | 5 +- .../irohad/torii/command_sync_client_test.cpp | 109 +++++++++++------- test/module/irohad/torii/torii_mocks.hpp | 22 ---- test/system/irohad_test.cpp | 5 +- 7 files changed, 89 insertions(+), 80 deletions(-) diff --git a/iroha-cli/client.cpp b/iroha-cli/client.cpp index 3777228b7d..5a603f89b1 100644 --- a/iroha-cli/client.cpp +++ b/iroha-cli/client.cpp @@ -11,11 +11,15 @@ #include "model/converters/json_transaction_factory.hpp" #include "model/converters/pb_query_factory.hpp" #include "model/converters/pb_transaction_factory.hpp" +#include "network/impl/grpc_channel_builder.hpp" namespace iroha_cli { CliClient::CliClient(std::string target_ip, int port) - : command_client_(target_ip, port), query_client_(target_ip, port) {} + : command_client_( + iroha::network::createClient( + target_ip + ":" + std::to_string(port))), + query_client_(target_ip, port) {} CliClient::Response CliClient::sendTx( const shared_model::interface::Transaction &tx) { diff --git a/irohad/torii/command_client.hpp b/irohad/torii/command_client.hpp index 85aaa8ad2d..23d71f4608 100644 --- a/irohad/torii/command_client.hpp +++ b/irohad/torii/command_client.hpp @@ -20,9 +20,9 @@ namespace torii { */ class CommandSyncClient { public: - CommandSyncClient(const std::string &ip, - size_t port, - logger::Logger log = logger::log("CommandSyncClient")); + CommandSyncClient( + std::unique_ptr stub, + logger::Logger log = logger::log("CommandSyncClient")); /** * requests tx to a torii server and returns response (blocking, sync) @@ -57,7 +57,7 @@ namespace torii { std::vector &response) const; private: - std::unique_ptr stub_; + std::unique_ptr stub_; logger::Logger log_; }; diff --git a/irohad/torii/impl/command_client.cpp b/irohad/torii/impl/command_client.cpp index 17d1dd3590..847efc7baa 100644 --- a/irohad/torii/impl/command_client.cpp +++ b/irohad/torii/impl/command_client.cpp @@ -8,7 +8,6 @@ #include #include "common/byteutils.hpp" -#include "network/impl/grpc_channel_builder.hpp" #include "torii/command_client.hpp" #include "transaction.pb.h" @@ -17,12 +16,10 @@ namespace torii { using iroha::protocol::ToriiResponse; using iroha::protocol::Transaction; - CommandSyncClient::CommandSyncClient(const std::string &ip, - size_t port, - logger::Logger log) - : stub_(iroha::network::createClient( - ip + ":" + std::to_string(port))), - log_(std::move(log)) {} + CommandSyncClient::CommandSyncClient( + std::unique_ptr stub, + logger::Logger log) + : stub_(std::move(stub)), log_(std::move(log)) {} grpc::Status CommandSyncClient::Torii(const Transaction &tx) const { google::protobuf::Empty a; @@ -49,8 +46,7 @@ namespace torii { std::vector &response) const { grpc::ClientContext context; ToriiResponse resp; - std::unique_ptr > reader( - stub_->StatusStream(&context, tx)); + auto reader = stub_->StatusStream(&context, tx); while (reader->Read(&resp)) { log_->debug("received new status: {}, hash {}", resp.tx_status(), diff --git a/test/framework/integration_framework/integration_test_framework.cpp b/test/framework/integration_framework/integration_test_framework.cpp index 0f2bf86e42..41c779be41 100644 --- a/test/framework/integration_framework/integration_test_framework.cpp +++ b/test/framework/integration_framework/integration_test_framework.cpp @@ -39,6 +39,7 @@ #include "module/shared_model/validators/always_valid_validators.hpp" #include "multi_sig_transactions/transport/mst_transport_grpc.hpp" #include "network/impl/async_grpc_client.hpp" +#include "network/impl/grpc_channel_builder.hpp" #include "synchronizer/synchronizer_common.hpp" using namespace shared_model::crypto; @@ -89,7 +90,9 @@ namespace integration_framework { torii_port_, internal_port_, dbname)), - command_client_(kLocalHost, torii_port_), + command_client_( + iroha::network::createClient( + kLocalHost + ":" + std::to_string(torii_port_))), query_client_(kLocalHost, torii_port_), async_call_(std::make_shared()), proposal_waiting(proposal_waiting), diff --git a/test/module/irohad/torii/command_sync_client_test.cpp b/test/module/irohad/torii/command_sync_client_test.cpp index b2d1518810..5b3094d463 100644 --- a/test/module/irohad/torii/command_sync_client_test.cpp +++ b/test/module/irohad/torii/command_sync_client_test.cpp @@ -8,8 +8,7 @@ #include #include #include "endpoint_mock.grpc.pb.h" -#include "main/server_runner.hpp" -#include "module/irohad/torii/torii_mocks.hpp" +#include "framework/mock_stream.h" using testing::_; using testing::Invoke; @@ -18,83 +17,109 @@ using testing::Return; class CommandSyncClientTest : public testing::Test { public: void SetUp() override { - runner = std::make_unique(ip + ":0"); - server = std::make_shared(); - runner->append(server).run().match( - [this](iroha::expected::Value port) { this->port = port.value; }, - [](iroha::expected::Error err) { FAIL() << err.error; }); + auto ustub = std::make_unique(); + stub = ustub.get(); + client = std::make_shared(std::move(ustub)); } - std::unique_ptr runner; - std::shared_ptr server; + iroha::protocol::MockCommandService_v1Stub *stub; + std::shared_ptr client; - const std::string ip = "127.0.0.1"; const size_t kHashLength = 32; - int port; + const std::string kTxHash = std::string(kHashLength, '1'); }; /** * @given command client * @when Status is called - * @then the same method of the server is called and client successfully return + * @then the stub handles passed data correctly (no corruptions in both + * directions) */ TEST_F(CommandSyncClientTest, Status) { - iroha::protocol::TxStatusRequest tx_request; - tx_request.set_tx_hash(std::string(kHashLength, '1')); - iroha::protocol::ToriiResponse toriiResponse; - - torii::CommandSyncClient client(ip, port); - EXPECT_CALL(*server, Status(_, _, _)).WillOnce(Return(grpc::Status::OK)); - auto stat = client.Status(tx_request, toriiResponse); + iroha::protocol::TxStatusRequest tx_request, intermediary_tx_request; + tx_request.set_tx_hash(kTxHash); + iroha::protocol::ToriiResponse torii_response, intermediary_response; + intermediary_response.set_tx_hash(kTxHash); + EXPECT_CALL(*stub, Status(_, _, _)) + .WillOnce( + ::testing::DoAll(::testing::SaveArg<1>(&intermediary_tx_request), + ::testing::SetArgPointee<2>(intermediary_response), + Return(::grpc::Status::OK))); + auto stat = client->Status(tx_request, torii_response); + ASSERT_EQ(kTxHash, intermediary_tx_request.tx_hash()); + ASSERT_EQ(kTxHash, torii_response.tx_hash()); ASSERT_TRUE(stat.ok()); } /** * @given command client * @when Torii is called - * @then the same method of the server is called and client successfully return + * @then the stub handles passed data correctly (no corruptions in both + * directions) */ TEST_F(CommandSyncClientTest, Torii) { - iroha::protocol::Transaction tx; - EXPECT_CALL(*server, Torii(_, _, _)).WillOnce(Return(grpc::Status())); - torii::CommandSyncClient client(ip, port); - auto stat = client.Torii(tx); + iroha::protocol::Transaction tx, intermediary_tx; + tx.mutable_payload()->mutable_reduced_payload()->set_creator_account_id( + kTxHash); + EXPECT_CALL(*stub, Torii(_, _, _)) + .WillOnce(::testing::DoAll(::testing::SaveArg<1>(&intermediary_tx), + ::testing::Return(::grpc::Status::OK))); + auto stat = client->Torii(tx); + ASSERT_EQ(kTxHash, + intermediary_tx.payload().reduced_payload().creator_account_id()); ASSERT_TRUE(stat.ok()); } /** * @given command client * @when ListTorii is called - * @then the same method of the server is called and client successfully return + * @then the stub handles passed data correctly (no corruptions in both + * directions) */ TEST_F(CommandSyncClientTest, ListTorii) { - iroha::protocol::TxList tx; - EXPECT_CALL(*server, ListTorii(_, _, _)).WillOnce(Return(grpc::Status())); - torii::CommandSyncClient client(ip, port); - auto stat = client.ListTorii(tx); + iroha::protocol::TxList tx_list, intermediary_tx_list; + tx_list.add_transactions() + ->mutable_payload() + ->mutable_reduced_payload() + ->set_creator_account_id(kTxHash); + EXPECT_CALL(*stub, ListTorii(_, _, _)) + .WillOnce(::testing::DoAll(::testing::SaveArg<1>(&intermediary_tx_list), + ::testing::Return(::grpc::Status::OK))); + auto stat = client->ListTorii(tx_list); + ASSERT_EQ(kTxHash, + intermediary_tx_list.transactions()[0] + .payload() + .reduced_payload() + .creator_account_id()); ASSERT_TRUE(stat.ok()); } /** * @given command client * @when StatusStream is called - * @then the same method of the server is called and client successfully return + * @then the stub handles passed data correctly (no corruptions in both + * directions) */ TEST_F(CommandSyncClientTest, StatusStream) { - iroha::protocol::TxStatusRequest tx; + iroha::protocol::TxStatusRequest tx, intermediary_tx; iroha::protocol::ToriiResponse resp; - resp.set_tx_hash(std::string(kHashLength, '1')); + auto hash = std::string(kHashLength, '1'); + tx.set_tx_hash(hash); + resp.set_tx_hash(hash); std::vector responses; - EXPECT_CALL(*server, StatusStream(_, _, _)) - .WillOnce(Invoke([&](auto, - auto, - grpc::ServerWriter - *response_writer) { - response_writer->Write(resp); - return grpc::Status(); - })); - torii::CommandSyncClient client(ip, port); - client.StatusStream(tx, responses); + auto reader = std::make_unique< + grpc::testing::MockClientReader<::iroha::protocol::ToriiResponse>>(); + + EXPECT_CALL(*reader, Read(_)) + .WillOnce(DoAll(::testing::SetArgPointee<0>(resp), Return(true))) + .WillOnce(Return(false)); + EXPECT_CALL(*reader, Finish()).WillOnce(Return(::grpc::Status::OK)); + + EXPECT_CALL(*stub, StatusStreamRaw(_, _)) + .WillOnce(::testing::DoAll(::testing::SaveArg<1>(&intermediary_tx), + Return(reader.release()))); + client->StatusStream(tx, responses); + ASSERT_EQ(intermediary_tx.tx_hash(), resp.tx_hash()); ASSERT_EQ(responses.size(), 1); ASSERT_EQ(responses[0].tx_hash(), resp.tx_hash()); } diff --git a/test/module/irohad/torii/torii_mocks.hpp b/test/module/irohad/torii/torii_mocks.hpp index 2bdfbcbded..5bd4f8fb63 100644 --- a/test/module/irohad/torii/torii_mocks.hpp +++ b/test/module/irohad/torii/torii_mocks.hpp @@ -37,28 +37,6 @@ namespace iroha { MOCK_METHOD0(statuses, rxcpp::observable()); }; - class MockCommandServiceTransport - : public iroha::protocol::CommandService_v1::Service { - public: - MOCK_METHOD3(Torii, - grpc::Status(grpc::ServerContext *, - const iroha::protocol::Transaction *, - google::protobuf::Empty *)); - MOCK_METHOD3(ListTorii, - grpc::Status(grpc::ServerContext *, - const iroha::protocol::TxList *, - google::protobuf::Empty *)); - MOCK_METHOD3(Status, - grpc::Status(grpc::ServerContext *, - const iroha::protocol::TxStatusRequest *, - iroha::protocol::ToriiResponse *)); - MOCK_METHOD3( - StatusStream, - grpc::Status(grpc::ServerContext *, - const iroha::protocol::TxStatusRequest *, - grpc::ServerWriter *)); - }; - class MockCommandService : public ::torii::CommandService { public: MOCK_METHOD1(handleTransactionBatch, diff --git a/test/system/irohad_test.cpp b/test/system/irohad_test.cpp index 5e94424c86..ca37e0a248 100644 --- a/test/system/irohad_test.cpp +++ b/test/system/irohad_test.cpp @@ -21,6 +21,7 @@ #include "integration/acceptance/acceptance_fixture.hpp" #include "interfaces/query_responses/roles_response.hpp" #include "main/iroha_conf_loader.hpp" +#include "network/impl/grpc_channel_builder.hpp" #include "torii/command_client.hpp" #include "torii/query_client.hpp" @@ -146,7 +147,9 @@ class IrohadTest : public AcceptanceFixture { complete(baseTx(kAdminId).setAccountQuorum(kAdminId, 1), key_pair); tx_request.set_tx_hash(tx.hash().hex()); - torii::CommandSyncClient client(kAddress, kPort); + torii::CommandSyncClient client( + iroha::network::createClient( + kAddress + ":" + std::to_string(kPort))); client.Torii(tx.getTransport()); auto resub_counter(resubscribe_attempts); From 4f6536b3fb1752f338c625e9cdb69102c4689b09 Mon Sep 17 00:00:00 2001 From: Akvinikym Date: Wed, 16 Jan 2019 13:53:33 +0300 Subject: [PATCH 08/41] Rework headers in application.hpp (#2023) * Reworked the headers and torii namespaces Signed-off-by: Akvinikym --- irohad/main/application.cpp | 16 +- irohad/main/application.hpp | 76 ++-- irohad/main/irohad.cpp | 1 + irohad/torii/command_service.hpp | 75 ++-- irohad/torii/impl/command_service_impl.cpp | 300 +++++++------ irohad/torii/impl/command_service_impl.hpp | 141 +++--- .../impl/command_service_transport_grpc.cpp | 423 +++++++++--------- .../impl/command_service_transport_grpc.hpp | 213 ++++----- irohad/torii/impl/query_service.cpp | 228 +++++----- irohad/torii/query_service.hpp | 99 ++-- .../integration_test_framework.cpp | 3 + .../integration_test_framework.hpp | 1 + .../integration_framework/iroha_instance.cpp | 3 + .../integration_framework/test_irohad.hpp | 1 + .../ametsuchi/tx_presence_cache_stub.hpp | 1 + .../irohad/torii/query_service_test.cpp | 2 - test/module/irohad/torii/torii_mocks.hpp | 2 +- .../irohad/torii/torii_queries_test.cpp | 4 +- .../irohad/torii/torii_service_query_test.cpp | 6 +- .../torii/torii_transport_command_test.cpp | 20 +- 20 files changed, 839 insertions(+), 776 deletions(-) diff --git a/irohad/main/application.cpp b/irohad/main/application.cpp index fbfce0118d..f36cde6abd 100644 --- a/irohad/main/application.cpp +++ b/irohad/main/application.cpp @@ -5,6 +5,7 @@ #include "main/application.hpp" +#include "ametsuchi/impl/storage_impl.hpp" #include "ametsuchi/impl/tx_presence_cache_impl.hpp" #include "ametsuchi/impl/wsv_restorer_impl.hpp" #include "backend/protobuf/common_objects/proto_common_objects_factory.hpp" @@ -16,9 +17,10 @@ #include "backend/protobuf/proto_tx_status_factory.hpp" #include "common/bind.hpp" #include "consensus/yac/impl/supermajority_checker_impl.hpp" +#include "cryptography/crypto_provider/crypto_model_signer.hpp" #include "interfaces/iroha_internal/transaction_batch_factory_impl.hpp" #include "interfaces/iroha_internal/transaction_batch_parser_impl.hpp" -#include "interfaces/permission_to_string.hpp" +#include "main/server_runner.hpp" #include "multi_sig_transactions/gossip_propagation_strategy.hpp" #include "multi_sig_transactions/mst_processor_impl.hpp" #include "multi_sig_transactions/mst_propagation_strategy_stub.hpp" @@ -26,9 +28,21 @@ #include "multi_sig_transactions/storage/mst_storage_impl.hpp" #include "multi_sig_transactions/transport/mst_transport_grpc.hpp" #include "multi_sig_transactions/transport/mst_transport_stub.hpp" +#include "network/impl/block_loader_impl.hpp" +#include "network/impl/peer_communication_service_impl.hpp" #include "ordering/impl/on_demand_common.hpp" +#include "ordering/impl/on_demand_ordering_gate.hpp" +#include "pending_txs_storage/impl/pending_txs_storage_impl.hpp" +#include "simulator/impl/simulator.hpp" +#include "synchronizer/impl/synchronizer_impl.hpp" #include "torii/impl/command_service_impl.hpp" +#include "torii/impl/command_service_transport_grpc.hpp" #include "torii/impl/status_bus_impl.hpp" +#include "torii/processor/query_processor_impl.hpp" +#include "torii/processor/transaction_processor_impl.hpp" +#include "torii/query_service.hpp" +#include "validation/impl/chain_validator_impl.hpp" +#include "validation/impl/stateful_validator_impl.hpp" #include "validators/default_validator.hpp" #include "validators/field_validator.hpp" #include "validators/protobuf/proto_block_validator.hpp" diff --git a/irohad/main/application.hpp b/irohad/main/application.hpp index 83ff43837d..2151753e34 100644 --- a/irohad/main/application.hpp +++ b/irohad/main/application.hpp @@ -6,48 +6,61 @@ #ifndef IROHA_APPLICATION_HPP #define IROHA_APPLICATION_HPP -#include "ametsuchi/impl/storage_impl.hpp" -#include "ametsuchi/tx_presence_cache.hpp" #include "consensus/consensus_block_cache.hpp" #include "cryptography/crypto_provider/crypto_model_signer.hpp" -#include "cryptography/keypair.hpp" -#include "interfaces/common_objects/common_objects_factory.hpp" -#include "interfaces/iroha_internal/query_response_factory.hpp" -#include "interfaces/iroha_internal/transaction_batch_factory.hpp" #include "logger/logger.hpp" #include "main/impl/block_loader_init.hpp" #include "main/impl/consensus_init.hpp" #include "main/impl/on_demand_ordering_init.hpp" -#include "main/server_runner.hpp" #include "multi_sig_transactions/gossip_propagation_strategy_params.hpp" -#include "multi_sig_transactions/mst_processor.hpp" -#include "network/block_loader.hpp" -#include "network/consensus_gate.hpp" -#include "network/impl/peer_communication_service_impl.hpp" -#include "network/mst_transport.hpp" -#include "network/ordering_gate.hpp" -#include "network/peer_communication_service.hpp" -#include "pending_txs_storage/impl/pending_txs_storage_impl.hpp" -#include "simulator/block_creator.hpp" -#include "simulator/impl/simulator.hpp" -#include "synchronizer/impl/synchronizer_impl.hpp" -#include "synchronizer/synchronizer.hpp" -#include "torii/command_service.hpp" -#include "torii/impl/command_service_transport_grpc.hpp" -#include "torii/processor/query_processor_impl.hpp" -#include "torii/processor/transaction_processor_impl.hpp" -#include "torii/query_service.hpp" -#include "validation/chain_validator.hpp" -#include "validation/impl/chain_validator_impl.hpp" -#include "validation/impl/stateful_validator_impl.hpp" -#include "validation/stateful_validator.hpp" namespace iroha { + class PendingTransactionStorage; + class MstProcessor; namespace ametsuchi { class WsvRestorer; + class TxPresenceCache; + class Storage; + } // namespace ametsuchi + namespace network { + class BlockLoader; + class ConsensusGate; + class PeerCommunicationService; + class MstTransport; + class OrderingGate; + } // namespace network + namespace simulator { + class Simulator; } + namespace synchronizer { + class Synchronizer; + } + namespace torii { + class QueryProcessor; + class StatusBus; + class CommandService; + class CommandServiceTransportGrpc; + class QueryService; + } // namespace torii + namespace validation { + class ChainValidator; + class StatefulValidator; + } // namespace validation } // namespace iroha +namespace shared_model { + namespace crypto { + class Keypair; + } + namespace interface { + class CommonObjectsFactory; + class QueryResponseFactory; + class TransactionBatchFactory; + } // namespace interface +} // namespace shared_model + +class ServerRunner; + class Irohad { public: using RunResult = iroha::expected::Result; @@ -243,11 +256,12 @@ class Irohad { std::shared_ptr status_bus_; // transaction service - std::shared_ptr command_service; - std::shared_ptr command_service_transport; + std::shared_ptr command_service; + std::shared_ptr + command_service_transport; // query service - std::shared_ptr query_service; + std::shared_ptr query_service; std::unique_ptr torii_server; std::unique_ptr internal_server; diff --git a/irohad/main/irohad.cpp b/irohad/main/irohad.cpp index 97c76791a8..dd80069307 100644 --- a/irohad/main/irohad.cpp +++ b/irohad/main/irohad.cpp @@ -9,6 +9,7 @@ #include #include +#include "ametsuchi/storage.hpp" #include "common/result.hpp" #include "crypto/keys_manager_impl.hpp" #include "main/application.hpp" diff --git a/irohad/torii/command_service.hpp b/irohad/torii/command_service.hpp index 0fadc46fff..315076eeaa 100644 --- a/irohad/torii/command_service.hpp +++ b/irohad/torii/command_service.hpp @@ -20,41 +20,44 @@ namespace shared_model { } // namespace crypto } // namespace shared_model -namespace torii { - - class CommandService { - public: - virtual ~CommandService() = default; - - /** - * Actual implementation of sync Torii in CommandService - * @param batch - transactions we've received - */ - virtual void handleTransactionBatch( - std::shared_ptr batch) = 0; - - /** - * Request to retrieve a status of any particular transaction - * @param request - TxStatusRequest object which identifies transaction - * uniquely - * @return response which contains a current state of requested transaction - */ - virtual std::shared_ptr - getStatus(const shared_model::crypto::Hash &request) = 0; - - /** - * Streaming call which will repeatedly send all statuses of requested - * transaction from its status at the moment of receiving this request to - * the some final transaction status (which cannot change anymore) - * @param request- TxStatusRequest object which identifies transaction - * uniquely - * @return observable with transaction statuses - */ - virtual rxcpp::observable< - std::shared_ptr> - getStatusStream(const shared_model::crypto::Hash &hash) = 0; - }; - -} // namespace torii +namespace iroha { + namespace torii { + + class CommandService { + public: + virtual ~CommandService() = default; + + /** + * Actual implementation of sync Torii in CommandService + * @param batch - transactions we've received + */ + virtual void handleTransactionBatch( + std::shared_ptr batch) = 0; + + /** + * Request to retrieve a status of any particular transaction + * @param request - TxStatusRequest object which identifies transaction + * uniquely + * @return response which contains a current state of requested + * transaction + */ + virtual std::shared_ptr + getStatus(const shared_model::crypto::Hash &request) = 0; + + /** + * Streaming call which will repeatedly send all statuses of requested + * transaction from its status at the moment of receiving this request to + * the some final transaction status (which cannot change anymore) + * @param request- TxStatusRequest object which identifies transaction + * uniquely + * @return observable with transaction statuses + */ + virtual rxcpp::observable< + std::shared_ptr> + getStatusStream(const shared_model::crypto::Hash &hash) = 0; + }; + + } // namespace torii +} // namespace iroha #endif // TORII_COMMAND_SERVICE_HPP diff --git a/irohad/torii/impl/command_service_impl.cpp b/irohad/torii/impl/command_service_impl.cpp index ca8293e949..9b79750dbc 100644 --- a/irohad/torii/impl/command_service_impl.cpp +++ b/irohad/torii/impl/command_service_impl.cpp @@ -13,157 +13,167 @@ #include "common/visitor.hpp" #include "interfaces/iroha_internal/transaction_batch.hpp" -namespace torii { - - CommandServiceImpl::CommandServiceImpl( - std::shared_ptr tx_processor, - std::shared_ptr storage, - std::shared_ptr status_bus, - std::shared_ptr status_factory, - logger::Logger log) - : tx_processor_(std::move(tx_processor)), - storage_(std::move(storage)), - status_bus_(std::move(status_bus)), - cache_(std::make_shared()), - status_factory_(std::move(status_factory)), - log_(std::move(log)) { - // Notifier for all clients - status_bus_->statuses().subscribe([this](auto response) { - // find response for this tx in cache; if status of received response - // isn't "greater" than cached one, dismiss received one - auto tx_hash = response->transactionHash(); - auto cached_tx_state = cache_->findItem(tx_hash); - if (cached_tx_state - and response->comparePriorities(**cached_tx_state) - != shared_model::interface::TransactionResponse:: - PrioritiesComparisonResult::kGreater) { - return; - } - cache_->addItem(tx_hash, response); - }); - } - - void CommandServiceImpl::handleTransactionBatch( - std::shared_ptr batch) { - processBatch(batch); - } - - std::shared_ptr - CommandServiceImpl::getStatus(const shared_model::crypto::Hash &request) { - auto cached = cache_->findItem(request); - if (cached) { - return cached.value(); +namespace iroha { + namespace torii { + + CommandServiceImpl::CommandServiceImpl( + std::shared_ptr tx_processor, + std::shared_ptr storage, + std::shared_ptr status_bus, + std::shared_ptr + status_factory, + logger::Logger log) + : tx_processor_(std::move(tx_processor)), + storage_(std::move(storage)), + status_bus_(std::move(status_bus)), + cache_(std::make_shared()), + status_factory_(std::move(status_factory)), + log_(std::move(log)) { + // Notifier for all clients + status_bus_->statuses().subscribe([this](auto response) { + // find response for this tx in cache; if status of received response + // isn't "greater" than cached one, dismiss received one + auto tx_hash = response->transactionHash(); + auto cached_tx_state = cache_->findItem(tx_hash); + if (cached_tx_state + and response->comparePriorities(**cached_tx_state) + != shared_model::interface::TransactionResponse:: + PrioritiesComparisonResult::kGreater) { + return; + } + cache_->addItem(tx_hash, response); + }); } - auto block_query = storage_->getBlockQuery(); - if (not block_query) { - // TODO andrei 30.11.18 IR-51 Handle database error - log_->warn("Could not create block query. Tx: {}", request.hex()); - return status_factory_->makeNotReceived(request); + void CommandServiceImpl::handleTransactionBatch( + std::shared_ptr batch) { + processBatch(batch); } - auto status = block_query->checkTxPresence(request); - if (not status) { - // TODO andrei 30.11.18 IR-51 Handle database error - log_->warn("Check tx presence database error. Tx: {}", request.hex()); - return status_factory_->makeNotReceived(request); + std::shared_ptr + CommandServiceImpl::getStatus(const shared_model::crypto::Hash &request) { + auto cached = cache_->findItem(request); + if (cached) { + return cached.value(); + } + + auto block_query = storage_->getBlockQuery(); + if (not block_query) { + // TODO andrei 30.11.18 IR-51 Handle database error + log_->warn("Could not create block query. Tx: {}", request.hex()); + return status_factory_->makeNotReceived(request); + } + + auto status = block_query->checkTxPresence(request); + if (not status) { + // TODO andrei 30.11.18 IR-51 Handle database error + log_->warn("Check tx presence database error. Tx: {}", request.hex()); + return status_factory_->makeNotReceived(request); + } + + return iroha::visit_in_place( + *status, + [this, &request]( + const iroha::ametsuchi::tx_cache_status_responses::Missing &) + -> std::shared_ptr { + log_->warn("Asked non-existing tx: {}", request.hex()); + return status_factory_->makeNotReceived(request); + }, + [this, &request](const auto &) { + std::shared_ptr + response = status_factory_->makeCommitted(request); + cache_->addItem(request, response); + return response; + }); } - return iroha::visit_in_place( - *status, - [this, - &request](const iroha::ametsuchi::tx_cache_status_responses::Missing &) - -> std::shared_ptr { - log_->warn("Asked non-existing tx: {}", request.hex()); - return status_factory_->makeNotReceived(request); - }, - [this, &request](const auto &) { - std::shared_ptr - response = status_factory_->makeCommitted(request); - cache_->addItem(request, response); - return response; - }); - } - - /** - * Statuses considered final for streaming. Observable stops value emission - * after receiving a value of one of the following types - * @tparam T concrete response type - */ - template - constexpr bool FinalStatusValue = - iroha::is_any, - shared_model::interface::StatelessFailedTxResponse, - shared_model::interface::StatefulFailedTxResponse, - shared_model::interface::CommittedTxResponse, - shared_model::interface::MstExpiredResponse>::value; - - rxcpp::observable< - std::shared_ptr> - CommandServiceImpl::getStatusStream(const shared_model::crypto::Hash &hash) { - using ResponsePtrType = - std::shared_ptr; - auto initial_status = cache_->findItem(hash).value_or([&] { - log_->debug("tx is not received: {}", hash); - return status_factory_->makeNotReceived(hash); - }()); - return status_bus_ - ->statuses() - // prepend initial status - .start_with(initial_status) - // select statuses with requested hash - .filter( - [&](auto response) { return response->transactionHash() == hash; }) - // successfully complete the observable if final status is received. - // final status is included in the observable - .template lift([](rxcpp::subscriber - dest) { - return rxcpp::make_subscriber( - dest, [=](ResponsePtrType response) { - dest.on_next(response); - iroha::visit_in_place( - response->get(), - [dest](const auto &resp) - -> std::enable_if_t> { - dest.on_completed(); - }, - [](const auto &resp) - -> std::enable_if_t< - not FinalStatusValue>{}); + /** + * Statuses considered final for streaming. Observable stops value emission + * after receiving a value of one of the following types + * @tparam T concrete response type + */ + template + constexpr bool FinalStatusValue = + iroha::is_any, + shared_model::interface::StatelessFailedTxResponse, + shared_model::interface::StatefulFailedTxResponse, + shared_model::interface::CommittedTxResponse, + shared_model::interface::MstExpiredResponse>::value; + + rxcpp::observable< + std::shared_ptr> + CommandServiceImpl::getStatusStream( + const shared_model::crypto::Hash &hash) { + using ResponsePtrType = + std::shared_ptr; + auto initial_status = cache_->findItem(hash).value_or([&] { + log_->debug("tx is not received: {}", hash); + return status_factory_->makeNotReceived(hash); + }()); + return status_bus_ + ->statuses() + // prepend initial status + .start_with(initial_status) + // select statuses with requested hash + .filter([&](auto response) { + return response->transactionHash() == hash; + }) + // successfully complete the observable if final status is received. + // final status is included in the observable + .template lift( + [](rxcpp::subscriber dest) { + return rxcpp::make_subscriber( + dest, [=](ResponsePtrType response) { + dest.on_next(response); + iroha::visit_in_place( + response->get(), + [dest](const auto &resp) + -> std::enable_if_t< + FinalStatusValue> { + dest.on_completed(); + }, + [](const auto &resp) + -> std::enable_if_t< + not FinalStatusValue>{}); + }); }); - }); - } - - void CommandServiceImpl::pushStatus( - const std::string &who, - std::shared_ptr response) { - log_->debug("{}: adding item to cache: {}", who, *response); - status_bus_->publish(response); - } - - void CommandServiceImpl::processBatch( - std::shared_ptr batch) { - tx_processor_->batchHandle(batch); - const auto &txs = batch->transactions(); - std::for_each(txs.begin(), txs.end(), [this](const auto &tx) { - const auto &tx_hash = tx->hash(); - auto found = cache_->findItem(tx_hash); - // StatlessValid status goes only after EnoughSignaturesCollectedResponse - // So doesn't skip publishing status after it - if (found - and iroha::visit_in_place( - found.value()->get(), - [](const shared_model::interface:: - EnoughSignaturesCollectedResponse &) { return false; }, - [](auto &) { return true; }) - and tx->quorum() < 2) { - log_->warn("Found transaction {} in cache, ignoring", tx_hash.hex()); - return; - } + } - this->pushStatus("ToriiBatchProcessor", - status_factory_->makeStatelessValid(tx_hash)); - }); - } + void CommandServiceImpl::pushStatus( + const std::string &who, + std::shared_ptr + response) { + log_->debug("{}: adding item to cache: {}", who, *response); + status_bus_->publish(response); + } + + void CommandServiceImpl::processBatch( + std::shared_ptr batch) { + tx_processor_->batchHandle(batch); + const auto &txs = batch->transactions(); + std::for_each(txs.begin(), txs.end(), [this](const auto &tx) { + const auto &tx_hash = tx->hash(); + auto found = cache_->findItem(tx_hash); + // StatlessValid status goes only after + // EnoughSignaturesCollectedResponse So doesn't skip publishing status + // after it + if (found + and iroha::visit_in_place( + found.value()->get(), + [](const shared_model::interface:: + EnoughSignaturesCollectedResponse &) { + return false; + }, + [](auto &) { return true; }) + and tx->quorum() < 2) { + log_->warn("Found transaction {} in cache, ignoring", tx_hash.hex()); + return; + } + + this->pushStatus("ToriiBatchProcessor", + status_factory_->makeStatelessValid(tx_hash)); + }); + } -} // namespace torii + } // namespace torii +} // namespace iroha diff --git a/irohad/torii/impl/command_service_impl.hpp b/irohad/torii/impl/command_service_impl.hpp index 505331d07b..5a68529c55 100644 --- a/irohad/torii/impl/command_service_impl.hpp +++ b/irohad/torii/impl/command_service_impl.hpp @@ -16,86 +16,89 @@ #include "torii/processor/transaction_processor.hpp" #include "torii/status_bus.hpp" -namespace torii { - /** - * Actual implementation of sync CommandServiceImpl. - */ - class CommandServiceImpl : public CommandService { - public: +namespace iroha { + namespace torii { /** - * Creates a new instance of CommandService - * @param tx_processor - processor of received transactions - * @param storage - to query transactions outside the cache - * @param status_bus is a common notifier for tx statuses - * @param log to print progress + * Actual implementation of sync CommandServiceImpl. */ - CommandServiceImpl( - std::shared_ptr tx_processor, - std::shared_ptr storage, - std::shared_ptr status_bus, - std::shared_ptr - status_factory, - logger::Logger log = logger::log("CommandServiceImpl")); + class CommandServiceImpl : public CommandService { + public: + /** + * Creates a new instance of CommandService + * @param tx_processor - processor of received transactions + * @param storage - to query transactions outside the cache + * @param status_bus is a common notifier for tx statuses + * @param log to print progress + */ + CommandServiceImpl( + std::shared_ptr tx_processor, + std::shared_ptr storage, + std::shared_ptr status_bus, + std::shared_ptr + status_factory, + logger::Logger log = logger::log("CommandServiceImpl")); - /** - * Disable copying in any way to prevent potential issues with common - * storage/tx_processor - */ - CommandServiceImpl(const CommandServiceImpl &) = delete; - CommandServiceImpl &operator=(const CommandServiceImpl &) = delete; + /** + * Disable copying in any way to prevent potential issues with common + * storage/tx_processor + */ + CommandServiceImpl(const CommandServiceImpl &) = delete; + CommandServiceImpl &operator=(const CommandServiceImpl &) = delete; - void handleTransactionBatch( - std::shared_ptr batch) - override; + void handleTransactionBatch( + std::shared_ptr batch) + override; - std::shared_ptr getStatus( - const shared_model::crypto::Hash &request) override; - rxcpp::observable< - std::shared_ptr> - getStatusStream(const shared_model::crypto::Hash &hash) override; + std::shared_ptr getStatus( + const shared_model::crypto::Hash &request) override; + rxcpp::observable< + std::shared_ptr> + getStatusStream(const shared_model::crypto::Hash &hash) override; - private: - /** - * Execute events scheduled in run loop until it is not empty and the - * subscriber is active - * @param subscription - tx status subscription - * @param run_loop - gRPC thread run loop - */ - inline void handleEvents(rxcpp::composite_subscription &subscription, - rxcpp::schedulers::run_loop &run_loop); + private: + /** + * Execute events scheduled in run loop until it is not empty and the + * subscriber is active + * @param subscription - tx status subscription + * @param run_loop - gRPC thread run loop + */ + inline void handleEvents(rxcpp::composite_subscription &subscription, + rxcpp::schedulers::run_loop &run_loop); - /** - * Share tx status and log it - * @param who identifier for the logging - * @param response to be shared - */ - void pushStatus( - const std::string &who, - std::shared_ptr response); + /** + * Share tx status and log it + * @param who identifier for the logging + * @param response to be shared + */ + void pushStatus( + const std::string &who, + std::shared_ptr + response); - /** - * Forward batch to transaction processor and set statuses of all - * transactions inside it - * @param batch to be processed - */ - void processBatch( - std::shared_ptr batch); + /** + * Forward batch to transaction processor and set statuses of all + * transactions inside it + * @param batch to be processed + */ + void processBatch( + std::shared_ptr batch); - private: - using CacheType = iroha::cache::Cache< - shared_model::crypto::Hash, - std::shared_ptr, - shared_model::crypto::Hash::Hasher>; + private: + using CacheType = iroha::cache::Cache< + shared_model::crypto::Hash, + std::shared_ptr, + shared_model::crypto::Hash::Hasher>; - std::shared_ptr tx_processor_; - std::shared_ptr storage_; - std::shared_ptr status_bus_; - std::shared_ptr cache_; - std::shared_ptr status_factory_; + std::shared_ptr tx_processor_; + std::shared_ptr storage_; + std::shared_ptr status_bus_; + std::shared_ptr cache_; + std::shared_ptr status_factory_; - logger::Logger log_; - }; + logger::Logger log_; + }; -} // namespace torii + } // namespace torii +} // namespace iroha #endif // TORII_COMMAND_SERVICE_IMPL_HPP diff --git a/irohad/torii/impl/command_service_transport_grpc.cpp b/irohad/torii/impl/command_service_transport_grpc.cpp index cadd2a7b0b..610308d6ce 100644 --- a/irohad/torii/impl/command_service_transport_grpc.cpp +++ b/irohad/torii/impl/command_service_transport_grpc.cpp @@ -22,220 +22,225 @@ #include "network/consensus_gate.hpp" #include "torii/status_bus.hpp" -namespace torii { - - CommandServiceTransportGrpc::CommandServiceTransportGrpc( - std::shared_ptr command_service, - std::shared_ptr status_bus, - std::shared_ptr status_factory, - std::shared_ptr transaction_factory, - std::shared_ptr - batch_parser, - std::shared_ptr - transaction_batch_factory, - std::shared_ptr consensus_gate, - int maximum_rounds_without_update, - logger::Logger log) - : command_service_(std::move(command_service)), - status_bus_(std::move(status_bus)), - status_factory_(std::move(status_factory)), - transaction_factory_(std::move(transaction_factory)), - batch_parser_(std::move(batch_parser)), - batch_factory_(std::move(transaction_batch_factory)), - log_(std::move(log)), - consensus_gate_(std::move(consensus_gate)), - maximum_rounds_without_update_(maximum_rounds_without_update) {} - - grpc::Status CommandServiceTransportGrpc::Torii( - grpc::ServerContext *context, - const iroha::protocol::Transaction *request, - google::protobuf::Empty *response) { - iroha::protocol::TxList single_tx_list; - *single_tx_list.add_transactions() = *request; - return ListTorii(context, &single_tx_list, response); - } - - namespace { - /** - * Form an error message, which is to be shared between all transactions, if - * there are several of them, or individual message, if there's only one - * @param tx_hashes is non empty hash list to form error message from - * @param error of those tx(s) - * @return message - */ - std::string formErrorMessage( - const std::vector &tx_hashes, - const std::string &error) { - if (tx_hashes.size() == 1) { - return (boost::format("Stateless invalid tx, error: %s, hash: %s") - % error % tx_hashes[0].hex()) +namespace iroha { + namespace torii { + + CommandServiceTransportGrpc::CommandServiceTransportGrpc( + std::shared_ptr command_service, + std::shared_ptr status_bus, + std::shared_ptr + status_factory, + std::shared_ptr transaction_factory, + std::shared_ptr + batch_parser, + std::shared_ptr + transaction_batch_factory, + std::shared_ptr consensus_gate, + int maximum_rounds_without_update, + logger::Logger log) + : command_service_(std::move(command_service)), + status_bus_(std::move(status_bus)), + status_factory_(std::move(status_factory)), + transaction_factory_(std::move(transaction_factory)), + batch_parser_(std::move(batch_parser)), + batch_factory_(std::move(transaction_batch_factory)), + log_(std::move(log)), + consensus_gate_(std::move(consensus_gate)), + maximum_rounds_without_update_(maximum_rounds_without_update) {} + + grpc::Status CommandServiceTransportGrpc::Torii( + grpc::ServerContext *context, + const iroha::protocol::Transaction *request, + google::protobuf::Empty *response) { + iroha::protocol::TxList single_tx_list; + *single_tx_list.add_transactions() = *request; + return ListTorii(context, &single_tx_list, response); + } + + namespace { + /** + * Form an error message, which is to be shared between all transactions, + * if there are several of them, or individual message, if there's only + * one + * @param tx_hashes is non empty hash list to form error message from + * @param error of those tx(s) + * @return message + */ + std::string formErrorMessage( + const std::vector &tx_hashes, + const std::string &error) { + if (tx_hashes.size() == 1) { + return (boost::format("Stateless invalid tx, error: %s, hash: %s") + % error % tx_hashes[0].hex()) + .str(); + } + + std::string folded_hashes = + std::accumulate(std::next(tx_hashes.begin()), + tx_hashes.end(), + tx_hashes[0].hex(), + [](auto &&acc, const auto &h) -> std::string { + return acc + ", " + h.hex(); + }); + + return (boost::format( + "Stateless invalid tx in transaction sequence, error: %s\n" + "Hash list: [%s]") + % error % folded_hashes) .str(); } - - std::string folded_hashes = - std::accumulate(std::next(tx_hashes.begin()), - tx_hashes.end(), - tx_hashes[0].hex(), - [](auto &&acc, const auto &h) -> std::string { - return acc + ", " + h.hex(); - }); - - return (boost::format( - "Stateless invalid tx in transaction sequence, error: %s\n" - "Hash list: [%s]") - % error % folded_hashes) - .str(); + } // namespace + + shared_model::interface::types::SharedTxsCollectionType + CommandServiceTransportGrpc::deserializeTransactions( + const iroha::protocol::TxList *request) { + shared_model::interface::types::SharedTxsCollectionType tx_collection; + for (const auto &tx : request->transactions()) { + transaction_factory_->build(tx).match( + [&tx_collection]( + iroha::expected::Value< + std::unique_ptr> &v) { + tx_collection.emplace_back(std::move(v).value); + }, + [this](iroha::expected::Error &error) { + status_bus_->publish(status_factory_->makeStatelessFail( + error.error.hash, + shared_model::interface::TxStatusFactory::TransactionError{ + error.error.error, 0, 0})); + }); + } + return tx_collection; } - } // namespace - - shared_model::interface::types::SharedTxsCollectionType - CommandServiceTransportGrpc::deserializeTransactions( - const iroha::protocol::TxList *request) { - shared_model::interface::types::SharedTxsCollectionType tx_collection; - for (const auto &tx : request->transactions()) { - transaction_factory_->build(tx).match( - [&tx_collection]( - iroha::expected::Value< - std::unique_ptr> &v) { - tx_collection.emplace_back(std::move(v).value); - }, - [this](iroha::expected::Error &error) { - status_bus_->publish(status_factory_->makeStatelessFail( - error.error.hash, - shared_model::interface::TxStatusFactory::TransactionError{ - error.error.error, 0, 0})); - }); + + grpc::Status CommandServiceTransportGrpc::ListTorii( + grpc::ServerContext *context, + const iroha::protocol::TxList *request, + google::protobuf::Empty *response) { + auto transactions = deserializeTransactions(request); + + auto batches = batch_parser_->parseBatches(transactions); + + for (auto &batch : batches) { + batch_factory_->createTransactionBatch(batch).match( + [&](iroha::expected::Value> &value) { + this->command_service_->handleTransactionBatch( + std::move(value).value); + }, + [&](iroha::expected::Error &error) { + std::vector hashes; + + std::transform(batch.begin(), + batch.end(), + std::back_inserter(hashes), + [](const auto &tx) { return tx->hash(); }); + + auto error_msg = formErrorMessage(hashes, error.error); + // set error response for each transaction in a batch candidate + std::for_each( + hashes.begin(), hashes.end(), [this, &error_msg](auto &hash) { + status_bus_->publish(status_factory_->makeStatelessFail( + hash, + shared_model::interface::TxStatusFactory:: + TransactionError{error_msg, 0, 0})); + }); + }); + } + + return grpc::Status::OK; } - return tx_collection; - } - - grpc::Status CommandServiceTransportGrpc::ListTorii( - grpc::ServerContext *context, - const iroha::protocol::TxList *request, - google::protobuf::Empty *response) { - auto transactions = deserializeTransactions(request); - - auto batches = batch_parser_->parseBatches(transactions); - - for (auto &batch : batches) { - batch_factory_->createTransactionBatch(batch).match( - [&](iroha::expected::Value> &value) { - this->command_service_->handleTransactionBatch( - std::move(value).value); - }, - [&](iroha::expected::Error &error) { - std::vector hashes; - - std::transform(batch.begin(), - batch.end(), - std::back_inserter(hashes), - [](const auto &tx) { return tx->hash(); }); - - auto error_msg = formErrorMessage(hashes, error.error); - // set error response for each transaction in a batch candidate - std::for_each( - hashes.begin(), hashes.end(), [this, &error_msg](auto &hash) { - status_bus_->publish(status_factory_->makeStatelessFail( - hash, - shared_model::interface::TxStatusFactory:: - TransactionError{error_msg, 0, 0})); - }); - }); + + grpc::Status CommandServiceTransportGrpc::Status( + grpc::ServerContext *context, + const iroha::protocol::TxStatusRequest *request, + iroha::protocol::ToriiResponse *response) { + *response = + std::static_pointer_cast( + command_service_->getStatus( + shared_model::crypto::Hash::fromHexString( + request->tx_hash()))) + ->getTransport(); + return grpc::Status::OK; } - return grpc::Status::OK; - } - - grpc::Status CommandServiceTransportGrpc::Status( - grpc::ServerContext *context, - const iroha::protocol::TxStatusRequest *request, - iroha::protocol::ToriiResponse *response) { - *response = - std::static_pointer_cast( - command_service_->getStatus( - shared_model::crypto::Hash::fromHexString(request->tx_hash()))) - ->getTransport(); - return grpc::Status::OK; - } - - namespace { - void handleEvents(rxcpp::composite_subscription &subscription, - rxcpp::schedulers::run_loop &run_loop) { - while (subscription.is_subscribed() or not run_loop.empty()) { - run_loop.dispatch(); + namespace { + void handleEvents(rxcpp::composite_subscription &subscription, + rxcpp::schedulers::run_loop &run_loop) { + while (subscription.is_subscribed() or not run_loop.empty()) { + run_loop.dispatch(); + } } + } // namespace + + grpc::Status CommandServiceTransportGrpc::StatusStream( + grpc::ServerContext *context, + const iroha::protocol::TxStatusRequest *request, + grpc::ServerWriter *response_writer) { + rxcpp::schedulers::run_loop rl; + + auto current_thread = + rxcpp::observe_on_one_worker(rxcpp::schedulers::make_run_loop(rl)); + + rxcpp::composite_subscription subscription; + + auto hash = shared_model::crypto::Hash::fromHexString(request->tx_hash()); + + auto client_id_format = boost::format("Peer: '%s', %s"); + std::string client_id = + (client_id_format % context->peer() % hash.toString()).str(); + + // in each round, increment the round counter, showing number of + // consecutive rounds without status update; if it becomes greater than + // some predefined value, stop the status streaming + std::atomic_int round_counter{0}; + consensus_gate_->onOutcome().subscribe( + [this, &subscription, &round_counter](const auto &) { + auto new_val = round_counter.load() + 1; + if (new_val >= maximum_rounds_without_update_) { + subscription.unsubscribe(); + } else { + round_counter++; + } + }); + + command_service_ + ->getStatusStream(hash) + // convert to transport objects + .map([&](auto response) { + log_->info("mapped {}, {}", *response, client_id); + return std::static_pointer_cast< + shared_model::proto::TransactionResponse>(response) + ->getTransport(); + }) + // complete the observable if client is disconnected + .take_while([=](const auto &) { + auto is_cancelled = context->IsCancelled(); + if (is_cancelled) { + log_->debug("client unsubscribed, {}", client_id); + } + return not is_cancelled; + }) + .subscribe(subscription, + [this, &response_writer, &client_id, &round_counter]( + iroha::protocol::ToriiResponse response) { + if (response_writer->Write(response)) { + log_->debug("status written, {}", client_id); + // reset consecutive rounds counter for this tx + round_counter.store(0); + } + }, + [&](std::exception_ptr ep) { + log_->error("something bad happened, client_id {}", + client_id); + }, + [&] { log_->debug("stream done, {}", client_id); }); + + // run loop while subscription is active or there are pending events in + // the queue + handleEvents(subscription, rl); + + log_->debug("status stream done, {}", client_id); + return grpc::Status::OK; } - } // namespace - - grpc::Status CommandServiceTransportGrpc::StatusStream( - grpc::ServerContext *context, - const iroha::protocol::TxStatusRequest *request, - grpc::ServerWriter *response_writer) { - rxcpp::schedulers::run_loop rl; - - auto current_thread = - rxcpp::observe_on_one_worker(rxcpp::schedulers::make_run_loop(rl)); - - rxcpp::composite_subscription subscription; - - auto hash = shared_model::crypto::Hash::fromHexString(request->tx_hash()); - - auto client_id_format = boost::format("Peer: '%s', %s"); - std::string client_id = - (client_id_format % context->peer() % hash.toString()).str(); - - // in each round, increment the round counter, showing number of consecutive - // rounds without status update; if it becomes greater than some predefined - // value, stop the status streaming - std::atomic_int round_counter{0}; - consensus_gate_->onOutcome().subscribe( - [this, &subscription, &round_counter](const auto &) { - auto new_val = round_counter.load() + 1; - if (new_val >= maximum_rounds_without_update_) { - subscription.unsubscribe(); - } else { - round_counter++; - } - }); - - command_service_ - ->getStatusStream(hash) - // convert to transport objects - .map([&](auto response) { - log_->info("mapped {}, {}", *response, client_id); - return std::static_pointer_cast< - shared_model::proto::TransactionResponse>(response) - ->getTransport(); - }) - // complete the observable if client is disconnected - .take_while([=](const auto &) { - auto is_cancelled = context->IsCancelled(); - if (is_cancelled) { - log_->debug("client unsubscribed, {}", client_id); - } - return not is_cancelled; - }) - .subscribe(subscription, - [this, &response_writer, &client_id, &round_counter]( - iroha::protocol::ToriiResponse response) { - if (response_writer->Write(response)) { - log_->debug("status written, {}", client_id); - // reset consecutive rounds counter for this tx - round_counter.store(0); - } - }, - [&](std::exception_ptr ep) { - log_->error("something bad happened, client_id {}", - client_id); - }, - [&] { log_->debug("stream done, {}", client_id); }); - - // run loop while subscription is active or there are pending events in - // the queue - handleEvents(subscription, rl); - - log_->debug("status stream done, {}", client_id); - return grpc::Status::OK; - } -} // namespace torii + } // namespace torii +} // namespace iroha diff --git a/irohad/torii/impl/command_service_transport_grpc.hpp b/irohad/torii/impl/command_service_transport_grpc.hpp index 07b889bec9..4cf3038ed8 100644 --- a/irohad/torii/impl/command_service_transport_grpc.hpp +++ b/irohad/torii/impl/command_service_transport_grpc.hpp @@ -32,112 +32,113 @@ namespace iroha { namespace network { class ConsensusGate; } -} // namespace iroha -namespace torii { - class CommandServiceTransportGrpc - : public iroha::protocol::CommandService_v1::Service { - public: - using TransportFactoryType = - shared_model::interface::AbstractTransportFactory< - shared_model::interface::Transaction, - iroha::protocol::Transaction>; - - /** - * Creates a new instance of CommandServiceTransportGrpc - * @param command_service - to delegate logic work - * @param status_bus is a common notifier for tx statuses - * @param status_factory - factory of statuses - * @param transaction_factory - factory of transactions - * @param batch_parser - parses of batches - * @param transaction_batch_factory - factory of batches - * @param initial_timeout - streaming timeout when tx is not received - * @param nonfinal_timeout - streaming timeout when tx is being processed - * @param log to print progress - */ - CommandServiceTransportGrpc( - std::shared_ptr command_service, - std::shared_ptr status_bus, - std::shared_ptr - status_factory, - std::shared_ptr transaction_factory, - std::shared_ptr - batch_parser, - std::shared_ptr - transaction_batch_factory, - std::shared_ptr consensus_gate, - int maximum_rounds_without_update, - logger::Logger log = logger::log("CommandServiceTransportGrpc")); - - /** - * Torii call via grpc - * @param context - call context (see grpc docs for details) - * @param request - transaction received - * @param response - no actual response (grpc stub for empty answer) - * @return status - */ - grpc::Status Torii(grpc::ServerContext *context, - const iroha::protocol::Transaction *request, - google::protobuf::Empty *response) override; - - /** - * Torii call for transactions list via grpc - * @param context - call context (see grpc docs for details) - * @param request - list of transactions received - * @param response - no actual response (grpc stub for empty answer) - * @return status - */ - grpc::Status ListTorii(grpc::ServerContext *context, - const iroha::protocol::TxList *request, - google::protobuf::Empty *response) override; - - /** - * Status call via grpc - * @param context - call context - * @param request - TxStatusRequest object which identifies transaction - * uniquely - * @param response - ToriiResponse which contains a current state of - * requested transaction - * @return status - */ - grpc::Status Status(grpc::ServerContext *context, - const iroha::protocol::TxStatusRequest *request, - iroha::protocol::ToriiResponse *response) override; - - /** - * StatusStream call via grpc - * @param context - call context - * @param request - TxStatusRequest object which identifies transaction - * uniquely - * @param response_writer - grpc::ServerWriter which can repeatedly send - * transaction statuses back to the client - * @return status - */ - grpc::Status StatusStream(grpc::ServerContext *context, - const iroha::protocol::TxStatusRequest *request, - grpc::ServerWriter - *response_writer) override; - - private: - /** - * Flat map transport transactions to shared model - */ - shared_model::interface::types::SharedTxsCollectionType - deserializeTransactions(const iroha::protocol::TxList *request); - - std::shared_ptr command_service_; - std::shared_ptr status_bus_; - std::shared_ptr status_factory_; - std::shared_ptr transaction_factory_; - std::shared_ptr - batch_parser_; - std::shared_ptr - batch_factory_; - logger::Logger log_; - - std::shared_ptr consensus_gate_; - const int maximum_rounds_without_update_; - }; -} // namespace torii + namespace torii { + class CommandServiceTransportGrpc + : public iroha::protocol::CommandService_v1::Service { + public: + using TransportFactoryType = + shared_model::interface::AbstractTransportFactory< + shared_model::interface::Transaction, + iroha::protocol::Transaction>; + + /** + * Creates a new instance of CommandServiceTransportGrpc + * @param command_service - to delegate logic work + * @param status_bus is a common notifier for tx statuses + * @param status_factory - factory of statuses + * @param transaction_factory - factory of transactions + * @param batch_parser - parses of batches + * @param transaction_batch_factory - factory of batches + * @param initial_timeout - streaming timeout when tx is not received + * @param nonfinal_timeout - streaming timeout when tx is being processed + * @param log to print progress + */ + CommandServiceTransportGrpc( + std::shared_ptr command_service, + std::shared_ptr status_bus, + std::shared_ptr + status_factory, + std::shared_ptr transaction_factory, + std::shared_ptr + batch_parser, + std::shared_ptr + transaction_batch_factory, + std::shared_ptr consensus_gate, + int maximum_rounds_without_update, + logger::Logger log = logger::log("CommandServiceTransportGrpc")); + + /** + * Torii call via grpc + * @param context - call context (see grpc docs for details) + * @param request - transaction received + * @param response - no actual response (grpc stub for empty answer) + * @return status + */ + grpc::Status Torii(grpc::ServerContext *context, + const iroha::protocol::Transaction *request, + google::protobuf::Empty *response) override; + + /** + * Torii call for transactions list via grpc + * @param context - call context (see grpc docs for details) + * @param request - list of transactions received + * @param response - no actual response (grpc stub for empty answer) + * @return status + */ + grpc::Status ListTorii(grpc::ServerContext *context, + const iroha::protocol::TxList *request, + google::protobuf::Empty *response) override; + + /** + * Status call via grpc + * @param context - call context + * @param request - TxStatusRequest object which identifies transaction + * uniquely + * @param response - ToriiResponse which contains a current state of + * requested transaction + * @return status + */ + grpc::Status Status(grpc::ServerContext *context, + const iroha::protocol::TxStatusRequest *request, + iroha::protocol::ToriiResponse *response) override; + + /** + * StatusStream call via grpc + * @param context - call context + * @param request - TxStatusRequest object which identifies transaction + * uniquely + * @param response_writer - grpc::ServerWriter which can repeatedly send + * transaction statuses back to the client + * @return status + */ + grpc::Status StatusStream( + grpc::ServerContext *context, + const iroha::protocol::TxStatusRequest *request, + grpc::ServerWriter *response_writer) + override; + + private: + /** + * Flat map transport transactions to shared model + */ + shared_model::interface::types::SharedTxsCollectionType + deserializeTransactions(const iroha::protocol::TxList *request); + + std::shared_ptr command_service_; + std::shared_ptr status_bus_; + std::shared_ptr status_factory_; + std::shared_ptr transaction_factory_; + std::shared_ptr + batch_parser_; + std::shared_ptr + batch_factory_; + logger::Logger log_; + + std::shared_ptr consensus_gate_; + const int maximum_rounds_without_update_; + }; + } // namespace torii +} // namespace iroha #endif // TORII_COMMAND_SERVICE_TRANSPORT_GRPC_HPP diff --git a/irohad/torii/impl/query_service.cpp b/irohad/torii/impl/query_service.cpp index 5a145745b9..3228e84acb 100644 --- a/irohad/torii/impl/query_service.cpp +++ b/irohad/torii/impl/query_service.cpp @@ -11,124 +11,126 @@ #include "interfaces/iroha_internal/abstract_transport_factory.hpp" #include "validators/default_validator.hpp" -namespace torii { +namespace iroha { + namespace torii { - QueryService::QueryService( - std::shared_ptr query_processor, - std::shared_ptr query_factory, - logger::Logger log) - : query_processor_{std::move(query_processor)}, - query_factory_{std::move(query_factory)}, - log_{std::move(log)} {} + QueryService::QueryService( + std::shared_ptr query_processor, + std::shared_ptr query_factory, + logger::Logger log) + : query_processor_{std::move(query_processor)}, + query_factory_{std::move(query_factory)}, + log_{std::move(log)} {} - void QueryService::Find(iroha::protocol::Query const &request, - iroha::protocol::QueryResponse &response) { - shared_model::crypto::Hash hash; - auto blobPayload = shared_model::proto::makeBlob(request.payload()); - hash = shared_model::crypto::DefaultHashProvider::makeHash(blobPayload); + void QueryService::Find(iroha::protocol::Query const &request, + iroha::protocol::QueryResponse &response) { + shared_model::crypto::Hash hash; + auto blobPayload = shared_model::proto::makeBlob(request.payload()); + hash = shared_model::crypto::DefaultHashProvider::makeHash(blobPayload); - if (cache_.findItem(hash)) { - // Query was already processed - response.mutable_error_response()->set_reason( - iroha::protocol::ErrorResponse::STATELESS_INVALID); - return; - } + if (cache_.findItem(hash)) { + // Query was already processed + response.mutable_error_response()->set_reason( + iroha::protocol::ErrorResponse::STATELESS_INVALID); + return; + } - query_factory_->build(request).match( - [this, &hash, &response]( - const iroha::expected::Value< - std::unique_ptr> &query) { - // Send query to iroha - response = static_cast( - *query_processor_->queryHandle(*query.value)) - .getTransport(); - cache_.addItem(hash, response); - }, - [&hash, &response]( - const iroha::expected::Error &error) { - response.set_query_hash(hash.hex()); - response.mutable_error_response()->set_reason( - iroha::protocol::ErrorResponse::STATELESS_INVALID); - response.mutable_error_response()->set_message( - std::move(error.error.error)); - }); - } + query_factory_->build(request).match( + [this, &hash, &response]( + const iroha::expected::Value< + std::unique_ptr> &query) { + // Send query to iroha + response = static_cast( + *query_processor_->queryHandle(*query.value)) + .getTransport(); + cache_.addItem(hash, response); + }, + [&hash, &response]( + const iroha::expected::Error &error) { + response.set_query_hash(hash.hex()); + response.mutable_error_response()->set_reason( + iroha::protocol::ErrorResponse::STATELESS_INVALID); + response.mutable_error_response()->set_message( + std::move(error.error.error)); + }); + } - grpc::Status QueryService::Find(grpc::ServerContext *context, - const iroha::protocol::Query *request, - iroha::protocol::QueryResponse *response) { - Find(*request, *response); - return grpc::Status::OK; - } + grpc::Status QueryService::Find(grpc::ServerContext *context, + const iroha::protocol::Query *request, + iroha::protocol::QueryResponse *response) { + Find(*request, *response); + return grpc::Status::OK; + } - grpc::Status QueryService::FetchCommits( - grpc::ServerContext *context, - const iroha::protocol::BlocksQuery *request, - grpc::ServerWriter *writer) { - log_->debug("Fetching commits"); - shared_model::proto::TransportBuilder< - shared_model::proto::BlocksQuery, - shared_model::validation::DefaultSignedBlocksQueryValidator>() - .build(*request) - .match( - [this, context, request, writer]( - const iroha::expected::Value - &query) { - rxcpp::composite_subscription sub; - query_processor_->blocksQueryHandle(query.value) - .as_blocking() - .subscribe( - sub, - [this, context, &sub, request, writer]( - const std::shared_ptr< - shared_model::interface::BlockQueryResponse> - response) { - if (context->IsCancelled()) { - log_->debug("Unsubscribed"); - sub.unsubscribe(); - } else { - iroha::visit_in_place( - response->get(), - [this, writer, request]( - const shared_model::interface::BlockResponse - &block_response) { - log_->debug( - "{} receives committed block", - request->meta().creator_account_id()); - auto proto_block_response = static_cast< - const shared_model::proto::BlockResponse &>( - block_response); - writer->Write( - proto_block_response.getTransport()); - }, - [this, writer, request]( - const shared_model::interface:: - BlockErrorResponse - &block_error_response) { - log_->debug( - "{} received error with message: {}", - request->meta().creator_account_id(), - block_error_response.message()); - auto proto_block_error_response = - static_cast( - block_error_response); - writer->WriteLast( - proto_block_error_response.getTransport(), - grpc::WriteOptions()); - }); - } - }); - }, - [this, writer](const auto &error) { - log_->debug("Stateless invalid: {}", error.error); - iroha::protocol::BlockQueryResponse response; - response.mutable_block_error_response()->set_message( - std::move(error.error)); - writer->WriteLast(response, grpc::WriteOptions()); - }); + grpc::Status QueryService::FetchCommits( + grpc::ServerContext *context, + const iroha::protocol::BlocksQuery *request, + grpc::ServerWriter *writer) { + log_->debug("Fetching commits"); + shared_model::proto::TransportBuilder< + shared_model::proto::BlocksQuery, + shared_model::validation::DefaultSignedBlocksQueryValidator>() + .build(*request) + .match( + [this, context, request, writer]( + const iroha::expected::Value + &query) { + rxcpp::composite_subscription sub; + query_processor_->blocksQueryHandle(query.value) + .as_blocking() + .subscribe( + sub, + [this, context, &sub, request, writer]( + const std::shared_ptr< + shared_model::interface::BlockQueryResponse> + response) { + if (context->IsCancelled()) { + log_->debug("Unsubscribed"); + sub.unsubscribe(); + } else { + iroha::visit_in_place( + response->get(), + [this, writer, request]( + const shared_model::interface::BlockResponse + &block_response) { + log_->debug( + "{} receives committed block", + request->meta().creator_account_id()); + auto proto_block_response = static_cast< + const shared_model::proto::BlockResponse + &>(block_response); + writer->Write( + proto_block_response.getTransport()); + }, + [this, writer, request]( + const shared_model::interface:: + BlockErrorResponse + &block_error_response) { + log_->debug( + "{} received error with message: {}", + request->meta().creator_account_id(), + block_error_response.message()); + auto proto_block_error_response = + static_cast( + block_error_response); + writer->WriteLast( + proto_block_error_response.getTransport(), + grpc::WriteOptions()); + }); + } + }); + }, + [this, writer](const auto &error) { + log_->debug("Stateless invalid: {}", error.error); + iroha::protocol::BlockQueryResponse response; + response.mutable_block_error_response()->set_message( + std::move(error.error)); + writer->WriteLast(response, grpc::WriteOptions()); + }); - return grpc::Status::OK; - } + return grpc::Status::OK; + } -} // namespace torii + } // namespace torii +} // namespace iroha diff --git a/irohad/torii/query_service.hpp b/irohad/torii/query_service.hpp index c8388a800a..57a069742a 100644 --- a/irohad/torii/query_service.hpp +++ b/irohad/torii/query_service.hpp @@ -26,55 +26,58 @@ namespace shared_model { } } // namespace shared_model -namespace torii { - /** - * Actual implementation of async QueryService. - * ToriiServiceHandler::(SomeMethod)Handler calls a corresponding method in - * this class. - */ - class QueryService : public iroha::protocol::QueryService_v1::Service { - public: - using QueryFactoryType = shared_model::interface::AbstractTransportFactory< - shared_model::interface::Query, - iroha::protocol::Query>; - - QueryService(std::shared_ptr query_processor, - std::shared_ptr query_factory, - logger::Logger log = logger::log("Query Service")); - - QueryService(const QueryService &) = delete; - QueryService &operator=(const QueryService &) = delete; - +namespace iroha { + namespace torii { /** - * actual implementation of async Find in QueryService - * @param request - Query - * @param response - QueryResponse + * Actual implementation of async QueryService. + * ToriiServiceHandler::(SomeMethod)Handler calls a corresponding method in + * this class. */ - void Find(iroha::protocol::Query const &request, - iroha::protocol::QueryResponse &response); - - grpc::Status Find(grpc::ServerContext *context, - const iroha::protocol::Query *request, - iroha::protocol::QueryResponse *response) override; - - grpc::Status FetchCommits( - grpc::ServerContext *context, - const iroha::protocol::BlocksQuery *request, - grpc::ServerWriter<::iroha::protocol::BlockQueryResponse> *writer) - override; - - private: - std::shared_ptr query_processor_; - std::shared_ptr query_factory_; - - iroha::cache::Cache - cache_; - - logger::Logger log_; - }; - -} // namespace torii + class QueryService : public iroha::protocol::QueryService_v1::Service { + public: + using QueryFactoryType = + shared_model::interface::AbstractTransportFactory< + shared_model::interface::Query, + iroha::protocol::Query>; + + QueryService( + std::shared_ptr query_processor, + std::shared_ptr query_factory, + logger::Logger log = logger::log("Query Service")); + + QueryService(const QueryService &) = delete; + QueryService &operator=(const QueryService &) = delete; + + /** + * actual implementation of async Find in QueryService + * @param request - Query + * @param response - QueryResponse + */ + void Find(iroha::protocol::Query const &request, + iroha::protocol::QueryResponse &response); + + grpc::Status Find(grpc::ServerContext *context, + const iroha::protocol::Query *request, + iroha::protocol::QueryResponse *response) override; + + grpc::Status FetchCommits( + grpc::ServerContext *context, + const iroha::protocol::BlocksQuery *request, + grpc::ServerWriter<::iroha::protocol::BlockQueryResponse> *writer) + override; + + private: + std::shared_ptr query_processor_; + std::shared_ptr query_factory_; + + iroha::cache::Cache + cache_; + + logger::Logger log_; + }; + } // namespace torii +} // namespace iroha #endif // TORII_QUERY_SERVICE_HPP diff --git a/test/framework/integration_framework/integration_test_framework.cpp b/test/framework/integration_framework/integration_test_framework.cpp index 41c779be41..fc441620f4 100644 --- a/test/framework/integration_framework/integration_test_framework.cpp +++ b/test/framework/integration_framework/integration_test_framework.cpp @@ -10,6 +10,7 @@ #include #include +#include "ametsuchi/storage.hpp" #include "backend/protobuf/block.hpp" #include "backend/protobuf/common_objects/proto_common_objects_factory.hpp" #include "backend/protobuf/proto_transport_factory.hpp" @@ -37,10 +38,12 @@ #include "module/shared_model/builders/protobuf/block.hpp" #include "module/shared_model/builders/protobuf/proposal.hpp" #include "module/shared_model/validators/always_valid_validators.hpp" +#include "multi_sig_transactions/mst_processor.hpp" #include "multi_sig_transactions/transport/mst_transport_grpc.hpp" #include "network/impl/async_grpc_client.hpp" #include "network/impl/grpc_channel_builder.hpp" #include "synchronizer/synchronizer_common.hpp" +#include "torii/status_bus.hpp" using namespace shared_model::crypto; using namespace std::literals::string_literals; diff --git a/test/framework/integration_framework/integration_test_framework.hpp b/test/framework/integration_framework/integration_test_framework.hpp index 06b1a47c3b..b0d2349347 100644 --- a/test/framework/integration_framework/integration_test_framework.hpp +++ b/test/framework/integration_framework/integration_test_framework.hpp @@ -17,6 +17,7 @@ #include #include +#include "backend/protobuf/queries/proto_query.hpp" #include "backend/protobuf/query_responses/proto_query_response.hpp" #include "backend/protobuf/transaction_responses/proto_tx_response.hpp" #include "framework/integration_framework/iroha_instance.hpp" diff --git a/test/framework/integration_framework/iroha_instance.cpp b/test/framework/integration_framework/iroha_instance.cpp index da9ff7913e..e3e3e95e5c 100644 --- a/test/framework/integration_framework/iroha_instance.cpp +++ b/test/framework/integration_framework/iroha_instance.cpp @@ -4,8 +4,11 @@ */ #include "framework/integration_framework/iroha_instance.hpp" + #include #include + +#include "ametsuchi/storage.hpp" #include "cryptography/keypair.hpp" #include "framework/config_helper.hpp" #include "framework/integration_framework/test_irohad.hpp" diff --git a/test/framework/integration_framework/test_irohad.hpp b/test/framework/integration_framework/test_irohad.hpp index f586c2fc21..6dfd3eccbd 100644 --- a/test/framework/integration_framework/test_irohad.hpp +++ b/test/framework/integration_framework/test_irohad.hpp @@ -8,6 +8,7 @@ #include "cryptography/keypair.hpp" #include "main/application.hpp" +#include "main/server_runner.hpp" namespace integration_framework { /** diff --git a/test/module/irohad/ametsuchi/tx_presence_cache_stub.hpp b/test/module/irohad/ametsuchi/tx_presence_cache_stub.hpp index 41590f8aad..1759deaf8a 100644 --- a/test/module/irohad/ametsuchi/tx_presence_cache_stub.hpp +++ b/test/module/irohad/ametsuchi/tx_presence_cache_stub.hpp @@ -7,6 +7,7 @@ #define IROHA_TX_PRESENCE_CACHE_STUB_HPP #include "ametsuchi/tx_presence_cache.hpp" +#include "interfaces/iroha_internal/transaction_batch.hpp" namespace iroha { namespace ametsuchi { diff --git a/test/module/irohad/torii/query_service_test.cpp b/test/module/irohad/torii/query_service_test.cpp index 462d7d3e3a..284488352f 100644 --- a/test/module/irohad/torii/query_service_test.cpp +++ b/test/module/irohad/torii/query_service_test.cpp @@ -12,8 +12,6 @@ #include "utils/query_error_response_visitor.hpp" #include "validators/protobuf/proto_query_validator.hpp" -using namespace torii; - using namespace iroha; using namespace iroha::torii; diff --git a/test/module/irohad/torii/torii_mocks.hpp b/test/module/irohad/torii/torii_mocks.hpp index 5bd4f8fb63..13e8a817c1 100644 --- a/test/module/irohad/torii/torii_mocks.hpp +++ b/test/module/irohad/torii/torii_mocks.hpp @@ -37,7 +37,7 @@ namespace iroha { MOCK_METHOD0(statuses, rxcpp::observable()); }; - class MockCommandService : public ::torii::CommandService { + class MockCommandService : public iroha::torii::CommandService { public: MOCK_METHOD1(handleTransactionBatch, void(std::shared_ptr< diff --git a/test/module/irohad/torii/torii_queries_test.cpp b/test/module/irohad/torii/torii_queries_test.cpp index 8980f20d3d..7419b212a8 100644 --- a/test/module/irohad/torii/torii_queries_test.cpp +++ b/test/module/irohad/torii/torii_queries_test.cpp @@ -74,7 +74,7 @@ class ToriiQueriesTest : public testing::Test { //----------- Server run ---------------- initQueryFactory(); - runner->append(std::make_unique(qpi, query_factory)) + runner->append(std::make_unique(qpi, query_factory)) .run() .match( [this](iroha::expected::Value port) { @@ -115,7 +115,7 @@ class ToriiQueriesTest : public testing::Test { std::shared_ptr pending_txs_storage; std::shared_ptr query_response_factory; - std::shared_ptr query_factory; + std::shared_ptr query_factory; const std::string ip = "127.0.0.1"; int port; diff --git a/test/module/irohad/torii/torii_service_query_test.cpp b/test/module/irohad/torii/torii_service_query_test.cpp index bdea9e4b27..59e80817d6 100644 --- a/test/module/irohad/torii/torii_service_query_test.cpp +++ b/test/module/irohad/torii/torii_service_query_test.cpp @@ -39,8 +39,8 @@ class ToriiQueryServiceTest : public ::testing::Test { //----------- Server run ---------------- initQueryFactory(); runner - ->append(std::make_unique(query_processor, - query_factory)) + ->append(std::make_unique(query_processor, + query_factory)) .run() .match( [this](iroha::expected::Value port) { @@ -70,7 +70,7 @@ class ToriiQueryServiceTest : public ::testing::Test { std::unique_ptr runner; std::shared_ptr query_processor; - std::shared_ptr query_factory; + std::shared_ptr query_factory; iroha::protocol::Block block; diff --git a/test/module/irohad/torii/torii_transport_command_test.cpp b/test/module/irohad/torii/torii_transport_command_test.cpp index 8bd73f89bf..a6fbeb98d5 100644 --- a/test/module/irohad/torii/torii_transport_command_test.cpp +++ b/test/module/irohad/torii/torii_transport_command_test.cpp @@ -86,15 +86,15 @@ class CommandServiceTransportGrpcTest : public testing::Test { status_bus = std::make_shared(); command_service = std::make_shared(); - transport_grpc = std::make_shared( - command_service, - status_bus, - status_factory, - transaction_factory, - batch_parser, - batch_factory, - mock_consensus_gate, - 2); + transport_grpc = + std::make_shared(command_service, + status_bus, + status_factory, + transaction_factory, + batch_parser, + batch_factory, + mock_consensus_gate, + 2); } std::shared_ptr status_bus; @@ -108,7 +108,7 @@ class CommandServiceTransportGrpcTest : public testing::Test { std::shared_ptr status_factory; std::shared_ptr command_service; - std::shared_ptr transport_grpc; + std::shared_ptr transport_grpc; std::shared_ptr mock_consensus_gate; From 4216d142c998ddbd15aec95b2c6933a5fde57ba9 Mon Sep 17 00:00:00 2001 From: Nikita Alekseev Date: Wed, 16 Jan 2019 15:28:17 +0300 Subject: [PATCH 09/41] Fix initialization of observable, to resolve memory sanitizer issue (#2021) Signed-off-by: Nikita Alekseev --- .../impl/gossip_propagation_strategy.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/irohad/multi_sig_transactions/impl/gossip_propagation_strategy.cpp b/irohad/multi_sig_transactions/impl/gossip_propagation_strategy.cpp index 70ea2ede69..39953a7965 100644 --- a/irohad/multi_sig_transactions/impl/gossip_propagation_strategy.cpp +++ b/irohad/multi_sig_transactions/impl/gossip_propagation_strategy.cpp @@ -27,7 +27,8 @@ namespace iroha { non_visited({}), emit_worker(emit_worker), emitent(rxcpp::observable<>::interval(steady_clock::now(), - params.emission_period) + params.emission_period, + emit_worker) .map([this, params](int) { PropagationData vec; auto range = boost::irange(0u, params.amount_per_once); @@ -40,8 +41,7 @@ namespace iroha { }; }); return vec; - }) - .subscribe_on(emit_worker)) {} + })) {} rxcpp::observable GossipPropagationStrategy::emitter() { return emitent; From 4e53e9fffe27e932cec5fda591caf69e8e9ecdf3 Mon Sep 17 00:00:00 2001 From: Konstantin Munichev Date: Wed, 16 Jan 2019 16:16:48 +0300 Subject: [PATCH 10/41] Irohad field reorder (#2013) * Irohad field reorder * Fix race condition with ordering_init destructor Signed-off-by: Konstantin Munichev --- irohad/main/application.cpp | 8 +--- irohad/main/application.hpp | 79 ++++++++++++++++++------------------- 2 files changed, 39 insertions(+), 48 deletions(-) diff --git a/irohad/main/application.cpp b/irohad/main/application.cpp index f36cde6abd..0dd2342861 100644 --- a/irohad/main/application.cpp +++ b/irohad/main/application.cpp @@ -442,8 +442,8 @@ void Irohad::initMstProcessor() { mst_propagation = std::make_shared( storage, rxcpp::observe_on_new_thread(), *opt_mst_gossip_params_); } else { - mst_propagation = std::make_shared(); mst_transport = std::make_shared(); + mst_propagation = std::make_shared(); } auto mst_time = std::make_shared(); @@ -569,9 +569,3 @@ Irohad::RunResult Irohad::run() { return e; }); } - -Irohad::~Irohad() { - // TODO andrei 17.09.18: IR-1710 Verify that all components' destructors are - // called in irohad destructor - storage->freeConnections(); -} diff --git a/irohad/main/application.hpp b/irohad/main/application.hpp index 2151753e34..b0153f1adf 100644 --- a/irohad/main/application.hpp +++ b/irohad/main/application.hpp @@ -117,7 +117,7 @@ class Irohad { */ RunResult run(); - virtual ~Irohad(); + virtual ~Irohad() = default; protected: // -----------------------| component initialization |------------------------ @@ -179,6 +179,24 @@ class Irohad { opt_mst_gossip_params_; // ------------------------| internal dependencies |------------------------- + public: + shared_model::crypto::Keypair keypair; + std::shared_ptr storage; + + protected: + logger::Logger log_; + + // initialization objects + iroha::network::OnDemandOrderingInit ordering_init; + iroha::consensus::yac::YacInit yac_init; + iroha::network::BlockLoaderInit loader_init; + + // common objects factory + std::shared_ptr + common_objects_factory_; + + // WSV restorer + std::shared_ptr wsv_restorer_; // crypto provider std::shared_ptr> crypto_signer_; @@ -190,21 +208,30 @@ class Irohad { std::shared_ptr stateful_validator; std::shared_ptr chain_validator; - // WSV restorer - std::shared_ptr wsv_restorer_; - // async call std::shared_ptr> async_call_; - // common objects factory - std::shared_ptr - common_objects_factory_; - // transaction batch factory std::shared_ptr transaction_batch_factory_; + // transaction factory + std::shared_ptr> + transaction_factory; + + // query response factory + std::shared_ptr + query_response_factory_; + + // query factory + std::shared_ptr> + query_factory; + // persistent cache std::shared_ptr persistent_cache; @@ -230,31 +257,16 @@ class Irohad { // pcs std::shared_ptr pcs; - // transaction factory - std::shared_ptr> - transaction_factory; - - // query factory - std::shared_ptr> - query_factory; - - // query response factory - std::shared_ptr - query_response_factory_; + // status bus + std::shared_ptr status_bus_; // mst + std::shared_ptr mst_transport; std::shared_ptr mst_processor; // pending transactions storage std::shared_ptr pending_txs_storage_; - // status bus - std::shared_ptr status_bus_; - // transaction service std::shared_ptr command_service; std::shared_ptr @@ -265,21 +277,6 @@ class Irohad { std::unique_ptr torii_server; std::unique_ptr internal_server; - - // initialization objects - iroha::network::OnDemandOrderingInit ordering_init; - iroha::consensus::yac::YacInit yac_init; - iroha::network::BlockLoaderInit loader_init; - - std::shared_ptr mst_transport; - - logger::Logger log_; - - public: - std::shared_ptr storage; - - shared_model::crypto::Keypair keypair; - grpc::ServerBuilder builder; }; #endif // IROHA_APPLICATION_HPP From d8c8c1fc82f6cb24c2e4af2fc0c172394e53c53f Mon Sep 17 00:00:00 2001 From: Victor Drobny Date: Wed, 16 Jan 2019 22:57:01 +0100 Subject: [PATCH 11/41] Feature/remove common object builders (#1999) * remove common object builders from postgres executor test Signed-off-by: Victor Drobny --- irohad/main/application.cpp | 2 + irohad/main/application.hpp | 2 +- .../acceptance/query_permission_test_ast.cpp | 34 +++----- .../acceptance/query_permission_test_ast.hpp | 4 +- .../consensus/consensus_sunny_day.cpp | 12 ++- .../ametsuchi/wsv_query_command_test.cpp | 10 ++- .../gossip_propagation_strategy_test.cpp | 3 +- .../mst_processor_test.cpp | 7 +- .../mst_test_helpers.hpp | 10 --- .../multi_sig_transactions/transport_test.cpp | 8 +- .../irohad/network/block_loader_test.cpp | 20 ++--- .../irohad/ordering/ordering_service_test.cpp | 22 ++--- .../torii/processor/query_processor_test.cpp | 1 - .../processor/transaction_processor_test.cpp | 1 - .../shared_model/builders/CMakeLists.txt | 1 - .../builders/common_objects/CMakeLists.txt | 49 ----------- .../common_objects/account_asset_builder.hpp | 69 --------------- .../account_asset_builder_test.cpp | 79 ----------------- .../common_objects/account_builder.hpp | 74 ---------------- .../common_objects/account_builder_test.cpp | 85 ------------------- .../builders/common_objects/asset_builder.hpp | 66 -------------- .../common_objects/asset_builder_test.cpp | 76 ----------------- .../common_objects/builders_test_fixture.hpp | 39 --------- .../builders/common_objects/common.hpp | 84 ------------------ .../common_objects/domain_builder.hpp | 51 ----------- .../builders/common_objects/peer_builder.hpp | 57 ------------- .../common_objects/peer_builder_test.cpp | 63 -------------- .../common_objects/signature_builder.hpp | 62 -------------- .../common_objects/signature_builder_test.cpp | 72 ---------------- .../builders/protobuf/CMakeLists.txt | 40 --------- .../proto_account_asset_builder.hpp | 51 ----------- .../proto_account_asset_builder_test.cpp | 55 ------------ .../common_objects/proto_account_builder.hpp | 56 ------------ .../proto_account_builder_test.cpp | 62 -------------- .../common_objects/proto_asset_builder.hpp | 49 ----------- .../proto_asset_builder_test.cpp | 54 ------------ .../common_objects/proto_domain_builder.hpp | 39 --------- .../common_objects/proto_peer_builder.hpp | 42 --------- .../proto_peer_builder_test.cpp | 69 --------------- .../proto_signature_builder.hpp | 46 ---------- .../proto_signature_builder_test.cpp | 49 ----------- .../protobuf/test_account_asset_builder.hpp | 17 ---- .../protobuf/test_account_builder.hpp | 17 ---- .../builders/protobuf/test_asset_builder.hpp | 17 ---- .../builders/protobuf/test_domain_builder.hpp | 17 ---- .../builders/protobuf/test_peer_builder.hpp | 17 ---- .../protobuf/test_signature_builder.hpp | 17 ---- .../cryptography/security_signatures_test.cpp | 22 +++-- test/module/shared_model/interface_mocks.hpp | 26 ++++++ .../irohad_test_data/config.sample.copy | 10 +++ 50 files changed, 113 insertions(+), 1722 deletions(-) delete mode 100644 test/module/shared_model/builders/common_objects/CMakeLists.txt delete mode 100644 test/module/shared_model/builders/common_objects/account_asset_builder.hpp delete mode 100644 test/module/shared_model/builders/common_objects/account_asset_builder_test.cpp delete mode 100644 test/module/shared_model/builders/common_objects/account_builder.hpp delete mode 100644 test/module/shared_model/builders/common_objects/account_builder_test.cpp delete mode 100644 test/module/shared_model/builders/common_objects/asset_builder.hpp delete mode 100644 test/module/shared_model/builders/common_objects/asset_builder_test.cpp delete mode 100644 test/module/shared_model/builders/common_objects/builders_test_fixture.hpp delete mode 100644 test/module/shared_model/builders/common_objects/common.hpp delete mode 100644 test/module/shared_model/builders/common_objects/domain_builder.hpp delete mode 100644 test/module/shared_model/builders/common_objects/peer_builder.hpp delete mode 100644 test/module/shared_model/builders/common_objects/peer_builder_test.cpp delete mode 100644 test/module/shared_model/builders/common_objects/signature_builder.hpp delete mode 100644 test/module/shared_model/builders/common_objects/signature_builder_test.cpp delete mode 100644 test/module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder.hpp delete mode 100644 test/module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder_test.cpp delete mode 100644 test/module/shared_model/builders/protobuf/common_objects/proto_account_builder.hpp delete mode 100644 test/module/shared_model/builders/protobuf/common_objects/proto_account_builder_test.cpp delete mode 100644 test/module/shared_model/builders/protobuf/common_objects/proto_asset_builder.hpp delete mode 100644 test/module/shared_model/builders/protobuf/common_objects/proto_asset_builder_test.cpp delete mode 100644 test/module/shared_model/builders/protobuf/common_objects/proto_domain_builder.hpp delete mode 100644 test/module/shared_model/builders/protobuf/common_objects/proto_peer_builder.hpp delete mode 100644 test/module/shared_model/builders/protobuf/common_objects/proto_peer_builder_test.cpp delete mode 100644 test/module/shared_model/builders/protobuf/common_objects/proto_signature_builder.hpp delete mode 100644 test/module/shared_model/builders/protobuf/common_objects/proto_signature_builder_test.cpp delete mode 100644 test/module/shared_model/builders/protobuf/test_account_asset_builder.hpp delete mode 100644 test/module/shared_model/builders/protobuf/test_account_builder.hpp delete mode 100644 test/module/shared_model/builders/protobuf/test_asset_builder.hpp delete mode 100644 test/module/shared_model/builders/protobuf/test_domain_builder.hpp delete mode 100644 test/module/shared_model/builders/protobuf/test_peer_builder.hpp delete mode 100644 test/module/shared_model/builders/protobuf/test_signature_builder.hpp create mode 100644 test/system/irohad_test_data/config.sample.copy diff --git a/irohad/main/application.cpp b/irohad/main/application.cpp index 0dd2342861..6d9820e123 100644 --- a/irohad/main/application.cpp +++ b/irohad/main/application.cpp @@ -92,6 +92,8 @@ Irohad::Irohad(const std::string &block_store_dir, initStorage(); } +Irohad::~Irohad() = default; + /** * Initializing iroha daemon */ diff --git a/irohad/main/application.hpp b/irohad/main/application.hpp index b0153f1adf..421153bcda 100644 --- a/irohad/main/application.hpp +++ b/irohad/main/application.hpp @@ -117,7 +117,7 @@ class Irohad { */ RunResult run(); - virtual ~Irohad() = default; + virtual ~Irohad(); protected: // -----------------------| component initialization |------------------------ diff --git a/test/integration/acceptance/query_permission_test_ast.cpp b/test/integration/acceptance/query_permission_test_ast.cpp index dfc19e2b86..dc049068c8 100644 --- a/test/integration/acceptance/query_permission_test_ast.cpp +++ b/test/integration/acceptance/query_permission_test_ast.cpp @@ -6,28 +6,18 @@ #include "integration/acceptance/query_permission_test_ast.hpp" #include "interfaces/query_responses/account_asset_response.hpp" -#include "module/shared_model/builders/common_objects/account_asset_builder.hpp" -#include "module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder.hpp" using shared_model::interface::Amount; -using shared_model::proto::AccountAsset; -using shared_model::proto::AccountAssetBuilder; using namespace common_constants; QueryPermissionAssets::QueryPermissionAssets() : QueryPermissionTestBase({Role::kGetMyAccAst}, {Role::kGetDomainAccAst}, {Role::kGetAllAccAst}), - account_assets_({AccountAssetBuilder() - .accountId(kUserId) - .assetId(std::string("asset1#") + kDomain) - .balance(Amount("100.0")) - .build(), - AccountAssetBuilder() - .accountId(kUserId) - .assetId(std::string("asset2#") + kDomain) - .balance(Amount("200.0")) - .build()}) {} + account_assets_( + {std::make_pair(std::string("asset1#") + kDomain, Amount("100.0")), + std::make_pair(std::string("asset2#") + kDomain, Amount("200.0"))}) { +} IntegrationTestFramework &QueryPermissionAssets::prepareState( AcceptanceFixture &fixture, @@ -43,17 +33,15 @@ IntegrationTestFramework &QueryPermissionAssets::prepareState( // Add assets to target user for (const auto &asset : account_assets_) { - const std::string asset_id = asset.assetId(); + const std::string asset_id = asset.first; const auto domain_sep = std::find(asset_id.cbegin(), asset_id.cend(), '#'); const std::string asset_name(asset_id.cbegin(), domain_sep); const std::string asset_domain(domain_sep + 1, asset_id.cend()); itf.sendTxAwait( fixture.complete( fixture.baseTx(kUserId) - .createAsset( - asset_name, asset_domain, asset.balance().precision()) - .addAssetQuantity(asset.assetId(), - asset.balance().toStringRepr()), + .createAsset(asset_name, asset_domain, asset.second.precision()) + .addAssetQuantity(asset.first, asset.second.toStringRepr()), kUserKeypair), getBlockTransactionsAmountChecker(1)); } @@ -72,7 +60,13 @@ QueryPermissionAssets::getGeneralResponseChecker() { ASSERT_EQ(boost::size(resp_assets), account_assets_.size()); // check that every initially created asset is present in the result for (const auto &asset : account_assets_) { - ASSERT_NE(std::find(resp_assets.begin(), resp_assets.end(), asset), + ASSERT_NE(std::find_if(resp_assets.begin(), + resp_assets.end(), + [&asset](const auto &a) { + return asset.first == a.assetId() + and asset.second == a.balance() + and kUserId == a.accountId(); + }), resp_assets.end()); } }) << "Actual response: " diff --git a/test/integration/acceptance/query_permission_test_ast.hpp b/test/integration/acceptance/query_permission_test_ast.hpp index 5c461688a4..78b98f2076 100644 --- a/test/integration/acceptance/query_permission_test_ast.hpp +++ b/test/integration/acceptance/query_permission_test_ast.hpp @@ -47,7 +47,9 @@ class QueryPermissionAssets final : public QueryPermissionTestBase { std::function getGeneralResponseChecker() override; - std::vector account_assets_; + std::vector> + account_assets_; }; #endif /* QUERY_PERMISSION_TEST_AST_HPP_ */ diff --git a/test/integration/consensus/consensus_sunny_day.cpp b/test/integration/consensus/consensus_sunny_day.cpp index 2154e552b5..46e21bbc2f 100644 --- a/test/integration/consensus/consensus_sunny_day.cpp +++ b/test/integration/consensus/consensus_sunny_day.cpp @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ +#include #include #include "consensus/yac/impl/timer_impl.hpp" #include "consensus/yac/storage/yac_proposal_storage.hpp" @@ -10,7 +11,7 @@ #include "cryptography/crypto_provider/crypto_defaults.hpp" #include "framework/test_subscriber.hpp" #include "module/irohad/consensus/yac/yac_mocks.hpp" -#include "module/shared_model/builders/protobuf/test_signature_builder.hpp" +#include "module/shared_model/interface_mocks.hpp" using ::testing::_; using ::testing::An; @@ -34,15 +35,22 @@ class FixedCryptoProvider : public MockYacCryptoProvider { shared_model::crypto::DefaultCryptoAlgorithmType::kPublicKeyLength, 0); std::copy(public_key.begin(), public_key.end(), key.begin()); pubkey = clone(shared_model::crypto::PublicKey(key)); + data = std::make_unique(""); } VoteMessage getVote(YacHash hash) override { auto vote = MockYacCryptoProvider::getVote(hash); - vote.signature = clone(TestSignatureBuilder().publicKey(*pubkey).build()); + auto signature = std::make_shared(); + EXPECT_CALL(*signature, publicKey()) + .WillRepeatedly(testing::ReturnRef(*pubkey)); + EXPECT_CALL(*signature, signedData()) + .WillRepeatedly(testing::ReturnRef(*data)); + vote.signature = signature; return vote; } std::unique_ptr pubkey; + std::unique_ptr data; }; class ConsensusSunnyDayTest : public ::testing::Test { diff --git a/test/module/irohad/ametsuchi/wsv_query_command_test.cpp b/test/module/irohad/ametsuchi/wsv_query_command_test.cpp index 6be942d964..6ce40e2c56 100644 --- a/test/module/irohad/ametsuchi/wsv_query_command_test.cpp +++ b/test/module/irohad/ametsuchi/wsv_query_command_test.cpp @@ -3,11 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ +#include + #include "ametsuchi/impl/postgres_wsv_command.hpp" #include "ametsuchi/impl/postgres_wsv_query.hpp" #include "framework/result_fixture.hpp" #include "module/irohad/ametsuchi/ametsuchi_fixture.hpp" -#include "module/shared_model/builders/protobuf/test_peer_builder.hpp" +#include "module/shared_model/interface_mocks.hpp" namespace iroha { namespace ametsuchi { @@ -49,9 +51,11 @@ namespace iroha { void SetUp() override { WsvQueryCommandTest::SetUp(); - peer = clone(TestPeerBuilder().build()); + peer = makePeer(address, pk); } - std::unique_ptr peer; + std::shared_ptr peer; + shared_model::interface::types::AddressType address{""}; + shared_model::interface::types::PubkeyType pk{""}; }; /** diff --git a/test/module/irohad/multi_sig_transactions/gossip_propagation_strategy_test.cpp b/test/module/irohad/multi_sig_transactions/gossip_propagation_strategy_test.cpp index de0148e7a5..cbfc946152 100644 --- a/test/module/irohad/multi_sig_transactions/gossip_propagation_strategy_test.cpp +++ b/test/module/irohad/multi_sig_transactions/gossip_propagation_strategy_test.cpp @@ -20,6 +20,7 @@ #include "ametsuchi/peer_query_factory.hpp" #include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" #include "module/irohad/multi_sig_transactions/mst_test_helpers.hpp" +#include "module/shared_model/interface_mocks.hpp" using namespace iroha; @@ -39,7 +40,7 @@ PropagationData generate(std::vector &ids, size_t num) { PropagationData peers; std::transform( ids.begin(), ids.end(), std::back_inserter(peers), [](auto &s) { - return makePeer(s, ""); + return makePeer(s, shared_model::interface::types::PubkeyType("")); }); return peers; } diff --git a/test/module/irohad/multi_sig_transactions/mst_processor_test.cpp b/test/module/irohad/multi_sig_transactions/mst_processor_test.cpp index 80fc68f523..ecaf0021bf 100644 --- a/test/module/irohad/multi_sig_transactions/mst_processor_test.cpp +++ b/test/module/irohad/multi_sig_transactions/mst_processor_test.cpp @@ -11,6 +11,7 @@ #include "logger/logger.hpp" #include "module/irohad/multi_sig_transactions/mst_mocks.hpp" #include "module/irohad/multi_sig_transactions/mst_test_helpers.hpp" +#include "module/shared_model/interface_mocks.hpp" #include "multi_sig_transactions/mst_processor_impl.hpp" #include "multi_sig_transactions/storage/mst_storage_impl.hpp" @@ -291,7 +292,8 @@ TEST_F(MstProcessorTest, onNewPropagationUsecase) { // ---------------------------------| when |---------------------------------- std::vector> peers{ - makePeer("one", "sign_one"), makePeer("two", "sign_two")}; + makePeer("one", shared_model::interface::types::PubkeyType("sign_one")), + makePeer("two", shared_model::interface::types::PubkeyType("sign_two"))}; propagation_subject.get_subscriber().on_next(peers); } @@ -309,7 +311,8 @@ TEST_F(MstProcessorTest, emptyStatePropagation) { EXPECT_CALL(*transport, sendState(_, _)).Times(0); // ---------------------------------| given |--------------------------------- - auto another_peer = makePeer("another", "another_pubkey"); + auto another_peer = makePeer( + "another", shared_model::interface::types::PubkeyType("sign_one")); auto another_peer_state = MstState::empty(); another_peer_state += makeTestBatch(txBuilder(1)); diff --git a/test/module/irohad/multi_sig_transactions/mst_test_helpers.hpp b/test/module/irohad/multi_sig_transactions/mst_test_helpers.hpp index e875aa9ed0..6c2141249a 100644 --- a/test/module/irohad/multi_sig_transactions/mst_test_helpers.hpp +++ b/test/module/irohad/multi_sig_transactions/mst_test_helpers.hpp @@ -12,7 +12,6 @@ #include "datetime/time.hpp" #include "framework/batch_helper.hpp" #include "interfaces/common_objects/types.hpp" -#include "module/shared_model/builders/protobuf/common_objects/proto_peer_builder.hpp" #include "module/shared_model/builders/protobuf/test_transaction_builder.hpp" #include "multi_sig_transactions/mst_types.hpp" @@ -103,13 +102,4 @@ inline auto makeTx(const shared_model::interface::types::CounterType &counter, .finish()); } -inline auto makePeer(const std::string &address, const std::string &pub_key) { - return std::make_shared( - shared_model::proto::PeerBuilder() - .address(address) - .pubkey(shared_model::crypto::PublicKey( - shared_model::crypto::Hash::fromHexString(pub_key))) - .build()); -} - #endif // IROHA_MST_TEST_HELPERS_HPP diff --git a/test/module/irohad/multi_sig_transactions/transport_test.cpp b/test/module/irohad/multi_sig_transactions/transport_test.cpp index 331349b956..076b67c48e 100644 --- a/test/module/irohad/multi_sig_transactions/transport_test.cpp +++ b/test/module/irohad/multi_sig_transactions/transport_test.cpp @@ -13,6 +13,7 @@ #include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" #include "module/irohad/multi_sig_transactions/mst_mocks.hpp" #include "module/irohad/multi_sig_transactions/mst_test_helpers.hpp" +#include "module/shared_model/interface_mocks.hpp" #include "module/shared_model/validators/validators.hpp" #include "multi_sig_transactions/state/mst_state.hpp" #include "validators/field_validator.hpp" @@ -121,8 +122,11 @@ TEST_F(TransportTest, SendAndReceive) { ASSERT_TRUE(server); ASSERT_NE(port, 0); - std::shared_ptr peer = - makePeer(addr + std::to_string(port), "abcdabcdabcdabcdabcdabcdabcdabcd"); + std::string address = addr + std::to_string(port); + shared_model::interface::types::PubkeyType pk( + shared_model::crypto::Hash::fromHexString( + "abcdabcdabcdabcdabcdabcdabcdabcd")); + std::shared_ptr peer = makePeer(address, pk); // we want to ensure that server side will call onNewState() // with same parameters as on the client side EXPECT_CALL(*mst_notification_transport_, onNewState(_, _)) diff --git a/test/module/irohad/network/block_loader_test.cpp b/test/module/irohad/network/block_loader_test.cpp index b07ae3be41..54f9379824 100644 --- a/test/module/irohad/network/block_loader_test.cpp +++ b/test/module/irohad/network/block_loader_test.cpp @@ -15,10 +15,9 @@ #include "datetime/time.hpp" #include "framework/test_subscriber.hpp" #include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" -#include "module/shared_model/builders/common_objects/peer_builder.hpp" -#include "module/shared_model/builders/protobuf/common_objects/proto_peer_builder.hpp" #include "module/shared_model/builders/protobuf/test_block_builder.hpp" #include "module/shared_model/builders/protobuf/test_transaction_builder.hpp" +#include "module/shared_model/interface_mocks.hpp" #include "network/impl/block_loader_impl.hpp" #include "network/impl/block_loader_service.hpp" #include "validators/default_validator.hpp" @@ -68,18 +67,8 @@ class BlockLoaderTest : public testing::Test { builder.RegisterService(service.get()); server = builder.BuildAndStart(); - shared_model::builder::PeerBuilder< - shared_model::proto::PeerBuilder, - shared_model::validation::FieldValidator>() - .address("0.0.0.0:" + std::to_string(port)) - .pubkey(peer_key) - .build() - .match( - [&](iroha::expected::Value< - std::shared_ptr> &v) { - peer = std::move(v.value); - }, - [](iroha::expected::Error>) {}); + address = "0.0.0.0:" + std::to_string(port); + peer = makePeer(address, peer_key); ASSERT_TRUE(server); ASSERT_NE(port, 0); @@ -130,7 +119,8 @@ class BlockLoaderTest : public testing::Test { const Hash kPrevHash = Hash(std::string(DefaultCryptoAlgorithmType::kHashLength, '0')); - std::shared_ptr peer; + std::shared_ptr peer; + std::string address; PublicKey peer_key = DefaultCryptoAlgorithmType::generateKeypair().publicKey(); Keypair key = DefaultCryptoAlgorithmType::generateKeypair(); diff --git a/test/module/irohad/ordering/ordering_service_test.cpp b/test/module/irohad/ordering/ordering_service_test.cpp index 3a5e0986fd..b32c8d63e5 100644 --- a/test/module/irohad/ordering/ordering_service_test.cpp +++ b/test/module/irohad/ordering/ordering_service_test.cpp @@ -13,8 +13,8 @@ #include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" #include "module/irohad/network/network_mocks.hpp" #include "module/irohad/ordering/mock_ordering_service_persistent_state.hpp" -#include "module/shared_model/builders/protobuf/common_objects/proto_peer_builder.hpp" #include "module/shared_model/builders/protobuf/test_proposal_builder.hpp" +#include "module/shared_model/interface_mocks.hpp" #include "ordering/impl/ordering_service_transport_grpc.hpp" #include "ordering/impl/single_peer_ordering_service.hpp" @@ -54,11 +54,9 @@ class MockOrderingServiceTransport : public network::OrderingServiceTransport { class OrderingServiceTest : public ::testing::Test { public: OrderingServiceTest() { - peer = clone(shared_model::proto::PeerBuilder() - .address(address) - .pubkey(shared_model::interface::types::PubkeyType( - std::string(32, '0'))) - .build()); + peer = std::make_unique(); + EXPECT_CALL(*peer, address()).WillRepeatedly(testing::ReturnRef(address)); + EXPECT_CALL(*peer, pubkey()).WillRepeatedly(testing::ReturnRef(pk)); } void SetUp() override { @@ -101,7 +99,8 @@ class OrderingServiceTest : public ::testing::Test { std::condition_variable cv; std::mutex m; std::string address{"0.0.0.0:50051"}; - std::shared_ptr peer; + shared_model::interface::types::PubkeyType pk{std::string(32, '0')}; + std::shared_ptr peer; std::shared_ptr wsv; std::shared_ptr pqfactory; std::unique_ptr factory; @@ -155,7 +154,8 @@ TEST_F(OrderingServiceTest, ValidWhenProposalSizeStrategy) { EXPECT_CALL(*fake_transport, publishProposalProxy(_, _)) .Times(tx_num / max_proposal); EXPECT_CALL(*wsv, getLedgerPeers()) - .WillRepeatedly(Return(std::vector{peer})); + .WillRepeatedly(Return( + std::vector>{peer})); auto ordering_service = initOs(max_proposal); fake_transport->subscribe(ordering_service); @@ -185,7 +185,8 @@ TEST_F(OrderingServiceTest, ValidWhenTimerStrategy) { .Times(1) .WillOnce(Return(boost::optional(2))); EXPECT_CALL(*wsv, getLedgerPeers()) - .WillRepeatedly(Return(std::vector{peer})); + .WillRepeatedly(Return( + std::vector>{peer})); EXPECT_CALL(*fake_transport, publishProposalProxy(_, _)).Times(2); auto ordering_service = initOs(max_proposal); @@ -284,7 +285,8 @@ TEST_F(OrderingServiceTest, BatchesProceed) { boost::optional(first_batch_size + second_batch_size))); EXPECT_CALL(*fake_transport, publishProposalProxy(_, _)).Times(1); EXPECT_CALL(*wsv, getLedgerPeers()) - .WillRepeatedly(Return(std::vector{peer})); + .WillRepeatedly(Return( + std::vector>{peer})); auto ordering_service = initOs(max_proposal); fake_transport->subscribe(ordering_service); diff --git a/test/module/irohad/torii/processor/query_processor_test.cpp b/test/module/irohad/torii/processor/query_processor_test.cpp index a1ae7bd614..e6715218e5 100644 --- a/test/module/irohad/torii/processor/query_processor_test.cpp +++ b/test/module/irohad/torii/processor/query_processor_test.cpp @@ -13,7 +13,6 @@ #include "interfaces/query_responses/block_query_response.hpp" #include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" #include "module/irohad/validation/validation_mocks.hpp" -#include "module/shared_model/builders/protobuf/common_objects/proto_account_builder.hpp" #include "module/shared_model/builders/protobuf/test_block_builder.hpp" #include "module/shared_model/builders/protobuf/test_query_builder.hpp" #include "module/shared_model/builders/protobuf/test_transaction_builder.hpp" diff --git a/test/module/irohad/torii/processor/transaction_processor_test.cpp b/test/module/irohad/torii/processor/transaction_processor_test.cpp index 9e6a982e90..5293132ddb 100644 --- a/test/module/irohad/torii/processor/transaction_processor_test.cpp +++ b/test/module/irohad/torii/processor/transaction_processor_test.cpp @@ -17,7 +17,6 @@ #include "module/irohad/multi_sig_transactions/mst_mocks.hpp" #include "module/irohad/network/network_mocks.hpp" #include "module/irohad/torii/torii_mocks.hpp" -#include "module/shared_model/builders/protobuf/common_objects/proto_signature_builder.hpp" #include "module/shared_model/builders/protobuf/proposal.hpp" #include "module/shared_model/builders/protobuf/test_block_builder.hpp" #include "module/shared_model/builders/protobuf/test_proposal_builder.hpp" diff --git a/test/module/shared_model/builders/CMakeLists.txt b/test/module/shared_model/builders/CMakeLists.txt index ac3e366944..badc262626 100644 --- a/test/module/shared_model/builders/CMakeLists.txt +++ b/test/module/shared_model/builders/CMakeLists.txt @@ -4,5 +4,4 @@ # add_subdirectory(protobuf) -add_subdirectory(common_objects) add_subdirectory(transaction_responses) diff --git a/test/module/shared_model/builders/common_objects/CMakeLists.txt b/test/module/shared_model/builders/common_objects/CMakeLists.txt deleted file mode 100644 index dea49f4e72..0000000000 --- a/test/module/shared_model/builders/common_objects/CMakeLists.txt +++ /dev/null @@ -1,49 +0,0 @@ -# -# Copyright Soramitsu Co., Ltd. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -# - -addtest(peer_builder_test - peer_builder_test.cpp - ) - -target_link_libraries(peer_builder_test - shared_model_proto_builders - shared_model_stateless_validation - ) - -addtest(account_builder_test - account_builder_test.cpp - ) - -target_link_libraries(account_builder_test - shared_model_proto_builders - shared_model_stateless_validation - ) - -addtest(signature_builder_test - signature_builder_test.cpp - ) - -target_link_libraries(signature_builder_test - shared_model_proto_builders - shared_model_stateless_validation - ) - -addtest(asset_builder_test - asset_builder_test.cpp - ) - -target_link_libraries(asset_builder_test - shared_model_proto_builders - shared_model_stateless_validation - ) - -addtest(account_asset_builder_test - account_asset_builder_test.cpp - ) - -target_link_libraries(account_asset_builder_test - shared_model_proto_builders - shared_model_stateless_validation - ) diff --git a/test/module/shared_model/builders/common_objects/account_asset_builder.hpp b/test/module/shared_model/builders/common_objects/account_asset_builder.hpp deleted file mode 100644 index 16b882b57e..0000000000 --- a/test/module/shared_model/builders/common_objects/account_asset_builder.hpp +++ /dev/null @@ -1,69 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_ACCOUNT_ASSET_BUILDER_HPP -#define IROHA_ACCOUNT_ASSET_BUILDER_HPP - -#include "interfaces/common_objects/account_asset.hpp" -#include "module/shared_model/builders/common_objects/common.hpp" - -// TODO: 14.02.2018 nickaleks Add check for uninitialized fields IR-972 - -namespace shared_model { - namespace builder { - /** - * AccountAssetBuilder is a class, used for construction of AccountAsset - * objects - * @tparam BuilderImpl is a type, which defines builder for implementation - * of shared_model. Since we return abstract classes, it is necessary for - * them to be instantiated with some concrete implementation - * @tparam Validator is a type, whose responsibility is - * to perform stateless validation on model fields - */ - template - class DEPRECATED AccountAssetBuilder - : public CommonObjectBuilder { - public: - AccountAssetBuilder accountId( - const interface::types::AccountIdType &account_id) { - AccountAssetBuilder copy(*this); - copy.builder_ = this->builder_.accountId(account_id); - return copy; - } - - AccountAssetBuilder assetId( - const interface::types::AssetIdType &asset_id) { - AccountAssetBuilder copy(*this); - copy.builder_ = this->builder_.assetId(asset_id); - return copy; - } - - AccountAssetBuilder balance(const interface::Amount &amount) { - AccountAssetBuilder copy(*this); - copy.builder_ = this->builder_.balance(amount); - return copy; - } - - protected: - virtual std::string builderName() const override { - return "Account Asset Builder"; - } - - virtual validation::ReasonsGroupType validate( - const interface::AccountAsset &object) override { - validation::ReasonsGroupType reasons; - this->validator_.validateAccountId(reasons, object.accountId()); - this->validator_.validateAssetId(reasons, object.assetId()); - // Do not validate balance, since its amount can be 0, which is - // forbidden by validation - - return reasons; - } - }; - } // namespace builder -} // namespace shared_model -#endif // IROHA_ACCOUNT_ASSET_BUILDER_HPP diff --git a/test/module/shared_model/builders/common_objects/account_asset_builder_test.cpp b/test/module/shared_model/builders/common_objects/account_asset_builder_test.cpp deleted file mode 100644 index f00ecfa7b6..0000000000 --- a/test/module/shared_model/builders/common_objects/account_asset_builder_test.cpp +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include - -#include "builders_test_fixture.hpp" -#include "module/shared_model/builders/common_objects/account_asset_builder.hpp" -#include "module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder.hpp" -#include "validators/field_validator.hpp" - -// TODO: 14.02.2018 nickaleks mock builder implementation IR-970 -// TODO: 14.02.2018 nickaleks mock field validator IR-971 - -/** - * @given field values which pass stateless validation - * @when AccountAssetBuilder is invoked - * @then AccountAsset object is successfully constructed and has valid fields - */ -TEST(AccountAssetBuilderTest, StatelessValidAllFields) { - shared_model::builder::AccountAssetBuilder< - shared_model::proto::AccountAssetBuilder, - shared_model::validation::FieldValidator> - builder; - - auto valid_account_id = "account@name"; - auto valid_asset_id = "asset#coin"; - auto valid_balance = shared_model::interface::Amount("1.00"); - - auto account_asset = builder.accountId(valid_account_id) - .assetId(valid_asset_id) - .balance(valid_balance) - .build(); - - account_asset.match( - [&](shared_model::builder::BuilderResult< - shared_model::interface::AccountAsset>::ValueType &v) { - EXPECT_EQ(v.value->accountId(), valid_account_id); - EXPECT_EQ(v.value->assetId(), valid_asset_id); - EXPECT_EQ(v.value->balance(), valid_balance); - }, - [](shared_model::builder::BuilderResult< - shared_model::interface::AccountAsset>::ErrorType &e) { - FAIL() << *e.error; - }); -} - -/** - * @given field values which pass stateless validation - * @when AccountAssetBuilder is invoked twice - * @then Two identical (==) AccountAsset objects are constructed - */ -TEST(AccountAssetBuilderTest, SeveralObjectsFromOneBuilder) { - shared_model::builder::AccountAssetBuilder< - shared_model::proto::AccountAssetBuilder, - shared_model::validation::FieldValidator> - builder; - - auto valid_account_id = "account@name"; - auto valid_asset_id = "asset#coin"; - auto valid_balance = shared_model::interface::Amount("1.00"); - - auto state = builder.accountId(valid_account_id) - .assetId(valid_asset_id) - .balance(valid_balance); - - auto account_asset = state.build(); - auto account_asset2 = state.build(); - - testResultObjects(account_asset, account_asset2, [](auto &a, auto &b) { - // pointer points to different objects - ASSERT_TRUE(a != b); - - EXPECT_EQ(a->accountId(), b->accountId()); - EXPECT_EQ(a->assetId(), b->assetId()); - EXPECT_EQ(a->balance(), b->balance()); - }); -} diff --git a/test/module/shared_model/builders/common_objects/account_builder.hpp b/test/module/shared_model/builders/common_objects/account_builder.hpp deleted file mode 100644 index f6501374dd..0000000000 --- a/test/module/shared_model/builders/common_objects/account_builder.hpp +++ /dev/null @@ -1,74 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_ACCOUNT_BUILDER_HPP -#define IROHA_ACCOUNT_BUILDER_HPP - -#include "interfaces/common_objects/account.hpp" -#include "module/shared_model/builders/common_objects/common.hpp" - -// TODO: 14.02.2018 nickaleks Add check for uninitialized fields IR-972 - -namespace shared_model { - namespace builder { - - /** - * AccountBuilder is a class, used for construction of Account objects - * @tparam BuilderImpl is a type, which defines builder for implementation - * of shared_model. Since we return abstract classes, it is necessary for - * them to be instantiated with some concrete implementation - * @tparam Validator is a type, whose responsibility is - * to perform stateless validation on model fields - */ - template - class DEPRECATED AccountBuilder - : public CommonObjectBuilder { - public: - AccountBuilder accountId( - const interface::types::AccountIdType &account_id) { - AccountBuilder copy(*this); - copy.builder_ = this->builder_.accountId(account_id); - return copy; - } - - AccountBuilder domainId(const interface::types::DomainIdType &domain_id) { - AccountBuilder copy(*this); - copy.builder_ = this->builder_.domainId(domain_id); - return copy; - } - - AccountBuilder quorum(const interface::types::QuorumType &quorum) { - AccountBuilder copy(*this); - copy.builder_ = this->builder_.quorum(quorum); - return copy; - } - - AccountBuilder jsonData(const interface::types::JsonType &json_data) { - AccountBuilder copy(*this); - copy.builder_ = this->builder_.jsonData(json_data); - return copy; - } - - protected: - virtual std::string builderName() const override { - return "Account Builder"; - } - - virtual validation::ReasonsGroupType validate( - const interface::Account &object) override { - validation::ReasonsGroupType reasons; - this->validator_.validateAccountId(reasons, object.accountId()); - this->validator_.validateDomainId(reasons, object.domainId()); - this->validator_.validateQuorum(reasons, object.quorum()); - - return reasons; - } - }; - } // namespace builder -} // namespace shared_model - -#endif // IROHA_ACCOUNT_BUILDER_HPP diff --git a/test/module/shared_model/builders/common_objects/account_builder_test.cpp b/test/module/shared_model/builders/common_objects/account_builder_test.cpp deleted file mode 100644 index 69e49df3e8..0000000000 --- a/test/module/shared_model/builders/common_objects/account_builder_test.cpp +++ /dev/null @@ -1,85 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include - -#include "builders_test_fixture.hpp" -#include "module/shared_model/builders/common_objects/account_builder.hpp" -#include "module/shared_model/builders/protobuf/common_objects/proto_account_builder.hpp" -#include "validators/field_validator.hpp" - -// TODO: 14.02.2018 nickaleks mock builder implementation IR-970 -// TODO: 14.02.2018 nickaleks mock field validator IR-971 - -/** - * @given field values which pass stateless validation - * @when AccountBuilder is invoked - * @then Account object is successfully constructed and has valid fields - */ -TEST(AccountBuilderTest, StatelessValidAllFields) { - shared_model::builder::AccountBuilder< - shared_model::proto::AccountBuilder, - shared_model::validation::FieldValidator> - builder; - - auto valid_account_id = "name@domain"; - auto valid_domain_id = "america"; - auto valid_quorum = 3; - auto valid_json_data = "{}"; - - auto account = builder.accountId(valid_account_id) - .domainId(valid_domain_id) - .quorum(valid_quorum) - .jsonData(valid_json_data) - .build(); - - account.match( - [&](shared_model::builder::BuilderResult< - shared_model::interface::Account>::ValueType &v) { - EXPECT_EQ(v.value->accountId(), valid_account_id); - EXPECT_EQ(v.value->domainId(), valid_domain_id); - EXPECT_EQ(v.value->quorum(), valid_quorum); - EXPECT_EQ(v.value->jsonData(), valid_json_data); - }, - [](shared_model::builder::BuilderResult< - shared_model::interface::Account>::ErrorType &e) { - FAIL() << *e.error; - }); -} - -/** - * @given field values which pass stateless validation - * @when AccountBuilder is invoked twice - * @then Two identical (==) Account objects are constructed - */ -TEST(AccountBuilderTest, SeveralObjectsFromOneBuilder) { - shared_model::builder::AccountBuilder< - shared_model::proto::AccountBuilder, - shared_model::validation::FieldValidator> - builder; - - auto valid_account_id = "name@domain"; - auto valid_domain_id = "america"; - auto valid_quorum = 3; - auto valid_json_data = "{}"; - - auto state = builder.accountId(valid_account_id) - .domainId(valid_domain_id) - .quorum(valid_quorum) - .jsonData(valid_json_data); - - auto account = state.build(); - auto account2 = state.build(); - - testResultObjects(account, account2, [](auto &a, auto &b) { - // pointer points to different objects - ASSERT_TRUE(a != b); - - EXPECT_EQ(a->accountId(), b->accountId()); - EXPECT_EQ(a->domainId(), b->domainId()); - EXPECT_EQ(a->quorum(), b->quorum()); - EXPECT_EQ(a->jsonData(), b->jsonData()); - }); -} diff --git a/test/module/shared_model/builders/common_objects/asset_builder.hpp b/test/module/shared_model/builders/common_objects/asset_builder.hpp deleted file mode 100644 index e73c862921..0000000000 --- a/test/module/shared_model/builders/common_objects/asset_builder.hpp +++ /dev/null @@ -1,66 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_ASSET_BUILDER_HPP -#define IROHA_ASSET_BUILDER_HPP - -#include "interfaces/common_objects/asset.hpp" -#include "interfaces/common_objects/types.hpp" -#include "module/shared_model/builders/common_objects/common.hpp" - -// TODO: 14.02.2018 nickaleks Add check for uninitialized fields IR-972 - -namespace shared_model { - namespace builder { - - /** - * AssetBuilder is a class, used for construction of Asset objects - * @tparam BuilderImpl is a type, which defines builder for implementation - * of shared_model. Since we return abstract classes, it is necessary for - * them to be instantiated with some concrete implementation - * @tparam Validator is a type, whose responsibility is - * to perform stateless validation on model fields - */ - template - class DEPRECATED AssetBuilder - : public CommonObjectBuilder { - public: - AssetBuilder assetId(const interface::types::AccountIdType &asset_id) { - AssetBuilder copy(*this); - copy.builder_ = this->builder_.assetId(asset_id); - return copy; - } - - AssetBuilder domainId(const interface::types::DomainIdType &domain_id) { - AssetBuilder copy(*this); - copy.builder_ = this->builder_.domainId(domain_id); - return copy; - } - - AssetBuilder precision(const interface::types::PrecisionType &precision) { - AssetBuilder copy(*this); - copy.builder_ = this->builder_.precision(precision); - return copy; - } - - protected: - virtual std::string builderName() const override { - return "Asset Builder"; - } - - virtual validation::ReasonsGroupType validate( - const interface::Asset &object) override { - validation::ReasonsGroupType reasons; - this->validator_.validateAssetId(reasons, object.assetId()); - this->validator_.validateDomainId(reasons, object.domainId()); - this->validator_.validatePrecision(reasons, object.precision()); - - return reasons; - } - }; - } // namespace builder -} // namespace shared_model - -#endif // IROHA_ASSET_BUILDER_HPP diff --git a/test/module/shared_model/builders/common_objects/asset_builder_test.cpp b/test/module/shared_model/builders/common_objects/asset_builder_test.cpp deleted file mode 100644 index b2fb132791..0000000000 --- a/test/module/shared_model/builders/common_objects/asset_builder_test.cpp +++ /dev/null @@ -1,76 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include - -#include "builders_test_fixture.hpp" -#include "module/shared_model/builders/common_objects/asset_builder.hpp" -#include "module/shared_model/builders/protobuf/common_objects/proto_asset_builder.hpp" -#include "validators/field_validator.hpp" - -// TODO: 14.02.2018 nickaleks mock builder implementation IR-970 -// TODO: 14.02.2018 nickaleks mock field validator IR-971 - -/** - * @given field values which pass stateless validation - * @when AssetBuilderTest is invoked - * @then Asset object is successfully constructed and has valid fields - */ -TEST(AssetBuilderTest, StatelessValidAllFields) { - shared_model::builder::AssetBuilder - builder; - - auto valid_asset_id = "bit#connect"; - auto valid_domain_id = "domain"; - auto valid_precision = 2; - - auto asset = builder.assetId(valid_asset_id) - .domainId(valid_domain_id) - .precision(valid_precision) - .build(); - - asset.match( - [&](shared_model::builder::BuilderResult< - shared_model::interface::Asset>::ValueType &v) { - EXPECT_EQ(v.value->assetId(), valid_asset_id); - EXPECT_EQ(v.value->domainId(), valid_domain_id); - EXPECT_EQ(v.value->precision(), valid_precision); - }, - [](shared_model::builder::BuilderResult< - shared_model::interface::Asset>::ErrorType &e) { - FAIL() << *e.error; - }); -} - -/** - * @given field values which pass stateless validation - * @when AssetBuilder is invoked twice - * @then Two identical (==) Asset objects are constructed - */ -TEST(AssetBuilderTest, SeveralObjectsFromOneBuilder) { - shared_model::builder::AssetBuilder - builder; - - auto valid_asset_id = "bit#connect"; - auto valid_domain_id = "domain"; - auto valid_precision = 2; - - auto state = builder.assetId(valid_asset_id) - .domainId(valid_domain_id) - .precision(valid_precision); - auto asset = state.build(); - auto asset2 = state.build(); - - testResultObjects(asset, asset2, [](auto &a, auto &b) { - // pointer points to different objects - ASSERT_TRUE(a != b); - - EXPECT_EQ(a->assetId(), b->assetId()); - EXPECT_EQ(a->domainId(), b->domainId()); - EXPECT_EQ(a->precision(), b->precision()); - }); -} diff --git a/test/module/shared_model/builders/common_objects/builders_test_fixture.hpp b/test/module/shared_model/builders/common_objects/builders_test_fixture.hpp deleted file mode 100644 index cef4010738..0000000000 --- a/test/module/shared_model/builders/common_objects/builders_test_fixture.hpp +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_BUILDERS_TEST_FIXTURE_HPP -#define IROHA_BUILDERS_TEST_FIXTURE_HPP - -#include -#include "module/shared_model/builders/common_objects/common.hpp" - -/** - * Perform testFunc on two objects of type std::shared_ptr which are taken - * from Result. If at least one of the results contains error then test fails - * with error message - */ -template -void testResultObjects(shared_model::builder::BuilderResult &a, - shared_model::builder::BuilderResult &b, - TestFunc t) { - auto result = a | [&](auto object1) { - return b.match( - [&](typename shared_model::builder::BuilderResult::ValueType &v) { - t(object1, v.value); - return shared_model::builder::BuilderResult(v); - }, - [](typename shared_model::builder::BuilderResult::ErrorType &e) { - return shared_model::builder::BuilderResult(e); - }); - }; - result.match( - [&](const typename shared_model::builder::BuilderResult::ValueType - &v) {}, - [](const typename shared_model::builder::BuilderResult::ErrorType &e) { - FAIL() << *e.error; - }); -} - -#endif // IROHA_BUILDERS_TEST_FIXTURE_HPP diff --git a/test/module/shared_model/builders/common_objects/common.hpp b/test/module/shared_model/builders/common_objects/common.hpp deleted file mode 100644 index c9c5f00435..0000000000 --- a/test/module/shared_model/builders/common_objects/common.hpp +++ /dev/null @@ -1,84 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_BUILDERS_COMMON_HPP -#define IROHA_BUILDERS_COMMON_HPP - -#include "common/result.hpp" -#include "utils/swig_keyword_hider.hpp" -#include "validators/answer.hpp" - -// TODO: 16.02.2018 nickaleks: Add validators for common_objects IR-986 - -namespace shared_model { - namespace builder { - - /** - * BuilderResult represents return value of a builder, - * it either contains a value of ModelType, an error, - * which indicates why construction of an object went wrong. - */ - template - using BuilderResult = - iroha::expected::PolymorphicResult; - - /** - * CommonObjectBuilder is a base class for all builders of common objects. - * It encapsulates common logic - * @tparam ModelType - type of object to be built - * @tparam BuilderImpl - underlying implementation - * @tparam Validator - validation object - */ - template - class DEPRECATED CommonObjectBuilder { - public: - /** - * build() constructs specified object and performs stateless validation - * on fields. - * @return Result which contains either object, or error with explanation, - * why object construction is unsuccessful. - */ - BuilderResult build() { - std::shared_ptr model_impl = - std::move(clone(builder_.build())); - - auto reasons = validate(*model_impl); - reasons.first = builderName(); - - shared_model::validation::Answer answer; - if (not reasons.second.empty()) { - answer.addReason(std::move(reasons)); - } - - if (answer) { - // TODO 15.04.2018 x3medima17 IR-1240: rework with std::string instead - // of pointer to string - return iroha::expected::makeError( - std::make_shared(answer.reason())); - } - - return iroha::expected::makeValue(std::move(model_impl)); - } - - protected: - /** - * @return string name of the builder, used in result error message - */ - virtual std::string builderName() const = 0; - - /** - * Perform stateless validation on an object - * @param reasons - list of reasons, which will be populated by validator - * @param object to be validated - */ - virtual shared_model::validation::ReasonsGroupType validate( - const ModelType &object) = 0; - - Validator validator_; - BuilderImpl builder_; - }; - } // namespace builder -} // namespace shared_model -#endif // IROHA_BUILDERS_COMMON_HPP diff --git a/test/module/shared_model/builders/common_objects/domain_builder.hpp b/test/module/shared_model/builders/common_objects/domain_builder.hpp deleted file mode 100644 index fa18839e03..0000000000 --- a/test/module/shared_model/builders/common_objects/domain_builder.hpp +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_DOMAIN_BUILDER_HPP -#define IROHA_DOMAIN_BUILDER_HPP - -#include "interfaces/common_objects/domain.hpp" -#include "interfaces/common_objects/types.hpp" -#include "module/shared_model/builders/common_objects/common.hpp" - -namespace shared_model { - namespace builder { - template - class DEPRECATED DomainBuilder - : public CommonObjectBuilder { - public: - DomainBuilder defaultRole( - const interface::types::RoleIdType &default_role) { - DomainBuilder copy(*this); - copy.builder_ = this->builder_.defaultRole(default_role); - return copy; - } - - DomainBuilder domainId(const interface::types::DomainIdType &domain_id) { - DomainBuilder copy(*this); - copy.builder_ = this->builder_.domainId(domain_id); - return copy; - } - - protected: - virtual std::string builderName() const override { - return "Domain Builder"; - } - - virtual validation::ReasonsGroupType validate( - const interface::Domain &object) override { - validation::ReasonsGroupType reasons; - this->validator_.validateDomainId(reasons, object.domainId()); - this->validator_.validateRoleId(reasons, object.defaultRole()); - - return reasons; - } - }; - } // namespace builder -} // namespace shared_model - -#endif // IROHA_DOMAIN_BUILDER_HPP diff --git a/test/module/shared_model/builders/common_objects/peer_builder.hpp b/test/module/shared_model/builders/common_objects/peer_builder.hpp deleted file mode 100644 index 907f8b3a91..0000000000 --- a/test/module/shared_model/builders/common_objects/peer_builder.hpp +++ /dev/null @@ -1,57 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_PEER_BUILDER_HPP -#define IROHA_PEER_BUILDER_HPP - -#include "interfaces/common_objects/peer.hpp" -#include "module/shared_model/builders/common_objects/common.hpp" - -// TODO: 14.02.2018 nickaleks Add check for uninitialized fields IR-972 - -namespace shared_model { - namespace builder { - - /** - * PeerBuilder is a class, used for construction of Peer objects - * @tparam BuilderImpl is a type, which defines builder for implementation - * of shared_model. Since we return abstract classes, it is necessary for - * them to be instantiated with some concrete implementation - * @tparam Validator is a type, whose responsibility is - * to perform stateless validation on model fields - */ - template - class DEPRECATED PeerBuilder - : public CommonObjectBuilder { - public: - PeerBuilder address(const interface::types::AddressType &address) { - PeerBuilder copy(*this); - copy.builder_ = this->builder_.address(address); - return copy; - } - - PeerBuilder pubkey(const interface::types::PubkeyType &key) { - PeerBuilder copy(*this); - copy.builder_ = this->builder_.pubkey(key); - return copy; - } - - protected: - virtual std::string builderName() const override { - return "Peer Builder"; - } - - virtual validation::ReasonsGroupType validate( - const interface::Peer &object) override { - validation::ReasonsGroupType reasons; - this->validator_.validatePeer(reasons, object); - - return reasons; - } - }; - } // namespace builder -} // namespace shared_model - -#endif // IROHA_PEER_BUILDER_HPP diff --git a/test/module/shared_model/builders/common_objects/peer_builder_test.cpp b/test/module/shared_model/builders/common_objects/peer_builder_test.cpp deleted file mode 100644 index 9ba4d8db9a..0000000000 --- a/test/module/shared_model/builders/common_objects/peer_builder_test.cpp +++ /dev/null @@ -1,63 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include - -#include "builders_test_fixture.hpp" -#include "module/shared_model/builders/common_objects/peer_builder.hpp" -#include "module/shared_model/builders/protobuf/common_objects/proto_peer_builder.hpp" -#include "validators/field_validator.hpp" - -// TODO: 14.02.2018 nickaleks mock builder implementation IR-970 -// TODO: 14.02.2018 nickaleks mock field validator IR-971 - -/** - * @given field values which pass stateless validation - * @when PeerBuilder is invoked - * @then Peer object is successfully constructed and has valid fields - */ -TEST(PeerBuilderTest, StatelessValidAddressCreation) { - shared_model::builder::PeerBuilder - builder; - - auto valid_address = "127.0.0.1:1337"; - shared_model::interface::types::PubkeyType key(std::string(32, '0')); - auto peer = builder.address(valid_address).pubkey(key).build(); - peer.match( - [&](shared_model::builder::BuilderResult< - shared_model::interface::Peer>::ValueType &v) { - EXPECT_EQ(v.value->address(), valid_address); - }, - [](shared_model::builder::BuilderResult< - shared_model::interface::Peer>::ErrorType &e) { - FAIL() << *e.error; - }); -} - -/** - * @given field values which pass stateless validation - * @when PeerBuilder is invoked twice - * @then Two identical (==) Peer objects are constructed - */ -TEST(PeerBuilderTest, SeveralObjectsFromOneBuilder) { - shared_model::builder::PeerBuilder - builder; - - auto valid_address = "127.0.0.1:1337"; - shared_model::interface::types::PubkeyType key(std::string(32, '0')); - - auto state = builder.address(valid_address).pubkey(key); - auto peer1 = state.build(); - auto peer2 = state.build(); - testResultObjects(peer1, peer2, [](auto &a, auto &b) { - // pointer points to different objects - ASSERT_TRUE(a != b); - - EXPECT_EQ(a->address(), b->address()); - EXPECT_EQ(a->pubkey(), b->pubkey()); - }); -} diff --git a/test/module/shared_model/builders/common_objects/signature_builder.hpp b/test/module/shared_model/builders/common_objects/signature_builder.hpp deleted file mode 100644 index fac639054f..0000000000 --- a/test/module/shared_model/builders/common_objects/signature_builder.hpp +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_SIGNATURE_BUILDER_HPP -#define IROHA_SIGNATURE_BUILDER_HPP - -#include "interfaces/common_objects/signature.hpp" -#include "interfaces/common_objects/types.hpp" -#include "module/shared_model/builders/common_objects/common.hpp" - -// TODO: 14.02.2018 nickaleks Add check for uninitialized fields IR-972 - -namespace shared_model { - namespace builder { - - /** - * SignatureBuilder is a class, used for construction of Signature objects - * @tparam BuilderImpl is a type, which defines builder for implementation - * of shared_model. Since we return abstract classes, it is necessary for - * them to be instantiated with some concrete implementation - * @tparam Validator is a type, whose responsibility is - * to perform stateless validation on model fields - */ - template - class DEPRECATED SignatureBuilder - : public CommonObjectBuilder { - public: - SignatureBuilder publicKey( - const shared_model::interface::types::PubkeyType &key) { - SignatureBuilder copy(*this); - copy.builder_ = this->builder_.publicKey(key); - return copy; - } - - SignatureBuilder signedData( - const interface::Signature::SignedType &signed_data) { - SignatureBuilder copy(*this); - copy.builder_ = this->builder_.signedData(signed_data); - return copy; - } - - protected: - virtual std::string builderName() const override { - return "Signature Builder"; - } - - virtual validation::ReasonsGroupType validate( - const interface::Signature &object) override { - validation::ReasonsGroupType reasons; - this->validator_.validatePubkey(reasons, object.publicKey()); - - return reasons; - } - }; - } // namespace builder -} // namespace shared_model - -#endif // IROHA_SIGNATURE_BUILDER_HPP diff --git a/test/module/shared_model/builders/common_objects/signature_builder_test.cpp b/test/module/shared_model/builders/common_objects/signature_builder_test.cpp deleted file mode 100644 index 6dba07acae..0000000000 --- a/test/module/shared_model/builders/common_objects/signature_builder_test.cpp +++ /dev/null @@ -1,72 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include - -#include "builders_test_fixture.hpp" -#include "module/shared_model/builders/common_objects/signature_builder.hpp" -#include "module/shared_model/builders/protobuf/common_objects/proto_signature_builder.hpp" -#include "validators/field_validator.hpp" - -// TODO: 14.02.2018 nickaleks mock builder implementation IR-970 -// TODO: 14.02.2018 nickaleks mock field validator IR-971 - -/** - * @given field values which pass stateless validation - * @when PeerBuilder is invoked - * @then Peer object is successfully constructed and has valid fields - */ -TEST(PeerBuilderTest, StatelessValidAddressCreation) { - shared_model::builder::SignatureBuilder< - shared_model::proto::SignatureBuilder, - shared_model::validation::FieldValidator> - builder; - - shared_model::interface::types::PubkeyType expected_key(std::string(32, '1')); - shared_model::interface::Signature::SignedType expected_signed( - "signed object"); - - auto signature = - builder.publicKey(expected_key).signedData(expected_signed).build(); - - signature.match( - [&](shared_model::builder::BuilderResult< - shared_model::interface::Signature>::ValueType &v) { - EXPECT_EQ(v.value->publicKey(), expected_key); - EXPECT_EQ(v.value->signedData(), expected_signed); - }, - [](shared_model::builder::BuilderResult< - shared_model::interface::Signature>::ErrorType &e) { - FAIL() << *e.error; - }); -} - -/** - * @given field values which pass stateless validation - * @when SignatureBuilder is invoked twice - * @then Two identical (==) Signature objects are constructed - */ -TEST(SignatureBuilderTest, SeveralObjectsFromOneBuilder) { - shared_model::builder::SignatureBuilder< - shared_model::proto::SignatureBuilder, - shared_model::validation::FieldValidator> - builder; - - shared_model::interface::types::PubkeyType expected_key(std::string(32, '1')); - shared_model::interface::Signature::SignedType expected_signed( - "signed object"); - - auto state = builder.publicKey(expected_key).signedData(expected_signed); - auto signature = state.build(); - auto signature2 = state.build(); - - testResultObjects(signature, signature2, [](auto &a, auto &b) { - // pointer points to different objects - ASSERT_TRUE(a != b); - - EXPECT_EQ(a->publicKey(), b->publicKey()); - EXPECT_EQ(a->signedData(), b->signedData()); - }); -} diff --git a/test/module/shared_model/builders/protobuf/CMakeLists.txt b/test/module/shared_model/builders/protobuf/CMakeLists.txt index 0e7491b68a..2ccb1b251b 100644 --- a/test/module/shared_model/builders/protobuf/CMakeLists.txt +++ b/test/module/shared_model/builders/protobuf/CMakeLists.txt @@ -14,46 +14,6 @@ if (IROHA_ROOT_PROJECT) ) endif () -addtest(proto_peer_builder_test - common_objects/proto_peer_builder_test.cpp - ) - -target_link_libraries(proto_peer_builder_test - shared_model_proto_builders - ) - -addtest(proto_account_builder_test - common_objects/proto_account_builder_test.cpp - ) - -target_link_libraries(proto_account_builder_test - shared_model_proto_builders - ) - -addtest(proto_signature_builder_test - common_objects/proto_signature_builder_test.cpp - ) - -target_link_libraries(proto_signature_builder_test - shared_model_proto_builders - ) - -addtest(proto_asset_builder_test - common_objects/proto_asset_builder_test.cpp - ) - -target_link_libraries(proto_asset_builder_test - shared_model_proto_builders - ) - -addtest(proto_account_asset_builder_test - common_objects/proto_account_asset_builder_test.cpp - ) - -target_link_libraries(proto_account_asset_builder_test - shared_model_proto_builders - ) - addtest(proto_transaction_responses_builder_test transaction_responses/proto_transaction_response_builder.cpp ) diff --git a/test/module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder.hpp b/test/module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder.hpp deleted file mode 100644 index f7cf76474a..0000000000 --- a/test/module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder.hpp +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_PROTO_ACCOUNT_ASSET_BUILDER_HPP -#define IROHA_PROTO_ACCOUNT_ASSET_BUILDER_HPP - -#include "backend/protobuf/common_objects/account_asset.hpp" -#include "qry_responses.pb.h" - -namespace shared_model { - namespace proto { - /** - * AccountAssetBuilder is used to construct AccountAsset proto objects with - * initialized protobuf implementation - */ - class DEPRECATED AccountAssetBuilder { - public: - shared_model::proto::AccountAsset build() { - return shared_model::proto::AccountAsset( - iroha::protocol::AccountAsset(account_asset_)); - } - - AccountAssetBuilder accountId( - const interface::types::AccountIdType &account_id) { - AccountAssetBuilder copy(*this); - copy.account_asset_.set_account_id(account_id); - return copy; - } - - AccountAssetBuilder assetId( - const interface::types::AssetIdType &asset_id) { - AccountAssetBuilder copy(*this); - copy.account_asset_.set_asset_id(asset_id); - return copy; - } - - AccountAssetBuilder balance(const interface::Amount &amount) { - AccountAssetBuilder copy(*this); - *copy.account_asset_.mutable_balance() = amount.toStringRepr(); - return copy; - } - - private: - iroha::protocol::AccountAsset account_asset_; - }; - } // namespace proto -} // namespace shared_model - -#endif // IROHA_PROTO_ACCOUNT_ASSET_BUILDER_HPP diff --git a/test/module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder_test.cpp b/test/module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder_test.cpp deleted file mode 100644 index 4939e5a405..0000000000 --- a/test/module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder_test.cpp +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include - -#include "module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder.hpp" - -/** - * @given fields for AccountAsset object - * @when AccountAssetBuilder is invoked - * @then AccountAsset object is successfully constructed and has the same fields - * as provided - */ -TEST(ProtoAccountAssetBuilder, AllFieldsBuild) { - shared_model::proto::AccountAssetBuilder builder; - - auto expected_account_id = "account@name"; - auto expected_asset_id = "asset#coin"; - auto expected_balance = shared_model::interface::Amount("1.00"); - - auto account_asset = builder.accountId(expected_account_id) - .assetId(expected_asset_id) - .balance(expected_balance) - .build(); - - EXPECT_EQ(account_asset.accountId(), expected_account_id); - EXPECT_EQ(account_asset.assetId(), expected_asset_id); - EXPECT_EQ(account_asset.balance(), expected_balance); -} - -/** - * @given fields for AccountAsset object - * @when AccountAssetBuilder is invoked twice with the same configuration - * @then Two constructed AccountAsset objects are identical - */ -TEST(ProtoAccountAssetBuilderTest, SeveralObjectsFromOneBuilder) { - shared_model::proto::AccountAssetBuilder builder; - - auto expected_account_id = "account@name"; - auto expected_asset_id = "asset#coin"; - auto expected_balance = shared_model::interface::Amount("1.00"); - - auto state = builder.accountId(expected_account_id) - .assetId(expected_asset_id) - .balance(expected_balance); - - auto account_asset = state.build(); - auto account_asset2 = state.build(); - - EXPECT_EQ(account_asset.accountId(), account_asset2.accountId()); - EXPECT_EQ(account_asset.assetId(), account_asset2.assetId()); - EXPECT_EQ(account_asset.balance(), account_asset2.balance()); -} diff --git a/test/module/shared_model/builders/protobuf/common_objects/proto_account_builder.hpp b/test/module/shared_model/builders/protobuf/common_objects/proto_account_builder.hpp deleted file mode 100644 index b07d66d19d..0000000000 --- a/test/module/shared_model/builders/protobuf/common_objects/proto_account_builder.hpp +++ /dev/null @@ -1,56 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_PROTO_ACCOUNT_BUILDER_HPP -#define IROHA_PROTO_ACCOUNT_BUILDER_HPP - -#include "backend/protobuf/common_objects/account.hpp" -#include "qry_responses.pb.h" - -namespace shared_model { - namespace proto { - - /** - * AccountBuilder is used to construct Account proto objects with - * initialized protobuf implementation - */ - class DEPRECATED AccountBuilder { - public: - shared_model::proto::Account build() { - return shared_model::proto::Account(iroha::protocol::Account(account_)); - } - - AccountBuilder accountId( - const interface::types::AccountIdType &account_id) { - AccountBuilder copy(*this); - copy.account_.set_account_id(account_id); - return copy; - } - - AccountBuilder domainId(const interface::types::DomainIdType &domain_id) { - AccountBuilder copy(*this); - copy.account_.set_domain_id(domain_id); - return copy; - } - - AccountBuilder quorum(const interface::types::QuorumType &quorum) { - AccountBuilder copy(*this); - copy.account_.set_quorum(quorum); - return copy; - } - - AccountBuilder jsonData(const interface::types::JsonType &json_data) { - AccountBuilder copy(*this); - copy.account_.set_json_data(json_data); - return copy; - } - - private: - iroha::protocol::Account account_; - }; - } // namespace proto -} // namespace shared_model - -#endif // IROHA_PROTO_ACCOUNT_BUILDER_HPP diff --git a/test/module/shared_model/builders/protobuf/common_objects/proto_account_builder_test.cpp b/test/module/shared_model/builders/protobuf/common_objects/proto_account_builder_test.cpp deleted file mode 100644 index c876d02015..0000000000 --- a/test/module/shared_model/builders/protobuf/common_objects/proto_account_builder_test.cpp +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include - -#include "module/shared_model/builders/protobuf/common_objects/proto_account_builder.hpp" - -/** - * @given fields for Account object - * @when AccountBuilder is invoked - * @then Account object is successfully constructed and has the same fields as - * provided - */ -TEST(ProtoAccountBuilderTest, AllFieldsBuild) { - shared_model::proto::AccountBuilder builder; - - auto expected_account_id = "Steve Irwin"; - auto expected_domain_id = "australia.com"; - auto expected_quorum = 3; - auto expected_json_data = "{}"; - - auto account = builder.accountId(expected_account_id) - .domainId(expected_domain_id) - .quorum(expected_quorum) - .jsonData(expected_json_data) - .build(); - - EXPECT_EQ(account.accountId(), expected_account_id); - EXPECT_EQ(account.domainId(), expected_domain_id); - EXPECT_EQ(account.quorum(), expected_quorum); - EXPECT_EQ(account.jsonData(), expected_json_data); -} - -/** - * @given fields for Account object - * @when AccountBuilder is invoked twice with the same configuration - * @then Two constructed Account objects are identical - */ -TEST(ProtoAccountBuilderTest, SeveralObjectsFromOneBuilder) { - shared_model::proto::AccountBuilder builder; - - auto expected_account_id = "Steve Irwin"; - auto expected_domain_id = "australia.com"; - auto expected_quorum = 3; - auto expected_json_data = "{}"; - - auto state = builder.accountId(expected_account_id) - .domainId(expected_domain_id) - .quorum(expected_quorum) - .jsonData(expected_json_data); - - auto account = state.build(); - - auto account2 = state.build(); - - EXPECT_EQ(account.accountId(), account2.accountId()); - EXPECT_EQ(account.domainId(), account2.domainId()); - EXPECT_EQ(account.quorum(), account2.quorum()); - EXPECT_EQ(account.jsonData(), account2.jsonData()); -} diff --git a/test/module/shared_model/builders/protobuf/common_objects/proto_asset_builder.hpp b/test/module/shared_model/builders/protobuf/common_objects/proto_asset_builder.hpp deleted file mode 100644 index c5ef02924a..0000000000 --- a/test/module/shared_model/builders/protobuf/common_objects/proto_asset_builder.hpp +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_PROTO_ASSET_BUILDER_HPP -#define IROHA_PROTO_ASSET_BUILDER_HPP - -#include "backend/protobuf/common_objects/asset.hpp" -#include "qry_responses.pb.h" - -namespace shared_model { - namespace proto { - - /** - * AssetBuilder is used to construct Asset proto objects with initialized - * protobuf implementation - */ - class DEPRECATED AssetBuilder { - public: - shared_model::proto::Asset build() { - return shared_model::proto::Asset(iroha::protocol::Asset(asset_)); - } - - AssetBuilder assetId(const interface::types::AssetIdType &asset_id) { - AssetBuilder copy(*this); - copy.asset_.set_asset_id(asset_id); - return copy; - } - - AssetBuilder domainId(const interface::types::DomainIdType &domain_id) { - AssetBuilder copy(*this); - copy.asset_.set_domain_id(domain_id); - return copy; - } - - AssetBuilder precision(const interface::types::PrecisionType &precision) { - AssetBuilder copy(*this); - copy.asset_.set_precision(precision); - return copy; - } - - private: - iroha::protocol::Asset asset_; - }; - } // namespace proto -} // namespace shared_model - -#endif // IROHA_PROTO_ASSET_BUILDER_HPP diff --git a/test/module/shared_model/builders/protobuf/common_objects/proto_asset_builder_test.cpp b/test/module/shared_model/builders/protobuf/common_objects/proto_asset_builder_test.cpp deleted file mode 100644 index f5824afd31..0000000000 --- a/test/module/shared_model/builders/protobuf/common_objects/proto_asset_builder_test.cpp +++ /dev/null @@ -1,54 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include - -#include "module/shared_model/builders/protobuf/common_objects/proto_asset_builder.hpp" - -/** - * @given fields for Asset object - * @when AssetBuilder is invoked - * @then Asset object is successfully constructed and has the same fields as - * provided - */ -TEST(ProtoAssetBuilderTest, AllFieldsBuild) { - shared_model::proto::AssetBuilder builder; - - auto expected_asset_id = "asset@coin"; - auto expected_domain_id = "domain"; - auto expected_precision = 2; - - auto asset = builder.assetId(expected_asset_id) - .domainId(expected_domain_id) - .precision(expected_precision) - .build(); - - EXPECT_EQ(asset.assetId(), expected_asset_id); - EXPECT_EQ(asset.domainId(), expected_domain_id); - EXPECT_EQ(asset.precision(), expected_precision); -} - -/** - * @given fields for Asset object - * @when AssetBuilder is invoked twice with the same configuration - * @then Two constructed Asset objects are identical - */ -TEST(ProtoAssetBuilderTest, SeveralObjectsFromOneBuilder) { - shared_model::proto::AssetBuilder builder; - - auto expected_asset_id = "asset@coin"; - auto expected_domain_id = "domain"; - auto expected_precision = 2; - - auto state = builder.assetId(expected_asset_id) - .domainId(expected_domain_id) - .precision(expected_precision); - auto asset = state.build(); - auto asset2 = state.build(); - - EXPECT_EQ(asset.assetId(), asset2.assetId()); - EXPECT_EQ(asset.domainId(), asset2.domainId()); - EXPECT_EQ(asset.precision(), asset2.precision()); -} diff --git a/test/module/shared_model/builders/protobuf/common_objects/proto_domain_builder.hpp b/test/module/shared_model/builders/protobuf/common_objects/proto_domain_builder.hpp deleted file mode 100644 index f199bc22de..0000000000 --- a/test/module/shared_model/builders/protobuf/common_objects/proto_domain_builder.hpp +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_PROTO_DOMAIN_BUILDER_HPP -#define IROHA_PROTO_DOMAIN_BUILDER_HPP - -#include "backend/protobuf/common_objects/domain.hpp" -#include "qry_responses.pb.h" - -namespace shared_model { - namespace proto { - class DEPRECATED DomainBuilder { - public: - shared_model::proto::Domain build() { - return shared_model::proto::Domain(iroha::protocol::Domain(domain_)); - } - - DomainBuilder defaultRole( - const interface::types::RoleIdType &default_role) { - DomainBuilder copy(*this); - copy.domain_.set_default_role(default_role); - return copy; - } - - DomainBuilder domainId(const interface::types::DomainIdType &domain_id) { - DomainBuilder copy(*this); - copy.domain_.set_domain_id(domain_id); - return copy; - } - - private: - iroha::protocol::Domain domain_; - }; - } // namespace proto -} // namespace shared_model - -#endif // IROHA_PROTO_DOMAIN_BUILDER_HPP diff --git a/test/module/shared_model/builders/protobuf/common_objects/proto_peer_builder.hpp b/test/module/shared_model/builders/protobuf/common_objects/proto_peer_builder.hpp deleted file mode 100644 index 9917a4770e..0000000000 --- a/test/module/shared_model/builders/protobuf/common_objects/proto_peer_builder.hpp +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_PROTO_PEER_BUILDER_HPP -#define IROHA_PROTO_PEER_BUILDER_HPP - -#include "backend/protobuf/common_objects/peer.hpp" -#include "primitive.pb.h" - -namespace shared_model { - namespace proto { - - /** - * PeerBuilder is used to construct Peer proto objects with initialized - * protobuf implementation - */ - class DEPRECATED PeerBuilder { - public: - shared_model::proto::Peer build() { - return shared_model::proto::Peer(iroha::protocol::Peer(peer_)); - } - - PeerBuilder address(const interface::types::AddressType &address) { - PeerBuilder copy(*this); - copy.peer_.set_address(address); - return copy; - } - - PeerBuilder pubkey(const interface::types::PubkeyType &key) { - PeerBuilder copy(*this); - copy.peer_.set_peer_key(key.hex()); - return copy; - } - - private: - iroha::protocol::Peer peer_; - }; - } // namespace proto -} // namespace shared_model -#endif // IROHA_PROTO_PEER_BUILDER_HPP diff --git a/test/module/shared_model/builders/protobuf/common_objects/proto_peer_builder_test.cpp b/test/module/shared_model/builders/protobuf/common_objects/proto_peer_builder_test.cpp deleted file mode 100644 index 3ab1d87236..0000000000 --- a/test/module/shared_model/builders/protobuf/common_objects/proto_peer_builder_test.cpp +++ /dev/null @@ -1,69 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include - -#include "module/shared_model/builders/protobuf/common_objects/proto_peer_builder.hpp" - -/** - * @given fields for Peer object - * @when PeerBuilder is invoked - * @then Peer object is successfully constructed and has the same address - */ -TEST(ProtoPeerBuilderTest, AddressFieldBuild) { - shared_model::proto::PeerBuilder builder; - - auto expected_address = "127.0.0.1"; - auto peer = builder.address(expected_address).build(); - - EXPECT_EQ(peer.address(), expected_address); -} - -/** - * @given fields for Peer object - * @when PeerBuilder is invoked - * @then Peer object is successfully constructed and has the same key - */ -TEST(ProtoPeerBuilderTest, KeyFieldBuild) { - shared_model::proto::PeerBuilder builder; - - auto expected_key = shared_model::crypto::PublicKey("very_secure_key"); - auto peer = builder.pubkey(expected_key).build(); - - EXPECT_EQ(peer.pubkey(), expected_key); -} - -/** - * @given fields for Peer object - * @when PeerBuilder is invoked - * @then Peer object is successfully constructed and has the same fields - */ -TEST(ProtoPeerBuilderTest, AllFieldsBuild) { - shared_model::proto::PeerBuilder builder; - - auto expected_address = "127.0.0.1"; - auto expected_key = shared_model::crypto::PublicKey("very_secure_key"); - auto peer = builder.pubkey(expected_key).address(expected_address).build(); - - EXPECT_EQ(peer.address(), expected_address); - EXPECT_EQ(peer.pubkey(), expected_key); -} - -/** - * @given fields for Peer object - * @when PeerBuilder is invoked twice with the same configuration - * @then Two constructed Peer objects are identical - */ -TEST(ProtoPeerBuilderTest, SeveralObjectsFromOneBuilder) { - shared_model::proto::PeerBuilder builder; - auto expected_address = "127.0.0.1"; - auto expected_key = shared_model::crypto::PublicKey("very_secure_key"); - auto state = builder.address(expected_address).pubkey(expected_key); - auto peer = state.build(); - auto peer2 = state.build(); - - EXPECT_EQ(peer.address(), peer2.address()); - EXPECT_EQ(peer.pubkey(), peer2.pubkey()); -} diff --git a/test/module/shared_model/builders/protobuf/common_objects/proto_signature_builder.hpp b/test/module/shared_model/builders/protobuf/common_objects/proto_signature_builder.hpp deleted file mode 100644 index 08d4123525..0000000000 --- a/test/module/shared_model/builders/protobuf/common_objects/proto_signature_builder.hpp +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_PROTO_SIGNATURE_BUILDER_HPP -#define IROHA_PROTO_SIGNATURE_BUILDER_HPP - -#include "backend/protobuf/common_objects/signature.hpp" -#include "interfaces/common_objects/types.hpp" -#include "primitive.pb.h" - -namespace shared_model { - namespace proto { - - /** - * SignatureBuilder is used to construct Signature proto objects with - * initialized protobuf implementation - */ - class DEPRECATED SignatureBuilder { - public: - shared_model::proto::Signature build() { - return shared_model::proto::Signature( - iroha::protocol::Signature(signature_)); - } - - SignatureBuilder publicKey( - const shared_model::interface::types::PubkeyType &key) { - SignatureBuilder copy(*this); - copy.signature_.set_public_key(key.hex()); - return copy; - } - - SignatureBuilder signedData( - const interface::Signature::SignedType &signed_data) { - SignatureBuilder copy(*this); - copy.signature_.set_signature(signed_data.hex()); - return copy; - } - - private: - iroha::protocol::Signature signature_; - }; - } // namespace proto -} // namespace shared_model -#endif // IROHA_PROTO_SIGNATURE_BUILDER_HPP diff --git a/test/module/shared_model/builders/protobuf/common_objects/proto_signature_builder_test.cpp b/test/module/shared_model/builders/protobuf/common_objects/proto_signature_builder_test.cpp deleted file mode 100644 index 1730b38340..0000000000 --- a/test/module/shared_model/builders/protobuf/common_objects/proto_signature_builder_test.cpp +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include - -#include "module/shared_model/builders/protobuf/common_objects/proto_signature_builder.hpp" - -/** - * @given fields for Signature object - * @when SignatureBuilder is invoked - * @then Signature object is successfully constructed and has the same fields as - * provided - */ -TEST(ProtoSignatureBuilderTest, AllFieldsBuild) { - shared_model::proto::SignatureBuilder builder; - - shared_model::interface::types::PubkeyType expected_key(std::string(32, '1')); - shared_model::interface::Signature::SignedType expected_signed( - "signed object"); - - auto signature = - builder.publicKey(expected_key).signedData(expected_signed).build(); - - EXPECT_EQ(signature.publicKey(), expected_key); - EXPECT_EQ(signature.signedData(), expected_signed); -} - -/** - * @given fields for Signature object - * @when SignatureBuilder is invoked twice with the same configuration - * @then Two constructed Signature objects are identical - */ -TEST(ProtoSignatureBuilderTest, SeveralObjectsFromOneBuilder) { - shared_model::proto::SignatureBuilder builder; - - shared_model::interface::types::PubkeyType expected_key(std::string(32, '1')); - shared_model::interface::Signature::SignedType expected_signed( - "signed object"); - - auto state = builder.publicKey(expected_key).signedData(expected_signed); - - auto signature = state.build(); - auto signature2 = state.build(); - - EXPECT_EQ(signature.publicKey(), signature2.publicKey()); - EXPECT_EQ(signature.signedData(), signature2.signedData()); -} diff --git a/test/module/shared_model/builders/protobuf/test_account_asset_builder.hpp b/test/module/shared_model/builders/protobuf/test_account_asset_builder.hpp deleted file mode 100644 index 16e0dcc318..0000000000 --- a/test/module/shared_model/builders/protobuf/test_account_asset_builder.hpp +++ /dev/null @@ -1,17 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include "module/shared_model/builders/protobuf/common_objects/proto_account_asset_builder.hpp" - -#ifndef IROHA_TEST_ACCOUNT_BUILDER_HPP -#define IROHA_TEST_ACCOUNT_BUILDER_HPP - -/** - * Builder alias, to build shared model proto block object avoiding validation - * and "required fields" check - */ -using TestAccountAssetBuilder = shared_model::proto::AccountAssetBuilder; - -#endif // IROHA_TEST_ACCOUNT_BUILDER_HPP diff --git a/test/module/shared_model/builders/protobuf/test_account_builder.hpp b/test/module/shared_model/builders/protobuf/test_account_builder.hpp deleted file mode 100644 index 1bef117fab..0000000000 --- a/test/module/shared_model/builders/protobuf/test_account_builder.hpp +++ /dev/null @@ -1,17 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include "module/shared_model/builders/protobuf/common_objects/proto_account_builder.hpp" - -#ifndef IROHA_TEST_ACCOUNT_BUILDER_HPP -#define IROHA_TEST_ACCOUNT_BUILDER_HPP - -/** - * Builder alias, to build shared model proto block object avoiding validation - * and "required fields" check - */ -using TestAccountBuilder = shared_model::proto::AccountBuilder; - -#endif // IROHA_TEST_ACCOUNT_BUILDER_HPP diff --git a/test/module/shared_model/builders/protobuf/test_asset_builder.hpp b/test/module/shared_model/builders/protobuf/test_asset_builder.hpp deleted file mode 100644 index f6e10c5e1b..0000000000 --- a/test/module/shared_model/builders/protobuf/test_asset_builder.hpp +++ /dev/null @@ -1,17 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_TEST_ASSET_BUILDER_HPP -#define IROHA_TEST_ASSET_BUILDER_HPP - -#include "module/shared_model/builders/protobuf/common_objects/proto_asset_builder.hpp" - -/** - * Builder alias, to build shared model proto block object avoiding validation - * and "required fields" check - */ -using TestAccountAssetBuilder = shared_model::proto::AssetBuilder; - -#endif // IROHA_TEST_ASSET_BUILDER_HPP diff --git a/test/module/shared_model/builders/protobuf/test_domain_builder.hpp b/test/module/shared_model/builders/protobuf/test_domain_builder.hpp deleted file mode 100644 index c5d116fff3..0000000000 --- a/test/module/shared_model/builders/protobuf/test_domain_builder.hpp +++ /dev/null @@ -1,17 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include "module/shared_model/builders/protobuf/common_objects/proto_domain_builder.hpp" - -#ifndef IROHA_TEST_DOMAIN_BUILDER_HPP -#define IROHA_TEST_DOMAIN_BUILDER_HPP - -/** - * Builder alias, to build shared model proto block object avoiding validation - * and "required fields" check - */ -using TestDomainBuilder = shared_model::proto::DomainBuilder; - -#endif // IROHA_TEST_DOMAIN_BUILDER_HPP diff --git a/test/module/shared_model/builders/protobuf/test_peer_builder.hpp b/test/module/shared_model/builders/protobuf/test_peer_builder.hpp deleted file mode 100644 index 69502a048a..0000000000 --- a/test/module/shared_model/builders/protobuf/test_peer_builder.hpp +++ /dev/null @@ -1,17 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include "module/shared_model/builders/protobuf/common_objects/proto_peer_builder.hpp" - -#ifndef IROHA_TEST_PEER_BUILDER_HPP -#define IROHA_TEST_PEER_BUILDER_HPP - -/** - * Builder alias, to build shared model proto block object avoiding validation - * and "required fields" check - */ -using TestPeerBuilder = shared_model::proto::PeerBuilder; - -#endif // IROHA_TEST_PEER_BUILDER_HPP diff --git a/test/module/shared_model/builders/protobuf/test_signature_builder.hpp b/test/module/shared_model/builders/protobuf/test_signature_builder.hpp deleted file mode 100644 index ad801f90ca..0000000000 --- a/test/module/shared_model/builders/protobuf/test_signature_builder.hpp +++ /dev/null @@ -1,17 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_TEST_SIGNATURE_BUILDER_HPP -#define IROHA_TEST_SIGNATURE_BUILDER_HPP - -#include "module/shared_model/builders/protobuf/common_objects/proto_signature_builder.hpp" - -/** - * Builder alias, for building shared model proto block object avoiding - * validation and "required fields" check - */ -using TestSignatureBuilder = shared_model::proto::SignatureBuilder; - -#endif // IROHA_TEST_SIGNATURE_BUILDER_HPP diff --git a/test/module/shared_model/cryptography/security_signatures_test.cpp b/test/module/shared_model/cryptography/security_signatures_test.cpp index 6be8824a5b..fdd8097e6a 100644 --- a/test/module/shared_model/cryptography/security_signatures_test.cpp +++ b/test/module/shared_model/cryptography/security_signatures_test.cpp @@ -7,8 +7,8 @@ #include #include "module/shared_model/builders/protobuf/test_block_builder.hpp" -#include "module/shared_model/builders/protobuf/test_signature_builder.hpp" #include "module/shared_model/builders/protobuf/test_transaction_builder.hpp" +#include "module/shared_model/interface_mocks.hpp" /** * @given Two signatures with same pub key but different signed @@ -16,19 +16,17 @@ * @then Expect true */ TEST(SecuritySignature, SignatureOperatorEqual) { - auto first_signature = - TestSignatureBuilder() - .publicKey(shared_model::crypto::PublicKey("one")) - .signedData(shared_model::crypto::Signed("signed_one")) - .build(); + shared_model::crypto::PublicKey pk1("one"), pk2("one"); + shared_model::crypto::Signed data1("signed_one"), data2("signed_two"); + auto first_signature = std::make_unique(); + auto second_signature = std::make_unique(); - auto second_signature = - TestSignatureBuilder() - .publicKey(shared_model::crypto::PublicKey("one")) - .signedData(shared_model::crypto::Signed("signed_two")) - .build(); + EXPECT_CALL(*first_signature, publicKey()).WillRepeatedly(testing::ReturnRef(pk1)); + EXPECT_CALL(*second_signature, publicKey()).WillRepeatedly(testing::ReturnRef(pk2)); + EXPECT_CALL(*first_signature, signedData()).WillRepeatedly(testing::ReturnRef(data1)); + EXPECT_CALL(*second_signature, signedData()).WillRepeatedly(testing::ReturnRef(data2)); - ASSERT_TRUE(first_signature == second_signature); + ASSERT_TRUE(*first_signature == *second_signature); } /** diff --git a/test/module/shared_model/interface_mocks.hpp b/test/module/shared_model/interface_mocks.hpp index ac65792f15..85d42c2fb8 100644 --- a/test/module/shared_model/interface_mocks.hpp +++ b/test/module/shared_model/interface_mocks.hpp @@ -175,6 +175,14 @@ struct MockPeer : public shared_model::interface::Peer { MOCK_CONST_METHOD0(clone, MockPeer *()); }; +inline auto makePeer(const std::string &address, + const shared_model::crypto::PublicKey &pub_key) { + auto peer = std::make_shared(); + EXPECT_CALL(*peer, address()).WillRepeatedly(testing::ReturnRefOfCopy(address)); + EXPECT_CALL(*peer, pubkey()).WillRepeatedly(testing::ReturnRefOfCopy(pub_key)); + return peer; +} + struct MockUnsafeProposalFactory : public shared_model::interface::UnsafeProposalFactory { MOCK_METHOD3(unsafeCreateProposal, @@ -223,4 +231,22 @@ struct MockCommonObjectsFactory const shared_model::interface::Signature::SignedType &)); }; +struct MockDomain : public shared_model::interface::Domain { + MOCK_CONST_METHOD0(domainId, + shared_model::interface::types::DomainIdType &()); + MOCK_CONST_METHOD0(defaultRole, + shared_model::interface::types::RoleIdType &()); + MOCK_CONST_METHOD0(clone, MockDomain *()); +}; + +struct MockAccount : public shared_model::interface::Account { + MOCK_CONST_METHOD0(accountId, + shared_model::interface::types::AccountIdType &()); + MOCK_CONST_METHOD0(domainId, + shared_model::interface::types::DomainIdType &()); + MOCK_CONST_METHOD0(quorum, shared_model::interface::types::QuorumType()); + MOCK_CONST_METHOD0(jsonData, shared_model::interface::types::JsonType &()); + MOCK_CONST_METHOD0(clone, MockAccount *()); +}; + #endif // IROHA_SHARED_MODEL_INTERFACE_MOCKS_HPP diff --git a/test/system/irohad_test_data/config.sample.copy b/test/system/irohad_test_data/config.sample.copy new file mode 100644 index 0000000000..9e7e8c1594 --- /dev/null +++ b/test/system/irohad_test_data/config.sample.copy @@ -0,0 +1,10 @@ +{ + "block_store_path": "/var/folders/w0/pxq3kms13hl5nvy7d50_hjr40000gn/T/abe6-cbd3-b6b1-1ff3", + "torii_port": 50051, + "internal_port": 10001, + "pg_opt": "host=localhost port=5432 user=postgres password=mysecretpassword", + "max_proposal_size": 10, + "proposal_delay": 5000, + "vote_delay": 5000, + "mst_enable": false +} \ No newline at end of file From 2eac1985ba25808a4e7f784b5c335ca27a959393 Mon Sep 17 00:00:00 2001 From: Victor Drobny Date: Thu, 17 Jan 2019 08:06:24 +0100 Subject: [PATCH 12/41] Feature/signatories check (#1874) * move signatories check from query processor to postgres query executor Signed-off-by: Victor Drobny --- .../impl/postgres_query_executor.cpp | 56 +++++++++++++++++-- .../impl/postgres_query_executor.hpp | 9 ++- irohad/ametsuchi/query_executor.hpp | 7 ++- .../processor/impl/query_processor_impl.cpp | 42 +------------- .../torii/processor/query_processor_impl.hpp | 8 --- .../irohad/ametsuchi/ametsuchi_mocks.hpp | 8 ++- .../postgres_query_executor_test.cpp | 6 +- .../torii/processor/query_processor_test.cpp | 24 ++++---- 8 files changed, 84 insertions(+), 76 deletions(-) diff --git a/irohad/ametsuchi/impl/postgres_query_executor.cpp b/irohad/ametsuchi/impl/postgres_query_executor.cpp index 26f401de4c..8a0d3658b0 100644 --- a/irohad/ametsuchi/impl/postgres_query_executor.cpp +++ b/irohad/ametsuchi/impl/postgres_query_executor.cpp @@ -233,6 +233,38 @@ namespace iroha { } } + template + bool PostgresQueryExecutor::validateSignatures(const Q &query) { + auto keys_range = + query.signatures() | boost::adaptors::transformed([](const auto &s) { + return s.publicKey().hex(); + }); + + if (boost::size(keys_range) != 1) { + return false; + } + std::string keys = *std::begin(keys_range); + // not using bool since it is not supported by SOCI + boost::optional signatories_valid; + + auto qry = R"( + SELECT count(public_key) = 1 + FROM account_has_signatory + WHERE account_id = :account_id AND public_key = :pk + )"; + + try { + *sql_ << qry, soci::into(signatories_valid), + soci::use(query.creatorAccountId(), "account_id"), + soci::use(keys, "pk"); + } catch (const std::exception &e) { + log_->error(e.what()); + return false; + } + + return signatories_valid and *signatories_valid; + } + PostgresQueryExecutor::PostgresQueryExecutor( std::unique_ptr sql, KeyValueStorage &block_store, @@ -256,14 +288,30 @@ namespace iroha { log_(std::move(log)) {} QueryExecutorResult PostgresQueryExecutor::validateAndExecute( - const shared_model::interface::Query &query) { + const shared_model::interface::Query &query, + const bool validate_signatories = true) { visitor_.setCreatorId(query.creatorAccountId()); visitor_.setQueryHash(query.hash()); + if (validate_signatories and not validateSignatures(query)) { + // TODO [IR-1816] Akvinikym 03.12.18: replace magic number 3 + // with a named constant + return query_response_factory_->createErrorQueryResponse( + shared_model::interface::QueryResponseFactory::ErrorQueryType:: + kStatefulFailed, + "query signatories did not pass validation", + 3, + query.hash()); + } return boost::apply_visitor(visitor_, query.get()); } bool PostgresQueryExecutor::validate( - const shared_model::interface::BlocksQuery &query) { + const shared_model::interface::BlocksQuery &query, + const bool validate_signatories = true) { + if (validate_signatories and not validateSignatures(query)) { + log_->error("query signatories did not pass validation"); + return false; + } using T = boost::tuple; boost::format cmd(R"(%s)"); try { @@ -331,8 +379,8 @@ namespace iroha { error = "no asset with such name in account with such id: " + error_body; break; - // other error are either handled by generic response or do not appear - // yet + // other errors are either handled by generic response or do not + // appear yet default: error = "failed to execute query: " + error_body; break; diff --git a/irohad/ametsuchi/impl/postgres_query_executor.hpp b/irohad/ametsuchi/impl/postgres_query_executor.hpp index 0937561cd1..57fff9aa57 100644 --- a/irohad/ametsuchi/impl/postgres_query_executor.hpp +++ b/irohad/ametsuchi/impl/postgres_query_executor.hpp @@ -239,11 +239,16 @@ namespace iroha { logger::Logger log = logger::log("PostgresQueryExecutor")); QueryExecutorResult validateAndExecute( - const shared_model::interface::Query &query) override; + const shared_model::interface::Query &query, + const bool validate_signatories) override; - bool validate(const shared_model::interface::BlocksQuery &query) override; + bool validate(const shared_model::interface::BlocksQuery &query, + const bool validate_signatories) override; private: + template + bool validateSignatures(const Q &query); + std::unique_ptr sql_; KeyValueStorage &block_store_; std::shared_ptr pending_txs_storage_; diff --git a/irohad/ametsuchi/query_executor.hpp b/irohad/ametsuchi/query_executor.hpp index df5e1f673c..c2764ecd2b 100644 --- a/irohad/ametsuchi/query_executor.hpp +++ b/irohad/ametsuchi/query_executor.hpp @@ -29,14 +29,15 @@ namespace iroha { * Execute and validate query. */ virtual QueryExecutorResult validateAndExecute( - const shared_model::interface::Query &query) = 0; + const shared_model::interface::Query &query, + const bool validate_signatories) = 0; /** * Perform BlocksQuery validation * @return true if valid, false otherwise */ - virtual bool validate( - const shared_model::interface::BlocksQuery &query) = 0; + virtual bool validate(const shared_model::interface::BlocksQuery &query, + const bool validate_signatories) = 0; }; } // namespace ametsuchi } // namespace iroha diff --git a/irohad/torii/processor/impl/query_processor_impl.cpp b/irohad/torii/processor/impl/query_processor_impl.cpp index da67d6a7a9..1dd14a1625 100644 --- a/irohad/torii/processor/impl/query_processor_impl.cpp +++ b/irohad/torii/processor/impl/query_processor_impl.cpp @@ -7,7 +7,6 @@ #include -#include "ametsuchi/wsv_query.hpp" #include "common/bind.hpp" #include "interfaces/queries/blocks_query.hpp" #include "interfaces/queries/query.hpp" @@ -39,38 +38,8 @@ namespace iroha { }); } - template - bool QueryProcessorImpl::checkSignatories(const Q &qry) { - const auto &wsv_query = storage_->getWsvQuery(); - - auto signatories = wsv_query->getSignatories(qry.creatorAccountId()); - const auto &sig = qry.signatures(); - - return boost::size(sig) == 1 - and signatories | [&sig](const auto &signatories) { - return validation::signaturesSubset(sig, signatories); - }; - } - - template bool QueryProcessorImpl::checkSignatories< - shared_model::interface::Query>(const shared_model::interface::Query &); - template bool - QueryProcessorImpl::checkSignatories( - const shared_model::interface::BlocksQuery &); - std::unique_ptr QueryProcessorImpl::queryHandle(const shared_model::interface::Query &qry) { - if (not checkSignatories(qry)) { - // TODO [IR-1816] Akvinikym 03.12.18: replace magic number 3 - // with a named constant - return response_factory_->createErrorQueryResponse( - shared_model::interface::QueryResponseFactory::ErrorQueryType:: - kStatefulFailed, - "query signatories did not pass validation", - 3, - qry.hash()); - } - auto executor = qry_exec_->createQueryExecutor(pending_transactions_, response_factory_); if (not executor) { @@ -78,24 +47,17 @@ namespace iroha { return nullptr; } - return executor.value()->validateAndExecute(qry); + return executor.value()->validateAndExecute(qry, true); } rxcpp::observable< std::shared_ptr> QueryProcessorImpl::blocksQueryHandle( const shared_model::interface::BlocksQuery &qry) { - if (not checkSignatories(qry)) { - std::shared_ptr response = - response_factory_->createBlockQueryResponse( - "query signatories did not pass validation"); - return rxcpp::observable<>::just(std::move(response)); - } - auto exec = qry_exec_->createQueryExecutor(pending_transactions_, response_factory_); if (not exec or not(exec | [&qry](const auto &executor) { - return executor->validate(qry); + return executor->validate(qry, true); })) { std::shared_ptr response = response_factory_->createBlockQueryResponse("stateful invalid"); diff --git a/irohad/torii/processor/query_processor_impl.hpp b/irohad/torii/processor/query_processor_impl.hpp index 84c714a8a5..e7e40f1bef 100644 --- a/irohad/torii/processor/query_processor_impl.hpp +++ b/irohad/torii/processor/query_processor_impl.hpp @@ -28,14 +28,6 @@ namespace iroha { response_factory, logger::Logger log = logger::log("QueryProcessorImpl")); - /** - * Checks if query has needed signatures - * @param qry arrived query - * @return true if passes stateful validation - */ - template - bool checkSignatories(const Q &qry); - std::unique_ptr queryHandle( const shared_model::interface::Query &qry) override; diff --git a/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp b/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp index 2a9123d063..7eab64f31a 100644 --- a/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp +++ b/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp @@ -284,11 +284,13 @@ namespace iroha { shared_model::interface::QueryResponse *( const shared_model::interface::Query &)); QueryExecutorResult validateAndExecute( - const shared_model::interface::Query &q) override { + const shared_model::interface::Query &q, + bool validate_signatories = true) override { return QueryExecutorResult(validateAndExecute_(q)); } - MOCK_METHOD1(validate, - bool(const shared_model::interface::BlocksQuery &)); + MOCK_METHOD2(validate, + bool(const shared_model::interface::BlocksQuery &, + const bool validate_signatories)); }; class MockTxPresenceCache : public iroha::ametsuchi::TxPresenceCache { diff --git a/test/module/irohad/ametsuchi/postgres_query_executor_test.cpp b/test/module/irohad/ametsuchi/postgres_query_executor_test.cpp index cb30ee020e..632cd6a03e 100644 --- a/test/module/irohad/ametsuchi/postgres_query_executor_test.cpp +++ b/test/module/irohad/ametsuchi/postgres_query_executor_test.cpp @@ -167,7 +167,7 @@ namespace iroha { return query_executor->createQueryExecutor(pending_txs_storage, query_response_factory) | [&query](const auto &executor) { - return executor->validateAndExecute(query); + return executor->validateAndExecute(query, false); }; } @@ -273,7 +273,7 @@ namespace iroha { ASSERT_TRUE(query_executor->createQueryExecutor(pending_txs_storage, query_response_factory) | [&blocks_query](const auto &executor) { - return executor->validate(blocks_query); + return executor->validate(blocks_query, false); }); } @@ -288,7 +288,7 @@ namespace iroha { ASSERT_FALSE(query_executor->createQueryExecutor(pending_txs_storage, query_response_factory) | [&blocks_query](const auto &executor) { - return executor->validate(blocks_query); + return executor->validate(blocks_query, false); }); } diff --git a/test/module/irohad/torii/processor/query_processor_test.cpp b/test/module/irohad/torii/processor/query_processor_test.cpp index e6715218e5..4b022d875a 100644 --- a/test/module/irohad/torii/processor/query_processor_test.cpp +++ b/test/module/irohad/torii/processor/query_processor_test.cpp @@ -40,8 +40,6 @@ class QueryProcessorTest : public ::testing::Test { std::make_shared(); qpi = std::make_shared( storage, storage, nullptr, query_response_factory); - wsv_queries = std::make_shared(); - EXPECT_CALL(*storage, getWsvQuery()).WillRepeatedly(Return(wsv_queries)); EXPECT_CALL(*storage, getBlockQuery()) .WillRepeatedly(Return(block_queries)); EXPECT_CALL(*storage, createQueryExecutor(_, _)) @@ -68,7 +66,6 @@ class QueryProcessorTest : public ::testing::Test { std::vector signatories = { keypair.publicKey()}; std::shared_ptr qry_exec; - std::shared_ptr wsv_queries; std::shared_ptr block_queries; std::shared_ptr storage; std::shared_ptr @@ -92,8 +89,6 @@ TEST_F(QueryProcessorTest, QueryProcessorWhereInvokeInvalidQuery) { query_response_factory->createAccountDetailResponse("", qry.hash()) .release(); - EXPECT_CALL(*wsv_queries, getSignatories(kAccountId)) - .WillRepeatedly(Return(signatories)); EXPECT_CALL(*qry_exec, validateAndExecute_(_)).WillOnce(Return(qry_resp)); auto response = qpi->queryHandle(qry); @@ -117,9 +112,16 @@ TEST_F(QueryProcessorTest, QueryProcessorWithWrongKey) { shared_model::crypto::DefaultCryptoAlgorithmType:: generateKeypair()) .finish(); + auto *qry_resp = query_response_factory + ->createErrorQueryResponse( + shared_model::interface::QueryResponseFactory:: + ErrorQueryType::kStatefulFailed, + "query signatories did not pass validation", + 3, + query.hash()) + .release(); - EXPECT_CALL(*wsv_queries, getSignatories(kAccountId)) - .WillRepeatedly(Return(signatories)); + EXPECT_CALL(*qry_exec, validateAndExecute_(_)).WillOnce(Return(qry_resp)); auto response = qpi->queryHandle(query); ASSERT_TRUE(response); @@ -139,9 +141,7 @@ TEST_F(QueryProcessorTest, GetBlocksQuery) { auto block_number = 5; auto block_query = getBlocksQuery(kAccountId); - EXPECT_CALL(*wsv_queries, getSignatories(kAccountId)) - .WillOnce(Return(signatories)); - EXPECT_CALL(*qry_exec, validate(_)).WillOnce(Return(true)); + EXPECT_CALL(*qry_exec, validate(_, _)).WillOnce(Return(true)); auto wrapper = make_test_subscriber( qpi->blocksQueryHandle(block_query), block_number); @@ -167,9 +167,7 @@ TEST_F(QueryProcessorTest, GetBlocksQueryNoPerms) { auto block_number = 5; auto block_query = getBlocksQuery(kAccountId); - EXPECT_CALL(*wsv_queries, getSignatories(kAccountId)) - .WillRepeatedly(Return(signatories)); - EXPECT_CALL(*qry_exec, validate(_)).WillOnce(Return(false)); + EXPECT_CALL(*qry_exec, validate(_, _)).WillOnce(Return(false)); auto wrapper = make_test_subscriber(qpi->blocksQueryHandle(block_query), 1); From 46c93383ef59ebd9c282fa3a69667687219857f3 Mon Sep 17 00:00:00 2001 From: Artyom Bakhtin Date: Fri, 18 Jan 2019 10:21:39 +0300 Subject: [PATCH 13/41] Deployment scripts rework (#1986) * rework ansible playbook Signed-off-by: Artyom Bakhtin * cleanup Signed-off-by: Artyom Bakhtin * fixes Signed-off-by: Artyom Bakhtin * fixes Signed-off-by: Artyom Bakhtin * readme + cleanup Signed-off-by: Artyom Bakhtin * fix sha3 for python > 3.5 * bind iroha_torii_port on localhost * README.md cleanup Signed-off-by: Artyom Bakhtin * more fixes - update readme - fix docker role to support Python2/3 - cleanup Signed-off-by: Artyom Bakhtin * fix deployment scripts to support RC2 Signed-off-by: Artyom Bakhtin * replace base64-encoded keys with hex-encoded ones Signed-off-by: Artyom Bakhtin * fix comments Signed-off-by: Artyom Bakhtin * fix packages installation Signed-off-by: Artyom Bakhtin --- deploy/ansible/.gitignore | 5 +- deploy/ansible/README.md | 218 ------------------ .../inventory/hosts_docker_cluster.list | 28 --- .../inventory/hosts_standalone_nodes.list | 15 -- .../iroha-docker-cluster/group_vars/all.yml | 8 - .../group_vars/iroha-east.yml | 1 - .../group_vars/iroha-north.yml | 1 - .../group_vars/iroha-south.yml | 1 - .../group_vars/iroha-west.yml | 1 - .../iroha-docker-cluster/iroha-deploy.yml | 14 -- .../ansible/playbooks/iroha-docker/main.yml | 8 + .../iroha-standalone-nodes/group_vars/all.yml | 6 - .../iroha-standalone-nodes/iroha-deploy.yml | 15 -- .../playbooks/iroha-standalone-nodes/main.yml | 5 - deploy/ansible/roles/docker/README.md | 11 +- deploy/ansible/roles/docker/defaults/main.yml | 6 +- .../ansible/roles/docker/defaults/ubuntu.yml | 4 - .../roles/docker/tasks/install_compose.yml | 25 ++ .../roles/docker/tasks/install_docker.yml | 39 ++++ deploy/ansible/roles/docker/tasks/main.yml | 50 +++- deploy/ansible/roles/docker/tasks/ubuntu.yml | 57 ----- .../roles/iroha-cluster-config-gen/README.md | 32 --- .../defaults/main.yml | 3 - .../iroha-cluster-config-gen/tasks/main.yml | 3 - .../iroha-cluster-config-gen/tasks/ubuntu.yml | 20 -- .../templates/peers.list.j2 | 5 - .../roles/iroha-cluster-deploy-node/README.md | 61 ----- .../defaults/main.yml | 13 -- .../iroha-cluster-deploy-node/tasks/main.yml | 3 - .../tasks/ubuntu.yml | 69 ------ .../templates/config.j2 | 9 - .../templates/docker-compose.yml.j2 | 34 --- deploy/ansible/roles/iroha-docker/README.md | 98 ++++++++ .../roles/iroha-docker/defaults/main.yml | 62 +++++ .../roles/iroha-docker/files/config_gen.sh | 73 ++++++ .../roles/iroha-docker/files/ed25519.py | 167 ++++++++++++++ .../iroha-docker/files/genesis-add-peers.py | 92 ++++++++ .../roles/iroha-docker/files/genesis.block | 1 + .../iroha-docker/files/old-keys-format.patch | 13 ++ .../roles/iroha-docker/tasks/config-gen.yml | 81 +++++++ .../roles/iroha-docker/tasks/deploy.yml | 44 ++++ .../ansible/roles/iroha-docker/tasks/main.yml | 8 + .../iroha-docker/templates/config.docker.j2 | 10 + .../templates/docker-compose.yml.j2 | 56 +++++ .../iroha-standalone-config-gen/README.md | 31 --- .../defaults/main.yml | 3 - .../tasks/main.yml | 3 - .../tasks/ubuntu.yml | 20 -- .../templates/peers.list.j2 | 3 - .../iroha-standalone-deploy-node/README.md | 46 ---- .../defaults/main.yml | 13 -- .../tasks/main.yml | 3 - .../tasks/ubuntu.yml | 73 ------ .../templates/config.j2 | 9 - 54 files changed, 836 insertions(+), 843 deletions(-) delete mode 100644 deploy/ansible/README.md delete mode 100644 deploy/ansible/inventory/hosts_docker_cluster.list delete mode 100644 deploy/ansible/inventory/hosts_standalone_nodes.list delete mode 100644 deploy/ansible/playbooks/iroha-docker-cluster/group_vars/all.yml delete mode 100644 deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-east.yml delete mode 100644 deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-north.yml delete mode 100644 deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-south.yml delete mode 100644 deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-west.yml delete mode 100644 deploy/ansible/playbooks/iroha-docker-cluster/iroha-deploy.yml create mode 100644 deploy/ansible/playbooks/iroha-docker/main.yml delete mode 100644 deploy/ansible/playbooks/iroha-standalone-nodes/group_vars/all.yml delete mode 100644 deploy/ansible/playbooks/iroha-standalone-nodes/iroha-deploy.yml delete mode 100644 deploy/ansible/playbooks/iroha-standalone-nodes/main.yml delete mode 100644 deploy/ansible/roles/docker/defaults/ubuntu.yml create mode 100644 deploy/ansible/roles/docker/tasks/install_compose.yml create mode 100644 deploy/ansible/roles/docker/tasks/install_docker.yml delete mode 100644 deploy/ansible/roles/docker/tasks/ubuntu.yml delete mode 100644 deploy/ansible/roles/iroha-cluster-config-gen/README.md delete mode 100644 deploy/ansible/roles/iroha-cluster-config-gen/defaults/main.yml delete mode 100644 deploy/ansible/roles/iroha-cluster-config-gen/tasks/main.yml delete mode 100644 deploy/ansible/roles/iroha-cluster-config-gen/tasks/ubuntu.yml delete mode 100644 deploy/ansible/roles/iroha-cluster-config-gen/templates/peers.list.j2 delete mode 100644 deploy/ansible/roles/iroha-cluster-deploy-node/README.md delete mode 100644 deploy/ansible/roles/iroha-cluster-deploy-node/defaults/main.yml delete mode 100644 deploy/ansible/roles/iroha-cluster-deploy-node/tasks/main.yml delete mode 100644 deploy/ansible/roles/iroha-cluster-deploy-node/tasks/ubuntu.yml delete mode 100644 deploy/ansible/roles/iroha-cluster-deploy-node/templates/config.j2 delete mode 100644 deploy/ansible/roles/iroha-cluster-deploy-node/templates/docker-compose.yml.j2 create mode 100644 deploy/ansible/roles/iroha-docker/README.md create mode 100644 deploy/ansible/roles/iroha-docker/defaults/main.yml create mode 100755 deploy/ansible/roles/iroha-docker/files/config_gen.sh create mode 100644 deploy/ansible/roles/iroha-docker/files/ed25519.py create mode 100644 deploy/ansible/roles/iroha-docker/files/genesis-add-peers.py create mode 100644 deploy/ansible/roles/iroha-docker/files/genesis.block create mode 100644 deploy/ansible/roles/iroha-docker/files/old-keys-format.patch create mode 100644 deploy/ansible/roles/iroha-docker/tasks/config-gen.yml create mode 100644 deploy/ansible/roles/iroha-docker/tasks/deploy.yml create mode 100644 deploy/ansible/roles/iroha-docker/tasks/main.yml create mode 100644 deploy/ansible/roles/iroha-docker/templates/config.docker.j2 create mode 100644 deploy/ansible/roles/iroha-docker/templates/docker-compose.yml.j2 delete mode 100644 deploy/ansible/roles/iroha-standalone-config-gen/README.md delete mode 100644 deploy/ansible/roles/iroha-standalone-config-gen/defaults/main.yml delete mode 100644 deploy/ansible/roles/iroha-standalone-config-gen/tasks/main.yml delete mode 100644 deploy/ansible/roles/iroha-standalone-config-gen/tasks/ubuntu.yml delete mode 100644 deploy/ansible/roles/iroha-standalone-config-gen/templates/peers.list.j2 delete mode 100644 deploy/ansible/roles/iroha-standalone-deploy-node/README.md delete mode 100644 deploy/ansible/roles/iroha-standalone-deploy-node/defaults/main.yml delete mode 100644 deploy/ansible/roles/iroha-standalone-deploy-node/tasks/main.yml delete mode 100644 deploy/ansible/roles/iroha-standalone-deploy-node/tasks/ubuntu.yml delete mode 100644 deploy/ansible/roles/iroha-standalone-deploy-node/templates/config.j2 diff --git a/deploy/ansible/.gitignore b/deploy/ansible/.gitignore index 44ef051a1a..e5f57df78e 100644 --- a/deploy/ansible/.gitignore +++ b/deploy/ansible/.gitignore @@ -5,4 +5,7 @@ config/tmp/ansible.log **/ansible-users/README.md **/ansible-users/tests **/ansible-users/meta/.galaxy_install_info -.travis.yml \ No newline at end of file +.travis.yml +ansible.cfg +**/iroha-docker/group_vars +**/iroha-docker/host_vars diff --git a/deploy/ansible/README.md b/deploy/ansible/README.md deleted file mode 100644 index 92b47d330f..0000000000 --- a/deploy/ansible/README.md +++ /dev/null @@ -1,218 +0,0 @@ -# Using ansible for iroha deployment -There are 2 deployment scenarios supported which are implemented as separated playbooks. - -1. `iroha-docker-cluster` -2. `iroha-standalone-nodes` - -These playbooks use different roles and inventories. Each role and inventory file is well-documented, sometimes they -are referred in the role's README file. -## 1. Iroha-docker-cluster -### 1.1 Main ideas -This playbook allows to deploy multiple iroha peers on one node. For example, you want to run 21 nodes of iroha, -but you have only 4 hosts that are in the same network. Thus, you might decide that: -- 8 `iroha` nodes should be launched on the 1st host (or you can vary amount of `iroha` nodes per each host). -- 5 `iroha` nodes on the 2nd host -- 4 `iroha` nodes on the 3rd host -- 4 `iroha` nodes on the 4th host -Actually, you can run as many iroha peers on one host as you want -(but no more that 30 - *due to docker networks max amount per host*) - this is just the example to show you the -flexibility of this playbook. - -It works in the following way: -- [pre-generation phase] - `peers.list` is generated and stored locally at `{{ filesDir }}` directory -- [generation phase] all configs are generated using `iroha-cli` and also stored at `{{ filesDir }}`: - - `genesis.block` - - `node$KEY.priv` - - `node$KEY.pub`, -where `$KEY` is a iroha node ID in the P2P network -- [deliver phase] - `config.sample` file is generated from the template and delivered to the `{{ confPath }}` which is -set to ` /opt/docker/iroha/conf$KEY` by default. Files from the `generation phase` are also delivered to these endpoints. -Then, `docker-compose.yml` file is generated and stored at `{{ composeDir }}`(see section 1.4 of this file) -location for each host (nodes amount for each host is set by variable `nodes_in_region` in -`playbooks/iroha/group_vars/.yml` - see `inventory/hosts_docker_cluster.list` file for more instructions). -- [deploy phase] all previously launched `iroha` and `postgres` containers are stopped and removed using -`docker-compose down` command. After all operations `iroha` and `postgres` nodes are started using `docker-compose up -d` -command. - -> NOTE: During the [deploy phase] one can see the error messages during execution of task -`stop and remove all docker-compose containers before operations`. That means that you don't have launched `iroha` -and `postgres` containers. This error is handled and will not affect playbook execution. - -Let's discuss how it works in details. - -### 1.2 Inventory -``` -[iroha-east] -iroha-bench1 ansible_host=0.0.0.0 ansible_user=root key=0 - -[iroha-west] -iroha-bench2 ansible_host=0.0.0.0 ansible_user=root key=8 - -[iroha-south] -iroha-bench3 ansible_host=0.0.0.0 ansible_user=root key=13 - -[iroha-north] -iroha-bench4 ansible_host=0.0.0.0 ansible_user=root key=17 -``` - -As you can see, basic host field in group contains `hostname`, `ansible_host `, `ansible_user`, and `key` field. - -`key` is a node ID in a iroha network. This value is used for passing only node-specific keypair to the `iroha` node to start. -In this particular playbook this value is used to the start of the count. -`nodes_in_region` is an amount of `iroha` nodes running on each host. -Values `key` and `nodes_in_region` are used in the following manner: -- for host iroha-bench1 we have 8 iroha peers. First peer ID will be `key=0`, for second `key=1`, and so on up to `key=7` -- for host iroha-bench2 we have another 5 iroha peers. Their IDs will start from 8 to 12. -- for host iroha-bench3 we want to run 4 iroha peers. Their IDs will start from 13 to 16. -- for host iroha-bench4 we want to run 4 iroha peers. Their IDs will start from 17 to 20. - -> `nodes_in_region` variable could be set at `playbooks/iroha-docker-cluster/group_vars/.yml`, or default value from -`playbooks/iroha-docker-cluster/group_vars/all.yml` will be used. - -### 1.3 Peer configs - -There is no need to describe them as they are generated automatically. Port management is also automated. -If you want to see how it works --> you can see `roles/iroha-cluster-deploy-node/tasks/ubuntu.yml` and templates `roles/iroha-cluster-deploy-node/templates/` - -### 1.4 Variables to be set -This section provides full list of variables that are used in the playbook. -- `playbooks/iroha-docker-cluster/group_vars/all.yml` - - `confPath` - config files directory on target host - - `filesDir`- local directory with files generated by `iroha-cli` - - `composeDir` - `docker-compose.yml` file location on target - - `torii_port` - torii port start value - - `internal_port` - iroha port start value - - `nodes_in_region` - default 4 nodes of iroha on each target host - -If you want everything to work from scratch, these variables should not be changed. -- `roles/iroha-cluster-config-gen/defaults/main.yml` - - `filesDir` - local directory on localhost where keys and genesis.block files will be stored after `pre-generation` - and `generation` phases (if you want to change it - do it here, it has higher precedence) -- ` roles/iroha-cluster-deploy-node/defaults/main.yml` - - `postgresName` - docker container name of postgres - - `postgresPort` - docker container port exposed by postgres - - `postgresUser` - postgres username - - `postgresPassword` - postgres password - - `iroha_net` - prefix name of docker network - - `containerConfPath` - path to folder with config files inside docker container (mount point for docker volume) - - `irohaDockerImage` - `iroha` docker image name - - `irohaDockerImageTag` - `iroha` docker image tag - - `dbDockerImage` - image name for `postgres` - - `dbDockerImageTag` - image tag for `postgres` - -### 1.5 How to launch - -After `hosts_docker_cluster.list` inventory file is configured one could launch the playbook. - -``` -ansible-playbook -i inventory/hosts_docker_cluster.list playbooks/iroha-docker-cluster/iroha-deploy.yml --private-key=~/.ssh/ -``` -, where you should specify your SSH key. - -> NOTE: you might see the tags property defined in the `playbooks/iroha-docker-cluster/iroha-deploy.yml` in -``` -- { role: iroha-cluster-deploy-node, tags: ["deliver", "deploy"] } -``` -> Tags are used to separate tasks in case you want to run only few of them without changing the role. -In this case, if you exclude `"deploy"` tag, then only configuration files will be delivered. After that is you run the playbook -with only -``` - - hosts: all - gather_facts: True - roles: - - { role: iroha-cluster-deploy-node, tags: ["deploy"] } -``` -> will allow you to only run `iroha` nodes without changing the configuration. It is just an option for flexibility, -there is no need to use it. - -### 1.6 Requirements -1) **iroha-cli** must be installed and could be accessed using PATH variable (e.g. /usr/bin). -This is required because keys and genesis.block are generated on your local host and stored in `{{ filesDir }}` folder. - -## 2. Iroha-standalone-nodes -### 2.1 Main ideas - -This playbook allows to run iroha cluster by delivering previously generated `genesis.block`, -keypair for each node, and `config.sample` to target hosts (single `iroha` node for each host). -It runs `iroha` and `postgres:9.5` in docker containers. - -It works in the following way: -- [pre-generation phase] - `peers.list` is generated and stored locally at `{{ filesDir }}` directory -- [generation phase] all configs are generated using `iroha-cli` and also stored at `{{ filesDir }}`: - - `genesis.block` - - `node$KEY.priv` - - `node$KEY.pub`, -where `$KEY` is a iroha node ID in the P2P network -- [deliver phase] - `config.sample` file is generated from the template and delivered to the `{{ confPath }}` which is -set to ` /opt/docker/iroha/conf` by default. Files from the `generation phase` are also delivered to these endpoints. -- [deploy phase] all previously launched `iroha` and `postgres` containers are stopped and removed, then images are updated -using `docker pull` command and after that `iroha` and `postgres` are started using `docker run` command. - -> NOTE: During the [deploy phase] one can see the error messages during execution of task -`Stop and remove previous running docker containers`. That means that you have neither launched `iroha` -and `postgres` containers nor existing `docker-compose.yml` file. This error is handled and will not affect playbook -execution. - -Let's discuss how it works in details. - -### 2.2 Inventory -``` -[iroha-nodes] -iroha-1 ansible_host=0.0.0.0 ansible_user=root key=0 -iroha-2 ansible_host=0.0.0.0 ansible_user=root key=1 -iroha-3 ansible_host=0.0.0.0 ansible_user=root key=2 -``` - -As you can see, basic host field in group contains `hostname`, `ansible_host `, `ansible_user`, and `key` field. - -`key` is a node ID in a iroha network. -Values `key` is used in the following manner: -- for host iroha-1 host peer ID will be `key=0` -- for host iroha-2 host peer ID will be `key=1` -- the following hosts in the list should increment this value - -> you can use multiple group of hosts with different names. The only requirement is that value `key` -should be increased throughout the all list on hosts. - -### 2.3 Peer configs - -There is no need to describe them as they are generated automaitally. Port management is also automated. -If you want to see how it works --> you can see `roles/iroha-standalone-deploy-node/tasks/ubuntu.yml` and templates -`roles/iroha-standalone-deploy-node/templates/` - -### 2.4 Variables to be set -This section provides full list of variables that are used in the playbook. -- `playbooks/iroha-docker-cluster/group_vars/all.yml` - - `confPath` - config files directory on target host - - `filesDir`- local directory with files generated by `iroha-cli` - - `torii_port` - torii port start value - - `internal_port` - iroha port start value - -If you want everything to work from scratch, these variables should not be changed. -- `roles/iroha-cluster-config-gen/defaults/main.yml` - - `filesDir` - local directory on localhost where keys and genesis.block files will be stored after `pre-generation` - and `generation` phases (if you want to change it - do it here, it has higher precedence) -- ` roles/iroha-cluster-deploy-node/defaults/main.yml` - - `postgresName` - docker container name of postgres - - `postgresPort` - docker container port exposed by postgres - - `postgresUser` - postgres username - - `postgresPassword` - postgres password - - `iroha_net` - prefix name of docker network - - `containerConfPath` - path to folder with config files inside docker container (mount point for docker volume) - - `irohaDockerImage` - `iroha` docker image name - - `irohaDockerImageTag` - `iroha` docker image tag - - `dbDockerImage` - image name for `postgres` - - `dbDockerImageTag` - image tag for `postgres` - -### 2.5 How to launch - -After `hosts_standalone_nodes.list` inventory file is configured one could launch the playbook. - -``` -ansible-playbook -i inventory/hosts_standalone_nodes.list playbooks/iroha-standalone-nodes/iroha-deploy.yml --private-key=~/.ssh/ -``` -, where you should specify your SSH key. - - -### 2.6 Requirements -1) **iroha-cli** must be installed and could be accessed using PATH variable (e.g. /usr/bin). This is due to keys and genesis.block is generated on your local host and stored on /tmp/iroha-bench diff --git a/deploy/ansible/inventory/hosts_docker_cluster.list b/deploy/ansible/inventory/hosts_docker_cluster.list deleted file mode 100644 index 6d6acfe45c..0000000000 --- a/deploy/ansible/inventory/hosts_docker_cluster.list +++ /dev/null @@ -1,28 +0,0 @@ -# This is sample inventory file for deployment using docker-compose (multiple nodes of iroha on target hosts) -# To start with: -# 1) set group name (not mandatory, you can use already predefined): e.g. [iroha-east]. -# 2) change inventory hostname (not mandatory, you can use already predefined): e.g. iroha-bench1. -# 3) set IP address to the address of your server ("ansible_host" variable) -# 4) set "ansible_user", or leave "root" -# 5) set "key" variable in the following way: -# key=0 for the first host of the first node in the inventory -# in playbook//group_vars/all.yml or playbook//group_vars/.yml -# you can find parameter "nodes_in_group" -# NOTE: that .yml values have higher priority than in all.yml -# for next host set key=key[prev_host] + nodes_in_group -# repeat this procedure for all other hosts -# NOTE: 1) you can add more than one host to each group -# NOTE: 2) you can set unique value of "nodes_in_group" for each host by setting it in -# playbook//host_vars//all.yml - -[iroha-east] -iroha-bench1 ansible_host=0.0.0.0 ansible_user=root key=0 - -[iroha-west] -iroha-bench2 ansible_host=0.0.0.0 ansible_user=root key=8 - -[iroha-south] -iroha-bench3 ansible_host=0.0.0.0 ansible_user=root key=13 - -[iroha-north] -iroha-bench4 ansible_host=0.0.0.0 ansible_user=root key=17 diff --git a/deploy/ansible/inventory/hosts_standalone_nodes.list b/deploy/ansible/inventory/hosts_standalone_nodes.list deleted file mode 100644 index e22fa29790..0000000000 --- a/deploy/ansible/inventory/hosts_standalone_nodes.list +++ /dev/null @@ -1,15 +0,0 @@ -# This is sample inventory file for deployment using docker-compose (multiple nodes of iroha on target hosts) -# To start with: -# 1) set group name (not mandatory, you can use already predefined): e.g. [iroha-nodes]. -# 2) change inventory hostname (not mandatory, you can use already predefined): e.g. iroha-1. -# 3) set IP address to the address of your server ("ansible_host" variable) -# 4) set "ansible_user", or leave "root" -# 5) set "key" variable in the following way: -# key=0 for the first host of the first node in the inventory -# for next host set key=key[prev_host] + 1 -# NOTE: 1) you can create more than one group, but you still have to increment it throughout the inventory file. - -[iroha-nodes] -iroha-1 ansible_host=0.0.0.0 ansible_user=root key=0 -iroha-2 ansible_host=0.0.0.0 ansible_user=root key=1 -iroha-3 ansible_host=0.0.0.0 ansible_user=root key=2 diff --git a/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/all.yml b/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/all.yml deleted file mode 100644 index c672fce95b..0000000000 --- a/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/all.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- - # iroha docker cluster playbook group vars - confPath: /opt/docker/iroha/conf # config files directory on target host - filesDir: /tmp/iroha-files # local directory with files generated by iroha-cli - composeDir: /tmp # docker-compose.yml file location on target - torii_port: 50051 # torii port start value - internal_port: 10001 # iroha port start value - nodes_in_region: 4 # default 4 nodes of iroha on each target host diff --git a/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-east.yml b/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-east.yml deleted file mode 100644 index 613df7cf5b..0000000000 --- a/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-east.yml +++ /dev/null @@ -1 +0,0 @@ -nodes_in_region: 8 diff --git a/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-north.yml b/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-north.yml deleted file mode 100644 index 85c3521cc8..0000000000 --- a/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-north.yml +++ /dev/null @@ -1 +0,0 @@ -nodes_in_region: 4 diff --git a/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-south.yml b/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-south.yml deleted file mode 100644 index 85c3521cc8..0000000000 --- a/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-south.yml +++ /dev/null @@ -1 +0,0 @@ -nodes_in_region: 4 diff --git a/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-west.yml b/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-west.yml deleted file mode 100644 index af7858cfd2..0000000000 --- a/deploy/ansible/playbooks/iroha-docker-cluster/group_vars/iroha-west.yml +++ /dev/null @@ -1 +0,0 @@ -nodes_in_region: 5 diff --git a/deploy/ansible/playbooks/iroha-docker-cluster/iroha-deploy.yml b/deploy/ansible/playbooks/iroha-docker-cluster/iroha-deploy.yml deleted file mode 100644 index cc61f3b4e9..0000000000 --- a/deploy/ansible/playbooks/iroha-docker-cluster/iroha-deploy.yml +++ /dev/null @@ -1,14 +0,0 @@ - - hosts: localhost - connection: local - roles: - - { role: iroha-cluster-config-gen } - - - hosts: all - gather_facts: False - pre_tasks: - - name: install python 2 # python installation (ansible hosts gather_facts requirement) - raw: test -e /usr/bin/python || (apt -y update && apt install -y python) - changed_when: False - roles: - - { role: docker, tags: docker } - - { role: iroha-cluster-deploy-node, tags: ["deliver", "deploy"] } diff --git a/deploy/ansible/playbooks/iroha-docker/main.yml b/deploy/ansible/playbooks/iroha-docker/main.yml new file mode 100644 index 0000000000..f25745dd85 --- /dev/null +++ b/deploy/ansible/playbooks/iroha-docker/main.yml @@ -0,0 +1,8 @@ +- hosts: all + strategy: linear + roles: + # docker role only works for Linux hosts + - { role: docker, tags: docker } + # - { role: iroha-docker, tags: iroha-docker } + vars: + hostnames: [] \ No newline at end of file diff --git a/deploy/ansible/playbooks/iroha-standalone-nodes/group_vars/all.yml b/deploy/ansible/playbooks/iroha-standalone-nodes/group_vars/all.yml deleted file mode 100644 index 10bc348560..0000000000 --- a/deploy/ansible/playbooks/iroha-standalone-nodes/group_vars/all.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- - # iroha standalone playbook group vars - confPath: /opt/docker/iroha/conf # config files directory on target host - filesDir: /tmp/iroha-files # local directory with files generated by iroha-cli - torii_port: 50051 # torii port default value - internal_port: 10001 # iroha port default value diff --git a/deploy/ansible/playbooks/iroha-standalone-nodes/iroha-deploy.yml b/deploy/ansible/playbooks/iroha-standalone-nodes/iroha-deploy.yml deleted file mode 100644 index 42a106b882..0000000000 --- a/deploy/ansible/playbooks/iroha-standalone-nodes/iroha-deploy.yml +++ /dev/null @@ -1,15 +0,0 @@ ---- - - hosts: localhost - connection: local - roles: - - { role: iroha-standalone-config-gen } - - - hosts: all - gather_facts: False - pre_tasks: - - name: install python 2 # python installation (ansible hosts gather_facts requirement) - raw: test -e /usr/bin/python || (apt -y update && apt install -y python) - changed_when: False - roles: - - { role: docker, tags: docker } - - { role: iroha-standalone-deploy-node, tags: iroha-standalone-deploy-node } diff --git a/deploy/ansible/playbooks/iroha-standalone-nodes/main.yml b/deploy/ansible/playbooks/iroha-standalone-nodes/main.yml deleted file mode 100644 index ffb7593c13..0000000000 --- a/deploy/ansible/playbooks/iroha-standalone-nodes/main.yml +++ /dev/null @@ -1,5 +0,0 @@ ---- - - hosts: all - gather_facts: true - roles: - - { role: ansible-users, tags: ansible-users } diff --git a/deploy/ansible/roles/docker/README.md b/deploy/ansible/roles/docker/README.md index 83be832ba6..4a03ffdf1d 100644 --- a/deploy/ansible/roles/docker/README.md +++ b/deploy/ansible/roles/docker/README.md @@ -1,7 +1,7 @@ -docker +Docker ========= -A role that installs docker engine and docker-compose including python modules. +A role that installs Docker CE and Docker Compose including Python modules. Requirements ------------ @@ -13,10 +13,9 @@ Role Variables - variables defined by this role: - `defaults/main.yml` list of variables: - - `dockerVersion` - version of docker that is going to be installed - - `dockerComposeVersion` - version of docker compose that is going to be installed - - `execPath` - path for `docker-compose` binary to be placed + `defaults/main.yml` list of variables: + - `dockerVersion` - version of Docker that is going to be installed + - `dockerComposeVersion` - version of Docker Compose that is going to be installed Example Playbook diff --git a/deploy/ansible/roles/docker/defaults/main.yml b/deploy/ansible/roles/docker/defaults/main.yml index aaf4ffe06b..04a9914cc0 100644 --- a/deploy/ansible/roles/docker/defaults/main.yml +++ b/deploy/ansible/roles/docker/defaults/main.yml @@ -1,4 +1,2 @@ -dockerVersion: 17.12.0~ce-0~ubuntu -dockerComposeVersion: 1.19.0 -execPath: /usr/local/bin/ -arch: amd64 +dockerVersion: 18.06.1~ce~3-0~ubuntu +dockerComposeVersion: 1.22.0 \ No newline at end of file diff --git a/deploy/ansible/roles/docker/defaults/ubuntu.yml b/deploy/ansible/roles/docker/defaults/ubuntu.yml deleted file mode 100644 index aaf4ffe06b..0000000000 --- a/deploy/ansible/roles/docker/defaults/ubuntu.yml +++ /dev/null @@ -1,4 +0,0 @@ -dockerVersion: 17.12.0~ce-0~ubuntu -dockerComposeVersion: 1.19.0 -execPath: /usr/local/bin/ -arch: amd64 diff --git a/deploy/ansible/roles/docker/tasks/install_compose.yml b/deploy/ansible/roles/docker/tasks/install_compose.yml new file mode 100644 index 0000000000..116f3b4eaa --- /dev/null +++ b/deploy/ansible/roles/docker/tasks/install_compose.yml @@ -0,0 +1,25 @@ +- name: Install packages + apt: + update_cache: yes + name: "{{ packages }}" + vars: + packages: + - python-setuptools + - python-pip + when: ansible_python_version is search("^2\..*$") + +- name: Install packages + apt: + update_cache: yes + name: "{{ packages }}" + vars: + packages: + - python3-setuptools + - python3-pip + when: ansible_python_version is search("^3\..*$") + +- name: Install docker python modules + pip: + name: + - docker + - docker-compose=={{ dockerComposeVersion }} diff --git a/deploy/ansible/roles/docker/tasks/install_docker.yml b/deploy/ansible/roles/docker/tasks/install_docker.yml new file mode 100644 index 0000000000..99e1201d4a --- /dev/null +++ b/deploy/ansible/roles/docker/tasks/install_docker.yml @@ -0,0 +1,39 @@ +- name: Purge old Docker versions if any + apt: + name: "{{ packages }}" + purge: yes + state: absent + vars: + packages: + - docker + - docker-engine + - docker.io + +- name: Install packages + apt: + name: "{{ packages }}" + update_cache: yes + vars: + packages: + - apt-transport-https + - ca-certificates + - curl + - software-properties-common + +- name: Add GPG key + apt_key: + url: https://download.docker.com/linux/ubuntu/gpg + +- name: Get distro type + command: lsb_release -cs + register: distro_type + changed_when: False + +- name: Add repository + apt_repository: + repo: "deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ distro_type.stdout }} stable" + update_cache: yes + +- name: Install docker + apt: + name: docker-ce={{ dockerVersion }} diff --git a/deploy/ansible/roles/docker/tasks/main.yml b/deploy/ansible/roles/docker/tasks/main.yml index 44b6052457..5618a6b3e3 100644 --- a/deploy/ansible/roles/docker/tasks/main.yml +++ b/deploy/ansible/roles/docker/tasks/main.yml @@ -1,3 +1,49 @@ --- - - include_tasks: ubuntu.yml - when: ansible_distribution == 'Ubuntu' + - name: check Docker binary + stat: + path: "/usr/bin/docker" + register: docker_binary + + - name: check Docker version + command: "/usr/bin/docker --version" + register: docker_version + when: "docker_binary.stat.exists" + changed_when: False + failed_when: False + + - name: process docker_version.stdout var + set_fact: + docker_version: "{{ docker_version.stdout | regex_search('[0-9]+\\.[0-9]+\\.[0-9]+') }}" + when: "docker_binary.stat.exists" + + - debug: + var: docker_version + + - name: process dockerVersion + set_fact: + docker_version_desired: "{{ dockerVersion | regex_search('[0-9]+\\.[0-9]+\\.[0-9]+') }}" + + - name: check Docker Compose binary + stat: + path: "/usr/local/bin/docker-compose" + register: docker_compose_binary + + - name: check Docker Compose version + command: "docker-compose --version" + register: docker_compose_version + when: "docker_compose_binary.stat.exists" + changed_when: False + failed_when: False + + - name: process docker_compose_version.stdout var + set_fact: + docker_compose_version: "{{ docker_compose_version.stdout | regex_search('[0-9]+\\.[0-9]+\\.[0-9]+') }}" + when: "docker_compose_binary.stat.exists" + + - name: install/upgrade Docker, if needed + include: install_docker.yml + when: not docker_binary.stat.exists or dockerVersion is not defined or docker_version_desired is not version(docker_version, operator="eq") + + - name: install/upgrade Docker Compose, if needed + include: install_compose.yml + when: not docker_compose_binary.stat.exists or dockerComposeVersion is not defined or dockerComposeVersion is not version(docker_compose_version, operator="eq") diff --git a/deploy/ansible/roles/docker/tasks/ubuntu.yml b/deploy/ansible/roles/docker/tasks/ubuntu.yml deleted file mode 100644 index f4bbc4c75d..0000000000 --- a/deploy/ansible/roles/docker/tasks/ubuntu.yml +++ /dev/null @@ -1,57 +0,0 @@ ---- - - name: install python 2 - raw: test -e /usr/bin/python || (apt -y update && apt install -y python-minimal) - - - name: Purge old Docker versions if any - apt: purge=yes state=absent name={{ item }} - with_items: - - docker - - docker-engine - - docker.io - - - name: Install packages - apt: pkg={{ item }} state=installed update_cache=yes - with_items: - - apt-transport-https - - ca-certificates - - curl - - software-properties-common - - python-setuptools - - python-pip - - - name: Add GPG key - shell: 'curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -' - - - name: Add repository - shell: 'add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"' - - - name: Install docker - apt: - name: docker-ce={{ dockerVersion }} - state: present - update_cache: yes - -# purge if anything installed to resolve conflicts - - name: remove existing python modules - pip: - name: "{{ item }}" - state: absent - with_items: - - docker - - docker-py - - docker-compose - - - name: install docker python modules - pip: - name: "{{ item }}" - state: latest - with_items: - - docker - - - name: download docker-compose bin - raw: curl -L https://github.com/docker/compose/releases/download/{{ dockerComposeVersion }}/docker-compose-`uname -s`-`uname -m` -o {{ execPath }}/docker-compose - - - name: make docker-compose executable - file: - path: "{{ execPath }}/docker-compose" - mode: "a+x" diff --git a/deploy/ansible/roles/iroha-cluster-config-gen/README.md b/deploy/ansible/roles/iroha-cluster-config-gen/README.md deleted file mode 100644 index e9ef6ee444..0000000000 --- a/deploy/ansible/roles/iroha-cluster-config-gen/README.md +++ /dev/null @@ -1,32 +0,0 @@ -iroha-cluster-config-gen -========= - -A role that generates `genesis.block` and keypair for each node in deployable iroha cluster based on inventory. - -Requirements ------------- - -`iroha-cli` binary should be accessible via `PATH` variable on your system. - -Role Variables --------------- - -- variables defined by this role: - - `defaults/main.yml` list of variables: - - `filesDir`: directory to store generated files - -- variables required by playbook (see description in playbook's `group_vars` files): - - `nodes_in_region` - - `internal_port` - - -Example Playbook ----------------- - -```yaml - - hosts: locals - gather_facts: true - roles: - - { role: iroha-cluster-config-gen } -``` diff --git a/deploy/ansible/roles/iroha-cluster-config-gen/defaults/main.yml b/deploy/ansible/roles/iroha-cluster-config-gen/defaults/main.yml deleted file mode 100644 index 19ffb10d40..0000000000 --- a/deploy/ansible/roles/iroha-cluster-config-gen/defaults/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -# defaults file for iroha-cluster-config-gen -filesDir: /tmp/iroha-files diff --git a/deploy/ansible/roles/iroha-cluster-config-gen/tasks/main.yml b/deploy/ansible/roles/iroha-cluster-config-gen/tasks/main.yml deleted file mode 100644 index 44b6052457..0000000000 --- a/deploy/ansible/roles/iroha-cluster-config-gen/tasks/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- - - include_tasks: ubuntu.yml - when: ansible_distribution == 'Ubuntu' diff --git a/deploy/ansible/roles/iroha-cluster-config-gen/tasks/ubuntu.yml b/deploy/ansible/roles/iroha-cluster-config-gen/tasks/ubuntu.yml deleted file mode 100644 index 49e39495a7..0000000000 --- a/deploy/ansible/roles/iroha-cluster-config-gen/tasks/ubuntu.yml +++ /dev/null @@ -1,20 +0,0 @@ ---- -# tasks file for iroha-standalone-config-gen -- name: create dir for temporary files - file: - path: "{{ filesDir }}" - state: "{{ item }}" - mode: 0755 - with_items: - - absent - - directory - -- name: generate peers list - template: - src: peers.list.j2 - dest: "{{ filesDir }}/peers.list" - -- name: generate keys and genesis block - command: "iroha-cli --genesis_block --peers_address peers.list" - args: - chdir: "{{ filesDir }}" diff --git a/deploy/ansible/roles/iroha-cluster-config-gen/templates/peers.list.j2 b/deploy/ansible/roles/iroha-cluster-config-gen/templates/peers.list.j2 deleted file mode 100644 index c2e7e3a784..0000000000 --- a/deploy/ansible/roles/iroha-cluster-config-gen/templates/peers.list.j2 +++ /dev/null @@ -1,5 +0,0 @@ -{% for host in groups['all'] | difference([inventory_hostname]) | sort(reverse=False) %} -{% for i in range(0, hostvars[host].nodes_in_region) %} -{{ hostvars[host].ansible_host }}:{{ hostvars[host].internal_port + i | int }} -{% endfor %} -{% endfor %} diff --git a/deploy/ansible/roles/iroha-cluster-deploy-node/README.md b/deploy/ansible/roles/iroha-cluster-deploy-node/README.md deleted file mode 100644 index 8a592b98ef..0000000000 --- a/deploy/ansible/roles/iroha-cluster-deploy-node/README.md +++ /dev/null @@ -1,61 +0,0 @@ -iroha-cluster-deploy-node -========= - -A role that runs iroha cluster by delivering previously generated `genesis.block`, -keypair for each node, and newly generated `config.sample` to target hosts. It creates `docker-compose.yml` file -with iroha docker and `postgres:9.5` configured in separated docker networks, but in one P2P network. - -This role allows you to run X nodes of iroha on each target host, where X [0,1,..,31], e.g: - -- target host1: node-0, node-1, node-2, node-3, node-4 -- target host2: node-5, node-6, node-7 -- target host3: node-8, node-9, node-10, node-11, node-12, node-13 - -Requirements ------------- - -1. Pre-generated files (role `iroha-standalone-config-gen`): - - `genesis.block` - - `nodeX.pub`, `nodeX.priv` keypair for each node -stored at `filesDir` folder. - -2. `docker-compose` binary available via `PATH` variable (role `docker`) - -Role Variables --------------- - -- variables defined by this role: - - `defaults/main.yml` list of variables: - - `postgresName`: name of `postgres` docker container after running by `docker-compose` - - `postgresUser`: username on postgresql - - `postgresPassword`: password on postgresql - - `iroha_net`: name prefix of docker network - - `containerConfPath`: config dir prefix on target host (this directory is attached to running iroha docker container as a docker volume) - -- variables required by playbook (see description in playbook's `group_vars` files): - - `filesDir` - - `composeDir` - - `confPath` - - `nodes_in_region` - - `internal_port` - - `torii_port` - - -Example Playbook ----------------- - -```yaml - - hosts: - - iroha-east - - iroha-west - gather_facts: False - pre_tasks: - - name: install python 2 - raw: test -e /usr/bin/python || (apt -y update && apt install -y python) - changed_when: False - roles: - - { role: iroha-cluster-deploy-node, tags: ["deliver", "deploy"] } -``` -tags can be used for separaring the execution, e.g. if you exclude tag "deploy", command -`docker-compose up -d` will not be executed and your iroha cluster will not start, but all files will be deployed. diff --git a/deploy/ansible/roles/iroha-cluster-deploy-node/defaults/main.yml b/deploy/ansible/roles/iroha-cluster-deploy-node/defaults/main.yml deleted file mode 100644 index fb05595bad..0000000000 --- a/deploy/ansible/roles/iroha-cluster-deploy-node/defaults/main.yml +++ /dev/null @@ -1,13 +0,0 @@ ---- - # docker container postgres settings - postgresName: tmp_postgres - postgresPort: 5432 - postgresUser: psql - postgresPassword: psql - iroha_net: iroha_network # name of docker network - containerConfPath: /opt/iroha_data # path to folder with config files inside docker container - - irohaDockerImage: hyperledger/iroha # docker image name - irohaDockerImageTag: latest # docker image tag - dbDockerImage: postgres - dbDockerImageTag: 9.5 diff --git a/deploy/ansible/roles/iroha-cluster-deploy-node/tasks/main.yml b/deploy/ansible/roles/iroha-cluster-deploy-node/tasks/main.yml deleted file mode 100644 index 44b6052457..0000000000 --- a/deploy/ansible/roles/iroha-cluster-deploy-node/tasks/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- - - include_tasks: ubuntu.yml - when: ansible_distribution == 'Ubuntu' diff --git a/deploy/ansible/roles/iroha-cluster-deploy-node/tasks/ubuntu.yml b/deploy/ansible/roles/iroha-cluster-deploy-node/tasks/ubuntu.yml deleted file mode 100644 index d9625733d8..0000000000 --- a/deploy/ansible/roles/iroha-cluster-deploy-node/tasks/ubuntu.yml +++ /dev/null @@ -1,69 +0,0 @@ ---- - - setup: - - name: stop and remove all docker-compose containers before operations - command: docker-compose down - args: - chdir: "{{ composeDir }}" - ignore_errors: yes - tag: deliver - - - name: Make conf dir - file: - path: "{{ confPath }}{{ item }}" - state: directory - mode: 0755 - owner: root - with_sequence: start=0 end={{ nodes_in_region - 1 | int }} - tag: deliver - - - name: generate configs - template: - src: config.j2 - dest: "{{ confPath }}{{ item }}/config.sample" - with_sequence: start=0 end={{ nodes_in_region - 1 | int }} - tag: deliver - - - name: copy genesis block - copy: - src: "{{ filesDir }}/genesis.block" - dest: "{{ confPath }}{{ item }}/genesis.block" - with_sequence: start=0 end={{ nodes_in_region - 1 | int }} - tag: deliver - - - name: copy public keys - copy: - src: "{{ filesDir }}/node{{ key + item | int}}.pub" - dest: "{{ confPath }}{{ item }}/" - with_sequence: start=0 end={{ nodes_in_region - 1 | int }} - tag: deliver - - - name: copy private keys - copy: - src: "{{ filesDir }}/node{{ key + item | int}}.priv" - dest: "{{ confPath }}{{ item }}/" - with_sequence: start=0 end={{ nodes_in_region - 1 | int }} - tag: deliver - - - name: generate compose - template: - src: docker-compose.yml.j2 - dest: "{{ composeDir }}/docker-compose.yml" - tag: deliver - - - name: pull fresh docker image of postgres - docker_image: - name: "{{ dbDockerImage }}" - tag: "{{ dbDockerImageTag }}" - tag: deploy - - - name: pull fresh docker image of iroha - docker_image: - name: "{{ irohaDockerImage }}" - tag: "{{ irohaDockerImageTag }}" - tag: deploy - - - name: run docker compose - command: docker-compose up -d - args: - chdir: "{{ composeDir }}" - tag: deploy diff --git a/deploy/ansible/roles/iroha-cluster-deploy-node/templates/config.j2 b/deploy/ansible/roles/iroha-cluster-deploy-node/templates/config.j2 deleted file mode 100644 index d400764c07..0000000000 --- a/deploy/ansible/roles/iroha-cluster-deploy-node/templates/config.j2 +++ /dev/null @@ -1,9 +0,0 @@ -{ - "block_store_path" : "/tmp/block_store/", - "torii_port" : {{ torii_port + item | int }}, - "internal_port" : {{ internal_port + item | int }}, - "pg_opt" : "host={{ postgresName }}_{{ key + item | int }}_1 port={{ postgresPort }} user={{ postgresUser }} password='{{ postgresPassword }}'", - "max_proposal_size" : 10, - "proposal_delay" : 5000, - "vote_delay" : 5000 -} diff --git a/deploy/ansible/roles/iroha-cluster-deploy-node/templates/docker-compose.yml.j2 b/deploy/ansible/roles/iroha-cluster-deploy-node/templates/docker-compose.yml.j2 deleted file mode 100644 index 83e84427e2..0000000000 --- a/deploy/ansible/roles/iroha-cluster-deploy-node/templates/docker-compose.yml.j2 +++ /dev/null @@ -1,34 +0,0 @@ -version: '3' - -services: -{% for i in range(nodes_in_region | int) %} - node_{{ key + i }}: - image: "{{ irohaDockerImage }}:{{ irohaDockerImageTag }}" - ports: - - "{{ torii_port + i }}:{{ torii_port + i }}" - - "{{ internal_port + i }}:{{ internal_port + i }}" - environment: - - KEY=node{{ key + i }} - depends_on: - - postgres_{{ key + i }} - tty: true - restart: always - volumes: - - "{{ confPath }}{{ i }}:{{ containerConfPath }}" - networks: - - "{{ iroha_net }}_{{ i }}" - - postgres_{{ key + i }}: - image: "{{ dbDockerImage }}:{{ dbDockerImageTag }}" - environment: - - POSTGRES_USER={{ postgresUser }} - - POSTGRES_PASSWORD={{ postgresPassword }} - networks: - - "{{ iroha_net }}_{{ i }}" - -{% endfor %} - -networks: -{% for i in range(nodes_in_region | int) %} - {{ iroha_net }}_{{ i }}: -{% endfor %} diff --git a/deploy/ansible/roles/iroha-docker/README.md b/deploy/ansible/roles/iroha-docker/README.md new file mode 100644 index 0000000000..b5a8aa2536 --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/README.md @@ -0,0 +1,98 @@ +## Description +This role deploys multiple replicas of Iroha containers (one Iroha peer per container) on remote hosts. Each Iroha peer can communicate with others in two ways: + - using public IP addresses or hostnames set in inventory list OR + - using private IP addresses of the Docker overlay network + +The first one is easier to implement since it does not require preliminary configuration of the remote hosts. Just make sure that network ports are not firewalled. You can check the port list in the generated Docker Compose file (`docker-compose.yml`) after deployment. + +This option is enabled by default. + +The second one can be used when there is an overlay network exists between the hosts. In short, overlay network allows for Docker containers to communicate using a single subnet. Such that each container would have a unique IP address in that subnet. Learn more in official Docker documentation (https://docs.docker.com/network/overlay). We recommend to use Calico for setting up Docker overlay network since it can be used as a network plugin (https://docs.projectcalico.org/v1.5/getting-started/docker/tutorials/basic). + +The second way is also suitable for local-only deployments. + +## Requirements + Tested on Ubuntu 16.04, 18.04 + - Local: + - python3, python3-dev + - PIP modules: ansible(>=2.4), future, sha3(for Python<3.6) + - Remote: + - Docker (>=17.12) + - python3 + - PIP modules: docker, docker-compose + There is a role for setting up a remote part of the dependencies named `docker`. It works for Ubuntu OS only. Check `iroha-docker` playbook. + +### Note: +> `docker.io` package from Ubuntu repos will not work. Either use Ansible role or install Docker following official instructions for your OS flavor. + +## Quick Start +1. Install Ansible + ``` + pip3 install ansible + ``` +2. Create inventory list containing IP address of the remote host + + **iroha.list** + ``` + [all] + 192.168.122.109 + ``` + + Put this file into `../../inventory/` directory. + +`cd ../../ && ansible-playbook -b -e 'ansible_ssh_user=ubuntu' -i inventory/iroha.list playbooks/iroha-docker/main.yml` + +This will deploy 6 Iroha Docker containers along with 6 Postgres containers on a remote host. Remote user is `ubuntu`. Torii port of each container is exposed on the host. Iroha peer can be communicated over port defined in `iroha_torii_port` variable (50051 by default). Overall, each host will listen the following port range: `iroha_torii_port` + *number-of-containers*. +During installation it will also install Docker along with required python modules. If you want to skip this step, comment out `docker` role in the playbook (`playbooks/iroha-docker/main.yml`) + +### Note: +> This command escalates privileges on a remote host during the run. It is required to be able to spin up Docker containers. We recommend to run the playbook using a passwordless remote sudo user. + +## Initial configuration + +See `defaults/main.yml` file to get more details about available configuration options. + +## Examples +### Example 1 + +Deploying 6 Iroha peers on two remote hosts communicating using public IP addresses. With 2 and 4 replicas on each host respectively. + +1. Create inventory list containing IP addresses (or hostnames if they are mutually resolve-able on both hosts) of two hosts that will run Iroha peers + + **iroha.list** + ``` + [all] + 192.168.122.109 + 192.168.122.30 + ``` + + Put this file into `../../inventory/` directory. +2. Make sure you can SSH with a root account into either of these hosts using a private key. + + **Note** + > You can also SSH with the user other than root. Make sure it can execute `sudo` without prompting for a password. Set `-u` option for `ansible-playbook` command. + +3. Create two YAML files in `../playbooks/iroha-docker/host_vars` directory: + + **192.168.122.109.yml** + ``` + replicas: 2 + ``` + + **192.168.122.30.yml** + ``` + replicas: 4 + ``` + +4. Run the playbook +``` +ansible-playbook -i inventory/iroha.list -b playbooks/iroha-docker/main.yml +``` + +### Example 2 +Deploying 6 Iroha peers on two remote hosts communicating over overlay network (Calico) using custom hostnames. + +**TBD** + +### Caveats +1. If `/usr/bin/python` does not exist on a remote host, Ansible will fail with the misleading message: `... Make sure this host can be reached over ssh`. This usually happens when Ansible uses Python 3. On Ubuntu systems `/usr/bin/python3` is not symlinked to `/usr/bin/python` which Ansible expects to find. The problem can be solved by setting `ansible_python_interpreter` variable to `/usr/bin/python3`. diff --git a/deploy/ansible/roles/iroha-docker/defaults/main.yml b/deploy/ansible/roles/iroha-docker/defaults/main.yml new file mode 100644 index 0000000000..168b0ed5ce --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/defaults/main.yml @@ -0,0 +1,62 @@ +# How many Iroha peers (containers) are deployed on each host. Can be set on a per-host level. +# Minimum total nodes is 6 (5f+1) in order for consensus to work properly. +# +# Default: 6 +replicas: 6 + +# Whether to use custom hostname for EACH container. +# If set to `true`, Iroha peers will communicate using these hostnames. Hostnames should be set using +# `hostnames` variable. See example playbook in `playbooks/iroha-docker/main.yml`. +# If set to `false`, Iroha peers will communicate by IP addresses set by `inventory_hostname` +# variable. Container and service names in Docker Compose files will be auto-generated. +# +# Default: false +custom_hostnames: false + +# Affects how Iroha peers are communicated. If set to `true`, Docker overlay network with that +# name will be used. It must be created beforehand. The recommended way is to use Calico plugin +# for Docker (projectcalico.org). +# Setting it to `true` without creating the network will only work for a single host deployment +# (Iroha peers will only be able to communicate within that host). Suitable for local-only +# deployments for testing purposes. +# +# Default: false +overlay_network: false + +## Deployment configs +container_basename: iroha +# Path on a local machine for generated configs prior moving them on the remote +config_local_dir: /tmp/iroha-ansible-conf +# Path on a remote machine for generated configs +deploy_dir: /opt/iroha-deploy +# Overlay network name +# If using overlay network plugin (like Calico) the network must be created prior running this role +# The network will be created automatically if deploying locally or with `overlay_network` +# set to `false` +iroha_network_name: iroha-net + +# The role is incompatible with Iroha versions below RC2 since keys format has changed. +# Apply the patch (files/old-keys-format.patch) if you need support for previous Iroha versions. +iroha_docker_tag: 1.0.0_rc2 +postgres_docker_tag: '9.5' + +## Iroha config +# This value will be used as a base peer port for Iroha container. E.g. setting `replicas` option to +# 6 will generate Docker Compose file with port numbers starting from 10001 counting up to 10006. +# If `custom_hostnames` set to `true` AND contains port numbers this option is ignored +iroha_peer_port: 10001 +# As above but for base Torii port +iroha_torii_port: 50051 + +# Rest of the options affect Iroha configuration +# See https://iroha.readthedocs.io/en/latest/guides/configuration.html#configuration +# for more info. +iroha_max_proposal_size: 10 +iroha_proposal_delay: 5000 +iroha_vote_delay: 5000 +iroha_mst_enable: 'false' +iroha_postgres_user: postgres +iroha_postgres_password: postgres +iroha_postgres_host: iroha-postgres +iroha_postgres_port: 5432 +iroha_blockstore_path: /tmp/block_store diff --git a/deploy/ansible/roles/iroha-docker/files/config_gen.sh b/deploy/ansible/roles/iroha-docker/files/config_gen.sh new file mode 100755 index 0000000000..7afa3f29c1 --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/files/config_gen.sh @@ -0,0 +1,73 @@ +#!/bin/bash + +FORCE_OVERWRITE=0 +OUT_DIR='config' + +function config_gen { + if [[ "$FORCE_OVERWRITE" == "1" || "$FORCE_OVERWRITE" == "0" && ! -d "$OUT_DIR" ]]; then + rm -rf $OUT_DIR + mkdir -p $OUT_DIR + echo 'host;port;priv_key_hex;pub_key_hex' > peers.csv + for p in $(echo "$PEERS" | tr ',' '\n'); do + KEY_PAIR=($(python3 ed25519.py)) + PRIVATE_KEY=${KEY_PAIR[0]} + PUBLIC_KEY=${KEY_PAIR[1]} + PORT=$(echo $p | cut -d':' -f2 -s) + if [ -z "$PORT" ]; then + PORT=10001 + else + p=$(echo "$p" | cut -d':' -f1) + fi + echo "$p;$PORT;$PRIVATE_KEY;$PUBLIC_KEY" >> peers.csv + done + python3 genesis-add-peers.py add_iroha_peers peers.csv genesis.block + python3 genesis-add-peers.py make_key_files peers.csv + mv *.{priv,pub} $OUT_DIR/ + cp genesis.block $OUT_DIR/ + rm -f peers.csv + else + echo "configs dir \`$OUT_DIR\` is not empty. Use \`-f\` to overwrite. Exiting..." + exit 1 + fi +} + +function usage { + echo "usage: $0 -p peers_list [--force-overwrite || --out-dir]" + echo "Generates keypairs for a number of Iroha peers and prepares genesis.block filled with those peers. + Make sure 'genesis.block', 'genesis-add-peers.py' and 'ed25519.py' scripts are in the same directory." + echo "" + echo " -p | --peers : Comma-delimeted list of Iroha peers (optionally, with port numbers)" + echo " -f | --force-overwrite : Forces creation of the new keypairs" + echo " -o | --out-dir : Output directory for generated files. Defaults to './config'" + echo " -h | --help : This message" +} + +if ! TEMP=$(getopt -o fp:o:: --long force-overwrite,peers:,out-dir:: -- "$@"); then + usage + exit 1 +fi +eval set -- "$TEMP" + +# extract options and their arguments into variables. +while true ; do + case "$1" in + -h|--help) usage ; shift ;; + -f|--force-overwrite) FORCE_OVERWRITE=1 ; shift ;; + -p|--peers) PEERS="$2" ; shift 2 ;; + -o|--out-dir) + case "$2" in + "") OUT_DIR='config' ; shift 2 ;; + *) OUT_DIR="$2" ; shift 2 ;; + esac ;; + --) shift ; break ;; + *) echo "Unknown argument" ; exit 1 ;; + esac +done + +if [[ -z "$PEERS" ]]; then + echo "Provide -p parameter" + usage + exit 1 +fi + +config_gen diff --git a/deploy/ansible/roles/iroha-docker/files/ed25519.py b/deploy/ansible/roles/iroha-docker/files/ed25519.py new file mode 100644 index 0000000000..4500f4e0f2 --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/files/ed25519.py @@ -0,0 +1,167 @@ +#!/usr/env/python3 + +from builtins import chr +from past.utils import old_div +import sys +if sys.version_info < (3, 6): + from sha3 import sha3_512 as SHA3512 +else: + import hashlib + +import sys, os + +python_version = sys.version_info.major +b = 256 +q = 2 ** 255 - 19 +l = 2 ** 252 + 27742317777372353535851937790883648493 + + +def H(m): + if sys.version_info < (3, 6): + return SHA3512(m).digest() + else: + sha3_512 = hashlib.sha3_512() + sha3_512.update(m) + return sha3_512.digest() + +def expmod(b, e, m): + if e == 0: return 1 + t = expmod(b, old_div(e, 2), m) ** 2 % m + if e & 1: t = (t * b) % m + return t + + +def inv(x): + return expmod(x, q - 2, q) + + +d = -121665 * inv(121666) +I = expmod(2, old_div((q - 1), 4), q) + + +def xrecover(y): + xx = (y * y - 1) * inv(d * y * y + 1) + x = expmod(xx, old_div((q + 3), 8), q) + if (x * x - xx) % q != 0: x = (x * I) % q + if x % 2 != 0: x = q - x + return x + + +By = 4 * inv(5) +Bx = xrecover(By) +B = [Bx % q, By % q] + + +def edwards(P, Q): + x1 = P[0] + y1 = P[1] + x2 = Q[0] + y2 = Q[1] + x3 = (x1 * y2 + x2 * y1) * inv(1 + d * x1 * x2 * y1 * y2) + y3 = (y1 * y2 + x1 * x2) * inv(1 - d * x1 * x2 * y1 * y2) + return [x3 % q, y3 % q] + + +def scalarmult(P, e): + if e == 0: return [0, 1] + Q = scalarmult(P, old_div(e, 2)) + Q = edwards(Q, Q) + if e & 1: Q = edwards(Q, P) + return Q + + +def encodeint(y): + bits = [(y >> i) & 1 for i in range(b)] + return ''.join([chr(sum([bits[i * 8 + j] << j for j in range(8)])) for i in range(old_div(b, 8))]) + + +def encodepoint(P): + x = P[0] + y = P[1] + bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1] + return ''.join([chr(sum([bits[i * 8 + j] << j for j in range(8)])) for i in range(old_div(b, 8))]) + + +if python_version == 3: + def bit(h, i): + return ((h[old_div(i, 8)]) >> (i % 8)) & 1 +else: + def bit(h, i): + return (ord(h[old_div(i, 8)]) >> (i % 8)) & 1 + + +def publickey(sk): + h = H(sk) + a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in list(range(3, b - 2))) + A = scalarmult(B, a) + return encodepoint(A) + + +def Hint(m): + h = H(m) + return sum(2 ** i * bit(h, i) for i in list(range(2 * b))) + + +def signature(m, sk, pk): + h = H(sk) + a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in list(range(3, b - 2))) + r = Hint(''.join([h[i] for i in range(old_div(b, 8), old_div(b, 4))]) + m) + R = scalarmult(B, r) + S = (r + Hint(encodepoint(R) + pk + m) * a) % l + return encodepoint(R) + encodeint(S) + + +def isoncurve(P): + x = P[0] + y = P[1] + return (-x * x + y * y - 1 - d * x * x * y * y) % q == 0 + + +def decodeint(s): + return sum(2 ** i * bit(s, i) for i in list(range(0, b))) + + +def decodepoint(s): + y = sum(2 ** i * bit(s, i) for i in list(range(0, b - 1))) + x = xrecover(y) + if x & 1 != bit(s, b - 1): x = q - x + P = [x, y] + if not isoncurve(P): raise Exception("decoding point that is not on curve") + return P + + +def checkvalid(s, m, pk): + if len(s) != old_div(b, 4): raise Exception("signature length is wrong") + if len(pk) != old_div(b, 8): raise Exception("public-key length is wrong") + R = decodepoint(s[0:old_div(b, 8)]) + A = decodepoint(pk) + S = decodeint(s[old_div(b, 8):old_div(b, 4)]) + h = Hint(encodepoint(R) + pk + m) + if scalarmult(B, S) != edwards(R, scalarmult(A, h)): + raise Exception("signature does not pass verification") + + +def derive_pubkey_from_priv(priv): + return publickey(priv) + + +def sign(msg, priv, pub): + return signature(msg, priv, pub) + + +def verify(msg, sig, pub): + try: + checkvalid(sig, msg, pub) + return True + except Exception: + return False + +k = os.urandom(32) +# private key +if sys.version_info >= (3, 0): + print(k.hex()) +else: + print(k.encode('hex')) +pub = derive_pubkey_from_priv(k) +# public key +print(''.join(hex(ord(j))[2:].zfill(2) for j in pub)) diff --git a/deploy/ansible/roles/iroha-docker/files/genesis-add-peers.py b/deploy/ansible/roles/iroha-docker/files/genesis-add-peers.py new file mode 100644 index 0000000000..b8893b3ed3 --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/files/genesis-add-peers.py @@ -0,0 +1,92 @@ +#!/usr/env/python3 +import json, csv, sys, base64 +''' +peers.csv +host;port;priv_key_b64_encoded;pub_key_b64_encoded +''' + +class Peer: + def __init__(self, host, port, priv_key, pub_key): + self.host = host + self.port = port + self.priv_key = priv_key + self.pub_key = pub_key + +def parse_peers(peers_csv_fp): + peers = [] + with open(peers_csv_fp) as csvfile: + peersreader = csv.reader(csvfile, delimiter=';') + next(peersreader, None) # skip the header + for peer in peersreader: + peers.append(Peer(peer[0], peer[1], peer[2], peer[3])) + return peers + +def genesis_add_peers(peers_list, genesis_block_fp): + with open(genesis_block_fp, 'r+') as genesis_json: + genesis_dict = json.load(genesis_json) + try: + genesis_dict['block_v1']['payload']['transactions'][0]['payload']['reducedPayload']['commands'] = filter(lambda c: not c.get('addPeer'), genesis_dict['block_v1']['payload']['transactions'][0]['payload']['reducedPayload']['commands']) + except KeyError: + pass + genesis_dict['block_v1']['payload']['transactions'][0]['payload']['reducedPayload']['commands'] = list(genesis_dict['block_v1']['payload']['transactions'][0]['payload']['reducedPayload']['commands']) + for p in peers_list: + p_add_command = {"addPeer": {"peer": {"address": "%s:%s" % (p.host, p.port), "peerKey": p.pub_key}}} + genesis_dict['block_v1']['payload']['transactions'][0]['payload']['reducedPayload']['commands'].append(p_add_command) + genesis_json.seek(0) + json.dump(genesis_dict, genesis_json, sort_keys=True) + genesis_json.truncate() + +def caliper_add_peers(peers_list, caliper_conf_fp): + with open(caliper_conf_fp, 'r+') as caliper_conf_json: + caliper_conf_dict = json.load(caliper_conf_json) + try: + caliper_conf_dict['iroha']['network'] = {} + except KeyError: + pass + for i, p in enumerate(peers_list): + p_node = {"node%s" % i: {"torii": "%s:%s" % (p.host, p.port)}} + caliper_conf_dict['iroha']['network'].update(p_node) + caliper_conf_json.seek(0) + json.dump(caliper_conf_dict, caliper_conf_json, sort_keys=True) + caliper_conf_json.truncate() + +def caliper_rename_keys(priv_key_name, pub_key_name, caliper_conf_fp): + with open(caliper_conf_fp, 'r+') as caliper_conf_json: + caliper_conf_dict = json.load(caliper_conf_json) + caliper_conf_dict['iroha']['admin']['key-pub'] = "network/iroha/simplenetwork/%s" % pub_key_name + caliper_conf_dict['iroha']['admin']['key-priv'] = "network/iroha/simplenetwork/%s" % priv_key_name + caliper_conf_json.seek(0) + json.dump(caliper_conf_dict, caliper_conf_json, sort_keys=True) + caliper_conf_json.truncate() + +def hex_to_b64(hex_string): + hex_string = base64.b64encode(bytearray.fromhex(hex_string)) + return hex_string.decode('utf-8') + +def b64_to_hex(b64_string): + return base64.b64decode(b64_string).hex() + +def make_keys(peers): + for i, p in enumerate(peers): + with open('%s:%s.priv' % (p.host, p.port), 'w+') as priv_key_file: + priv_key_file.write(p.priv_key) + with open('%s:%s.pub' % (p.host, p.port), 'w+') as pub_key_file: + pub_key_file.write(p.pub_key) + +if __name__ == "__main__": + command = sys.argv[1] + peers_csv = sys.argv[2] + try: + json_conf = sys.argv[3] + except IndexError: + pass + peers = parse_peers(peers_csv) + if command == 'add_iroha_peers': + genesis_add_peers(peers, json_conf) + elif command == 'add_caliper_peers': + caliper_add_peers(peers, json_conf) + caliper_rename_keys('admin-test.priv', 'admin-test.pub', json_conf) + elif command == 'make_key_files': + make_keys(peers) + else: + print('Invalid command') diff --git a/deploy/ansible/roles/iroha-docker/files/genesis.block b/deploy/ansible/roles/iroha-docker/files/genesis.block new file mode 100644 index 0000000000..3b101fe50b --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/files/genesis.block @@ -0,0 +1 @@ +{"block_v1": {"payload": {"height": "1", "prevBlockHash": "0000000000000000000000000000000000000000000000000000000000000000", "transactions": [{"payload": {"reducedPayload": {"commands": [{"createRole": {"permissions": ["can_add_peer", "can_add_signatory", "can_create_account", "can_create_domain", "can_get_all_acc_ast", "can_get_all_acc_ast_txs", "can_get_all_acc_detail", "can_get_all_acc_txs", "can_get_all_accounts", "can_get_all_signatories", "can_get_all_txs", "can_get_blocks", "can_get_roles", "can_read_assets", "can_remove_signatory", "can_set_quorum"], "roleName": "admin"}}, {"createRole": {"permissions": ["can_add_signatory", "can_get_my_acc_ast", "can_get_my_acc_ast_txs", "can_get_my_acc_detail", "can_get_my_acc_txs", "can_get_my_account", "can_get_my_signatories", "can_get_my_txs", "can_grant_can_add_my_signatory", "can_grant_can_remove_my_signatory", "can_grant_can_set_my_account_detail", "can_grant_can_set_my_quorum", "can_grant_can_transfer_my_assets", "can_receive", "can_remove_signatory", "can_set_quorum", "can_transfer"], "roleName": "user"}}, {"createRole": {"permissions": ["can_add_asset_qty", "can_create_asset", "can_receive", "can_transfer"], "roleName": "money_creator"}}, {"createDomain": {"defaultRole": "user", "domainId": "test"}}, {"createAsset": {"assetName": "coin", "domainId": "test", "precision": 2}}, {"createAccount": {"accountName": "admin", "domainId": "test", "publicKey": "313a07e6384776ed95447710d15e59148473ccfc052a681317a72a69f2a49910"}}, {"createAccount": {"accountName": "test", "domainId": "test", "publicKey": "716fe505f69f18511a1b083915aa9ff73ef36e6688199f3959750db38b8f4bfc"}}, {"appendRole": {"accountId": "admin@test", "roleName": "admin"}}, {"appendRole": {"accountId": "admin@test", "roleName": "money_creator"}}, {"addPeer": {"peer": {"address": "iroha-0-3ada5cb:10001", "peerKey": "3e5b449f650372b04ba979b9eb87e39f3349563adffa27e4b26c8afb1ec3f7e0"}}}, {"addPeer": {"peer": {"address": "iroha-1-4deedc6:10001", "peerKey": "34efa4ed858a3c1e22b1bbdcff04385f4e67bc80086237daa2ee9b8505f5a07a"}}}, {"addPeer": {"peer": {"address": "iroha-2-2bfed1e:10001", "peerKey": "c66605e7b2ae2cdd2eb2aa7ed92f9263150254ddcda464937957ef216cf4237f"}}}, {"addPeer": {"peer": {"address": "iroha-3-abea4f7:10001", "peerKey": "76ab0ae0959529c7abb78e05de5ec8620426fde48fc9eae1b75783b77f9358d1"}}}, {"addPeer": {"peer": {"address": "iroha-4-da317bc:10001", "peerKey": "73995ca2c1dba061042cb995d021b24dfeec9b7a81bd5facb258ffb97e7d97b6"}}}], "quorum": 1}}}], "txNumber": 1}}} \ No newline at end of file diff --git a/deploy/ansible/roles/iroha-docker/files/old-keys-format.patch b/deploy/ansible/roles/iroha-docker/files/old-keys-format.patch new file mode 100644 index 0000000000..1eac55bfd9 --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/files/old-keys-format.patch @@ -0,0 +1,13 @@ +diff --git a/deploy/ansible/roles/iroha-docker/files/genesis-add-peers.py b/deploy/ansible/roles/iroha-docker/files/genesis-add-peers.py +index ca855d7..b8893b3 100644 +--- a/deploy/ansible/roles/iroha-docker/files/genesis-add-peers.py ++++ b/deploy/ansible/roles/iroha-docker/files/genesis-add-peers.py +@@ -30,7 +30,7 @@ def genesis_add_peers(peers_list, genesis_block_fp): + pass + genesis_dict['block_v1']['payload']['transactions'][0]['payload']['reducedPayload']['commands'] = list(genesis_dict['block_v1']['payload']['transactions'][0]['payload']['reducedPayload']['commands']) + for p in peers_list: +- p_add_command = {"addPeer": {"peer": {"address": "%s:%s" % (p.host, p.port), "peerKey": hex_to_b64(p.pub_key)}}} ++ p_add_command = {"addPeer": {"peer": {"address": "%s:%s" % (p.host, p.port), "peerKey": p.pub_key}}} + genesis_dict['block_v1']['payload']['transactions'][0]['payload']['reducedPayload']['commands'].append(p_add_command) + genesis_json.seek(0) + json.dump(genesis_dict, genesis_json, sort_keys=True) diff --git a/deploy/ansible/roles/iroha-docker/tasks/config-gen.yml b/deploy/ansible/roles/iroha-docker/tasks/config-gen.yml new file mode 100644 index 0000000000..14c5fd4fe6 --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/tasks/config-gen.yml @@ -0,0 +1,81 @@ +- block: + - set_fact: + iroha_nodes: [] + iroha_all_nodes: [] + + - name: generate hostnames (no overlay) + set_fact: + iroha_nodes: "{{ iroha_nodes }} + [ '{{ inventory_hostname }}:{{ iroha_peer_port | int + item | int }}' ]" + loop: "{{ range(0, replicas |int) | list }}" + when: not custom_hostnames and not overlay_network + + - shell: "cat /dev/urandom | tr -cd [:xdigit:] | tr '[:upper:]' '[:lower:]' | head -c {{ 8 * replicas | int}}" + register: random_hex + when: not custom_hostnames and overlay_network + + - name: generate hostnames (overlay) + set_fact: + iroha_nodes: "{{ iroha_nodes }} + [ '{{ container_basename }}-{{ item }}-{{ random_hex.stdout[item|int*8:(item|int+1)*8-1] }}:{{ iroha_peer_port }}' ]" + loop: "{{ range(0, replicas) | list }}" + when: not custom_hostnames and overlay_network + + - name: generate hostnames (custom_hostnames) + set_fact: + iroha_nodes: "{{ iroha_nodes }} + [ '{% if item.split(':') | length < 2 %}{{ item }}:{{ iroha_peer_port | int + idx }}{% else %}{{ item }}{% endif %}' ]" + when: custom_hostnames and hostnames is defined + loop: "{{ hostnames }}" + loop_control: + index_var: idx + + - set_fact: + iroha_peers_map: "{{ iroha_peers_map | default([]) }} + [ {{ {'hostname': item, 'human_hostname': 'c_' + item | regex_replace('\\.', '_') | regex_replace(':', '_'), 'peer_port': item.split(':')[1] } }} ]" + loop: "{{ iroha_nodes }}" + + - debug: + var: iroha_peers_map + + - set_fact: + iroha_nodes: "{{ iroha_peers_map }}" + + - set_fact: + iroha_all_nodes: "{{ iroha_all_nodes }} + {{ hostvars[item]['iroha_nodes'] }}" + loop: "{{ ansible_play_hosts }}" + run_once: yes + + - name: generate Iroha configs + command: "./config_gen.sh -p{{ iroha_all_nodes | join(',', attribute='hostname') }} -o{{ config_local_dir }} -f" + run_once: yes + args: + chdir: "{{ role_path }}/files" + + - name: make config dirs + file: + path: "{{ config_local_dir }}/{{ inventory_hostname }}/conf/{{ item.human_hostname }}" + state: directory + loop: "{{ iroha_nodes }}" + + - name: move keys + copy: + src: "{{ config_local_dir }}/{{ item[0].hostname }}{{ item[1] }}" + dest: "{{ config_local_dir }}/{{ inventory_hostname }}/conf/{{ item[0].human_hostname }}" + loop: "{{ iroha_nodes | product(['.priv', '.pub']) | list }}" + + - name: move genesis.block + copy: + src: "{{ config_local_dir }}/genesis.block" + dest: "{{ config_local_dir }}/{{ inventory_hostname }}/conf/{{ item.human_hostname }}" + loop: "{{ iroha_nodes }}" + + - name: cleanup + file: + path: "{{ config_local_dir }}/{{ item[0].hostname }}{{ item[1] }}" + state: absent + loop: "{{ iroha_nodes | product(['.priv', '.pub']) | list }}" + + - name: cleanup + file: + path: "{{ config_local_dir }}/genesis.block" + state: absent + run_once: yes + become: no + delegate_to: localhost diff --git a/deploy/ansible/roles/iroha-docker/tasks/deploy.yml b/deploy/ansible/roles/iroha-docker/tasks/deploy.yml new file mode 100644 index 0000000000..1894fa71c6 --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/tasks/deploy.yml @@ -0,0 +1,44 @@ +- name: create deploy dir + file: + state: directory + path: "{{ deploy_dir }}/conf" + +- name: create Docker network + docker_network: + name: "{{ iroha_network_name }}" + +- stat: + path: "{{ deploy_dir }}/docker-compose.yml" + register: docker_compose_file + +- name: stop Iroha + docker_service: + project_src: "{{ deploy_dir }}" + project_name: ansible_iroha + remove_volumes: yes + state: absent + when: "docker_compose_file.stat.exists" + +- name: generate Docker compose + template: + src: docker-compose.yml.j2 + dest: "{{ deploy_dir }}/docker-compose.yml" + +- name: copy config files + synchronize: + src: "{{ config_local_dir }}/{{ inventory_hostname }}/conf/" + dest: "{{ deploy_dir }}/conf/" + recursive: yes + delete: yes + +- name: generate config files + template: + src: config.docker.j2 + dest: "{{ deploy_dir }}/conf/{{ item.human_hostname }}/config.docker" + loop: "{{ iroha_nodes }}" + +- name: run Iroha + docker_service: + project_src: "{{ deploy_dir }}" + project_name: ansible_iroha + pull: yes diff --git a/deploy/ansible/roles/iroha-docker/tasks/main.yml b/deploy/ansible/roles/iroha-docker/tasks/main.yml new file mode 100644 index 0000000000..4e357468f0 --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/tasks/main.yml @@ -0,0 +1,8 @@ +- name: generate Iroha configs + include: config-gen.yml + +- name: deploy Iroha + include: deploy.yml + +- debug: + msg: "Stop Iroha containers with `docker-compose -f {{ deploy_dir }}/docker-compose.yml -p ansible_iroha down -v`" diff --git a/deploy/ansible/roles/iroha-docker/templates/config.docker.j2 b/deploy/ansible/roles/iroha-docker/templates/config.docker.j2 new file mode 100644 index 0000000000..e59a695935 --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/templates/config.docker.j2 @@ -0,0 +1,10 @@ +{ + "block_store_path" : "{{ iroha_blockstore_path }}", + "torii_port" : {{ iroha_torii_port }}, + "internal_port" : {{ iroha_peer_port }}, + "pg_opt" : "host={{ item.human_hostname }}-postgres port={{ iroha_postgres_port }} user={{ iroha_postgres_user }} password={{ iroha_postgres_password }}", + "max_proposal_size" : {{ iroha_max_proposal_size }}, + "proposal_delay" : {{ iroha_proposal_delay }}, + "vote_delay" : {{ iroha_vote_delay }}, + "mst_enable" : {{ iroha_mst_enable }} +} diff --git a/deploy/ansible/roles/iroha-docker/templates/docker-compose.yml.j2 b/deploy/ansible/roles/iroha-docker/templates/docker-compose.yml.j2 new file mode 100644 index 0000000000..7637b73f39 --- /dev/null +++ b/deploy/ansible/roles/iroha-docker/templates/docker-compose.yml.j2 @@ -0,0 +1,56 @@ +version: "2.4" + +services: +{% for node in iroha_nodes %} + {{ node.human_hostname }}: + image: hyperledger/iroha:{{ iroha_docker_tag }} +{% if overlay_network %} + container_name: {{ node.hostname.split(':')[0] }} + expose: + - {{ iroha_peer_port }} + ports: + - 127.0.0.1:{{ iroha_torii_port | int + loop.index - 1 }}:{{ iroha_torii_port }} +{% else %} + container_name: {{ node.human_hostname }} + ports: + - {{ node.peer_port }}:{{ iroha_peer_port }} + - {{ iroha_torii_port | int + loop.index - 1 }}:{{ iroha_torii_port }} +{% endif %} + environment: + KEY: {{ node.hostname }} + IROHA_POSTGRES_HOST: {{ node.human_hostname }}-postgres + volumes: + - iroha_block_store-{{ node.human_hostname }}:/tmp/block_store + - ./conf/{{ node.human_hostname }}:/opt/iroha_data + depends_on: + - {{ node.human_hostname }}-postgres + networks: + - iroha-net + - iroha-db-net + + {{ node.human_hostname }}-postgres: + image: postgres:{{ postgres_docker_tag }} + container_name: {{ node.human_hostname }}-postgres + environment: + POSTGRES_PASSWORD: {{ iroha_postgres_password }} + expose: + - {{ iroha_postgres_port }} + volumes: + - psql_storage-{{ node.human_hostname }}:/var/lib/postgresql/data + networks: + - iroha-db-net + +{% endfor %} + +volumes: +{% for node in iroha_nodes %} + iroha_block_store-{{ node.human_hostname }}: + psql_storage-{{ node.human_hostname }}: +{% endfor %} + +networks: + iroha-net: + external: + name: iroha-net + iroha-db-net: + name: iroha-db-net diff --git a/deploy/ansible/roles/iroha-standalone-config-gen/README.md b/deploy/ansible/roles/iroha-standalone-config-gen/README.md deleted file mode 100644 index 9b3aa8afb7..0000000000 --- a/deploy/ansible/roles/iroha-standalone-config-gen/README.md +++ /dev/null @@ -1,31 +0,0 @@ -iroha-standalone-config-gen -========= - -A role that generates `genesis.block` and keypair for each node in listed in inventory. - -Requirements ------------- - -`iroha-cli` binary should be accessible via `PATH` variable on your system. - -Role Variables --------------- - -- variables defined by this role: - - `defaults/main.yml` list of variables: - - `filesDir`: directory to store generated files -- variables required by playbook (see description in playbook's `group_vars` files): - - `nodes_in_region` - - `internal_port` - - -Example Playbook ----------------- - -```yaml - - hosts: locals - gather_facts: true - roles: - - { role: iroha-cluster-config-gen } -``` diff --git a/deploy/ansible/roles/iroha-standalone-config-gen/defaults/main.yml b/deploy/ansible/roles/iroha-standalone-config-gen/defaults/main.yml deleted file mode 100644 index 942cc12512..0000000000 --- a/deploy/ansible/roles/iroha-standalone-config-gen/defaults/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -# defaults file for iroha-gen -filesDir: /tmp/iroha-files diff --git a/deploy/ansible/roles/iroha-standalone-config-gen/tasks/main.yml b/deploy/ansible/roles/iroha-standalone-config-gen/tasks/main.yml deleted file mode 100644 index 44b6052457..0000000000 --- a/deploy/ansible/roles/iroha-standalone-config-gen/tasks/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- - - include_tasks: ubuntu.yml - when: ansible_distribution == 'Ubuntu' diff --git a/deploy/ansible/roles/iroha-standalone-config-gen/tasks/ubuntu.yml b/deploy/ansible/roles/iroha-standalone-config-gen/tasks/ubuntu.yml deleted file mode 100644 index 49e39495a7..0000000000 --- a/deploy/ansible/roles/iroha-standalone-config-gen/tasks/ubuntu.yml +++ /dev/null @@ -1,20 +0,0 @@ ---- -# tasks file for iroha-standalone-config-gen -- name: create dir for temporary files - file: - path: "{{ filesDir }}" - state: "{{ item }}" - mode: 0755 - with_items: - - absent - - directory - -- name: generate peers list - template: - src: peers.list.j2 - dest: "{{ filesDir }}/peers.list" - -- name: generate keys and genesis block - command: "iroha-cli --genesis_block --peers_address peers.list" - args: - chdir: "{{ filesDir }}" diff --git a/deploy/ansible/roles/iroha-standalone-config-gen/templates/peers.list.j2 b/deploy/ansible/roles/iroha-standalone-config-gen/templates/peers.list.j2 deleted file mode 100644 index 8b20c708bd..0000000000 --- a/deploy/ansible/roles/iroha-standalone-config-gen/templates/peers.list.j2 +++ /dev/null @@ -1,3 +0,0 @@ -{% for host in groups['iroha-nodes'] %} -{{ hostvars[host].ansible_host }}:{{ hostvars[host].internal_port }} -{% endfor %} diff --git a/deploy/ansible/roles/iroha-standalone-deploy-node/README.md b/deploy/ansible/roles/iroha-standalone-deploy-node/README.md deleted file mode 100644 index 805f5c7400..0000000000 --- a/deploy/ansible/roles/iroha-standalone-deploy-node/README.md +++ /dev/null @@ -1,46 +0,0 @@ -iroha-standalone-deploy-node -========= - -A role that runs iroha cluster by delivering previously generated `genesis.block`, -keypair for each node, and newly generated `config.sample` to target hosts. -It runs `iroha` and `postgres:9.5` in docker containers. - -Requirements ------------- - -1. Pre-generated files (role `iroha-standalone-config-gen`): - - `genesis.block` - - `nodeX.pub`, `nodeX.priv` keypair for each node -stored at `filesDir` folder. - -2. `docker` engine installed on target host (role `docker`) - -Role Variables --------------- - -- variables defined by this role: - - `defaults/main.yml` list of variables: - - `postgresName`: name of `postgres` docker container after running by `docker-compose` - - `postgresUser`: username on postgresql - - `postgresPassword`: password on postgresql - - `iroha_net`: name of docker network - - `containerConfPath`: config dir prefix on target host (this directory is attached to running iroha docker container as a docker volume) - -- variables required by playbook (see description in playbook's `group_vars` and `host_vars` files): - - `filesDir` - - `composeDir` - - `confPath` - - `internal_port` - - `torii_port` - - -Example Playbook ----------------- - -```yaml - - hosts: iroha-nodes - gather_facts: true - roles: - - { role: iroha-standalone-deploy-node } -``` diff --git a/deploy/ansible/roles/iroha-standalone-deploy-node/defaults/main.yml b/deploy/ansible/roles/iroha-standalone-deploy-node/defaults/main.yml deleted file mode 100644 index 8c59e7c0d7..0000000000 --- a/deploy/ansible/roles/iroha-standalone-deploy-node/defaults/main.yml +++ /dev/null @@ -1,13 +0,0 @@ ---- - # docker container postgres settings - postgresName: iroha_postgres - postgresPort: 5432 - postgresUser: psql - postgresPassword: psql - iroha_net: iroha_network # name of docker network - containerConfPath: /opt/iroha_data - - irohaDockerImage: hyperledger/iroha # docker image name - irohaDockerImageTag: latest # docker image tag - dbDockerImage: postgres - dbDockerImageTag: 9.5 diff --git a/deploy/ansible/roles/iroha-standalone-deploy-node/tasks/main.yml b/deploy/ansible/roles/iroha-standalone-deploy-node/tasks/main.yml deleted file mode 100644 index 44b6052457..0000000000 --- a/deploy/ansible/roles/iroha-standalone-deploy-node/tasks/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- - - include_tasks: ubuntu.yml - when: ansible_distribution == 'Ubuntu' diff --git a/deploy/ansible/roles/iroha-standalone-deploy-node/tasks/ubuntu.yml b/deploy/ansible/roles/iroha-standalone-deploy-node/tasks/ubuntu.yml deleted file mode 100644 index 7e8ad28ba2..0000000000 --- a/deploy/ansible/roles/iroha-standalone-deploy-node/tasks/ubuntu.yml +++ /dev/null @@ -1,73 +0,0 @@ ---- - - setup: - - - name: Setup docker network - docker_network: - name: "{{ iroha_net }}" - - - name: Stop and remove previous running docker containers - docker_container: - name: "{{ item }}" - state: absent - with_items: - - "{{ postgresName }}" - - iroha - ignore_errors: yes - - - name: Run postgres in docker - docker_container: - name: "{{ postgresName }}" - image: "{{ dbDockerImage }}:{{ dbDockerImageTag }}" - state: started - recreate: yes - restart: yes - env: - POSTGRES_USER: '{{ postgresUser }}' - POSTGRES_PASSWORD: '{{ postgresPassword }}' - networks: - - name: "{{ iroha_net }}" - - - name: Make conf dir - file: - path: "{{ confPath }}" - state: directory - mode: 0755 - owner: root - - - name: Copy config - template: - src: config.j2 - dest: "{{ confPath }}/config.sample" - mode: 0600 - - - name: Copy keys and genesis block - copy: - src: "{{ item }}" - dest: "{{ confPath }}" - mode: 0644 - with_items: - - "{{ filesDir }}/node{{ key }}.pub" - - "{{ filesDir }}/node{{ key }}.priv" - - "{{ filesDir }}/genesis.block" - - - name: Run iroha in docker - docker_container: - name: iroha - image: "{{ irohaDockerImage }}:{{ irohaDockerImageTag }}" - state: started - restart: yes - recreate: yes - pull: yes - ports: - - "{{ torii_port }}:{{ torii_port }}" - - "{{ internal_port }}:{{ internal_port }}" - networks: - - name: "{{ iroha_net }}" - volumes: - - "{{ confPath }}:{{ containerConfPath }}" - env: - POSTGRES_HOST: "{{ postgresName }}" - POSTGRES_PORT: "{{ postgresPort }}" - POSTGRES_USER: "{{ postgresUser }}" - POSTGRES_PASSWORD: "{{ postgresPassword }}" - KEY: "node{{ key }}" diff --git a/deploy/ansible/roles/iroha-standalone-deploy-node/templates/config.j2 b/deploy/ansible/roles/iroha-standalone-deploy-node/templates/config.j2 deleted file mode 100644 index dbf0464df8..0000000000 --- a/deploy/ansible/roles/iroha-standalone-deploy-node/templates/config.j2 +++ /dev/null @@ -1,9 +0,0 @@ -{ - "block_store_path" : "/tmp/block_store/", - "torii_port" : {{ torii_port }}, - "internal_port" : {{ internal_port }}, - "pg_opt" : "host={{ postgresName }} port= {{ postgresPort }} user={{ postgresUser }} password='{{ postgresPassword }}'", - "max_proposal_size" : 10, - "proposal_delay" : 5000, - "vote_delay" : 5000 -} From 4ef0562f8efa04dead1363cad960134658112916 Mon Sep 17 00:00:00 2001 From: Nikita Alekseev Date: Mon, 21 Jan 2019 10:47:20 +0300 Subject: [PATCH 14/41] Change Simulator functions to return objects instead of emitting events (#2029) Signed-off-by: Nikita Alekseev --- irohad/simulator/block_creator.hpp | 8 +- irohad/simulator/impl/simulator.cpp | 46 +++-- irohad/simulator/impl/simulator.hpp | 11 +- .../simulator/verified_proposal_creator.hpp | 8 +- .../irohad/simulator/simulator_mocks.hpp | 9 +- .../irohad/simulator/simulator_test.cpp | 167 +++++++----------- 6 files changed, 108 insertions(+), 141 deletions(-) diff --git a/irohad/simulator/block_creator.hpp b/irohad/simulator/block_creator.hpp index 4ca4582665..d295b6eae2 100644 --- a/irohad/simulator/block_creator.hpp +++ b/irohad/simulator/block_creator.hpp @@ -22,12 +22,12 @@ namespace iroha { class BlockCreator { public: /** - * Creates a block from given proposal and round + * Creates a block from given proposal */ - virtual void processVerifiedProposal( + virtual boost::optional> + processVerifiedProposal( const std::shared_ptr - &verified_proposal_and_errors, - const consensus::Round &round) = 0; + &verified_proposal_and_errors) = 0; /** * Emit blocks made from proposals diff --git a/irohad/simulator/impl/simulator.cpp b/irohad/simulator/impl/simulator.cpp index 58a185e837..0c874174b9 100644 --- a/irohad/simulator/impl/simulator.cpp +++ b/irohad/simulator/impl/simulator.cpp @@ -32,7 +32,13 @@ namespace iroha { ordering_gate->onProposal().subscribe( proposal_subscription_, [this](const network::OrderingEvent &event) { if (event.proposal) { - this->processProposal(*getProposalUnsafe(event), event.round); + auto validated_proposal_and_errors = + this->processProposal(*getProposalUnsafe(event)); + + if (validated_proposal_and_errors) { + notifier_.get_subscriber().on_next(VerifiedProposalCreatorEvent{ + *validated_proposal_and_errors, event.round}); + } } else { notifier_.get_subscriber().on_next( VerifiedProposalCreatorEvent{boost::none, event.round}); @@ -43,8 +49,13 @@ namespace iroha { verified_proposal_subscription_, [this](const VerifiedProposalCreatorEvent &event) { if (event.verified_proposal_result) { - this->processVerifiedProposal(getVerifiedProposalUnsafe(event), - event.round); + auto proposal_and_errors = getVerifiedProposalUnsafe(event); + auto block = this->processVerifiedProposal(proposal_and_errors); + if (block) { + block_notifier_.get_subscriber().on_next(BlockCreatorEvent{ + RoundData{proposal_and_errors->verified_proposal, *block}, + event.round}); + } } else { block_notifier_.get_subscriber().on_next( BlockCreatorEvent{boost::none, event.round}); @@ -62,9 +73,9 @@ namespace iroha { return notifier_.get_observable(); } - void Simulator::processProposal( - const shared_model::interface::Proposal &proposal, - const consensus::Round &round) { + boost::optional> + Simulator::processProposal( + const shared_model::interface::Proposal &proposal) { log_->info("process proposal"); // Get last block from local ledger @@ -72,7 +83,7 @@ namespace iroha { auto block_var = block_query_opt.value()->getTopBlock(); if (auto e = boost::get>(&block_var)) { log_->warn("Could not fetch last block: " + e->error); - return; + return boost::none; } last_block = boost::getvalue; } else { log_->error("could not create block query"); - return; + return boost::none; } if (last_block->height() + 1 != proposal.height()) { log_->warn("Last block height: {}, proposal height: {}", last_block->height(), proposal.height()); - return; + return boost::none; } auto temporary_wsv_var = ametsuchi_factory_->createTemporaryWsv(); if (auto e = boost::get>(&temporary_wsv_var)) { log_->error("could not create temporary storage: {}", e->error); - return; + return boost::none; } auto storage = std::move( @@ -107,14 +118,13 @@ namespace iroha { validator_->validate(proposal, *storage); ametsuchi_factory_->prepareBlock(std::move(storage)); - notifier_.get_subscriber().on_next( - VerifiedProposalCreatorEvent{validated_proposal_and_errors, round}); + return validated_proposal_and_errors; } - void Simulator::processVerifiedProposal( + boost::optional> + Simulator::processVerifiedProposal( const std::shared_ptr - &verified_proposal_and_errors, - const consensus::Round &round) { + &verified_proposal_and_errors) { log_->info("process verified proposal"); auto height = block_query_factory_->createBlockQuery() | @@ -123,7 +133,7 @@ namespace iroha { }; if (not height) { log_->error("Unable to query top block height"); - return; + return boost::none; } const auto &proposal = verified_proposal_and_errors->verified_proposal; std::vector rejected_hashes; @@ -138,8 +148,8 @@ namespace iroha { proposal->transactions(), rejected_hashes); crypto_signer_->sign(*block); - block_notifier_.get_subscriber().on_next( - BlockCreatorEvent{RoundData{proposal, block}, round}); + + return block; } rxcpp::observable Simulator::onBlock() { diff --git a/irohad/simulator/impl/simulator.hpp b/irohad/simulator/impl/simulator.hpp index cbc3d82538..5cd4bc1421 100644 --- a/irohad/simulator/impl/simulator.hpp +++ b/irohad/simulator/impl/simulator.hpp @@ -36,16 +36,17 @@ namespace iroha { ~Simulator() override; - void processProposal(const shared_model::interface::Proposal &proposal, - const consensus::Round &round) override; + boost::optional> + processProposal( + const shared_model::interface::Proposal &proposal) override; rxcpp::observable onVerifiedProposal() override; - void processVerifiedProposal( + boost::optional> + processVerifiedProposal( const std::shared_ptr - &verified_proposal_and_errors, - const consensus::Round &round) override; + &verified_proposal_and_errors) override; rxcpp::observable onBlock() override; diff --git a/irohad/simulator/verified_proposal_creator.hpp b/irohad/simulator/verified_proposal_creator.hpp index 87e5fcdde1..e0d90f3cde 100644 --- a/irohad/simulator/verified_proposal_creator.hpp +++ b/irohad/simulator/verified_proposal_creator.hpp @@ -28,11 +28,11 @@ namespace iroha { class VerifiedProposalCreator { public: /** - * Execute stateful validation for given proposal and round + * Execute stateful validation for given proposal */ - virtual void processProposal( - const shared_model::interface::Proposal &proposal, - const consensus::Round &round) = 0; + virtual boost::optional< + std::shared_ptr> + processProposal(const shared_model::interface::Proposal &proposal) = 0; /** * Emit proposals which were verified by stateful validator diff --git a/test/module/irohad/simulator/simulator_mocks.hpp b/test/module/irohad/simulator/simulator_mocks.hpp index 0c1abb7cd5..398a20ec5e 100644 --- a/test/module/irohad/simulator/simulator_mocks.hpp +++ b/test/module/irohad/simulator/simulator_mocks.hpp @@ -13,10 +13,11 @@ namespace iroha { namespace simulator { class MockBlockCreator : public BlockCreator { public: - MOCK_METHOD2(processVerifiedProposal, - void(const std::shared_ptr< - iroha::validation::VerifiedProposalAndErrors> &, - const consensus::Round &)); + MOCK_METHOD1( + processVerifiedProposal, + boost::optional>( + const std::shared_ptr< + iroha::validation::VerifiedProposalAndErrors> &)); MOCK_METHOD0(onBlock, rxcpp::observable()); }; } // namespace simulator diff --git a/test/module/irohad/simulator/simulator_test.cpp b/test/module/irohad/simulator/simulator_test.cpp index 89c2cf2e45..bf683d9a7e 100644 --- a/test/module/irohad/simulator/simulator_test.cpp +++ b/test/module/irohad/simulator/simulator_test.cpp @@ -12,6 +12,7 @@ #include "backend/protobuf/proto_block_factory.hpp" #include "backend/protobuf/transaction.hpp" #include "builders/protobuf/transaction.hpp" +#include "datetime/time.hpp" #include "framework/test_subscriber.hpp" #include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" #include "module/irohad/network/network_mocks.hpp" @@ -59,13 +60,10 @@ class SimulatorTest : public ::testing::Test { shared_model::interface::Block>>(), std::make_unique< shared_model::validation::MockValidator>()); - } - void TearDown() override { - shared_model::crypto::crypto_signer_expecter.reset(); - } + EXPECT_CALL(*ordering_gate, onProposal()) + .WillOnce(Return(ordering_events.get_observable())); - void init() { simulator = std::make_shared(ordering_gate, validator, factory, @@ -74,6 +72,10 @@ class SimulatorTest : public ::testing::Test { std::move(block_factory)); } + void TearDown() override { + shared_model::crypto::crypto_signer_expecter.reset(); + } + consensus::Round round; std::shared_ptr validator; @@ -83,6 +85,7 @@ class SimulatorTest : public ::testing::Test { std::shared_ptr ordering_gate; std::shared_ptr> crypto_signer; std::unique_ptr block_factory; + rxcpp::subjects::subject ordering_events; std::shared_ptr simulator; }; @@ -112,30 +115,25 @@ auto makeProposal(int height) { .createdTime(iroha::time::now()) .transactions(txs) .build(); - return std::make_shared(std::move(proposal)); + return std::shared_ptr( + std::make_shared(std::move(proposal))); } -TEST_F(SimulatorTest, ValidWhenInitialized) { - // simulator constructor => onProposal subscription called - EXPECT_CALL(*ordering_gate, onProposal()) - .WillOnce(Return(rxcpp::observable<>::empty())); - - init(); +auto makeTx() { + return shared_model::proto::TransactionBuilder() + .createdTime(iroha::time::now()) + .creatorAccountId("admin@ru") + .addAssetQuantity("coin#coin", "1.0") + .quorum(1) + .build() + .signAndAddSignature( + shared_model::crypto::DefaultCryptoAlgorithmType::generateKeypair()) + .finish(); } TEST_F(SimulatorTest, ValidWhenPreviousBlock) { // proposal with height 2 => height 1 block present => new block generated - auto tx = shared_model::proto::TransactionBuilder() - .createdTime(iroha::time::now()) - .creatorAccountId("admin@ru") - .addAssetQuantity("coin#coin", "1.0") - .quorum(1) - .build() - .signAndAddSignature( - shared_model::crypto::DefaultCryptoAlgorithmType:: - generateKeypair()) - .finish(); - std::vector txs = {tx, tx}; + std::vector txs = {makeTx(), makeTx()}; auto validation_result = std::make_unique(); @@ -159,39 +157,32 @@ TEST_F(SimulatorTest, ValidWhenPreviousBlock) { return std::move(validation_result); })); - EXPECT_CALL(*ordering_gate, onProposal()) - .WillOnce(Return(rxcpp::observable<>::empty())); - EXPECT_CALL(*shared_model::crypto::crypto_signer_expecter, sign(A())) .Times(1); - init(); - auto proposal_wrapper = make_test_subscriber(simulator->onVerifiedProposal(), 1); - proposal_wrapper.subscribe([&proposal](auto event) { - auto verified_proposal = getVerifiedProposalUnsafe(event); - - ASSERT_EQ(verified_proposal->verified_proposal->height(), - proposal->height()); - ASSERT_EQ(verified_proposal->verified_proposal->transactions(), - proposal->transactions()); - ASSERT_TRUE(verified_proposal->rejected_transactions.empty()); + proposal_wrapper.subscribe([&](auto event) { + auto verification_result = getVerifiedProposalUnsafe(event); + auto verified_proposal = verification_result->verified_proposal; + EXPECT_EQ(verified_proposal->height(), proposal->height()); + EXPECT_EQ(verified_proposal->transactions(), proposal->transactions()); + EXPECT_TRUE(verification_result->rejected_transactions.empty()); }); auto block_wrapper = make_test_subscriber(simulator->onBlock(), 1); - block_wrapper.subscribe([&proposal](const auto &event) { + block_wrapper.subscribe([&](auto event) { auto block = getBlockUnsafe(event); - - ASSERT_EQ(block->height(), proposal->height()); - ASSERT_EQ(block->transactions(), proposal->transactions()); + EXPECT_EQ(block->height(), proposal->height()); + EXPECT_EQ(block->transactions(), proposal->transactions()); }); - simulator->processProposal(*proposal, round); + ordering_events.get_subscriber().on_next( + OrderingEvent{proposal, consensus::Round{}}); - ASSERT_TRUE(proposal_wrapper.validate()); - ASSERT_TRUE(block_wrapper.validate()); + EXPECT_TRUE(proposal_wrapper.validate()); + EXPECT_TRUE(block_wrapper.validate()); } TEST_F(SimulatorTest, FailWhenNoBlock) { @@ -204,15 +195,10 @@ TEST_F(SimulatorTest, FailWhenNoBlock) { EXPECT_CALL(*validator, validate(_, _)).Times(0); - EXPECT_CALL(*ordering_gate, onProposal()) - .WillOnce(Return(rxcpp::observable<>::empty())); - EXPECT_CALL(*shared_model::crypto::crypto_signer_expecter, sign(A())) .Times(0); - init(); - auto proposal_wrapper = make_test_subscriber(simulator->onVerifiedProposal(), 0); proposal_wrapper.subscribe(); @@ -220,7 +206,8 @@ TEST_F(SimulatorTest, FailWhenNoBlock) { auto block_wrapper = make_test_subscriber(simulator->onBlock(), 0); block_wrapper.subscribe(); - simulator->processProposal(*proposal, round); + ordering_events.get_subscriber().on_next( + OrderingEvent{proposal, consensus::Round{}}); ASSERT_TRUE(proposal_wrapper.validate()); ASSERT_TRUE(block_wrapper.validate()); @@ -239,15 +226,10 @@ TEST_F(SimulatorTest, FailWhenSameAsProposalHeight) { EXPECT_CALL(*validator, validate(_, _)).Times(0); - EXPECT_CALL(*ordering_gate, onProposal()) - .WillOnce(Return(rxcpp::observable<>::empty())); - EXPECT_CALL(*shared_model::crypto::crypto_signer_expecter, sign(A())) .Times(0); - init(); - auto proposal_wrapper = make_test_subscriber(simulator->onVerifiedProposal(), 0); proposal_wrapper.subscribe(); @@ -255,7 +237,8 @@ TEST_F(SimulatorTest, FailWhenSameAsProposalHeight) { auto block_wrapper = make_test_subscriber(simulator->onBlock(), 0); block_wrapper.subscribe(); - simulator->processProposal(*proposal, round); + ordering_events.get_subscriber().on_next( + OrderingEvent{proposal, consensus::Round{}}); ASSERT_TRUE(proposal_wrapper.validate()); ASSERT_TRUE(block_wrapper.validate()); @@ -271,21 +254,12 @@ TEST_F(SimulatorTest, FailWhenSameAsProposalHeight) { * transactions are provided as well */ TEST_F(SimulatorTest, SomeFailingTxs) { - // create a 3-height proposal, but validator returns only a 2-height verified - // proposal + // create a 3-height proposal, but validator returns only a 2-height + // verified proposal const int kNumTransactions = 3; std::vector txs; for (int i = 0; i < kNumTransactions; ++i) { - txs.emplace_back(shared_model::proto::TransactionBuilder() - .createdTime(iroha::time::now() + i) - .creatorAccountId("admin@ru") - .addAssetQuantity("coin#coin", "1.0") - .quorum(1) - .build() - .signAndAddSignature( - shared_model::crypto::DefaultCryptoAlgorithmType:: - generateKeypair()) - .finish()); + txs.push_back(makeTx()); } auto proposal = std::make_shared( shared_model::proto::ProposalBuilder() @@ -318,48 +292,29 @@ TEST_F(SimulatorTest, SomeFailingTxs) { EXPECT_CALL(*query, getTopBlock()) .WillOnce(Return(expected::makeValue(wBlock(clone(block))))); - EXPECT_CALL(*query, getTopBlockHeight()).WillOnce(Return(2)); - EXPECT_CALL(*validator, validate(_, _)) .WillOnce(Invoke([&verified_proposal_and_errors](const auto &p, auto &v) { return std::move(verified_proposal_and_errors); })); - EXPECT_CALL(*ordering_gate, onProposal()) - .WillOnce(Return(rxcpp::observable<>::empty())); - - EXPECT_CALL(*shared_model::crypto::crypto_signer_expecter, - sign(A())) - .Times(1); - - init(); - - auto proposal_wrapper = - make_test_subscriber(simulator->onVerifiedProposal(), 1); - proposal_wrapper.subscribe([&](auto event) { - auto verified_proposal_ = getVerifiedProposalUnsafe(event); - - // ensure that txs in verified proposal do not include failed ones - ASSERT_EQ(verified_proposal_->verified_proposal->height(), - verified_proposal_height); - ASSERT_EQ(verified_proposal_->verified_proposal->transactions(), - verified_proposal_transactions); - ASSERT_TRUE(verified_proposal_->rejected_transactions.size() - == kNumTransactions - 1); - const auto verified_proposal_rejected_tx_hashes = - verified_proposal_->rejected_transactions - | boost::adaptors::transformed( - [](const auto &tx_error) { return tx_error.tx_hash; }); - for (auto rejected_tx = txs.begin() + 1; rejected_tx != txs.end(); - ++rejected_tx) { - ASSERT_NE(boost::range::find(verified_proposal_rejected_tx_hashes, - rejected_tx->hash()), - boost::end(verified_proposal_rejected_tx_hashes)) - << rejected_tx->toString() << " missing in rejected transactions."; - } - }); - - simulator->processProposal(*proposal, round); - - ASSERT_TRUE(proposal_wrapper.validate()); + auto verification_result = simulator->processProposal(*proposal); + ASSERT_TRUE(verification_result); + auto verified_proposal = verification_result->get()->verified_proposal; + + // ensure that txs in verified proposal do not include failed ones + EXPECT_EQ(verified_proposal->height(), verified_proposal_height); + EXPECT_EQ(verified_proposal->transactions(), verified_proposal_transactions); + EXPECT_TRUE(verification_result->get()->rejected_transactions.size() + == kNumTransactions - 1); + const auto verified_proposal_rejected_tx_hashes = + verification_result->get()->rejected_transactions + | boost::adaptors::transformed( + [](const auto &tx_error) { return tx_error.tx_hash; }); + for (auto rejected_tx = txs.begin() + 1; rejected_tx != txs.end(); + ++rejected_tx) { + EXPECT_NE(boost::range::find(verified_proposal_rejected_tx_hashes, + rejected_tx->hash()), + boost::end(verified_proposal_rejected_tx_hashes)) + << rejected_tx->toString() << " missing in rejected transactions."; + } } From 0a8fd6a340eaf568fdb1fe1006acab43ce20d4fe Mon Sep 17 00:00:00 2001 From: Bulat Saifullin Date: Tue, 22 Jan 2019 14:44:29 +0300 Subject: [PATCH 15/41] clean dev -> develop (#2038) Signed-off-by: Bulat Saifullin --- .jenkinsci/debug-build.groovy | 4 ++-- .jenkinsci/docker-pull-or-build.groovy | 2 +- .jenkinsci/doxygen.groovy | 3 --- .jenkinsci/linux-post-step.groovy | 2 +- .jenkinsci/release-build.groovy | 2 +- Jenkinsfile | 20 ++++++++++---------- 6 files changed, 15 insertions(+), 18 deletions(-) diff --git a/.jenkinsci/debug-build.groovy b/.jenkinsci/debug-build.groovy index 89564aea65..4ad9038392 100644 --- a/.jenkinsci/debug-build.groovy +++ b/.jenkinsci/debug-build.groovy @@ -27,11 +27,11 @@ def doDebugBuild(coverageEnabled=false) { def iC = dPullOrBuild.dockerPullOrUpdate("${platform}-develop-build", "${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/develop/Dockerfile", "${env.GIT_RAW_BASE_URL}/${previousCommit}/docker/develop/Dockerfile", - "${env.GIT_RAW_BASE_URL}/dev/docker/develop/Dockerfile", + "${env.GIT_RAW_BASE_URL}/develop/docker/develop/Dockerfile", ['PARALLELISM': parallelism]) // push Docker image in case the current branch is develop, // or it is a commit into PR which base branch is develop (usually develop -> master) - if ((GIT_LOCAL_BRANCH == 'develop' || CHANGE_BRANCH_LOCAL == 'develop' || GIT_LOCAL_BRANCH == 'dev' || CHANGE_BRANCH_LOCAL == 'dev') && manifest.manifestSupportEnabled()) { + if ((GIT_LOCAL_BRANCH == 'develop' || CHANGE_BRANCH_LOCAL == 'develop') && manifest.manifestSupportEnabled()) { manifest.manifestCreate("${DOCKER_REGISTRY_BASENAME}:develop-build", ["${DOCKER_REGISTRY_BASENAME}:x86_64-develop-build"] ) diff --git a/.jenkinsci/docker-pull-or-build.groovy b/.jenkinsci/docker-pull-or-build.groovy index 463e330a36..0d221c35c9 100644 --- a/.jenkinsci/docker-pull-or-build.groovy +++ b/.jenkinsci/docker-pull-or-build.groovy @@ -60,7 +60,7 @@ def dockerPullOrUpdate(imageName, currentDockerfileURL, previousDockerfileURL, r } } } - if (GIT_LOCAL_BRANCH ==~ /develop|master|dev/ || CHANGE_BRANCH_LOCAL == 'develop' || CHANGE_BRANCH_LOCAL == 'dev') { + if (GIT_LOCAL_BRANCH ==~ /develop|master/ || CHANGE_BRANCH_LOCAL == 'develop') { docker.withRegistry('https://registry.hub.docker.com', 'docker-hub-credentials') { iC.push(imageName) } diff --git a/.jenkinsci/doxygen.groovy b/.jenkinsci/doxygen.groovy index b7348135c3..65d330f013 100644 --- a/.jenkinsci/doxygen.groovy +++ b/.jenkinsci/doxygen.groovy @@ -1,9 +1,6 @@ #!/usr/bin/env groovy def doDoxygen() { - // TODO: Remove this comment once dev branch will return to develop - // I will not be changing branches here. It requires some rewriting - // Hope dev branch situation will be resolved soon if (env.GIT_LOCAL_BRANCH in ["master","develop"] || env.CHANGE_BRANCH_LOCAL == 'develop') { def branch = env.CHANGE_BRANCH_LOCAL == 'develop' ? env.CHANGE_BRANCH_LOCAL : env.GIT_LOCAL_BRANCH sh "doxygen Doxyfile" diff --git a/.jenkinsci/linux-post-step.groovy b/.jenkinsci/linux-post-step.groovy index 2372422573..c28f6f6022 100644 --- a/.jenkinsci/linux-post-step.groovy +++ b/.jenkinsci/linux-post-step.groovy @@ -1,7 +1,7 @@ def linuxPostStep() { timeout(time: 600, unit: "SECONDS") { try { - if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop|dev)/) { + if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop)/) { def artifacts = load ".jenkinsci/artifacts.groovy" def commit = env.GIT_COMMIT def platform = sh(script: 'uname -m', returnStdout: true).trim() diff --git a/.jenkinsci/release-build.groovy b/.jenkinsci/release-build.groovy index c98b7ce2cf..297739b2f4 100644 --- a/.jenkinsci/release-build.groovy +++ b/.jenkinsci/release-build.groovy @@ -60,7 +60,7 @@ def doReleaseBuild() { // push Docker image in case the current branch is develop, // or it is a commit into PR which base branch is develop (usually develop -> master) checkTag = sh(script: 'git describe --tags --exact-match ${GIT_COMMIT}', returnStatus: true) - if (GIT_LOCAL_BRANCH == 'develop' || CHANGE_BRANCH_LOCAL == 'develop' || GIT_LOCAL_BRANCH == 'dev' || CHANGE_BRANCH_LOCAL == 'dev') { + if (GIT_LOCAL_BRANCH == 'develop' || CHANGE_BRANCH_LOCAL == 'develop') { docker.withRegistry('https://registry.hub.docker.com', 'docker-hub-credentials') { iCRelease.push("${platform}-develop") } diff --git a/Jenkinsfile b/Jenkinsfile index dae4a219dc..4df134819b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -57,7 +57,7 @@ pipeline { CHANGE_BRANCH_LOCAL = env.CHANGE_BRANCH } catch(MissingPropertyException e) { } - if (GIT_LOCAL_BRANCH != "develop" && CHANGE_BRANCH_LOCAL != "develop" && GIT_LOCAL_BRANCH != "dev" && CHANGE_BRANCH_LOCAL != "dev") { + if (GIT_LOCAL_BRANCH != "develop" && CHANGE_BRANCH_LOCAL != "develop") { def builds = load ".jenkinsci/cancel-builds-same-job.groovy" builds.cancelSameJobBuilds() } @@ -88,7 +88,7 @@ pipeline { else { debugBuild.doDebugBuild() } - if (GIT_LOCAL_BRANCH ==~ /(master|develop|dev)/) { + if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) { releaseBuild = load ".jenkinsci/release-build.groovy" releaseBuild.doReleaseBuild() } @@ -119,7 +119,7 @@ pipeline { else { debugBuild.doDebugBuild() } - if (GIT_LOCAL_BRANCH ==~ /(master|develop|dev)/) { + if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) { releaseBuild = load ".jenkinsci/release-build.groovy" releaseBuild.doReleaseBuild() } @@ -150,7 +150,7 @@ pipeline { else { debugBuild.doDebugBuild() } - if (GIT_LOCAL_BRANCH ==~ /(master|develop|dev)/) { + if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) { releaseBuild = load ".jenkinsci/release-build.groovy" releaseBuild.doReleaseBuild() } @@ -237,7 +237,7 @@ pipeline { sh "python /usr/local/bin/lcov_cobertura.py build/reports/coverage.info -o build/reports/coverage.xml" cobertura autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: '**/build/reports/coverage.xml', conditionalCoverageTargets: '75, 50, 0', failUnhealthy: false, failUnstable: false, lineCoverageTargets: '75, 50, 0', maxNumberOfBuilds: 50, methodCoverageTargets: '75, 50, 0', onlyStable: false, zoomCoverageChart: false } - if (GIT_LOCAL_BRANCH ==~ /(master|develop|dev)/) { + if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) { releaseBuild = load ".jenkinsci/mac-release-build.groovy" releaseBuild.doReleaseBuild() } @@ -248,7 +248,7 @@ pipeline { script { timeout(time: 600, unit: "SECONDS") { try { - if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop|dev)/) { + if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop)/) { def artifacts = load ".jenkinsci/artifacts.groovy" def commit = env.GIT_COMMIT filePaths = [ '\$(pwd)/build/*.tar.gz' ] @@ -355,7 +355,7 @@ pipeline { script { timeout(time: 600, unit: "SECONDS") { try { - if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop|dev)/) { + if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop)/) { def artifacts = load ".jenkinsci/artifacts.groovy" def commit = env.GIT_COMMIT filePaths = [ '\$(pwd)/build/*.tar.gz' ] @@ -387,7 +387,7 @@ pipeline { "$platform-develop-build", "${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/develop/Dockerfile", "${env.GIT_RAW_BASE_URL}/${env.GIT_PREVIOUS_COMMIT}/docker/develop/Dockerfile", - "${env.GIT_RAW_BASE_URL}/dev/docker/develop/Dockerfile", + "${env.GIT_RAW_BASE_URL}/develop/docker/develop/Dockerfile", ['PARALLELISM': params.PARALLELISM]) iC.inside() { doxygen.doDoxygen() @@ -425,7 +425,7 @@ pipeline { "$platform-develop-build", "${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/develop/Dockerfile", "${env.GIT_RAW_BASE_URL}/${env.GIT_PREVIOUS_COMMIT}/docker/develop/Dockerfile", - "${env.GIT_RAW_BASE_URL}/dev/docker/develop/Dockerfile", + "${env.GIT_RAW_BASE_URL}/develop/docker/develop/Dockerfile", ['PARALLELISM': params.PARALLELISM]) if (params.JavaBindings) { iC.inside("-v /tmp/${env.GIT_COMMIT}/bindings-artifact:/tmp/bindings-artifact") { @@ -443,7 +443,7 @@ pipeline { "android-${params.ABPlatform}-${params.ABBuildType}", "${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/android/Dockerfile", "${env.GIT_RAW_BASE_URL}/${env.GIT_PREVIOUS_COMMIT}/docker/android/Dockerfile", - "${env.GIT_RAW_BASE_URL}/dev/docker/android/Dockerfile", + "${env.GIT_RAW_BASE_URL}/develop/docker/android/Dockerfile", ['PARALLELISM': params.PARALLELISM, 'PLATFORM': params.ABPlatform, 'BUILD_TYPE': params.ABBuildType]) sh "curl -L -o /tmp/${env.GIT_COMMIT}/entrypoint.sh ${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/android/entrypoint.sh" sh "chmod +x /tmp/${env.GIT_COMMIT}/entrypoint.sh" From d7e7011c49c228117ef3dde50b8cd604311a241f Mon Sep 17 00:00:00 2001 From: Alexey Rodionov Date: Wed, 23 Jan 2019 15:54:21 +0300 Subject: [PATCH 16/41] Feature/iroha tests coredump (#2024) * Collecting core dumps for iroha tests and uploading it to Nexus Signed-off-by: Alexey Rodionov --- .jenkinsci/debug-build.groovy | 6 ++++++ .jenkinsci/linux-post-step.groovy | 15 +++++++++++++++ Jenkinsfile | 1 + 3 files changed, 22 insertions(+) diff --git a/.jenkinsci/debug-build.groovy b/.jenkinsci/debug-build.groovy index 4ad9038392..e0a201a6a9 100644 --- a/.jenkinsci/debug-build.groovy +++ b/.jenkinsci/debug-build.groovy @@ -23,6 +23,12 @@ def doDebugBuild(coverageEnabled=false) { parallelism = 1 } + // enable coredumps collecting + if (params.coredump) { + sh "echo %e.%p.coredump > /proc/sys/kernel/core_pattern" + sh "ulimit -c unlimited" + } + sh "docker network create ${env.IROHA_NETWORK}" def iC = dPullOrBuild.dockerPullOrUpdate("${platform}-develop-build", "${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/develop/Dockerfile", diff --git a/.jenkinsci/linux-post-step.groovy b/.jenkinsci/linux-post-step.groovy index c28f6f6022..6ea387fcd9 100644 --- a/.jenkinsci/linux-post-step.groovy +++ b/.jenkinsci/linux-post-step.groovy @@ -1,6 +1,21 @@ def linuxPostStep() { timeout(time: 600, unit: "SECONDS") { try { + // stop write core dumps + sh "ulimit -c 0" + // handling coredumps (if tests crashed) + if (currentBuild.currentResult != "SUCCESS" && params.coredump) { + def dumpsFileName = sprintf('coredumps-%1$s.bzip2', + [GIT_COMMIT.substring(0,8)]) + + sh(script: "find . -type f -name '*.coredump' -exec tar -cjvf ${dumpsFileName} {} \\+;") + if( fileExists(dumpsFileName)) { + withCredentials([usernamePassword(credentialsId: 'ci_nexus', passwordVariable: 'NEXUS_PASS', usernameVariable: 'NEXUS_USER')]) { + sh(script: "curl -u ${NEXUS_USER}:${NEXUS_PASS} --upload-file ${WORKSPACE}/${dumpsFileName} https://nexus.iroha.tech/repository/artifacts/iroha/coredumps/${dumpsFileName}") + } + echo "Build is not SUCCESS! See core dumps at: https://nexus.iroha.tech/repository/artifacts/iroha/coredumps/${dumpsFileName}" + } + } if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop)/) { def artifacts = load ".jenkinsci/artifacts.groovy" def commit = env.GIT_COMMIT diff --git a/Jenkinsfile b/Jenkinsfile index 4df134819b..a9e09d891e 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -20,6 +20,7 @@ properties([parameters([ booleanParam(defaultValue: true, description: 'Build docs', name: 'Doxygen'), booleanParam(defaultValue: true, description: 'Sanitize address;leak', name: 'sanitize'), booleanParam(defaultValue: false, description: 'Build fuzzing, but do not run tests', name: 'fuzzing'), + booleanParam(defaultValue: true, description: 'Collect coredumps', name: 'coredump'), string(defaultValue: '8', description: 'Expect ~3GB memory consumtion per CPU core', name: 'PARALLELISM')])]) From 1120fe93a5074da298aa7abd77512ea4e28ceb81 Mon Sep 17 00:00:00 2001 From: Konstantin Munichev Date: Thu, 24 Jan 2019 22:44:47 +0300 Subject: [PATCH 17/41] Make MST expiration time configurable (#2046) Signed-off-by: Konstantin Munichev --- example/config-win.sample | 3 +- example/config.docker | 3 +- example/config.sample | 3 +- irohad/main/application.cpp | 5 +- irohad/main/application.hpp | 5 +- irohad/main/iroha_conf_loader.hpp | 3 + irohad/main/irohad.cpp | 1 + .../gossip_propagation_strategy.hpp | 2 +- .../state/impl/mst_state.cpp | 13 +- .../state/mst_state.hpp | 18 ++- .../transport/impl/mst_transport_grpc.cpp | 11 +- .../transport/mst_transport_grpc.hpp | 3 + .../fake_peer/fake_peer.cpp | 14 +- .../integration_framework/iroha_instance.cpp | 3 + .../integration_framework/iroha_instance.hpp | 1 + .../integration_framework/test_irohad.hpp | 2 + .../basic_mst_state_propagation.cpp | 2 +- .../multi_sig_transactions/CMakeLists.txt | 5 + .../multi_sig_transactions/completer_test.cpp | 136 ++++++++++++++++++ .../mst_processor_test.cpp | 20 +-- .../mst_test_helpers.hpp | 24 ++++ .../multi_sig_transactions/state_test.cpp | 59 ++++---- .../multi_sig_transactions/storage_test.cpp | 19 +-- .../multi_sig_transactions/transport_test.cpp | 9 +- .../pending_txs_storage_test.cpp | 30 ++-- test/system/irohad_test_data/config.sample | 3 +- .../irohad_test_data/config.sample.copy | 5 +- 27 files changed, 297 insertions(+), 105 deletions(-) create mode 100644 test/module/irohad/multi_sig_transactions/completer_test.cpp diff --git a/example/config-win.sample b/example/config-win.sample index 7100d8a9bc..8a4d3ab0e0 100644 --- a/example/config-win.sample +++ b/example/config-win.sample @@ -6,6 +6,7 @@ "max_proposal_size" : 10, "proposal_delay" : 5000, "vote_delay" : 5000, - "mst_enable" : false + "mst_enable" : false, + "mst_expiration_time" : 1440 } diff --git a/example/config.docker b/example/config.docker index d33227903e..44e45b49e1 100644 --- a/example/config.docker +++ b/example/config.docker @@ -6,5 +6,6 @@ "max_proposal_size" : 10, "proposal_delay" : 5000, "vote_delay" : 5000, - "mst_enable" : false + "mst_enable" : false, + "mst_expiration_time" : 1440 } diff --git a/example/config.sample b/example/config.sample index c4479444d2..cb9e1108cb 100644 --- a/example/config.sample +++ b/example/config.sample @@ -6,6 +6,7 @@ "max_proposal_size" : 10, "proposal_delay" : 5000, "vote_delay" : 5000, - "mst_enable" : false + "mst_enable" : false, + "mst_expiration_time" : 1440 } diff --git a/irohad/main/application.cpp b/irohad/main/application.cpp index 6d9820e123..8c0f22ad65 100644 --- a/irohad/main/application.cpp +++ b/irohad/main/application.cpp @@ -71,6 +71,7 @@ Irohad::Irohad(const std::string &block_store_dir, size_t max_proposal_size, std::chrono::milliseconds proposal_delay, std::chrono::milliseconds vote_delay, + std::chrono::minutes mst_expiration_time, const shared_model::crypto::Keypair &keypair, const boost::optional &opt_mst_gossip_params) @@ -83,6 +84,7 @@ Irohad::Irohad(const std::string &block_store_dir, proposal_delay_(proposal_delay), vote_delay_(vote_delay), is_mst_supported_(opt_mst_gossip_params), + mst_expiration_time_(mst_expiration_time), opt_mst_gossip_params_(opt_mst_gossip_params), keypair(keypair) { log_ = logger::log("IROHAD"); @@ -430,7 +432,7 @@ void Irohad::initStatusBus() { } void Irohad::initMstProcessor() { - auto mst_completer = std::make_shared(); + auto mst_completer = std::make_shared(mst_expiration_time_); auto mst_storage = std::make_shared(mst_completer); std::shared_ptr mst_propagation; if (is_mst_supported_) { @@ -440,6 +442,7 @@ void Irohad::initMstProcessor() { batch_parser, transaction_batch_factory_, persistent_cache, + mst_completer, keypair.publicKey()); mst_propagation = std::make_shared( storage, rxcpp::observe_on_new_thread(), *opt_mst_gossip_params_); diff --git a/irohad/main/application.hpp b/irohad/main/application.hpp index 421153bcda..38f211bafd 100644 --- a/irohad/main/application.hpp +++ b/irohad/main/application.hpp @@ -78,9 +78,10 @@ class Irohad { * @param proposal_delay - maximum waiting time util emitting new proposal * @param vote_delay - waiting time before sending vote to next peer * @param keypair - public and private keys for crypto signer + * @param mst_expiration_time - maximum time until until MST transaction is + * not considered as expired (in minutes) * @param opt_mst_gossip_params - parameters for Gossip MST propagation * (optional). If not provided, disables mst processing support - * * TODO mboldyrev 03.11.2018 IR-1844 Refactor the constructor. */ Irohad(const std::string &block_store_dir, @@ -91,6 +92,7 @@ class Irohad { size_t max_proposal_size, std::chrono::milliseconds proposal_delay, std::chrono::milliseconds vote_delay, + std::chrono::minutes mst_expiration_time, const shared_model::crypto::Keypair &keypair, const boost::optional &opt_mst_gossip_params = boost::none); @@ -175,6 +177,7 @@ class Irohad { std::chrono::milliseconds proposal_delay_; std::chrono::milliseconds vote_delay_; bool is_mst_supported_; + std::chrono::minutes mst_expiration_time_; boost::optional opt_mst_gossip_params_; diff --git a/irohad/main/iroha_conf_loader.hpp b/irohad/main/iroha_conf_loader.hpp index b197de1d61..19b4221ae4 100644 --- a/irohad/main/iroha_conf_loader.hpp +++ b/irohad/main/iroha_conf_loader.hpp @@ -26,6 +26,7 @@ namespace config_members { const char *ProposalDelay = "proposal_delay"; const char *VoteDelay = "vote_delay"; const char *MstSupport = "mst_enable"; + const char *MstExpirationTime = "mst_expiration_time"; } // namespace config_members static constexpr size_t kBadJsonPrintLength = 15; @@ -103,6 +104,8 @@ inline rapidjson::Document parse_iroha_config(const std::string &conf_path) { ac::no_member_error(mbr::MstSupport)); ac::assert_fatal(doc[mbr::MstSupport].IsBool(), ac::type_error(mbr::MstSupport, kBoolType)); + ac::assert_fatal(doc[mbr::MstExpirationTime].IsUint(), + ac::type_error(mbr::MstExpirationTime, kUintType)); return doc; } diff --git a/irohad/main/irohad.cpp b/irohad/main/irohad.cpp index dd80069307..e587b15149 100644 --- a/irohad/main/irohad.cpp +++ b/irohad/main/irohad.cpp @@ -128,6 +128,7 @@ int main(int argc, char *argv[]) { config[mbr::MaxProposalSize].GetUint(), std::chrono::milliseconds(config[mbr::ProposalDelay].GetUint()), std::chrono::milliseconds(config[mbr::VoteDelay].GetUint()), + std::chrono::minutes(config[mbr::MstExpirationTime].GetUint()), *keypair, boost::make_optional(config[mbr::MstSupport].GetBool(), iroha::GossipPropagationStrategyParams{})); diff --git a/irohad/multi_sig_transactions/gossip_propagation_strategy.hpp b/irohad/multi_sig_transactions/gossip_propagation_strategy.hpp index 4158f80939..0ece08b2d7 100644 --- a/irohad/multi_sig_transactions/gossip_propagation_strategy.hpp +++ b/irohad/multi_sig_transactions/gossip_propagation_strategy.hpp @@ -18,7 +18,7 @@ namespace iroha { /** * This class provides strategy for propagation states in network - * Emits exactly (or zero iff provider is empty) amount of peers + * Emits exactly (or zero if provider is empty) amount of peers * at some period * note: it can be inconsistent with the peer provider */ diff --git a/irohad/multi_sig_transactions/state/impl/mst_state.cpp b/irohad/multi_sig_transactions/state/impl/mst_state.cpp index e2d751f36b..e2f6de8eff 100644 --- a/irohad/multi_sig_transactions/state/impl/mst_state.cpp +++ b/irohad/multi_sig_transactions/state/impl/mst_state.cpp @@ -20,6 +20,9 @@ namespace iroha { return left_tx->reducedHash() == right_tx->reducedHash(); } + DefaultCompleter::DefaultCompleter(std::chrono::minutes expiration_time) + : expiration_time_(expiration_time) {} + bool DefaultCompleter::operator()(const DataType &batch) const { return std::all_of(batch->transactions().begin(), batch->transactions().end(), @@ -28,9 +31,15 @@ namespace iroha { }); } - bool DefaultCompleter::operator()(const DataType &tx, + bool DefaultCompleter::operator()(const DataType &batch, const TimeType &time) const { - return false; + return std::any_of(batch->transactions().begin(), + batch->transactions().end(), + [&](const auto &tx) { + return tx->createdTime() + + expiration_time_ / std::chrono::milliseconds(1) + < time; + }); } // ------------------------------| public api |------------------------------- diff --git a/irohad/multi_sig_transactions/state/mst_state.hpp b/irohad/multi_sig_transactions/state/mst_state.hpp index ab1a00f3d4..2403e9f0a0 100644 --- a/irohad/multi_sig_transactions/state/mst_state.hpp +++ b/irohad/multi_sig_transactions/state/mst_state.hpp @@ -54,13 +54,24 @@ namespace iroha { }; /** - * Class provides the default behavior for the batch completer: - * complete, if all transactions have at least quorum number of signatures + * Class provides the default behavior for the batch completer. + * Complete, if all transactions have at least quorum number of signatures. + * Expired if at least one transaction is expired. */ class DefaultCompleter : public Completer { + /** + * Creates new Completer with a given expiration time for transactions + * @param expiration_time - expiration time in minutes + */ + public: + explicit DefaultCompleter(std::chrono::minutes expiration_time); + bool operator()(const DataType &batch) const override; bool operator()(const DataType &tx, const TimeType &time) const override; + + private: + std::chrono::minutes expiration_time_; }; using CompleterType = std::shared_ptr; @@ -74,8 +85,7 @@ namespace iroha { * @param completer - strategy for determine completed and expired batches * @return empty mst state */ - static MstState empty( - const CompleterType &completer = std::make_shared()); + static MstState empty(const CompleterType &completer); /** * Add batch to current state diff --git a/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp b/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp index 78b42fc8b2..f6bc4cd788 100644 --- a/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp +++ b/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp @@ -32,12 +32,14 @@ MstTransportGrpc::MstTransportGrpc( std::shared_ptr transaction_batch_factory, std::shared_ptr tx_presence_cache, + std::shared_ptr mst_completer, shared_model::crypto::PublicKey my_key) : async_call_(std::move(async_call)), transaction_factory_(std::move(transaction_factory)), batch_parser_(std::move(batch_parser)), batch_factory_(std::move(transaction_batch_factory)), tx_presence_cache_(std::move(tx_presence_cache)), + mst_completer_(std::move(mst_completer)), my_key_(shared_model::crypto::toBinaryString(my_key)) {} shared_model::interface::types::SharedTxsCollectionType @@ -80,7 +82,7 @@ grpc::Status MstTransportGrpc::SendState( auto batches = batch_parser_->parseBatches(transactions); - MstState new_state = MstState::empty(); + MstState new_state = MstState::empty(mst_completer_); for (auto &batch : batches) { batch_factory_->createTransactionBatch(batch).match( @@ -134,10 +136,9 @@ grpc::Status MstTransportGrpc::SendState( return grpc::Status::OK; } - if (auto subscriber =subscriber_.lock()) { - subscriber->onNewState( - source_key, - std::move(new_state));} else { + if (auto subscriber = subscriber_.lock()) { + subscriber->onNewState(source_key, std::move(new_state)); + } else { async_call_->log_->warn("No subscriber for MST SendState event is set"); } diff --git a/irohad/multi_sig_transactions/transport/mst_transport_grpc.hpp b/irohad/multi_sig_transactions/transport/mst_transport_grpc.hpp index 766ce8d00c..d75837ad24 100644 --- a/irohad/multi_sig_transactions/transport/mst_transport_grpc.hpp +++ b/irohad/multi_sig_transactions/transport/mst_transport_grpc.hpp @@ -15,6 +15,7 @@ #include "interfaces/iroha_internal/transaction_batch_factory.hpp" #include "interfaces/iroha_internal/transaction_batch_parser.hpp" #include "logger/logger.hpp" +#include "multi_sig_transactions/state/mst_state.hpp" #include "network/impl/async_grpc_client.hpp" namespace iroha { @@ -41,6 +42,7 @@ namespace iroha { std::shared_ptr transaction_batch_factory, std::shared_ptr tx_presence_cache, + std::shared_ptr mst_completer, shared_model::crypto::PublicKey my_key); /** @@ -78,6 +80,7 @@ namespace iroha { batch_factory_; std::shared_ptr tx_presence_cache_; /// source peer key for MST propogation messages + std::shared_ptr mst_completer_; const std::string my_key_; }; diff --git a/test/framework/integration_framework/fake_peer/fake_peer.cpp b/test/framework/integration_framework/fake_peer/fake_peer.cpp index 18cb353c4d..eccc9d0d3e 100644 --- a/test/framework/integration_framework/fake_peer/fake_peer.cpp +++ b/test/framework/integration_framework/fake_peer/fake_peer.cpp @@ -72,12 +72,14 @@ namespace integration_framework { common_objects_factory, getAddress(), keypair_->publicKey())), real_peer_(real_peer), async_call_(std::make_shared()), - mst_transport_(std::make_shared(async_call_, - transaction_factory, - batch_parser, - transaction_batch_factory, - tx_presence_cache, - keypair_->publicKey())), + mst_transport_(std::make_shared( + async_call_, + transaction_factory, + batch_parser, + transaction_batch_factory, + tx_presence_cache, + std::make_shared(std::chrono::minutes(0)), + keypair_->publicKey())), yac_transport_(std::make_shared(async_call_)), yac_network_notifier_(std::make_shared()), yac_crypto_(std::make_shared( diff --git a/test/framework/integration_framework/iroha_instance.cpp b/test/framework/integration_framework/iroha_instance.cpp index e3e3e95e5c..dceca54189 100644 --- a/test/framework/integration_framework/iroha_instance.cpp +++ b/test/framework/integration_framework/iroha_instance.cpp @@ -33,6 +33,8 @@ namespace integration_framework { proposal_delay_(1h), // not required due to solo consensus vote_delay_(0ms), + // amount of minutes in a day + mst_expiration_time_(std::chrono::minutes(24*60)), opt_mst_gossip_params_(boost::make_optional( mst_support, iroha::GossipPropagationStrategyParams{})) {} @@ -69,6 +71,7 @@ namespace integration_framework { max_proposal_size, proposal_delay_, vote_delay_, + mst_expiration_time_, key_pair, opt_mst_gossip_params_); } diff --git a/test/framework/integration_framework/iroha_instance.hpp b/test/framework/integration_framework/iroha_instance.hpp index 93eb0ae33b..4e317d6153 100644 --- a/test/framework/integration_framework/iroha_instance.hpp +++ b/test/framework/integration_framework/iroha_instance.hpp @@ -70,6 +70,7 @@ namespace integration_framework { const size_t internal_port_; const std::chrono::milliseconds proposal_delay_; const std::chrono::milliseconds vote_delay_; + const std::chrono::minutes mst_expiration_time_; boost::optional opt_mst_gossip_params_; diff --git a/test/framework/integration_framework/test_irohad.hpp b/test/framework/integration_framework/test_irohad.hpp index 6dfd3eccbd..ff1d957489 100644 --- a/test/framework/integration_framework/test_irohad.hpp +++ b/test/framework/integration_framework/test_irohad.hpp @@ -24,6 +24,7 @@ namespace integration_framework { size_t max_proposal_size, std::chrono::milliseconds proposal_delay, std::chrono::milliseconds vote_delay, + std::chrono::minutes mst_expiration_time, const shared_model::crypto::Keypair &keypair, const boost::optional &opt_mst_gossip_params = boost::none) @@ -35,6 +36,7 @@ namespace integration_framework { max_proposal_size, proposal_delay, vote_delay, + mst_expiration_time, keypair, opt_mst_gossip_params) {} diff --git a/test/integration/acceptance/basic_mst_state_propagation.cpp b/test/integration/acceptance/basic_mst_state_propagation.cpp index 4cdf48d898..50d7a5da92 100644 --- a/test/integration/acceptance/basic_mst_state_propagation.cpp +++ b/test/integration/acceptance/basic_mst_state_propagation.cpp @@ -77,7 +77,7 @@ class BasicMstPropagationFixture : public AcceptanceFixture { /** * Check that after sending a not fully signed transaction, an MST state - * propagtes to another peer + * propagates to another peer * @given a not fully signed transaction * @when such transaction is sent to one of two iroha peers in the network * @then that peer propagates MST state to another peer diff --git a/test/module/irohad/multi_sig_transactions/CMakeLists.txt b/test/module/irohad/multi_sig_transactions/CMakeLists.txt index 30f1b00eba..69d7c56187 100644 --- a/test/module/irohad/multi_sig_transactions/CMakeLists.txt +++ b/test/module/irohad/multi_sig_transactions/CMakeLists.txt @@ -19,6 +19,11 @@ target_link_libraries(storage_test shared_model_interfaces_factories ) +AddTest(completer_test completer_test.cpp) +target_link_libraries(completer_test + mst_state + ) + AddTest(transport_test transport_test.cpp) target_link_libraries(transport_test mst_transport diff --git a/test/module/irohad/multi_sig_transactions/completer_test.cpp b/test/module/irohad/multi_sig_transactions/completer_test.cpp new file mode 100644 index 0000000000..4efffdb540 --- /dev/null +++ b/test/module/irohad/multi_sig_transactions/completer_test.cpp @@ -0,0 +1,136 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#include +#include +#include +#include "datetime/time.hpp" +#include "module/shared_model/interface_mocks.hpp" +#include "multi_sig_transactions/state/mst_state.hpp" + +using namespace iroha; +using namespace testing; + +/** + * @given batch with 3 transactions: first one with quorum 1 and 1 signature, + * second one with quorum 2 and 2 signatures, third one with quorum 3 and 3 + * signatures + * @when completer was called for the batch + * @then batch is complete + */ +TEST(CompleterTest, BatchQuorumTestEnoughSignatures) { + auto completer = std::make_shared(std::chrono::minutes(0)); + + std::vector> sigs1{ + 1, std::make_shared()}; + std::vector> sigs2{ + 2, std::make_shared()}; + std::vector> sigs3{ + 3, std::make_shared()}; + + auto tx1 = std::make_shared(); + EXPECT_CALL(*tx1, quorum()).WillOnce(Return(1)); + EXPECT_CALL(*tx1, signatures()) + .WillOnce(Return( + sigs1 | boost::adaptors::indirected)); + + auto tx2 = std::make_shared(); + EXPECT_CALL(*tx2, quorum()).WillOnce(Return(2)); + EXPECT_CALL(*tx2, signatures()) + .WillOnce(Return( + sigs2 | boost::adaptors::indirected)); + + auto tx3 = std::make_shared(); + EXPECT_CALL(*tx3, quorum()).WillOnce(Return(3)); + EXPECT_CALL(*tx3, signatures()) + .WillOnce(Return( + sigs3 | boost::adaptors::indirected)); + + auto batch = createMockBatchWithTransactions({tx1, tx2, tx3}, ""); + ASSERT_TRUE((*completer)(batch)); +} + +/** + * @given batch with 3 transactions: first one with quorum 1 and 1 signature, + * second one with quorum 2 and 1 signature, third one with quorum 3 and 3 + * signatures + * @when completer was called for the batch + * @then batch is not complete + */ +TEST(CompleterTest, BatchQuorumTestNotEnoughSignatures) { + auto completer = std::make_shared(std::chrono::minutes(0)); + + std::vector> sigs1{ + 1, std::make_shared()}; + std::vector> sigs2{ + 1, std::make_shared()}; + std::vector> sigs3{ + 3, std::make_shared()}; + + auto tx1 = std::make_shared(); + EXPECT_CALL(*tx1, quorum()).WillOnce(Return(1)); + EXPECT_CALL(*tx1, signatures()) + .WillOnce(Return( + sigs1 | boost::adaptors::indirected)); + + auto tx2 = std::make_shared(); + EXPECT_CALL(*tx2, quorum()).WillOnce(Return(2)); + EXPECT_CALL(*tx2, signatures()) + .WillOnce(Return( + sigs2 | boost::adaptors::indirected)); + + auto tx3 = std::make_shared(); + EXPECT_CALL(*tx3, quorum()).Times(0); + EXPECT_CALL(*tx3, signatures()).Times(0); + + auto batch = createMockBatchWithTransactions({tx1, tx2, tx3}, ""); + ASSERT_FALSE((*completer)(batch)); +} + +/** + * @given batch with 3 transactions with now() creation time and completer + * with 1 minute expiration time + * @when completer with 2 minute gap was called for the batch + * @then batch is expired + */ +TEST(CompleterTest, BatchExpirationTestExpired) { + auto completer = std::make_shared(std::chrono::minutes(1)); + auto time = iroha::time::now(); + auto tx1 = std::make_shared(); + EXPECT_CALL(*tx1, createdTime()).WillOnce(Return(time)); + auto tx2 = std::make_shared(); + EXPECT_CALL(*tx2, createdTime()).Times(0); + auto tx3 = std::make_shared(); + EXPECT_CALL(*tx3, createdTime()).Times(0); + auto batch = createMockBatchWithTransactions({tx1, tx2, tx3}, ""); + ASSERT_TRUE((*completer)( + batch, time + std::chrono::minutes(2) / std::chrono::milliseconds(1))); +} + +/** + * @given batch with 3 transactions: first one in 2 minutes from now, + * second one in 3 minutes from now, third one in 4 minutes from now and + * completer with 5 minute expiration time + * @when completer without time gap was called for the batch + * @then batch is not expired + */ +TEST(CompleterTest, BatchExpirationTestNoExpired) { + auto completer = std::make_shared(std::chrono::minutes(5)); + auto time = iroha::time::now(); + auto tx1 = std::make_shared(); + EXPECT_CALL(*tx1, createdTime()) + .WillOnce(Return( + time + std::chrono::minutes(2) / std::chrono::milliseconds(1))); + auto tx2 = std::make_shared(); + EXPECT_CALL(*tx2, createdTime()) + .WillOnce(Return( + time + std::chrono::minutes(3) / std::chrono::milliseconds(1))); + auto tx3 = std::make_shared(); + EXPECT_CALL(*tx3, createdTime()) + .WillOnce(Return( + time + std::chrono::minutes(4) / std::chrono::milliseconds(1))); + auto batch = createMockBatchWithTransactions({tx1, tx2, tx3}, ""); + ASSERT_FALSE((*completer)(batch, time)); +} diff --git a/test/module/irohad/multi_sig_transactions/mst_processor_test.cpp b/test/module/irohad/multi_sig_transactions/mst_processor_test.cpp index ecaf0021bf..54e78d5445 100644 --- a/test/module/irohad/multi_sig_transactions/mst_processor_test.cpp +++ b/test/module/irohad/multi_sig_transactions/mst_processor_test.cpp @@ -23,23 +23,6 @@ using namespace framework::test_subscriber; using testing::_; using testing::Return; -class TestCompleter : public Completer { - bool operator()(const DataType &batch) const override { - return std::all_of(batch->transactions().begin(), - batch->transactions().end(), - [](const auto &tx) { - return boost::size(tx->signatures()) >= tx->quorum(); - }); - } - - bool operator()(const DataType &batch, const TimeType &time) const override { - return std::any_of( - batch->transactions().begin(), - batch->transactions().end(), - [&time](const auto &tx) { return tx->createdTime() < time; }); - } -}; - class MstProcessorTest : public testing::Test { public: // --------------------------------| fields |--------------------------------- @@ -314,7 +297,8 @@ TEST_F(MstProcessorTest, emptyStatePropagation) { auto another_peer = makePeer( "another", shared_model::interface::types::PubkeyType("sign_one")); - auto another_peer_state = MstState::empty(); + auto another_peer_state = MstState::empty( + std::make_shared(std::chrono::minutes(0))); another_peer_state += makeTestBatch(txBuilder(1)); storage->apply(another_peer->pubkey(), another_peer_state); diff --git a/test/module/irohad/multi_sig_transactions/mst_test_helpers.hpp b/test/module/irohad/multi_sig_transactions/mst_test_helpers.hpp index 6c2141249a..0bcf48802a 100644 --- a/test/module/irohad/multi_sig_transactions/mst_test_helpers.hpp +++ b/test/module/irohad/multi_sig_transactions/mst_test_helpers.hpp @@ -14,6 +14,7 @@ #include "interfaces/common_objects/types.hpp" #include "module/shared_model/builders/protobuf/test_transaction_builder.hpp" #include "multi_sig_transactions/mst_types.hpp" +#include "multi_sig_transactions/state/mst_state.hpp" #include "logger/logger.hpp" @@ -102,4 +103,27 @@ inline auto makeTx(const shared_model::interface::types::CounterType &counter, .finish()); } +namespace iroha { + class TestCompleter : public DefaultCompleter { + public: + explicit TestCompleter() : DefaultCompleter(std::chrono::minutes(0)) {} + + bool operator()(const DataType &batch) const override { + return std::all_of(batch->transactions().begin(), + batch->transactions().end(), + [](const auto &tx) { + return boost::size(tx->signatures()) >= tx->quorum(); + }); + } + + bool operator()(const DataType &batch, + const TimeType &time) const override { + return std::any_of( + batch->transactions().begin(), + batch->transactions().end(), + [&time](const auto &tx) { return tx->createdTime() < time; }); + } + }; +} // namespace iroha + #endif // IROHA_MST_TEST_HELPERS_HPP diff --git a/test/module/irohad/multi_sig_transactions/state_test.cpp b/test/module/irohad/multi_sig_transactions/state_test.cpp index f4c1049be7..5bd4998b1e 100644 --- a/test/module/irohad/multi_sig_transactions/state_test.cpp +++ b/test/module/irohad/multi_sig_transactions/state_test.cpp @@ -8,18 +8,20 @@ #include "module/irohad/multi_sig_transactions/mst_test_helpers.hpp" #include "multi_sig_transactions/state/mst_state.hpp" -auto log_ = logger::log("MstStateTest"); using namespace std; using namespace iroha; using namespace iroha::model; +auto log_ = logger::log("MstStateTest"); +auto completer_ = std::make_shared(); + /** * @given empty state * @when insert one batch * @then checks that state contains the inserted batch */ TEST(StateTest, CreateState) { - auto state = MstState::empty(); + auto state = MstState::empty(completer_); ASSERT_EQ(0, state.getBatches().size()); auto tx = addSignatures( makeTestBatch(txBuilder(1)), 0, makeSignature("1", "pub_key_1")); @@ -34,7 +36,7 @@ TEST(StateTest, CreateState) { * @then checks that signatures are merged into the state */ TEST(StateTest, UpdateExistingState) { - auto state = MstState::empty(); + auto state = MstState::empty(completer_); auto time = iroha::time::now(); auto first_signature = makeSignature("1", "pub_key_1"); @@ -58,7 +60,7 @@ TEST(StateTest, UpdateExistingState) { * @then "contains" method shows presence of the batch */ TEST(StateTest, ContainsMethodFindsInsertedBatch) { - auto state = MstState::empty(); + auto state = MstState::empty(completer_); auto first_signature = makeSignature("1", "pub_key_1"); auto batch = makeTestBatch(txBuilder(1, iroha::time::now())); @@ -74,7 +76,7 @@ TEST(StateTest, ContainsMethodFindsInsertedBatch) { * @then "contains" method shows absence of the batch */ TEST(StateTest, ContainsMethodDoesNotFindNonInsertedBatch) { - auto state = MstState::empty(); + auto state = MstState::empty(completer_); auto batch = makeTestBatch(txBuilder(1, iroha::time::now())); EXPECT_FALSE(state.contains(batch)); @@ -88,7 +90,7 @@ TEST(StateTest, ContainsMethodDoesNotFindNonInsertedBatch) { TEST(StateTest, UpdateStateWhenTransactionsSame) { log_->info("Create empty state => insert two equal transaction"); - auto state = MstState::empty(); + auto state = MstState::empty(completer_); auto time = iroha::time::now(); state += addSignatures( @@ -115,7 +117,7 @@ TEST(StateTest, UpdateStateWhenTransactionsSame) { TEST(StateTest, DifferentSignaturesUnionTest) { log_->info("Create two states => merge them"); - auto state1 = MstState::empty(); + auto state1 = MstState::empty(completer_); state1 += addSignatures(makeTestBatch(txBuilder(1)), 0, makeSignature("1", "1")); @@ -127,7 +129,7 @@ TEST(StateTest, DifferentSignaturesUnionTest) { ASSERT_EQ(3, state1.getBatches().size()); - auto state2 = MstState::empty(); + auto state2 = MstState::empty(completer_); state2 += addSignatures(makeTestBatch(txBuilder(4)), 0, makeSignature("4", "4")); state2 += @@ -152,8 +154,8 @@ TEST(StateTest, UnionStateWhenSameTransactionHaveDifferentSignatures) { auto time = iroha::time::now(); - auto state1 = MstState::empty(); - auto state2 = MstState::empty(); + auto state1 = MstState::empty(completer_); + auto state2 = MstState::empty(completer_); state1 += addSignatures( makeTestBatch(txBuilder(1, time)), 0, makeSignature("1", "1")); @@ -181,7 +183,7 @@ TEST(StateTest, UnionStateWhenSameTransactionHaveDifferentSignatures) { TEST(StateTest, UnionStateWhenTransactionsSame) { auto time = iroha::time::now(); - auto state1 = MstState::empty(); + auto state1 = MstState::empty(completer_); state1 += addSignatures( makeTestBatch(txBuilder(1, time)), 0, makeSignature("1", "1")); state1 += addSignatures( @@ -189,7 +191,7 @@ TEST(StateTest, UnionStateWhenTransactionsSame) { ASSERT_EQ(2, state1.getBatches().size()); - auto state2 = MstState::empty(); + auto state2 = MstState::empty(completer_); state2 += addSignatures( makeTestBatch(txBuilder(1, time)), 0, makeSignature("1", "1")); state2 += addSignatures( @@ -217,11 +219,11 @@ TEST(StateTest, DifferenceTest) { auto common_batch = makeTestBatch(txBuilder(1, time)); auto another_batch = makeTestBatch(txBuilder(3)); - auto state1 = MstState::empty(); + auto state1 = MstState::empty(completer_); state1 += addSignatures(common_batch, 0, first_signature); state1 += addSignatures(common_batch, 0, second_signature); - auto state2 = MstState::empty(); + auto state2 = MstState::empty(completer_); state2 += addSignatures(common_batch, 0, second_signature); state2 += addSignatures(common_batch, 0, third_signature); state2 += addSignatures(another_batch, 0, another_signature); @@ -243,7 +245,7 @@ TEST(StateTest, UpdateTxUntillQuorum) { auto quorum = 3u; auto time = iroha::time::now(); - auto state = MstState::empty(); + auto state = MstState::empty(completer_); auto state_after_one_tx = state += addSignatures( makeTestBatch(txBuilder(1, time, quorum)), 0, makeSignature("1", "1")); @@ -274,7 +276,7 @@ TEST(StateTest, UpdateStateWithNewStateUntilQuorum) { auto keypair = makeKey(); auto time = iroha::time::now(); - auto state1 = MstState::empty(); + auto state1 = MstState::empty(completer_); state1 += addSignatures(makeTestBatch(txBuilder(1, time, quorum)), 0, makeSignature("1_1", "1_1")); @@ -284,7 +286,7 @@ TEST(StateTest, UpdateStateWithNewStateUntilQuorum) { makeTestBatch(txBuilder(2, time)), 0, makeSignature("3", "3")); ASSERT_EQ(2, state1.getBatches().size()); - auto state2 = MstState::empty(); + auto state2 = MstState::empty(completer_); state2 += addSignatures(makeTestBatch(txBuilder(1, time, quorum)), 0, makeSignature("1_2", "1_2")); @@ -298,19 +300,6 @@ TEST(StateTest, UpdateStateWithNewStateUntilQuorum) { ASSERT_EQ(1, state1.getBatches().size()); } -/** - * Tests expired completer, which checks that all transactions in batch are not - * expired - */ -class TimeTestCompleter : public iroha::DefaultCompleter { - bool operator()(const DataType &batch, const TimeType &time) const override { - return std::all_of( - batch->transactions().begin(), - batch->transactions().end(), - [&time](const auto &tx) { return tx->createdTime() < time; }); - } -}; - /** * @given a timepoint * AND a state with an expired transaction @@ -325,7 +314,7 @@ TEST(StateTest, TimeIndexInsertionByTx) { 0, makeSignature("1_1", "1_1")); - auto state = MstState::empty(std::make_shared()); + auto state = MstState::empty(completer_); state += prepared_batch; @@ -345,7 +334,7 @@ TEST(StateTest, TimeIndexInsertionByAddState) { auto quorum = 3u; auto time = iroha::time::now(); - auto state1 = MstState::empty(std::make_shared()); + auto state1 = MstState::empty(completer_); state1 += addSignatures(makeTestBatch(txBuilder(1, time, quorum)), 0, makeSignature("1_1", "1_1")); @@ -353,7 +342,7 @@ TEST(StateTest, TimeIndexInsertionByAddState) { 0, makeSignature("1_2", "1_2")); - auto state2 = MstState::empty(std::make_shared()); + auto state2 = MstState::empty(completer_); state2 += addSignatures( makeTestBatch(txBuilder(2, time)), 0, makeSignature("2", "2")); state2 += addSignatures( @@ -374,13 +363,13 @@ TEST(StateTest, TimeIndexInsertionByAddState) { TEST(StateTest, RemovingTestWhenByTimeHasExpired) { auto time = iroha::time::now(); - auto state1 = MstState::empty(std::make_shared()); + auto state1 = MstState::empty(completer_); state1 += addSignatures( makeTestBatch(txBuilder(1, time)), 0, makeSignature("2", "2")); state1 += addSignatures( makeTestBatch(txBuilder(2, time)), 0, makeSignature("2", "2")); - auto state2 = MstState::empty(std::make_shared()); + auto state2 = MstState::empty(completer_); auto diff_state = state1 - state2; diff --git a/test/module/irohad/multi_sig_transactions/storage_test.cpp b/test/module/irohad/multi_sig_transactions/storage_test.cpp index cdafb2b938..4cb5af5ee9 100644 --- a/test/module/irohad/multi_sig_transactions/storage_test.cpp +++ b/test/module/irohad/multi_sig_transactions/storage_test.cpp @@ -9,27 +9,17 @@ #include "module/irohad/multi_sig_transactions/mst_test_helpers.hpp" #include "multi_sig_transactions/storage/mst_storage_impl.hpp" -auto log_ = logger::log("MstStorageTest"); - using namespace iroha; -class StorageTestCompleter : public DefaultCompleter { - public: - bool operator()(const DataType &batch, const TimeType &time) const override { - return std::all_of( - batch->transactions().begin(), - batch->transactions().end(), - [&time](const auto &tx) { return tx->createdTime() < time; }); - } -}; +auto log_ = logger::log("MstStorageTest"); class StorageTest : public testing::Test { public: StorageTest() : absent_peer_key("absent") {} void SetUp() override { - storage = std::make_shared( - std::make_shared()); + completer_ = std::make_shared(); + storage = std::make_shared(completer_); fillOwnState(); } @@ -45,6 +35,7 @@ class StorageTest : public testing::Test { const unsigned quorum = 3u; const shared_model::interface::types::TimestampType creation_time = iroha::time::now(); + std::shared_ptr completer_; }; TEST_F(StorageTest, StorageWhenApplyOtherState) { @@ -52,7 +43,7 @@ TEST_F(StorageTest, StorageWhenApplyOtherState) { "create state with default peers and other state => " "apply state"); - auto new_state = MstState::empty(std::make_shared()); + auto new_state = MstState::empty(completer_); new_state += makeTestBatch(txBuilder(5, creation_time)); new_state += makeTestBatch(txBuilder(6, creation_time)); new_state += makeTestBatch(txBuilder(7, creation_time)); diff --git a/test/module/irohad/multi_sig_transactions/transport_test.cpp b/test/module/irohad/multi_sig_transactions/transport_test.cpp index 076b67c48e..6f4b4b332a 100644 --- a/test/module/irohad/multi_sig_transactions/transport_test.cpp +++ b/test/module/irohad/multi_sig_transactions/transport_test.cpp @@ -37,6 +37,8 @@ class TransportTest : public ::testing::Test { tx_presence_cache_( std::make_shared()), my_key_(makeKey()), + completer_( + std::make_shared(std::chrono::minutes(0))), mst_notification_transport_( std::make_shared()) {} @@ -45,6 +47,7 @@ class TransportTest : public ::testing::Test { std::shared_ptr batch_factory_; std::shared_ptr tx_presence_cache_; shared_model::crypto::Keypair my_key_; + std::shared_ptr completer_; std::shared_ptr mst_notification_transport_; }; @@ -91,6 +94,7 @@ TEST_F(TransportTest, SendAndReceive) { std::move(parser_), std::move(batch_factory_), std::move(tx_presence_cache_), + completer_, my_key_.publicKey()); transport->subscribe(mst_notification_transport_); @@ -98,7 +102,7 @@ TEST_F(TransportTest, SendAndReceive) { std::condition_variable cv; auto time = iroha::time::now(); - auto state = iroha::MstState::empty(); + auto state = iroha::MstState::empty(completer_); state += addSignaturesFromKeyPairs( makeTestBatch(txBuilder(1, time)), 0, makeKey()); state += addSignaturesFromKeyPairs( @@ -175,12 +179,13 @@ TEST_F(TransportTest, ReplayAttack) { std::move(parser_), std::move(batch_factory_), tx_presence_cache_, + completer_, my_key_.publicKey()); transport->subscribe(mst_notification_transport_); auto batch = makeTestBatch(txBuilder(1), txBuilder(2)); - auto state = iroha::MstState::empty(); + auto state = iroha::MstState::empty(completer_); state += addSignaturesFromKeyPairs( addSignaturesFromKeyPairs(batch, 0, makeKey()), 1, makeKey()); diff --git a/test/module/irohad/pending_txs_storage/pending_txs_storage_test.cpp b/test/module/irohad/pending_txs_storage/pending_txs_storage_test.cpp index 728ba86871..dae338c09d 100644 --- a/test/module/irohad/pending_txs_storage/pending_txs_storage_test.cpp +++ b/test/module/irohad/pending_txs_storage/pending_txs_storage_test.cpp @@ -29,6 +29,9 @@ class PendingTxsStorageFixture : public ::testing::Test { return ++latest_timestamp; } } + + std::shared_ptr completer_ = + std::make_shared(std::chrono::minutes(0)); }; /** @@ -39,7 +42,8 @@ class PendingTxsStorageFixture : public ::testing::Test { * @then the transactions can be added to MST state successfully */ TEST_F(PendingTxsStorageFixture, FixutureSelfCheck) { - auto state = std::make_shared(iroha::MstState::empty()); + auto state = + std::make_shared(iroha::MstState::empty(completer_)); auto transactions = addSignatures(makeTestBatch(txBuilder(1, getUniqueTime()), @@ -60,7 +64,8 @@ TEST_F(PendingTxsStorageFixture, FixutureSelfCheck) { * @then list of pending transactions can be received for all batch creators */ TEST_F(PendingTxsStorageFixture, InsertionTest) { - auto state = std::make_shared(iroha::MstState::empty()); + auto state = + std::make_shared(iroha::MstState::empty(completer_)); auto transactions = addSignatures( makeTestBatch(txBuilder(2, getUniqueTime(), 2, "alice@iroha"), txBuilder(2, getUniqueTime(), 2, "bob@iroha")), @@ -98,8 +103,10 @@ TEST_F(PendingTxsStorageFixture, InsertionTest) { * @then pending transactions response is also updated */ TEST_F(PendingTxsStorageFixture, SignaturesUpdate) { - auto state1 = std::make_shared(iroha::MstState::empty()); - auto state2 = std::make_shared(iroha::MstState::empty()); + auto state1 = + std::make_shared(iroha::MstState::empty(completer_)); + auto state2 = + std::make_shared(iroha::MstState::empty(completer_)); auto transactions = addSignatures( makeTestBatch(txBuilder(3, getUniqueTime(), 3, "alice@iroha")), 0, @@ -131,7 +138,8 @@ TEST_F(PendingTxsStorageFixture, SignaturesUpdate) { * @then users receives correct responses */ TEST_F(PendingTxsStorageFixture, SeveralBatches) { - auto state = std::make_shared(iroha::MstState::empty()); + auto state = + std::make_shared(iroha::MstState::empty(completer_)); auto batch1 = addSignatures( makeTestBatch(txBuilder(2, getUniqueTime(), 2, "alice@iroha"), txBuilder(2, getUniqueTime(), 2, "bob@iroha")), @@ -172,14 +180,16 @@ TEST_F(PendingTxsStorageFixture, SeveralBatches) { * @then updates don't overwrite the whole storage state */ TEST_F(PendingTxsStorageFixture, SeparateBatchesDoNotOverwriteStorage) { - auto state1 = std::make_shared(iroha::MstState::empty()); + auto state1 = + std::make_shared(iroha::MstState::empty(completer_)); auto batch1 = addSignatures( makeTestBatch(txBuilder(2, getUniqueTime(), 2, "alice@iroha"), txBuilder(2, getUniqueTime(), 2, "bob@iroha")), 0, makeSignature("1", "pub_key_1")); *state1 += batch1; - auto state2 = std::make_shared(iroha::MstState::empty()); + auto state2 = + std::make_shared(iroha::MstState::empty(completer_)); auto batch2 = addSignatures( makeTestBatch(txBuilder(2, getUniqueTime(), 2, "alice@iroha"), txBuilder(3, getUniqueTime(), 3, "alice@iroha")), @@ -212,7 +222,8 @@ TEST_F(PendingTxsStorageFixture, SeparateBatchesDoNotOverwriteStorage) { * @then storage removes the batch */ TEST_F(PendingTxsStorageFixture, PreparedBatch) { - auto state = std::make_shared(iroha::MstState::empty()); + auto state = + std::make_shared(iroha::MstState::empty(completer_)); std::shared_ptr batch = addSignatures( makeTestBatch(txBuilder(3, getUniqueTime(), 3, "alice@iroha")), @@ -247,7 +258,8 @@ TEST_F(PendingTxsStorageFixture, PreparedBatch) { * @then storage removes the batch */ TEST_F(PendingTxsStorageFixture, ExpiredBatch) { - auto state = std::make_shared(iroha::MstState::empty()); + auto state = + std::make_shared(iroha::MstState::empty(completer_)); std::shared_ptr batch = addSignatures( makeTestBatch(txBuilder(3, getUniqueTime(), 3, "alice@iroha")), diff --git a/test/system/irohad_test_data/config.sample b/test/system/irohad_test_data/config.sample index c4479444d2..cb9e1108cb 100644 --- a/test/system/irohad_test_data/config.sample +++ b/test/system/irohad_test_data/config.sample @@ -6,6 +6,7 @@ "max_proposal_size" : 10, "proposal_delay" : 5000, "vote_delay" : 5000, - "mst_enable" : false + "mst_enable" : false, + "mst_expiration_time" : 1440 } diff --git a/test/system/irohad_test_data/config.sample.copy b/test/system/irohad_test_data/config.sample.copy index 9e7e8c1594..7b7a2ddd5b 100644 --- a/test/system/irohad_test_data/config.sample.copy +++ b/test/system/irohad_test_data/config.sample.copy @@ -6,5 +6,6 @@ "max_proposal_size": 10, "proposal_delay": 5000, "vote_delay": 5000, - "mst_enable": false -} \ No newline at end of file + "mst_enable": false, + "mst_expiration_time" : 1440 +} From 55e7184a222170ef9e46d5bb08e51f268b2e6aca Mon Sep 17 00:00:00 2001 From: Andrei Lebedev Date: Mon, 28 Jan 2019 17:37:17 +0300 Subject: [PATCH 18/41] Refactor crypto provider usage in Simulator (#2057) Signed-off-by: Andrei Lebedev --- irohad/main/application.hpp | 7 +- irohad/simulator/impl/simulator.cpp | 3 +- irohad/simulator/impl/simulator.hpp | 16 +++-- .../abstract_crypto_model_signer.hpp | 29 ++++++++ .../crypto_provider/crypto_model_signer.hpp | 16 +++-- .../irohad/simulator/simulator_test.cpp | 26 +++---- .../cryptography/crypto_model_signer_mock.hpp | 68 ------------------- .../mock_abstract_crypto_model_signer.hpp | 25 +++++++ 8 files changed, 87 insertions(+), 103 deletions(-) create mode 100644 shared_model/cryptography/crypto_provider/abstract_crypto_model_signer.hpp delete mode 100644 test/module/shared_model/cryptography/crypto_model_signer_mock.hpp create mode 100644 test/module/shared_model/cryptography/mock_abstract_crypto_model_signer.hpp diff --git a/irohad/main/application.hpp b/irohad/main/application.hpp index a43991bdec..0f5813afcc 100644 --- a/irohad/main/application.hpp +++ b/irohad/main/application.hpp @@ -7,7 +7,8 @@ #define IROHA_APPLICATION_HPP #include "consensus/consensus_block_cache.hpp" -#include "cryptography/crypto_provider/crypto_model_signer.hpp" +#include "cryptography/crypto_provider/abstract_crypto_model_signer.hpp" +#include "interfaces/queries/query.hpp" #include "logger/logger.hpp" #include "main/impl/block_loader_init.hpp" #include "main/impl/consensus_init.hpp" @@ -202,7 +203,9 @@ class Irohad { std::shared_ptr wsv_restorer_; // crypto provider - std::shared_ptr> crypto_signer_; + std::shared_ptr> + crypto_signer_; // batch parser std::shared_ptr batch_parser; diff --git a/irohad/simulator/impl/simulator.cpp b/irohad/simulator/impl/simulator.cpp index 0c874174b9..61614c0e39 100644 --- a/irohad/simulator/impl/simulator.cpp +++ b/irohad/simulator/impl/simulator.cpp @@ -18,8 +18,7 @@ namespace iroha { std::shared_ptr statefulValidator, std::shared_ptr factory, std::shared_ptr block_query_factory, - std::shared_ptr> - crypto_signer, + std::shared_ptr crypto_signer, std::unique_ptr block_factory, logger::Logger log) diff --git a/irohad/simulator/impl/simulator.hpp b/irohad/simulator/impl/simulator.hpp index 5cd4bc1421..5d8ee91665 100644 --- a/irohad/simulator/impl/simulator.hpp +++ b/irohad/simulator/impl/simulator.hpp @@ -6,16 +6,16 @@ #ifndef IROHA_SIMULATOR_HPP #define IROHA_SIMULATOR_HPP -#include +#include "simulator/block_creator.hpp" +#include "simulator/verified_proposal_creator.hpp" +#include #include "ametsuchi/block_query_factory.hpp" #include "ametsuchi/temporary_factory.hpp" -#include "cryptography/crypto_provider/crypto_model_signer.hpp" +#include "cryptography/crypto_provider/abstract_crypto_model_signer.hpp" #include "interfaces/iroha_internal/unsafe_block_factory.hpp" #include "logger/logger.hpp" #include "network/ordering_gate.hpp" -#include "simulator/block_creator.hpp" -#include "simulator/verified_proposal_creator.hpp" #include "validation/stateful_validator.hpp" namespace iroha { @@ -23,13 +23,15 @@ namespace iroha { class Simulator : public VerifiedProposalCreator, public BlockCreator { public: + using CryptoSignerType = shared_model::crypto::AbstractCryptoModelSigner< + shared_model::interface::Block>; + Simulator( std::shared_ptr ordering_gate, std::shared_ptr statefulValidator, std::shared_ptr factory, std::shared_ptr block_query_factory, - std::shared_ptr> - crypto_signer, + std::shared_ptr crypto_signer, std::unique_ptr block_factory, logger::Logger log = logger::log("Simulator")); @@ -61,7 +63,7 @@ namespace iroha { std::shared_ptr validator_; std::shared_ptr ametsuchi_factory_; std::shared_ptr block_query_factory_; - std::shared_ptr> crypto_signer_; + std::shared_ptr crypto_signer_; std::unique_ptr block_factory_; diff --git a/shared_model/cryptography/crypto_provider/abstract_crypto_model_signer.hpp b/shared_model/cryptography/crypto_provider/abstract_crypto_model_signer.hpp new file mode 100644 index 0000000000..7b16094d2a --- /dev/null +++ b/shared_model/cryptography/crypto_provider/abstract_crypto_model_signer.hpp @@ -0,0 +1,29 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_ABSTRACT_CRYPTO_MODEL_SIGNER_HPP +#define IROHA_ABSTRACT_CRYPTO_MODEL_SIGNER_HPP + +namespace shared_model { + namespace crypto { + + /** + * An interface that supports signing Model objects + */ + template + class AbstractCryptoModelSigner { + public: + /** + * Signs m according to implementation + */ + virtual void sign(Model &m) const = 0; + + virtual ~AbstractCryptoModelSigner() = default; + }; + + } // namespace crypto +} // namespace shared_model + +#endif // IROHA_ABSTRACT_CRYPTO_MODEL_SIGNER_HPP diff --git a/shared_model/cryptography/crypto_provider/crypto_model_signer.hpp b/shared_model/cryptography/crypto_provider/crypto_model_signer.hpp index 58f08aaf7b..40e8a8b26f 100644 --- a/shared_model/cryptography/crypto_provider/crypto_model_signer.hpp +++ b/shared_model/cryptography/crypto_provider/crypto_model_signer.hpp @@ -6,19 +6,17 @@ #ifndef IROHA_CRYPTO_MODEL_SIGNER_HPP_ #define IROHA_CRYPTO_MODEL_SIGNER_HPP_ +#include "cryptography/crypto_provider/abstract_crypto_model_signer.hpp" #include "cryptography/crypto_provider/crypto_signer.hpp" -namespace shared_model { +#include "interfaces/iroha_internal/block.hpp" - namespace interface { - class Block; - class Query; - class Transaction; - } +namespace shared_model { namespace crypto { template > - class CryptoModelSigner { + class CryptoModelSigner + : public AbstractCryptoModelSigner { public: explicit CryptoModelSigner(const shared_model::crypto::Keypair &keypair); @@ -30,6 +28,10 @@ namespace shared_model { signable.addSignature(signedBlob, keypair_.publicKey()); } + void sign(interface::Block &m) const override { + sign(m); + } + private: shared_model::crypto::Keypair keypair_; }; diff --git a/test/module/irohad/simulator/simulator_test.cpp b/test/module/irohad/simulator/simulator_test.cpp index bf683d9a7e..570b45da9d 100644 --- a/test/module/irohad/simulator/simulator_test.cpp +++ b/test/module/irohad/simulator/simulator_test.cpp @@ -20,7 +20,7 @@ #include "module/shared_model/builders/protobuf/proposal.hpp" #include "module/shared_model/builders/protobuf/test_block_builder.hpp" #include "module/shared_model/builders/protobuf/test_proposal_builder.hpp" -#include "module/shared_model/cryptography/crypto_model_signer_mock.hpp" +#include "module/shared_model/cryptography/mock_abstract_crypto_model_signer.hpp" #include "module/shared_model/validators/validators.hpp" using namespace iroha; @@ -41,16 +41,15 @@ using wBlock = std::shared_ptr; class SimulatorTest : public ::testing::Test { public: - void SetUp() override { - shared_model::crypto::crypto_signer_expecter = - std::make_shared(); + using CryptoSignerType = shared_model::crypto::MockAbstractCryptoModelSigner< + shared_model::interface::Block>; + void SetUp() override { validator = std::make_shared(); factory = std::make_shared>(); query = std::make_shared(); ordering_gate = std::make_shared(); - crypto_signer = std::make_shared>( - shared_model::crypto::DefaultCryptoAlgorithmType::generateKeypair()); + crypto_signer = std::make_shared(); block_query_factory = std::make_shared(); EXPECT_CALL(*block_query_factory, createBlockQuery()) .WillRepeatedly(testing::Return(boost::make_optional( @@ -72,10 +71,6 @@ class SimulatorTest : public ::testing::Test { std::move(block_factory)); } - void TearDown() override { - shared_model::crypto::crypto_signer_expecter.reset(); - } - consensus::Round round; std::shared_ptr validator; @@ -83,7 +78,7 @@ class SimulatorTest : public ::testing::Test { std::shared_ptr query; std::shared_ptr block_query_factory; std::shared_ptr ordering_gate; - std::shared_ptr> crypto_signer; + std::shared_ptr crypto_signer; std::unique_ptr block_factory; rxcpp::subjects::subject ordering_events; @@ -157,8 +152,7 @@ TEST_F(SimulatorTest, ValidWhenPreviousBlock) { return std::move(validation_result); })); - EXPECT_CALL(*shared_model::crypto::crypto_signer_expecter, - sign(A())) + EXPECT_CALL(*crypto_signer, sign(A())) .Times(1); auto proposal_wrapper = @@ -195,8 +189,7 @@ TEST_F(SimulatorTest, FailWhenNoBlock) { EXPECT_CALL(*validator, validate(_, _)).Times(0); - EXPECT_CALL(*shared_model::crypto::crypto_signer_expecter, - sign(A())) + EXPECT_CALL(*crypto_signer, sign(A())) .Times(0); auto proposal_wrapper = @@ -226,8 +219,7 @@ TEST_F(SimulatorTest, FailWhenSameAsProposalHeight) { EXPECT_CALL(*validator, validate(_, _)).Times(0); - EXPECT_CALL(*shared_model::crypto::crypto_signer_expecter, - sign(A())) + EXPECT_CALL(*crypto_signer, sign(A())) .Times(0); auto proposal_wrapper = diff --git a/test/module/shared_model/cryptography/crypto_model_signer_mock.hpp b/test/module/shared_model/cryptography/crypto_model_signer_mock.hpp deleted file mode 100644 index 45f989c6c0..0000000000 --- a/test/module/shared_model/cryptography/crypto_model_signer_mock.hpp +++ /dev/null @@ -1,68 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_CRYPTO_MODEL_SIGNER_MOCK_HPP -#define IROHA_CRYPTO_MODEL_SIGNER_MOCK_HPP - -#include - -#include "backend/protobuf/block.hpp" -#include "backend/protobuf/queries/proto_query.hpp" -#include "backend/protobuf/transaction.hpp" -#include "cryptography/crypto_provider/crypto_model_signer.hpp" - -namespace shared_model { - namespace crypto { - - /** - * Here we mock template method sign<>() from template class - * CryptoModelSigner<>. We specialize interestd methods that call - * crypto_signer_expecter which will catch interesting calls. Just use - * crypto_signer_expecter in EXPECT_CALL() instead of CryptoModelSigner - * instance. Since gtest consider global variable as memory leak, - * we wrap crypto_signer_expecter in shared_ptr. It is reaponsibility - * of the tester to make_shared and reset - * crypto_signer_expecter before the end of test. - * - * Usage: - * shared_model::crypto::CryptoModelSigner<> crypto_signer(keypair); - * EXPECT_CALL(crypto_signer_expecter, sign(block)); - * crypto_signer.sign(block); - */ - - class CryptoModelSignerExpecter { - public: - MOCK_CONST_METHOD1(sign, void(shared_model::interface::Block &)); - MOCK_CONST_METHOD1(sign, void(shared_model::interface::Query &)); - MOCK_CONST_METHOD1(sign, void(shared_model::interface::Transaction &)); - }; - - std::shared_ptr crypto_signer_expecter; - - template <> - template <> - void CryptoModelSigner<>::sign( - shared_model::interface::Block &signable) const noexcept { - crypto_signer_expecter->sign(signable); - } - - template <> - template <> - void CryptoModelSigner<>::sign( - shared_model::interface::Query &signable) const noexcept { - crypto_signer_expecter->sign(signable); - } - - template <> - template <> - void CryptoModelSigner<>::sign( - shared_model::interface::Transaction &signable) const noexcept { - crypto_signer_expecter->sign(signable); - } - - } // namespace crypto -} // namespace shared_model - -#endif // IROHA_CRYPTO_MODEL_SIGNER_MOCK_HPP diff --git a/test/module/shared_model/cryptography/mock_abstract_crypto_model_signer.hpp b/test/module/shared_model/cryptography/mock_abstract_crypto_model_signer.hpp new file mode 100644 index 0000000000..4771a9caaf --- /dev/null +++ b/test/module/shared_model/cryptography/mock_abstract_crypto_model_signer.hpp @@ -0,0 +1,25 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_ABSTRACT_CRYPTO_MODEL_SIGNER_HPP +#define IROHA_MOCK_ABSTRACT_CRYPTO_MODEL_SIGNER_HPP + +#include "cryptography/crypto_provider/abstract_crypto_model_signer.hpp" + +#include + +namespace shared_model { + namespace crypto { + + template + class MockAbstractCryptoModelSigner : public AbstractCryptoModelSigner { + public: + MOCK_CONST_METHOD1_T(sign, void(T &)); + }; + + } // namespace crypto +} // namespace shared_model + +#endif // IROHA_MOCK_ABSTRACT_CRYPTO_MODEL_SIGNER_HPP From 13433a73b13611bacde7b840fa45d0af2b8d8a75 Mon Sep 17 00:00:00 2001 From: Bulat Saifullin Date: Tue, 29 Jan 2019 09:25:19 +0300 Subject: [PATCH 19/41] IR-181 Migrate to new CI pipeline (#2042) * move to new folder Signed-off-by: Bulat Saifullin * rename .jenkinsci to .jenkinsci-new Signed-off-by: Bulat Saifullin * fix pr comments Signed-off-by: Bulat Saifullin * delete TODO Signed-off-by: Bulat Saifullin --- .jenkinsci-new/artifacts.groovy | 53 +++ .jenkinsci-new/build.groovy | 67 ++++ .../builders/x64-linux-build-steps.groovy | 169 ++++++++++ .../builders/x64-mac-build-steps.groovy | 108 +++++++ .jenkinsci-new/helpers/platform_tag.py | 23 ++ .jenkinsci-new/text-variables.groovy | 217 +++++++++++++ .jenkinsci-new/utils/docker-manifest.groovy | 36 +++ .../utils/docker-pull-or-build.groovy | 73 +++++ .jenkinsci-new/utils/doxygen.groovy | 30 ++ .jenkinsci-new/utils/utils.groovy | 51 +++ .jenkinsci-new/utils/vars.groovy | 21 ++ Jenkinsfile-new | 306 ++++++++++++++++++ 12 files changed, 1154 insertions(+) create mode 100644 .jenkinsci-new/artifacts.groovy create mode 100644 .jenkinsci-new/build.groovy create mode 100644 .jenkinsci-new/builders/x64-linux-build-steps.groovy create mode 100644 .jenkinsci-new/builders/x64-mac-build-steps.groovy create mode 100644 .jenkinsci-new/helpers/platform_tag.py create mode 100644 .jenkinsci-new/text-variables.groovy create mode 100644 .jenkinsci-new/utils/docker-manifest.groovy create mode 100644 .jenkinsci-new/utils/docker-pull-or-build.groovy create mode 100644 .jenkinsci-new/utils/doxygen.groovy create mode 100644 .jenkinsci-new/utils/utils.groovy create mode 100644 .jenkinsci-new/utils/vars.groovy create mode 100644 Jenkinsfile-new diff --git a/.jenkinsci-new/artifacts.groovy b/.jenkinsci-new/artifacts.groovy new file mode 100644 index 0000000000..148c590e0d --- /dev/null +++ b/.jenkinsci-new/artifacts.groovy @@ -0,0 +1,53 @@ +#!/usr/bin/env groovy +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +// +// Upload Artifacts to nexus +// + +def uploadArtifacts(filePaths, uploadPath, artifactServers=['nexus.iroha.tech']) { + def filePathsConverted = [] + agentType = sh(script: 'uname', returnStdout: true).trim() + filePaths.each { + fp = sh(script: "ls -d ${it} | tr '\n' ','", returnStdout: true).trim() + filePathsConverted.addAll(fp.split(',')) + } + def shaSumBinary = 'sha256sum' + def md5SumBinary = 'md5sum' + def gpgKeyBinary = 'gpg --armor --detach-sign --no-tty --batch --yes --passphrase-fd 0' + if (agentType == 'Darwin') { + shaSumBinary = 'shasum -a 256' + md5SumBinary = 'md5 -r' + gpgKeyBinary = 'GPG_TTY=\$(tty) gpg --pinentry-mode loopback --armor --detach-sign --no-tty --batch --yes --passphrase-fd 0' + } + sh "> \$(pwd)/batch.txt" + + withCredentials([file(credentialsId: 'ci_gpg_privkey', variable: 'CI_GPG_PRIVKEY'), string(credentialsId: 'ci_gpg_masterkey', variable: 'CI_GPG_MASTERKEY')]) { + if (!agentType.contains('MSYS_NT')) { + sh "gpg --yes --batch --no-tty --import ${CI_GPG_PRIVKEY} || true" + } + filePathsConverted.each { + sh "echo ${it} >> \$(pwd)/batch.txt;" + sh "$shaSumBinary ${it} | cut -d' ' -f1 > \$(pwd)/\$(basename ${it}).sha256" + sh "$md5SumBinary ${it} | cut -d' ' -f1 > \$(pwd)/\$(basename ${it}).md5" + if (!agentType.contains('MSYS_NT')) { + sh "echo \"${CI_GPG_MASTERKEY}\" | $gpgKeyBinary -o \$(pwd)/\$(basename ${it}).ascfile ${it}" + sh "echo \$(pwd)/\$(basename ${it}).ascfile >> \$(pwd)/batch.txt;" + } + sh "echo \$(pwd)/\$(basename ${it}).sha256 >> \$(pwd)/batch.txt;" + sh "echo \$(pwd)/\$(basename ${it}).md5 >> \$(pwd)/batch.txt;" + } + } + + withCredentials([usernamePassword(credentialsId: 'ci_nexus', passwordVariable: 'NEXUS_PASS', usernameVariable: 'NEXUS_USER')]) { + artifactServers.each { + sh(script: "while read line; do curl --http1.1 -u ${NEXUS_USER}:${NEXUS_PASS} --upload-file \$line https://${it}/repository/artifacts/${uploadPath}/ ; done < \$(pwd)/batch.txt") + } + } +} + +return this + diff --git a/.jenkinsci-new/build.groovy b/.jenkinsci-new/build.groovy new file mode 100644 index 0000000000..8ac859cf60 --- /dev/null +++ b/.jenkinsci-new/build.groovy @@ -0,0 +1,67 @@ +#!/usr/bin/env groovy +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +// +// functions we use when build iroha +// + +def cmakeConfigure(String buildDir, String cmakeOptions, String sourceTreeDir=".") { + sh "cmake -H${sourceTreeDir} -B${buildDir} ${cmakeOptions}" +} + +def cmakeBuild(String buildDir, String cmakeOptions, int parallelism) { + sh "cmake --build ${buildDir} ${cmakeOptions} -- -j${parallelism}" + sh "ccache --show-stats" +} + +def cmakeBuildWindows(String buildDir, String cmakeOptions) { + sh "cmake --build ${buildDir} ${cmakeOptions}" +} + +def cppCheck(String buildDir, int parallelism) { + // github.com/jenkinsci/cppcheck-plugin/pull/36 + sh "cppcheck -j${parallelism} --enable=all -i${buildDir} --template='{file},,{line},,{severity},,{id},,{message}' . 2> cppcheck.txt" + warnings ( + parserConfigurations: [[parserName: 'Cppcheck', pattern: "cppcheck.txt"]], categoriesPattern: '', + defaultEncoding: '', excludePattern: '', healthy: '', includePattern: '', messagesPattern: '', unHealthy: '' + ) +} + +def sonarScanner(scmVars, environment) { + withEnv(environment) { + withCredentials([string(credentialsId: 'SONAR_TOKEN', variable: 'SONAR_TOKEN'), string(credentialsId: 'SORABOT_TOKEN', variable: 'SORABOT_TOKEN')]) { + sonar_option = "" + if (scmVars.CHANGE_ID != null) + sonar_option = "-Dsonar.github.pullRequest=${scmVars.CHANGE_ID}" + else + print "************** Warning No 'CHANGE_ID' Present run sonar without org.sonar.plugins.github.PullRequestProjectBuilder *****************" + + sh """ + sonar-scanner \ + -Dsonar.github.disableInlineComments \ + -Dsonar.github.repository='${env.DOCKER_REGISTRY_BASENAME}' \ + -Dsonar.analysis.mode=preview \ + -Dsonar.login=${SONAR_TOKEN} \ + -Dsonar.projectVersion=${BUILD_TAG} \ + -Dsonar.github.oauth=${SORABOT_TOKEN} ${sonar_option} + """ + } + } +} + +def initialCoverage(String buildDir) { + sh "cmake --build ${buildDir} --target coverage.init.info" +} + +def postCoverage(buildDir, String cobertura_bin) { + sh "cmake --build ${buildDir} --target coverage.info" + sh "python ${cobertura_bin} ${buildDir}/reports/coverage.info -o ${buildDir}/reports/coverage.xml" + cobertura autoUpdateHealth: false, autoUpdateStability: false, + coberturaReportFile: "**/${buildDir}/reports/coverage.xml", conditionalCoverageTargets: '75, 50, 0', + failUnhealthy: false, failUnstable: false, lineCoverageTargets: '75, 50, 0', maxNumberOfBuilds: 50, + methodCoverageTargets: '75, 50, 0', onlyStable: false, zoomCoverageChart: false +} +return this diff --git a/.jenkinsci-new/builders/x64-linux-build-steps.groovy b/.jenkinsci-new/builders/x64-linux-build-steps.groovy new file mode 100644 index 0000000000..633a27a5c9 --- /dev/null +++ b/.jenkinsci-new/builders/x64-linux-build-steps.groovy @@ -0,0 +1,169 @@ +#!/usr/bin/env groovy +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +// +// Linux Build steps +// + +def dockerManifestPush(dockerImageObj, String dockerTag, environment) { + def manifest = load ".jenkinsci-new/utils/docker-manifest.groovy" + withEnv(environment) { + if (manifest.manifestSupportEnabled()) { + manifest.manifestCreate("${env.DOCKER_REGISTRY_BASENAME}:${dockerTag}", + ["${env.DOCKER_REGISTRY_BASENAME}:x86_64-${dockerTag}"]) + manifest.manifestAnnotate("${env.DOCKER_REGISTRY_BASENAME}:${dockerTag}", + [ + [manifest: "${env.DOCKER_REGISTRY_BASENAME}:x86_64-${dockerTag}", + arch: 'amd64', os: 'linux', osfeatures: [], variant: ''], + ]) + withCredentials([usernamePassword(credentialsId: 'docker-hub-credentials', usernameVariable: 'login', passwordVariable: 'password')]) { + manifest.manifestPush("${env.DOCKER_REGISTRY_BASENAME}:${dockerTag}", login, password) + } + } + else { + echo('[WARNING] Docker CLI does not support manifest management features. Manifest will not be updated') + } + } +} + +def testSteps(String buildDir, List environment, String testList) { + withEnv(environment) { + sh "cd ${buildDir}; ctest --output-on-failure --no-compress-output --tests-regex '${testList}' --test-action Test || true" + sh """ python .jenkinsci-new/helpers/platform_tag.py "Linux \$(uname -m)" \$(ls ${buildDir}/Testing/*/Test.xml) """ + // Mark build as UNSTABLE if there are any failed tests (threshold <100%) + xunit testTimeMargin: '3000', thresholdMode: 2, thresholds: [passed(unstableThreshold: '100')], \ + tools: [CTest(deleteOutputFiles: true, failIfNotNew: false, \ + pattern: "${buildDir}/Testing/**/Test.xml", skipNoTestFiles: false, stopProcessingIfError: true)] + } +} + +def buildSteps(int parallelism, List compilerVersions, String build_type, boolean specialBranch, boolean coverage, + boolean testing, String testList, boolean cppcheck, boolean sonar, boolean docs, boolean packagebuild, boolean sanitize, boolean fuzzing, List environment) { + withEnv(environment) { + scmVars = checkout scm + def build = load '.jenkinsci-new/build.groovy' + def vars = load ".jenkinsci-new/utils/vars.groovy" + def utils = load ".jenkinsci-new/utils/utils.groovy" + def dockerUtils = load ".jenkinsci-new/utils/docker-pull-or-build.groovy" + def doxygen = load ".jenkinsci-new/utils/doxygen.groovy" + buildDir = 'build' + compilers = vars.compilerMapping() + cmakeBooleanOption = [ (true): 'ON', (false): 'OFF' ] + platform = sh(script: 'uname -m', returnStdout: true).trim() + cmakeBuildOptions = "" + cmakeOptions = "" + if (packagebuild){ + cmakeBuildOptions = " --target package " + } + if (sanitize){ + cmakeOptions += " -DSANITIZE='address;leak' " + } + sh "docker network create ${env.IROHA_NETWORK}" + iC = dockerUtils.dockerPullOrBuild("${platform}-develop-build", + "${env.GIT_RAW_BASE_URL}/${scmVars.GIT_COMMIT}/docker/develop/Dockerfile", + "${env.GIT_RAW_BASE_URL}/${utils.previousCommitOrCurrent(scmVars)}/docker/develop/Dockerfile", + "${env.GIT_RAW_BASE_URL}/develop/docker/develop/Dockerfile", + scmVars, + environment, + ['PARALLELISM': parallelism]) + // enable prepared transactions so that 2 phase commit works + // we set it to 100 as a safe value + sh "docker run -td -e POSTGRES_USER=${env.IROHA_POSTGRES_USER} \ + -e POSTGRES_PASSWORD=${env.IROHA_POSTGRES_PASSWORD} --name ${env.IROHA_POSTGRES_HOST} \ + --network=${env.IROHA_NETWORK} postgres:9.5 -c 'max_prepared_transactions=100'" + iC.inside("" + + " -e IROHA_POSTGRES_HOST=${env.IROHA_POSTGRES_HOST}" + + " -e IROHA_POSTGRES_PORT=${env.IROHA_POSTGRES_PORT}" + + " -e IROHA_POSTGRES_USER=${env.IROHA_POSTGRES_USER}" + + " -e IROHA_POSTGRES_PASSWORD=${env.IROHA_POSTGRES_PASSWORD}" + + " --network=${env.IROHA_NETWORK}" + + " -v /var/jenkins/ccache:${env.CCACHE_DEBUG_DIR}") { + utils.ccacheSetup(5) + for (compiler in compilerVersions) { + stage ("build ${compiler}"){ + build.cmakeConfigure(buildDir, "-DCMAKE_CXX_COMPILER=${compilers[compiler]['cxx_compiler']} \ + -DCMAKE_C_COMPILER=${compilers[compiler]['cc_compiler']} \ + -DCMAKE_BUILD_TYPE=${build_type} \ + -DCOVERAGE=${cmakeBooleanOption[coverage]} \ + -DTESTING=${cmakeBooleanOption[testing]} \ + -DFUZZING=${cmakeBooleanOption[fuzzing]} \ + -DPACKAGE_DEB=${cmakeBooleanOption[packagebuild]} \ + -DPACKAGE_TGZ=${cmakeBooleanOption[packagebuild]} ${cmakeOptions}") + build.cmakeBuild(buildDir, cmakeBuildOptions, parallelism) + } + if (testing) { + stage("Test ${compiler}") { + coverage ? build.initialCoverage(buildDir) : echo('Skipping initial coverage...') + testSteps(buildDir, environment, testList) + coverage ? build.postCoverage(buildDir, '/tmp/lcov_cobertura.py') : echo('Skipping post coverage...') + // We run coverage once, using the first compiler as it is enough + coverage = false + } + } + } + stage("Analysis") { + cppcheck ? build.cppCheck(buildDir, parallelism) : echo('Skipping Cppcheck...') + sonar ? build.sonarScanner(scmVars, environment) : echo('Skipping Sonar Scanner...') + } + stage('Build docs'){ + docs ? doxygen.doDoxygen(specialBranch, scmVars.GIT_LOCAL_BRANCH) : echo("Skipping Doxygen...") + } + stage ('Docker ManifestPush'){ + if (specialBranch) { + utils.dockerPush(iC, "${platform}-develop-build") + dockerManifestPush(iC, "develop-build", environment) + } + } + } + } +} + +def successPostSteps(scmVars, boolean packagePush, String dockerTag, List environment) { + stage('Linux success PostSteps') { + withEnv(environment) { + if (packagePush) { + def artifacts = load ".jenkinsci-new/artifacts.groovy" + def utils = load ".jenkinsci-new/utils/utils.groovy" + platform = sh(script: 'uname -m', returnStdout: true).trim() + def commit = scmVars.GIT_COMMIT + + // if we use several compilers only the last compiler, used for the build, will be used for iroha.deb and iroha.tar.gz archives + sh """ + ls -lah ./build + mv ./build/iroha-*.deb ./build/iroha.deb + mv ./build/iroha-*.tar.gz ./build/iroha.tar.gz + cp ./build/iroha.deb docker/release/iroha.deb + mkdir -p build/artifacts + mv ./build/iroha.deb ./build/iroha.tar.gz build/artifacts + """ + // publish docker + iCRelease = docker.build("${env.DOCKER_REGISTRY_BASENAME}:${commit}-${env.BUILD_NUMBER}-release", "--no-cache -f docker/release/Dockerfile ${WORKSPACE}/docker/release") + utils.dockerPush(iCRelease, "${platform}-${dockerTag}") + dockerManifestPush(iCRelease, dockerTag, environment) + sh "docker rmi ${iCRelease.id}" + + // publish packages + filePaths = [ './build/artifacts/iroha.deb', './build/artifacts/iroha.tar.gz' ] + artifacts.uploadArtifacts(filePaths, sprintf('/iroha/linux/%4$s/%1$s-%2$s-%3$s', [scmVars.GIT_LOCAL_BRANCH, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6), platform])) + } else { + archiveArtifacts artifacts: 'build/iroha*.tar.gz', allowEmptyArchive: true + archiveArtifacts artifacts: 'build/iroha*.deb', allowEmptyArchive: true + } + } + } +} + +def alwaysPostSteps(List environment) { + stage('Linux always PostSteps') { + withEnv(environment) { + sh "docker rm -f ${env.IROHA_POSTGRES_HOST} || true" + sh "docker network rm ${env.IROHA_NETWORK}" + cleanWs() + } + } +} + +return this diff --git a/.jenkinsci-new/builders/x64-mac-build-steps.groovy b/.jenkinsci-new/builders/x64-mac-build-steps.groovy new file mode 100644 index 0000000000..9ea7b1177f --- /dev/null +++ b/.jenkinsci-new/builders/x64-mac-build-steps.groovy @@ -0,0 +1,108 @@ +#!/usr/bin/env groovy +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +// +// Mac Build steps +// + +def testSteps(scmVars, String buildDir, List environment, String testList) { + withEnv(environment) { + sh """ + export IROHA_POSTGRES_PASSWORD=${IROHA_POSTGRES_PASSWORD}; \ + export IROHA_POSTGRES_USER=${IROHA_POSTGRES_USER}; \ + mkdir -p /var/jenkins/${scmVars.GIT_COMMIT}-${BUILD_NUMBER}; \ + initdb -D /var/jenkins/${scmVars.GIT_COMMIT}-${BUILD_NUMBER}/ -U ${IROHA_POSTGRES_USER} --pwfile=<(echo ${IROHA_POSTGRES_PASSWORD}); \ + pg_ctl -D /var/jenkins/${scmVars.GIT_COMMIT}-${BUILD_NUMBER}/ -o '-p 5433 -c max_prepared_transactions=100' -l /var/jenkins/${scmVars.GIT_COMMIT}-${BUILD_NUMBER}/events.log start; \ + psql -h localhost -d postgres -p 5433 -U ${IROHA_POSTGRES_USER} --file=<(echo create database ${IROHA_POSTGRES_USER};) + """ + sh "cd build; IROHA_POSTGRES_HOST=localhost IROHA_POSTGRES_PORT=5433 ctest --output-on-failure --no-compress-output --tests-regex '${testList}' --test-action Test || true" + + sh 'python .jenkinsci-new/helpers/platform_tag.py "Darwin \$(uname -m)" \$(ls build/Testing/*/Test.xml)' + // Mark build as UNSTABLE if there are any failed tests (threshold <100%) + xunit testTimeMargin: '3000', thresholdMode: 2, thresholds: [passed(unstableThreshold: '100')], \ + tools: [CTest(deleteOutputFiles: true, failIfNotNew: false, \ + pattern: 'build/Testing/**/Test.xml', skipNoTestFiles: false, stopProcessingIfError: true)] + + sh """ + pg_ctl -D /var/jenkins/${scmVars.GIT_COMMIT}-${BUILD_NUMBER}/ stop && \ + rm -rf /var/jenkins/${scmVars.GIT_COMMIT}-${BUILD_NUMBER}/ + """ + } +} + +def buildSteps(int parallelism, List compilerVersions, String build_type, boolean coverage, boolean testing, String testList, boolean packagebuild, List environment) { + withEnv(environment) { + scmVars = checkout scm + def build = load '.jenkinsci-new/build.groovy' + def vars = load ".jenkinsci-new/utils/vars.groovy" + def utils = load ".jenkinsci-new/utils/utils.groovy" + buildDir = 'build' + compilers = vars.compilerMapping() + cmakeBooleanOption = [ (true): 'ON', (false): 'OFF' ] + cmakeBuildOptions = "" + + if (packagebuild){ + cmakeBuildOptions = " --target package " + } + + utils.ccacheSetup(5) + + for (compiler in compilerVersions) { + stage ("build ${compiler}"){ + build.cmakeConfigure(buildDir, + "-DCMAKE_CXX_COMPILER=${compilers[compiler]['cxx_compiler']} \ + -DCMAKE_C_COMPILER=${compilers[compiler]['cc_compiler']} \ + -DCMAKE_BUILD_TYPE=${build_type} \ + -DCOVERAGE=${cmakeBooleanOption[coverage]} \ + -DTESTING=${cmakeBooleanOption[testing]} \ + -DPACKAGE_TGZ=${cmakeBooleanOption[packagebuild]} ") + + build.cmakeBuild(buildDir, cmakeBuildOptions, parallelism) + } + if (testing) { + stage("Test ${compiler}") { + coverage ? build.initialCoverage(buildDir) : echo('Skipping initial coverage...') + testSteps(scmVars, buildDir, environment, testList) + coverage ? build.postCoverage(buildDir, '/usr/local/bin/lcov_cobertura.py') : echo('Skipping post coverage...') + // We run coverage once, using the first compiler as it is enough + coverage = false + } + } + } + } +} + +def successPostSteps(scmVars, boolean packagePush, List environment) { + stage('Mac success PostSteps') { + withEnv(environment) { + timeout(time: 600, unit: "SECONDS") { + if (packagePush) { + def artifacts = load ".jenkinsci-new/artifacts.groovy" + def commit = scmVars.GIT_COMMIT + // if we use several compilers only the last compiler, used for the build, will be used for iroha.deb and iroha.tar.gz archives + sh """ + ls -lah ./build + mv ./build/iroha-*.tar.gz ./build/iroha.tar.gz + """ + // publish packages + filePaths = [ '\$(pwd)/build/*.tar.gz' ] + artifacts.uploadArtifacts(filePaths, sprintf('iroha/macos/%1$s-%2$s-%3$s', [scmVars.GIT_LOCAL_BRANCH, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6)])) + } else { + archiveArtifacts artifacts: 'build/iroha*.tar.gz', allowEmptyArchive: true + } + } + } + } +} + +def alwaysPostSteps(List environment) { + stage('Mac always PostSteps') { + withEnv(environment) { + cleanWs() + } + } +} +return this \ No newline at end of file diff --git a/.jenkinsci-new/helpers/platform_tag.py b/.jenkinsci-new/helpers/platform_tag.py new file mode 100644 index 0000000000..4f309df28e --- /dev/null +++ b/.jenkinsci-new/helpers/platform_tag.py @@ -0,0 +1,23 @@ +#!/usr/env/python +# +# Copyright Soramitsu Co., Ltd. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# + +# +# Adds prefix (can be os name) to each test report. +# + +import xml.etree.ElementTree as ET +import argparse + +parser = argparse.ArgumentParser(description='Tag test names in a JUnit report') +for arg in ['tag', 'xml_report_file']: + parser.add_argument(arg) +args = parser.parse_args() + +tree = ET.parse(args.xml_report_file) +root = tree.getroot() +for i in root.findall(".//Test/Name"): + i.text = "%s | %s" % (args.tag, i.text) +tree.write(args.xml_report_file) diff --git a/.jenkinsci-new/text-variables.groovy b/.jenkinsci-new/text-variables.groovy new file mode 100644 index 0000000000..b12d53bf83 --- /dev/null +++ b/.jenkinsci-new/text-variables.groovy @@ -0,0 +1,217 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +// +// Text variable for jenkins job description +// + +param_chose_opt = 'Default\nBranch commit\nOn open PR\nCommit in Open PR\nBefore merge to trunk\nBefore merge develop\nBefore merge master\nNightly build\nCustom command' + +param_descriptions = """ +

+ Default - will automatically chose the correct one based on branch name and build number
+ Branch commit - Linux/gcc v5; Test: Smoke, Unit;
+ On open PR - Linux/gcc v5, MacOS/appleclang; Test: Smoke, Unit; Coverage; Analysis: cppcheck, sonar;
+ Commit in Open PR - Same as Branch commit
+ Before merge to trunk - Linux/gcc v5 v7, Linux/clang v6 v7, MacOS/appleclang; Test: ALL; Coverage; Analysis: cppcheck, sonar; Build type: Debug when Release
+ Before merge develop - Not implemented
+ Before merge master - Not implemented
+ Nightly build - Not implemented
+ Custom command - enter command below, Ex: build_type='Release'; testing=false;
+

+""" + +cmd_description = """ +

List of parameters for Jenkins "Custom command" option:

+
+
    +
  • +

    <option_name> = <default_value> [(or <second_default_value> if <git branch name>)] 

    +
      +
    • +

      <descriptions>

      +
    • +
    • +

      Ex: <Example of Use>

      +
    • +
    +
  • +
+
+
    +
  • +

    x64linux_compiler_list = ['gcc5'] 

    +
      +
    • +

      Linux compiler name to build

      +
    • +
    • +

      Ex: x64linux_compiler_list = ['gcc5','gcc7', 'clang6' , 'clang7']

      +
    • +
    +
  • +
  • +

    mac_compiler_list = [ ] 

    +
      +
    • +

      Mac compiler name to build

      +
    • +
    • +

      Ex: mac_compiler_list = ['appleclang']

      +
    • +
    +
  • +
  • +

    parallelism = 0

    +
      +
    • +

      Build in parallel. 0 is choose default: 8 for Linux and 4 for Mac

      +
    • +
    • Ex: cppcheck = flase
    • +
    +
  • +
  • +

    testing = true 

    +
      +
    • +

      Run test for each selected compiler, in jenkins will be several reports

      +
    • +
    • Ex: cppcheck = flase
    • +
    +
  • +
  • +

    testList = '(module)' 

    +
      +
    • +

      Test Regex name

      +
    • +
    • +

      Ex: testList = '()'-All, testList = '(module|integration|system|cmake|regression|benchmark|framework)'

      +
    • +
    +
  • +
  • +

    sanitize = false 

    +
      +
    • +

      Adds cmakeOptions -DSANITIZE='address;leak' 

      +
    • +
    • +

      Ex: sanitize=true;

      +
    • +
    +
  • +
  • +

    cppcheck = false

    +
      +
    • +

      Runs cppcheck 

      +
    • +
    • +

      Ex: cppcheck = true

      +
    • +
    +
  • +
  • +

    fuzzing = false 

    +
      +
    • +

      builds fuzzing tests, work only with x64linux_compiler_list = ['clang6'] 

      +
    • +
    • +

      Ex: fuzzing=true; x64linux_compiler_list= ['clang6']; testing = true; testList = "(None)"

      +
    • +
    +
  • +
  • +

    sonar = false 

    +
      +
    • +

      Runs Sonar Analysis, runs only on Linux

      +
    • +
    • +

      Ex: sonar = true;x64linux_compiler_list= ['gcc5','gcc7']

      +
    • +
    +
  • +
  • +

    coverage = false 

    +
      +
    • +

      Runs coverage, will run only if testing = true 

      +
    • +
    • +

      Ex: coverage = true

      +
    • +
    +
  • +
  • +

    doxygen = false (or = true if master|develop|dev ) 

    +
      +
    • +

      Build doxygen, if specialBranch== true will publish, if not specialBranch will upload it to jenkins,

      +
    • +
    • +

      Ex: doxygen=true

      +
    • +
    +
  • +
  • +

    build_type = 'Debug' 

    +
      +
    • +

      Sets -DCMAKE_BUILD_TYPE=Debug 

      +
    • +
    • +

      Ex: build_type = 'Release';packageBuild = true;testing=false

      +
    • +
    +
  • +
  • +

    packageBuild = false 

    +
      +
    • +

      Build package Work only with build_type = 'Release' and testing=false 

      +
    • +
    • +

      Ex: packageBuild = true;build_type = 'Release';testing=false

      +
    • +
    +
  • +
  • +

    pushDockerTag = 'not-supposed-to-be-pushed'(or = latest if master, or = develop if develop|dev) -

    +
      +
    • +

      if packagePush=true it the name of docker tag that will be pushed

      +
    • +
    • +

      Ex: packageBuild = true;build_type = 'Release';testing=false;packagePush=true

      +
    • +
    +
  • +
  • +

    packagePush = false (or = true if master|develop|dev ) -

    +
      +
    • +

      push all packages and docker to the artifactory and docker hub

      +
    • +
    • +

      Ex:packagePush=true;packageBuild = true;build_type = 'Release';testing=false

      +
    • +
    +
  • +
  • +

    specialBranch = false (or = true if master|develop|dev ),

    +
      +
    • +

      Not recommended to set, it used to decide push doxygen and iroha:develop-build or not, and force to run build_type = 'Release'

      +
    • +
    +
  • +
+

Red - this options require to set additional options, or may conflict with another options 

+""" + +return this \ No newline at end of file diff --git a/.jenkinsci-new/utils/docker-manifest.groovy b/.jenkinsci-new/utils/docker-manifest.groovy new file mode 100644 index 0000000000..403febc201 --- /dev/null +++ b/.jenkinsci-new/utils/docker-manifest.groovy @@ -0,0 +1,36 @@ +#!/usr/bin/env groovy +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +// +// Helpers to work with docker manifest +// + +def manifestSupportEnabled() { + def dockerVersion = sh(script: "docker -v", returnStdout: true).trim() + def experimentalEnabled = sh(script: "grep -i experimental ~/.docker/config.json", returnStatus: true) + return experimentalEnabled == 0 && dockerVersion ==~ /^Docker version 18.*$/ + +} + +def manifestCreate(manifestListName, manifests) { + sh "docker manifest create ${manifestListName} ${manifests.join(' ')}" +} + +def manifestAnnotate(manifestListName, manifestsWithFeatures) { + manifestsWithFeatures.each { + sh """ + docker manifest annotate ${manifestListName} ${it['manifest']} --arch "${it['arch']}" \ + --os "${it['os']}" --os-features "${it['osfeatures'].join(',')}" --variant "${it['variant']}" + """ + } +} + +def manifestPush(manifestListName, dockerRegistryLogin, dockerRegistryPassword) { + sh "docker login -u '${dockerRegistryLogin}' -p '${dockerRegistryPassword}'" + sh "docker manifest push --purge ${manifestListName}" +} + +return this diff --git a/.jenkinsci-new/utils/docker-pull-or-build.groovy b/.jenkinsci-new/utils/docker-pull-or-build.groovy new file mode 100644 index 0000000000..65dab2440f --- /dev/null +++ b/.jenkinsci-new/utils/docker-pull-or-build.groovy @@ -0,0 +1,73 @@ +#!/usr/bin/env groovy +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +// +// This module helps automatically build new docker develop-build image if Dockerfile changed +// + +def buildOptionsString(options) { + def s = '' + if (options) { + options.each { k, v -> + s += "--build-arg ${k}=${v} " + } + } + return s +} + +def dockerPullOrBuild(imageName, currentDockerfileURL, previousDockerfileURL, referenceDockerfileURL, scmVars, environment, buildOptions=null) { + buildOptions = buildOptionsString(buildOptions) + withEnv(environment) { + def utils = load '.jenkinsci-new/utils/utils.groovy' + randDir = sh(script: "cat /dev/urandom | tr -dc 'a-zA-Z0-9' | head -c 10", returnStdout: true).trim() + currentDockerfile = utils.getUrl(currentDockerfileURL, "/tmp/${randDir}/currentDockerfile", true) + previousDockerfile = utils.getUrl(previousDockerfileURL, "/tmp/${randDir}/previousDockerfile") + referenceDockerfile = utils.getUrl(referenceDockerfileURL, "/tmp/${randDir}/referenceDockerfile") + if (utils.filesDiffer(currentDockerfile, previousDockerfile) && utils.filesDiffer(currentDockerfile, referenceDockerfile)) { + // Dockerfile has been changed compared to both the previous commit and reference Dockerfile + // Worst case scenario. We cannot count on the local cache + // because Dockerfile may contain apt-get entries that would try to update + // from invalid (stale) addresses + if(remoteFilesDiffer(currentDockerfileURL, referenceDockerfileURL)){ + // Dockerfile has been changed compared to the develop + iC = docker.build("${env.DOCKER_REGISTRY_BASENAME}:${randDir}-${BUILD_NUMBER}", "${buildOptions} --no-cache -f ${currentDockerfile} .") + } else { + // Dockerfile is same as develop, we can just pull it + def testExitCode = sh(script: "docker pull ${env.DOCKER_REGISTRY_BASENAME}:${imageName}", returnStatus: true) + if (testExitCode != 0) { + // image does not (yet) exist on Dockerhub. Build it + iC = docker.build("${env.DOCKER_REGISTRY_BASENAME}:${randDir}-${BUILD_NUMBER}", "${buildOptions} --no-cache -f ${currentDockerfile} .") + } + else { + // no difference found compared to both previous and reference Dockerfile + iC = docker.image("${env.DOCKER_REGISTRY_BASENAME}:${imageName}") + } + } + } + else { + // first commit in this branch or Dockerfile modified + if (utils.filesDiffer(currentDockerfile, referenceDockerfile)) { + // if we're lucky to build on the same agent, image will be built using cache + iC = docker.build("${env.DOCKER_REGISTRY_BASENAME}:${randDir}-${BUILD_NUMBER}", "$buildOptions -f ${currentDockerfile} .") + } + else { + // try pulling image from Dockerhub, probably image is already there + def testExitCode = sh(script: "docker pull ${env.DOCKER_REGISTRY_BASENAME}:${imageName}", returnStatus: true) + if (testExitCode != 0) { + // image does not (yet) exist on Dockerhub. Build it + iC = docker.build("${env.DOCKER_REGISTRY_BASENAME}:${randDir}-${BUILD_NUMBER}", "$buildOptions --no-cache -f ${currentDockerfile} .") + } + else { + // no difference found compared to both previous and reference Dockerfile + iC = docker.image("${env.DOCKER_REGISTRY_BASENAME}:${imageName}") + } + } + } + } + return iC +} + +return this diff --git a/.jenkinsci-new/utils/doxygen.groovy b/.jenkinsci-new/utils/doxygen.groovy new file mode 100644 index 0000000000..cc7baa9491 --- /dev/null +++ b/.jenkinsci-new/utils/doxygen.groovy @@ -0,0 +1,30 @@ +#!/usr/bin/env groovy +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +// +// Builds and push Doxygen docks +// + +def doDoxygen(boolean specialBranch, String local_branch) { + sh "doxygen Doxyfile" + if (specialBranch) { + def branch = local_branch == "master" ? local_branch : "develop" + sshagent(['jenkins-artifact']) { + sh "ssh-agent" + sh """ + rsync \ + -e 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' \ + -rzcv --delete \ + docs/doxygen/html/* \ + ubuntu@docs.iroha.tech:/var/nexus-efs/doxygen/${branch}/ + """ + } + } else { + archiveArtifacts artifacts: 'docs/doxygen/html/*', allowEmptyArchive: true + } +} + +return this diff --git a/.jenkinsci-new/utils/utils.groovy b/.jenkinsci-new/utils/utils.groovy new file mode 100644 index 0000000000..4410aa85be --- /dev/null +++ b/.jenkinsci-new/utils/utils.groovy @@ -0,0 +1,51 @@ +#!/usr/bin/env groovy +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +// +// Small utils that can be used multiple times +// + +def previousCommitOrCurrent(scmVars) { + // GIT_PREVIOUS_COMMIT is null on first PR build + // regardless Jenkins docs saying it equals the current one on first build in branch + return !scmVars.GIT_PREVIOUS_COMMIT ? scmVars.GIT_COMMIT : scmVars.GIT_PREVIOUS_COMMIT +} + +def selectedBranchesCoverage(List branches) { + return env.GIT_LOCAL_BRANCH in branches +} + +def ccacheSetup(int maxSize) { + sh """ + ccache --version + ccache --show-stats + ccache --zero-stats + ccache --max-size=${maxSize}G + """ +} + +def dockerPush(dockerImageObj, String imageName) { + docker.withRegistry('https://registry.hub.docker.com', 'docker-hub-credentials') { + dockerImageObj.push(imageName) + } +} + +def getUrl(String url, String savePath, boolean createDstDir=false) { + if (createDstDir) { + sh "curl -L -o ${savePath} --create-dirs ${url}" + } + else { + sh "curl -L -o ${savePath} ${url}" + } + return savePath +} + +def filesDiffer(String f1, String f2) { + diffExitCode = sh(script: "diff -q ${f1} ${f2}", returnStatus: true) + return diffExitCode != 0 +} + +return this diff --git a/.jenkinsci-new/utils/vars.groovy b/.jenkinsci-new/utils/vars.groovy new file mode 100644 index 0000000000..5be64a5463 --- /dev/null +++ b/.jenkinsci-new/utils/vars.groovy @@ -0,0 +1,21 @@ +#!/usr/bin/env groovy +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +// +// vars to map compiler versions +// + +def compilerMapping () { + return ['gcc5': ['cxx_compiler':'g++-5', 'cc_compiler':'gcc-5'], + 'gcc7' : ['cxx_compiler':'g++-7', 'cc_compiler':'gcc-7'], + 'clang6': ['cxx_compiler':'clang++-6.0', 'cc_compiler':'clang-6.0'], + 'clang7': ['cxx_compiler':'clang++-7', 'cc_compiler':'clang-7'], + 'appleclang': ['cxx_compiler':'clang++', 'cc_compiler':'clang'], + ] + } + + +return this diff --git a/Jenkinsfile-new b/Jenkinsfile-new new file mode 100644 index 0000000000..256f8f58c6 --- /dev/null +++ b/Jenkinsfile-new @@ -0,0 +1,306 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +import org.jenkinsci.plugins.workflow.steps.FlowInterruptedException + +def tasks = [:] + +class Worker { + String label + int cpusAvailable +} + +class Builder { + // can't get to work without 'static' + static class PostSteps { + List success + List failure + List unstable + List always + List aborted + } + List buildSteps + PostSteps postSteps +} + +class Build { + String name + String type + Builder builder + Worker worker +} + +def build(Build build) { + return { + node(build.worker.label) { + try { + echo "Worker: ${env.NODE_NAME}" + gitNotify ("New CI: " + build.name, "Started...", 'PENDING') + build.builder.buildSteps.each { + it() + } + if (currentBuild.currentResult == 'SUCCESS') { + build.builder.postSteps.success.each { + it() + } + } else if(currentBuild.currentResult == 'UNSTABLE') { + build.builder.postSteps.unstable.each { + it() + } + } + } catch(FlowInterruptedException e) { + print "Looks like we ABORTED" + currentBuild.result = 'ABORTED' + build.builder.postSteps.aborted.each { + it() + } + } catch(Exception e) { + print "Error was detected: " + e + currentBuild.result = 'FAILURE' + build.builder.postSteps.failure.each { + it() + } + } + // ALWAYS + finally { + if (currentBuild.currentResult == 'SUCCESS') + gitNotify ("New CI: " + build.name, "Finish", 'SUCCESS') + else + gitNotify ("New CI: " + build.name, currentBuild.currentResult, 'FAILURE') + + build.builder.postSteps.always.each { + it() + } + } + } + } +} + +// sanitise the string it should contain only 'key1=value1;key2=value2;...' +def cmd_sanitize(String cmd){ + if (cmd.contains("//")) + return false + + for (i in cmd.split(";")){ + if (i.split("=").size() != 2 ) + return false + for (j in i.split("=")){ + if (j.trim().contains(" ")) + return false + } + } + return true +} + +def gitNotify (context, description, status, targetUrl='' ){ + githubNotify context: context, credentialsId: 'SORABOT_TOKEN_AND_LOGIN', description: description, status: status, targetUrl: targetUrl +} + +stage('Prepare environment'){ +timestamps(){ + + +node ('master') { + scmVars = checkout scm + def textVariables = load '.jenkinsci-new/text-variables.groovy' + properties([ + parameters([ + choice(choices: textVariables.param_chose_opt, description: textVariables.param_descriptions, name: 'build_scenario'), + string(defaultValue: '', description: textVariables.cmd_description, name: 'custom_cmd', trim: true) + ]), + buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')) + ]) + environmentList = [] + environment = [:] + environment = [ + "CCACHE_DEBUG_DIR": "/opt/.ccache", + "CCACHE_RELEASE_DIR": "/opt/.ccache", + "DOCKER_REGISTRY_BASENAME": "hyperledger/iroha", + "IROHA_NETWORK": "iroha-${scmVars.CHANGE_ID}-${scmVars.GIT_COMMIT}-${env.BUILD_NUMBER}", + "IROHA_POSTGRES_HOST": "pg-${scmVars.CHANGE_ID}-${scmVars.GIT_COMMIT}-${env.BUILD_NUMBER}", + "IROHA_POSTGRES_USER": "pguser${scmVars.GIT_COMMIT}", + "IROHA_POSTGRES_PASSWORD": "${scmVars.GIT_COMMIT}", + "IROHA_POSTGRES_PORT": "5432", + "GIT_RAW_BASE_URL": "https://raw.githubusercontent.com/hyperledger/iroha" + ] + environment.each { e -> + environmentList.add("${e.key}=${e.value}") + } + + // Define variable and params + + //All variable and Default values + x64linux_compiler_list = ['gcc5'] + mac_compiler_list = [] + + testing = true + testList = '(module)' + + sanitize = false + cppcheck = false + fuzzing = false // x64linux_compiler_list= ['clang6'] testing = true testList = "(None)" + sonar = false + coverage = false + coverage_mac = false + doxygen = false + + build_type = 'Debug' + packageBuild = false + pushDockerTag = 'not-supposed-to-be-pushed' + packagePush = false + specialBranch = false + parallelism = 0 + + if (scmVars.GIT_LOCAL_BRANCH in ["master","develop","dev"] || scmVars.CHANGE_BRANCH_LOCAL in ["develop","dev"]) + specialBranch = true + else + specialBranch = false + + if (specialBranch){ + // if specialBranch == true the release build will run, so set packagePush + packagePush = true + doxygen = true + } + + if (scmVars.GIT_LOCAL_BRANCH ==~ /(develop|dev)/) + pushDockerTag = 'develop' + else if (scmVars.GIT_LOCAL_BRANCH == 'master') + pushDockerTag = 'latest' + else + pushDockerTag = 'not-supposed-to-be-pushed' + + if (params.build_scenario == 'Default') + if ( scmVars.GIT_BRANCH.startsWith('PR-')) + if (BUILD_NUMBER == '1') + build_scenario='On open PR' + else + build_scenario='Commit in Open PR' + else + build_scenario='Branch commit' + else + build_scenario = params.build_scenario + + + print("Selected Build Scenario '${build_scenario}'") + switch(build_scenario) { + case 'Branch commit': + echo "All Default" + break; + case 'On open PR': + // Just hint, not the main way to Notify about build status. + gitNotify ("New CI: Merge to trunk", "Please, run: 'Before merge to trunk'", 'PENDING', env.JOB_URL + "/build") + mac_compiler_list = ['appleclang'] + coverage = true + cppcheck = true + sonar = true + break; + case 'Commit in Open PR': + gitNotify ("New CI: Merge to trunk", "Please, run: 'Before merge to trunk'", 'PENDING', env.JOB_URL + "/build") + echo "All Default" + break; + case 'Before merge to trunk': + gitNotify ("New CI: Merge to trunk", "Started...", 'PENDING') + x64linux_compiler_list = ['gcc5','gcc7', 'clang6' , 'clang7'] + mac_compiler_list = ['appleclang'] + testing = true + testList = '()' + coverage = true + cppcheck = true + sonar = true + break; + case 'Custom command': + if (cmd_sanitize(params.custom_cmd)){ + evaluate (params.custom_cmd) + // A very rare scenario when linux compiler is not selected but we still need coverage + if (x64linux_compiler_list.isEmpty() && coverage ){ + coverage_mac = true + } + } else { + println("Unable to parse '${params.custom_cmd}'") + sh "exit 1" + } + break; + default: + println("The value build_scenario='${build_scenario}' is not implemented"); + sh "exit 1" + break; + } + + echo "specialBranch=${specialBranch}, packageBuild=${packageBuild}, pushDockerTag=${pushDockerTag}, packagePush=${packagePush} " + echo "testing=${testing}, testList=${testList}, parallelism=${parallelism}" + echo "x64linux_compiler_list=${x64linux_compiler_list}" + echo "mac_compiler_list=${mac_compiler_list}" + echo "sanitize=${sanitize}, cppcheck=${cppcheck}, fuzzing=${fuzzing}, sonar=${sonar}, coverage=${coverage}, coverage_mac=${coverage_mac} doxygen=${doxygen}" + print scmVars + print environmentList + + + // Load Scripts + def x64LinuxBuildScript = load '.jenkinsci-new/builders/x64-linux-build-steps.groovy' + def x64BuildScript = load '.jenkinsci-new/builders/x64-mac-build-steps.groovy' + + + // Define Workers + x64LinuxWorker = new Worker(label: 'docker-build-agent', cpusAvailable: 8) + x64MacWorker = new Worker(label: 'mac', cpusAvailable: 4) + + + // Define all possible steps + def x64LinuxBuildSteps + def x64LinuxPostSteps = new Builder.PostSteps() + if(!x64linux_compiler_list.isEmpty()){ + x64LinuxBuildSteps = [{x64LinuxBuildScript.buildSteps( + parallelism==0 ?x64LinuxWorker.cpusAvailable : parallelism, x64linux_compiler_list, build_type, specialBranch, coverage, + testing, testList, cppcheck, sonar, doxygen, packageBuild, sanitize, fuzzing, environmentList)}] + //If "master" or "dev" also run Release build + if(specialBranch && build_type == 'Debug'){ + x64LinuxBuildSteps += [{x64LinuxBuildScript.buildSteps( + parallelism==0 ?x64LinuxWorker.cpusAvailable : parallelism, x64linux_compiler_list, 'Release', specialBranch, false, + false , testList, false, false, false, true, false, false, environmentList)}] + } + x64LinuxPostSteps = new Builder.PostSteps( + always: [{x64LinuxBuildScript.alwaysPostSteps(environmentList)}], + success: [{x64LinuxBuildScript.successPostSteps(scmVars, packagePush, pushDockerTag, environmentList)}]) + } + def x64MacBuildSteps + def x64MacBuildPostSteps = new Builder.PostSteps() + if(!mac_compiler_list.isEmpty()){ + x64MacBuildSteps = [{x64BuildScript.buildSteps(parallelism==0 ?x64MacWorker.cpusAvailable : parallelism, mac_compiler_list, build_type, coverage_mac, testing, testList, packageBuild, environmentList)}] + //If "master" or "dev" also run Release build + if(specialBranch && build_type == 'Debug'){ + x64MacBuildSteps += [{x64BuildScript.buildSteps(parallelism==0 ?x64MacWorker.cpusAvailable : parallelism, mac_compiler_list, 'Release', false, false, testList, true, environmentList)}] + } + x64MacBuildPostSteps = new Builder.PostSteps( + always: [{x64BuildScript.alwaysPostSteps(environmentList)}], + success: [{x64BuildScript.successPostSteps(scmVars, packagePush, environmentList)}]) + } + + // Define builders + x64LinuxBuilder = new Builder(buildSteps: x64LinuxBuildSteps, postSteps: x64LinuxPostSteps) + x64MacBuilder = new Builder(buildSteps: x64MacBuildSteps, postSteps: x64MacBuildPostSteps ) + + // Define Build + x64LinuxBuild = new Build(name: "x86_64 Linux ${build_type}", + type: build_type, + builder: x64LinuxBuilder, + worker: x64LinuxWorker) + x64MacBuild = new Build(name: "Mac ${build_type}", + type: build_type, + builder: x64MacBuilder, + worker: x64MacWorker) + if(!x64linux_compiler_list.isEmpty()) + tasks[x64LinuxBuild.name] = build(x64LinuxBuild) + if(!mac_compiler_list.isEmpty()) + tasks[x64MacBuild.name] = build(x64MacBuild) + + cleanWs() + parallel tasks + + if (build_scenario == 'Before merge to trunk') + gitNotify ("New CI: Merge to trunk", "Finish", 'SUCCESS') +} + +} +} \ No newline at end of file From 9a3a79635072ad86736931571c87cd98640ca6cd Mon Sep 17 00:00:00 2001 From: Andrei Lebedev Date: Tue, 29 Jan 2019 13:36:19 +0300 Subject: [PATCH 20/41] Tests MSVC build (#2022) Signed-off-by: Andrei Lebedev --- cmake/dependencies.cmake | 24 +- .../transport/impl/mst_transport_grpc.cpp | 1 + irohad/network/impl/async_grpc_client.hpp | 1 + libs/common/hexutils.hpp | 1 + .../protobuf/proto_transport_factory.hpp | 9 + .../crypto_provider/crypto_model_signer.hpp | 2 +- test/framework/config_helper.cpp | 1 + .../irohad/ametsuchi/ametsuchi_fixture.hpp | 3 +- .../irohad/ametsuchi/ametsuchi_mocks.hpp | 209 +----------------- .../irohad/ametsuchi/block_query_test.cpp | 4 +- .../irohad/ametsuchi/mock_block_query.hpp | 35 +++ .../ametsuchi/mock_block_query_factory.hpp | 25 +++ .../ametsuchi/mock_key_value_storage.hpp | 28 +++ .../irohad/ametsuchi/mock_mutable_factory.hpp | 35 +++ .../irohad/ametsuchi/mock_mutable_storage.hpp | 33 +++ .../irohad/ametsuchi/mock_peer_query.hpp | 26 +++ .../ametsuchi/mock_peer_query_factory.hpp | 25 +++ .../irohad/ametsuchi/mock_query_executor.hpp | 34 +++ test/module/irohad/ametsuchi/mock_storage.hpp | 71 ++++++ .../ametsuchi/mock_temporary_factory.hpp | 32 +++ .../ametsuchi/mock_tx_presence_cache.hpp | 31 +++ .../irohad/ametsuchi/mock_wsv_query.hpp | 31 +++ .../ametsuchi/postgres_executor_test.cpp | 3 +- .../postgres_query_executor_test.cpp | 7 +- .../irohad/ametsuchi/storage_init_test.cpp | 4 +- .../ametsuchi/tx_presence_cache_test.cpp | 3 +- .../ametsuchi/wsv_query_command_test.cpp | 3 +- .../irohad/consensus/yac/network_test.cpp | 7 +- .../consensus/yac/peer_orderer_test.cpp | 3 +- .../irohad/network/block_loader_test.cpp | 5 +- .../mock_on_demand_os_notification.hpp | 28 +++ .../on_demand_connection_manager_test.cpp | 4 +- .../ordering/on_demand_ordering_gate_test.cpp | 3 +- .../on_demand_os_server_grpc_test.cpp | 2 +- .../module/irohad/ordering/ordering_mocks.hpp | 8 +- .../irohad/ordering/ordering_service_test.cpp | 2 +- .../irohad/simulator/simulator_test.cpp | 6 +- .../irohad/synchronizer/synchronizer_test.cpp | 16 +- .../torii/processor/mock_query_processor.hpp | 31 +++ .../torii/processor/query_processor_test.cpp | 5 +- .../irohad/torii/query_service_test.cpp | 2 +- test/module/irohad/torii/torii_mocks.hpp | 12 - .../irohad/torii/torii_queries_test.cpp | 5 +- .../irohad/torii/torii_service_query_test.cpp | 2 +- .../torii/torii_transport_command_test.cpp | 2 - .../validation/chain_validation_test.cpp | 3 +- .../validation/mock_stateful_validator.hpp | 27 +++ .../irohad/validation/validation_mocks.hpp | 9 +- .../libs/cache/single_pointer_cache_test.cpp | 13 +- test/system/irohad_test.cpp | 2 +- 50 files changed, 613 insertions(+), 265 deletions(-) create mode 100644 test/module/irohad/ametsuchi/mock_block_query.hpp create mode 100644 test/module/irohad/ametsuchi/mock_block_query_factory.hpp create mode 100644 test/module/irohad/ametsuchi/mock_key_value_storage.hpp create mode 100644 test/module/irohad/ametsuchi/mock_mutable_factory.hpp create mode 100644 test/module/irohad/ametsuchi/mock_mutable_storage.hpp create mode 100644 test/module/irohad/ametsuchi/mock_peer_query.hpp create mode 100644 test/module/irohad/ametsuchi/mock_peer_query_factory.hpp create mode 100644 test/module/irohad/ametsuchi/mock_query_executor.hpp create mode 100644 test/module/irohad/ametsuchi/mock_storage.hpp create mode 100644 test/module/irohad/ametsuchi/mock_temporary_factory.hpp create mode 100644 test/module/irohad/ametsuchi/mock_tx_presence_cache.hpp create mode 100644 test/module/irohad/ametsuchi/mock_wsv_query.hpp create mode 100644 test/module/irohad/ordering/mock_on_demand_os_notification.hpp create mode 100644 test/module/irohad/torii/processor/mock_query_processor.hpp create mode 100644 test/module/irohad/validation/mock_stateful_validator.hpp diff --git a/cmake/dependencies.cmake b/cmake/dependencies.cmake index 0d4259ef3c..2e08b58fd0 100644 --- a/cmake/dependencies.cmake +++ b/cmake/dependencies.cmake @@ -15,7 +15,29 @@ find_package(Threads REQUIRED) ########################## # testing is an option. Look at the main CMakeLists.txt for details. if (TESTING) - find_package(gtest) + if (MSVC) + set(CMAKE_MODULE_PATH "") + find_package(GTest REQUIRED CONFIG) + add_library(gtest::gtest INTERFACE IMPORTED) + target_link_libraries(gtest::gtest INTERFACE + GTest::gtest + ) + add_library(gtest::main INTERFACE IMPORTED) + target_link_libraries(gtest::main INTERFACE + GTest::gtest_main + ) + add_library(gmock::gmock INTERFACE IMPORTED) + target_link_libraries(gmock::gmock INTERFACE + GTest::gmock + ) + add_library(gmock::main INTERFACE IMPORTED) + target_link_libraries(gmock::main INTERFACE + GTest::gmock_main + ) + set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules) + else () + find_package(gtest) + endif() endif () ############################# diff --git a/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp b/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp index f6bc4cd788..1a0c7c5b87 100644 --- a/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp +++ b/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp @@ -15,6 +15,7 @@ #include "interfaces/transaction.hpp" #include "validators/field_validator.hpp" +using namespace iroha; using namespace iroha::network; using iroha::ConstRefState; diff --git a/irohad/network/impl/async_grpc_client.hpp b/irohad/network/impl/async_grpc_client.hpp index 477100b4a9..5e8ea7535c 100644 --- a/irohad/network/impl/async_grpc_client.hpp +++ b/irohad/network/impl/async_grpc_client.hpp @@ -6,6 +6,7 @@ #ifndef IROHA_ASYNC_GRPC_CLIENT_HPP #define IROHA_ASYNC_GRPC_CLIENT_HPP +#include #include #include diff --git a/libs/common/hexutils.hpp b/libs/common/hexutils.hpp index 18d5167b32..1f200264ec 100644 --- a/libs/common/hexutils.hpp +++ b/libs/common/hexutils.hpp @@ -5,6 +5,7 @@ #ifndef IROHA_HEXUTILS_HPP #define IROHA_HEXUTILS_HPP +#include #include #include #include diff --git a/shared_model/backend/protobuf/proto_transport_factory.hpp b/shared_model/backend/protobuf/proto_transport_factory.hpp index 3e90ec405b..d1cfd2d47f 100644 --- a/shared_model/backend/protobuf/proto_transport_factory.hpp +++ b/shared_model/backend/protobuf/proto_transport_factory.hpp @@ -12,6 +12,11 @@ #include "cryptography/hash_providers/sha3_256.hpp" #include "validators/abstract_validator.hpp" +#ifdef _MSC_VER +#pragma push_macro("GetMessage") +#undef GetMessage +#endif + namespace shared_model { namespace proto { @@ -68,4 +73,8 @@ namespace shared_model { } // namespace proto } // namespace shared_model +#ifdef _MSC_VER +#pragma pop_macro("GetMessage") +#endif + #endif // IROHA_PROTO_TRANSPORT_FACTORY_HPP diff --git a/shared_model/cryptography/crypto_provider/crypto_model_signer.hpp b/shared_model/cryptography/crypto_provider/crypto_model_signer.hpp index 40e8a8b26f..9b6ec4995c 100644 --- a/shared_model/cryptography/crypto_provider/crypto_model_signer.hpp +++ b/shared_model/cryptography/crypto_provider/crypto_model_signer.hpp @@ -23,7 +23,7 @@ namespace shared_model { virtual ~CryptoModelSigner() = default; template - void sign(T &signable) const noexcept { + inline void sign(T &signable) const noexcept { auto signedBlob = Algorithm::sign(signable.payload(), keypair_); signable.addSignature(signedBlob, keypair_.publicKey()); } diff --git a/test/framework/config_helper.cpp b/test/framework/config_helper.cpp index 4cfa2fe69c..05e6fa5035 100644 --- a/test/framework/config_helper.cpp +++ b/test/framework/config_helper.cpp @@ -5,6 +5,7 @@ #include "framework/config_helper.hpp" +#include #include namespace integration_framework { diff --git a/test/module/irohad/ametsuchi/ametsuchi_fixture.hpp b/test/module/irohad/ametsuchi/ametsuchi_fixture.hpp index 1f9ec8e6a0..8d75cd2bab 100644 --- a/test/module/irohad/ametsuchi/ametsuchi_fixture.hpp +++ b/test/module/irohad/ametsuchi/ametsuchi_fixture.hpp @@ -47,7 +47,8 @@ namespace iroha { [](iroha::expected::Error &error) { FAIL() << "StorageImpl: " << error.error; }); - sql = std::make_shared(soci::postgresql, pgopt_); + sql = std::make_shared(*soci::factory_postgresql(), + pgopt_); sql_query = std::make_unique(*sql, factory); } diff --git a/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp b/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp index 7eab64f31a..a7c04dedf4 100644 --- a/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp +++ b/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp @@ -8,36 +8,26 @@ #include #include -#include "ametsuchi/block_query.hpp" -#include "ametsuchi/block_query_factory.hpp" -#include "ametsuchi/key_value_storage.hpp" -#include "ametsuchi/mutable_factory.hpp" -#include "ametsuchi/mutable_storage.hpp" #include "ametsuchi/os_persistent_state_factory.hpp" -#include "ametsuchi/peer_query.hpp" -#include "ametsuchi/peer_query_factory.hpp" -#include "ametsuchi/storage.hpp" -#include "ametsuchi/temporary_factory.hpp" #include "ametsuchi/temporary_wsv.hpp" -#include "ametsuchi/tx_presence_cache.hpp" #include "ametsuchi/wsv_command.hpp" -#include "ametsuchi/wsv_query.hpp" #include "common/result.hpp" #include "interfaces/common_objects/peer.hpp" +#include "module/irohad/ametsuchi/mock_block_query.hpp" +#include "module/irohad/ametsuchi/mock_block_query_factory.hpp" +#include "module/irohad/ametsuchi/mock_key_value_storage.hpp" +#include "module/irohad/ametsuchi/mock_mutable_factory.hpp" +#include "module/irohad/ametsuchi/mock_mutable_storage.hpp" +#include "module/irohad/ametsuchi/mock_peer_query.hpp" +#include "module/irohad/ametsuchi/mock_peer_query_factory.hpp" +#include "module/irohad/ametsuchi/mock_query_executor.hpp" +#include "module/irohad/ametsuchi/mock_storage.hpp" +#include "module/irohad/ametsuchi/mock_temporary_factory.hpp" +#include "module/irohad/ametsuchi/mock_tx_presence_cache.hpp" +#include "module/irohad/ametsuchi/mock_wsv_query.hpp" namespace iroha { namespace ametsuchi { - class MockWsvQuery : public WsvQuery { - public: - MOCK_METHOD1(getSignatories, - boost::optional< - std::vector>( - const std::string &account_id)); - MOCK_METHOD0( - getPeers, - boost::optional< - std::vector>>()); - }; class MockWsvCommand : public WsvCommand { public: @@ -108,35 +98,6 @@ namespace iroha { const std::string &)); }; - class MockBlockQuery : public BlockQuery { - public: - MOCK_METHOD2(getBlocks, - std::vector( - shared_model::interface::types::HeightType, uint32_t)); - MOCK_METHOD1(getBlocksFrom, - std::vector( - shared_model::interface::types::HeightType)); - MOCK_METHOD1(getTopBlocks, std::vector(uint32_t)); - MOCK_METHOD0(getTopBlock, expected::Result(void)); - MOCK_METHOD1(checkTxPresence, - boost::optional( - const shared_model::crypto::Hash &)); - MOCK_METHOD0(getTopBlockHeight, uint32_t(void)); - }; - - class MockTemporaryFactory : public TemporaryFactory { - public: - MOCK_METHOD0( - createTemporaryWsv, - expected::Result, std::string>(void)); - MOCK_METHOD1(prepareBlock_, void(std::unique_ptr &)); - - void prepareBlock(std::unique_ptr wsv) override { - // gmock workaround for non-copyable parameters - prepareBlock_(wsv); - } - }; - class MockTemporaryWsv : public TemporaryWsv { public: MOCK_METHOD1(apply, @@ -152,125 +113,6 @@ namespace iroha { MOCK_METHOD0(release, void(void)); }; - class MockMutableStorage : public MutableStorage { - public: - MOCK_METHOD2( - apply, - bool(rxcpp::observable< - std::shared_ptr>, - std::function< - bool(const shared_model::interface::Block &, - PeerQuery &, - const shared_model::interface::types::HashType &)>)); - MOCK_METHOD1(apply, bool(const shared_model::interface::Block &)); - MOCK_METHOD1(applyPrepared, bool(const shared_model::interface::Block &)); - }; - - /** - * Factory for generation mock mutable storages. - * This method provide technique, - * when required to return object wrapped in Result. - */ - expected::Result, std::string> - createMockMutableStorage() { - return expected::makeValue>( - std::make_unique()); - } - - class MockMutableFactory : public MutableFactory { - public: - MOCK_METHOD0( - createMutableStorage, - expected::Result, std::string>(void)); - - void commit(std::unique_ptr mutableStorage) override { - // gmock workaround for non-copyable parameters - commit_(mutableStorage); - } - - MOCK_METHOD1(commitPrepared, - bool(const shared_model::interface::Block &)); - MOCK_METHOD1(commit_, void(std::unique_ptr &)); - }; - - class MockPeerQuery : public PeerQuery { - public: - MockPeerQuery() = default; - - MOCK_METHOD0(getLedgerPeers, boost::optional>()); - }; - - class MockStorage : public Storage { - public: - MOCK_CONST_METHOD0(getWsvQuery, std::shared_ptr(void)); - MOCK_CONST_METHOD0(getBlockQuery, std::shared_ptr(void)); - MOCK_METHOD0( - createTemporaryWsv, - expected::Result, std::string>(void)); - MOCK_METHOD0( - createMutableStorage, - expected::Result, std::string>(void)); - MOCK_CONST_METHOD0(createPeerQuery, - boost::optional>()); - MOCK_CONST_METHOD0(createBlockQuery, - boost::optional>()); - MOCK_CONST_METHOD0( - createOsPersistentState, - boost::optional>()); - MOCK_CONST_METHOD2( - createQueryExecutor, - boost::optional>( - std::shared_ptr, - std::shared_ptr)); - MOCK_METHOD1(doCommit, void(MutableStorage *storage)); - MOCK_METHOD1(commitPrepared, - bool(const shared_model::interface::Block &)); - MOCK_METHOD1(insertBlock, bool(const shared_model::interface::Block &)); - MOCK_METHOD1(insertBlocks, - bool(const std::vector< - std::shared_ptr> &)); - MOCK_METHOD0(reset, void(void)); - MOCK_METHOD0(dropStorage, void(void)); - MOCK_METHOD0(freeConnections, void(void)); - MOCK_METHOD1(prepareBlock_, void(std::unique_ptr &)); - - void prepareBlock(std::unique_ptr wsv) override { - // gmock workaround for non-copyable parameters - prepareBlock_(wsv); - } - - rxcpp::observable> - on_commit() override { - return notifier.get_observable(); - } - void commit(std::unique_ptr storage) override { - doCommit(storage.get()); - } - rxcpp::subjects::subject> - notifier; - }; - - class MockKeyValueStorage : public KeyValueStorage { - public: - MOCK_METHOD2(add, bool(Identifier, const Bytes &)); - MOCK_CONST_METHOD1(get, boost::optional(Identifier)); - MOCK_CONST_METHOD0(directory, std::string(void)); - MOCK_CONST_METHOD0(last_id, Identifier(void)); - MOCK_METHOD0(dropAll, void(void)); - }; - - class MockPeerQueryFactory : public PeerQueryFactory { - public: - MOCK_CONST_METHOD0(createPeerQuery, - boost::optional>()); - }; - - class MockBlockQueryFactory : public BlockQueryFactory { - public: - MOCK_CONST_METHOD0(createBlockQuery, - boost::optional>()); - }; - class MockOsPersistentStateFactory : public OsPersistentStateFactory { public: MOCK_CONST_METHOD0( @@ -278,33 +120,6 @@ namespace iroha { boost::optional>()); }; - class MockQueryExecutor : public QueryExecutor { - public: - MOCK_METHOD1(validateAndExecute_, - shared_model::interface::QueryResponse *( - const shared_model::interface::Query &)); - QueryExecutorResult validateAndExecute( - const shared_model::interface::Query &q, - bool validate_signatories = true) override { - return QueryExecutorResult(validateAndExecute_(q)); - } - MOCK_METHOD2(validate, - bool(const shared_model::interface::BlocksQuery &, - const bool validate_signatories)); - }; - - class MockTxPresenceCache : public iroha::ametsuchi::TxPresenceCache { - public: - MOCK_CONST_METHOD1(check, - boost::optional( - const shared_model::crypto::Hash &hash)); - - MOCK_CONST_METHOD1( - check, - boost::optional( - const shared_model::interface::TransactionBatch &)); - }; - namespace tx_cache_status_responses { std::ostream &operator<<(std::ostream &os, const Committed &resp) { return os << resp.hash.toString(); diff --git a/test/module/irohad/ametsuchi/block_query_test.cpp b/test/module/irohad/ametsuchi/block_query_test.cpp index 89709d29b9..27fb8951d1 100644 --- a/test/module/irohad/ametsuchi/block_query_test.cpp +++ b/test/module/irohad/ametsuchi/block_query_test.cpp @@ -13,7 +13,7 @@ #include "converters/protobuf/json_proto_converter.hpp" #include "framework/result_fixture.hpp" #include "module/irohad/ametsuchi/ametsuchi_fixture.hpp" -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" +#include "module/irohad/ametsuchi/mock_key_value_storage.hpp" #include "module/shared_model/builders/protobuf/test_block_builder.hpp" #include "module/shared_model/builders/protobuf/test_transaction_builder.hpp" @@ -30,7 +30,7 @@ class BlockQueryTest : public AmetsuchiTest { ASSERT_TRUE(tmp); file = std::move(*tmp); mock_file = std::make_shared(); - sql = std::make_unique(soci::postgresql, pgopt_); + sql = std::make_unique(*soci::factory_postgresql(), pgopt_); index = std::make_shared(*sql); auto converter = diff --git a/test/module/irohad/ametsuchi/mock_block_query.hpp b/test/module/irohad/ametsuchi/mock_block_query.hpp new file mode 100644 index 0000000000..cf6d576f2e --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_block_query.hpp @@ -0,0 +1,35 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_BLOCK_QUERY_HPP +#define IROHA_MOCK_BLOCK_QUERY_HPP + +#include "ametsuchi/block_query.hpp" + +#include + +namespace iroha { + namespace ametsuchi { + + class MockBlockQuery : public BlockQuery { + public: + MOCK_METHOD2(getBlocks, + std::vector( + shared_model::interface::types::HeightType, uint32_t)); + MOCK_METHOD1(getBlocksFrom, + std::vector( + shared_model::interface::types::HeightType)); + MOCK_METHOD1(getTopBlocks, std::vector(uint32_t)); + MOCK_METHOD0(getTopBlock, expected::Result(void)); + MOCK_METHOD1(checkTxPresence, + boost::optional( + const shared_model::crypto::Hash &)); + MOCK_METHOD0(getTopBlockHeight, uint32_t(void)); + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_BLOCK_QUERY_HPP diff --git a/test/module/irohad/ametsuchi/mock_block_query_factory.hpp b/test/module/irohad/ametsuchi/mock_block_query_factory.hpp new file mode 100644 index 0000000000..5d410a6509 --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_block_query_factory.hpp @@ -0,0 +1,25 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_BLOCK_QUERY_FACTORY_HPP +#define IROHA_MOCK_BLOCK_QUERY_FACTORY_HPP + +#include "ametsuchi/block_query_factory.hpp" + +#include + +namespace iroha { + namespace ametsuchi { + + class MockBlockQueryFactory : public BlockQueryFactory { + public: + MOCK_CONST_METHOD0(createBlockQuery, + boost::optional>()); + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_BLOCK_QUERY_FACTORY_HPP diff --git a/test/module/irohad/ametsuchi/mock_key_value_storage.hpp b/test/module/irohad/ametsuchi/mock_key_value_storage.hpp new file mode 100644 index 0000000000..7d94cf6bca --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_key_value_storage.hpp @@ -0,0 +1,28 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_KEY_VALUE_STORAGE_HPP +#define IROHA_MOCK_KEY_VALUE_STORAGE_HPP + +#include "ametsuchi/key_value_storage.hpp" + +#include + +namespace iroha { + namespace ametsuchi { + + class MockKeyValueStorage : public KeyValueStorage { + public: + MOCK_METHOD2(add, bool(Identifier, const Bytes &)); + MOCK_CONST_METHOD1(get, boost::optional(Identifier)); + MOCK_CONST_METHOD0(directory, std::string(void)); + MOCK_CONST_METHOD0(last_id, Identifier(void)); + MOCK_METHOD0(dropAll, void(void)); + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_KEY_VALUE_STORAGE_HPP diff --git a/test/module/irohad/ametsuchi/mock_mutable_factory.hpp b/test/module/irohad/ametsuchi/mock_mutable_factory.hpp new file mode 100644 index 0000000000..0bf0f4af76 --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_mutable_factory.hpp @@ -0,0 +1,35 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_MUTABLE_FACTORY_HPP +#define IROHA_MOCK_MUTABLE_FACTORY_HPP + +#include "ametsuchi/mutable_factory.hpp" + +#include + +namespace iroha { + namespace ametsuchi { + + class MockMutableFactory : public MutableFactory { + public: + MOCK_METHOD0( + createMutableStorage, + expected::Result, std::string>(void)); + + void commit(std::unique_ptr mutableStorage) override { + // gmock workaround for non-copyable parameters + commit_(mutableStorage); + } + + MOCK_METHOD1(commitPrepared, + bool(const shared_model::interface::Block &)); + MOCK_METHOD1(commit_, void(std::unique_ptr &)); + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_MUTABLE_FACTORY_HPP diff --git a/test/module/irohad/ametsuchi/mock_mutable_storage.hpp b/test/module/irohad/ametsuchi/mock_mutable_storage.hpp new file mode 100644 index 0000000000..cd85e14191 --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_mutable_storage.hpp @@ -0,0 +1,33 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_MUTABLE_STORAGE_HPP +#define IROHA_MOCK_MUTABLE_STORAGE_HPP + +#include "ametsuchi/mutable_storage.hpp" + +#include + +namespace iroha { + namespace ametsuchi { + + class MockMutableStorage : public MutableStorage { + public: + MOCK_METHOD2( + apply, + bool(rxcpp::observable< + std::shared_ptr>, + std::function< + bool(const shared_model::interface::Block &, + PeerQuery &, + const shared_model::interface::types::HashType &)>)); + MOCK_METHOD1(apply, bool(const shared_model::interface::Block &)); + MOCK_METHOD1(applyPrepared, bool(const shared_model::interface::Block &)); + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_MUTABLE_STORAGE_HPP diff --git a/test/module/irohad/ametsuchi/mock_peer_query.hpp b/test/module/irohad/ametsuchi/mock_peer_query.hpp new file mode 100644 index 0000000000..f0b455e7c8 --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_peer_query.hpp @@ -0,0 +1,26 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_PEER_QUERY_HPP +#define IROHA_MOCK_PEER_QUERY_HPP + +#include "ametsuchi/peer_query.hpp" + +#include + +namespace iroha { + namespace ametsuchi { + + class MockPeerQuery : public PeerQuery { + public: + MockPeerQuery() = default; + + MOCK_METHOD0(getLedgerPeers, boost::optional>()); + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_PEER_QUERY_HPP diff --git a/test/module/irohad/ametsuchi/mock_peer_query_factory.hpp b/test/module/irohad/ametsuchi/mock_peer_query_factory.hpp new file mode 100644 index 0000000000..0a98ecd641 --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_peer_query_factory.hpp @@ -0,0 +1,25 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_PEER_QUERY_FACTORY_HPP +#define IROHA_MOCK_PEER_QUERY_FACTORY_HPP + +#include "ametsuchi/peer_query_factory.hpp" + +#include + +namespace iroha { + namespace ametsuchi { + + class MockPeerQueryFactory : public PeerQueryFactory { + public: + MOCK_CONST_METHOD0(createPeerQuery, + boost::optional>()); + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_PEER_QUERY_FACTORY_HPP diff --git a/test/module/irohad/ametsuchi/mock_query_executor.hpp b/test/module/irohad/ametsuchi/mock_query_executor.hpp new file mode 100644 index 0000000000..133cb61c16 --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_query_executor.hpp @@ -0,0 +1,34 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_QUERY_EXECUTOR_HPP +#define IROHA_MOCK_QUERY_EXECUTOR_HPP + +#include "ametsuchi/query_executor.hpp" + +#include + +namespace iroha { + namespace ametsuchi { + + class MockQueryExecutor : public QueryExecutor { + public: + MOCK_METHOD1(validateAndExecute_, + shared_model::interface::QueryResponse *( + const shared_model::interface::Query &)); + QueryExecutorResult validateAndExecute( + const shared_model::interface::Query &q, + bool validate_signatories = true) override { + return QueryExecutorResult(validateAndExecute_(q)); + } + MOCK_METHOD2(validate, + bool(const shared_model::interface::BlocksQuery &, + const bool validate_signatories)); + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_QUERY_EXECUTOR_HPP diff --git a/test/module/irohad/ametsuchi/mock_storage.hpp b/test/module/irohad/ametsuchi/mock_storage.hpp new file mode 100644 index 0000000000..5cb63bd31a --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_storage.hpp @@ -0,0 +1,71 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_STORAGE_HPP +#define IROHA_MOCK_STORAGE_HPP + +#include "ametsuchi/storage.hpp" + +#include +#include "ametsuchi/mutable_storage.hpp" +#include "ametsuchi/temporary_wsv.hpp" + +namespace iroha { + namespace ametsuchi { + + class MockStorage : public Storage { + public: + MOCK_CONST_METHOD0(getWsvQuery, std::shared_ptr(void)); + MOCK_CONST_METHOD0(getBlockQuery, std::shared_ptr(void)); + MOCK_METHOD0( + createTemporaryWsv, + expected::Result, std::string>(void)); + MOCK_METHOD0( + createMutableStorage, + expected::Result, std::string>(void)); + MOCK_CONST_METHOD0(createPeerQuery, + boost::optional>()); + MOCK_CONST_METHOD0(createBlockQuery, + boost::optional>()); + MOCK_CONST_METHOD0( + createOsPersistentState, + boost::optional>()); + MOCK_CONST_METHOD2( + createQueryExecutor, + boost::optional>( + std::shared_ptr, + std::shared_ptr)); + MOCK_METHOD1(doCommit, void(MutableStorage *storage)); + MOCK_METHOD1(commitPrepared, + bool(const shared_model::interface::Block &)); + MOCK_METHOD1(insertBlock, bool(const shared_model::interface::Block &)); + MOCK_METHOD1(insertBlocks, + bool(const std::vector< + std::shared_ptr> &)); + MOCK_METHOD0(reset, void(void)); + MOCK_METHOD0(dropStorage, void(void)); + MOCK_METHOD0(freeConnections, void(void)); + MOCK_METHOD1(prepareBlock_, void(std::unique_ptr &)); + + void prepareBlock(std::unique_ptr wsv) override { + // gmock workaround for non-copyable parameters + prepareBlock_(wsv); + } + + rxcpp::observable> + on_commit() override { + return notifier.get_observable(); + } + void commit(std::unique_ptr storage) override { + doCommit(storage.get()); + } + rxcpp::subjects::subject> + notifier; + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_STORAGE_HPP diff --git a/test/module/irohad/ametsuchi/mock_temporary_factory.hpp b/test/module/irohad/ametsuchi/mock_temporary_factory.hpp new file mode 100644 index 0000000000..04aafa56fa --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_temporary_factory.hpp @@ -0,0 +1,32 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_TEMPORARY_FACTORY_HPP +#define IROHA_MOCK_TEMPORARY_FACTORY_HPP + +#include "ametsuchi/temporary_factory.hpp" + +#include + +namespace iroha { + namespace ametsuchi { + + class MockTemporaryFactory : public TemporaryFactory { + public: + MOCK_METHOD0( + createTemporaryWsv, + expected::Result, std::string>(void)); + MOCK_METHOD1(prepareBlock_, void(std::unique_ptr &)); + + void prepareBlock(std::unique_ptr wsv) override { + // gmock workaround for non-copyable parameters + prepareBlock_(wsv); + } + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_TEMPORARY_FACTORY_HPP diff --git a/test/module/irohad/ametsuchi/mock_tx_presence_cache.hpp b/test/module/irohad/ametsuchi/mock_tx_presence_cache.hpp new file mode 100644 index 0000000000..1d64918f9e --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_tx_presence_cache.hpp @@ -0,0 +1,31 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_TX_PRESENCE_CACHE_HPP +#define IROHA_MOCK_TX_PRESENCE_CACHE_HPP + +#include "ametsuchi/tx_presence_cache.hpp" + +#include + +namespace iroha { + namespace ametsuchi { + + class MockTxPresenceCache : public TxPresenceCache { + public: + MOCK_CONST_METHOD1(check, + boost::optional( + const shared_model::crypto::Hash &hash)); + + MOCK_CONST_METHOD1( + check, + boost::optional( + const shared_model::interface::TransactionBatch &)); + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_TX_PRESENCE_CACHE_HPP diff --git a/test/module/irohad/ametsuchi/mock_wsv_query.hpp b/test/module/irohad/ametsuchi/mock_wsv_query.hpp new file mode 100644 index 0000000000..fe80941326 --- /dev/null +++ b/test/module/irohad/ametsuchi/mock_wsv_query.hpp @@ -0,0 +1,31 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_WSV_QUERY_HPP +#define IROHA_MOCK_WSV_QUERY_HPP + +#include "ametsuchi/wsv_query.hpp" + +#include + +namespace iroha { + namespace ametsuchi { + + class MockWsvQuery : public WsvQuery { + public: + MOCK_METHOD1(getSignatories, + boost::optional< + std::vector>( + const std::string &account_id)); + MOCK_METHOD0( + getPeers, + boost::optional< + std::vector>>()); + }; + + } // namespace ametsuchi +} // namespace iroha + +#endif // IROHA_MOCK_WSV_QUERY_HPP diff --git a/test/module/irohad/ametsuchi/postgres_executor_test.cpp b/test/module/irohad/ametsuchi/postgres_executor_test.cpp index 41914faaec..2dcd4c3428 100644 --- a/test/module/irohad/ametsuchi/postgres_executor_test.cpp +++ b/test/module/irohad/ametsuchi/postgres_executor_test.cpp @@ -38,7 +38,8 @@ namespace iroha { void SetUp() override { AmetsuchiTest::SetUp(); - sql = std::make_unique(soci::postgresql, pgopt_); + sql = std::make_unique(*soci::factory_postgresql(), + pgopt_); auto factory = std::make_shared(soci::postgresql, pgopt_); + sql = std::make_unique(*soci::factory_postgresql(), + pgopt_); auto factory = std::make_shared &error) { FAIL() << error.error; }); - soci::session sql(soci::postgresql, pg_opt_without_dbname_); + soci::session sql(*soci::factory_postgresql(), pg_opt_without_dbname_); int size; sql << "SELECT COUNT(datname) FROM pg_catalog.pg_database WHERE datname = " ":dbname", diff --git a/test/module/irohad/ametsuchi/tx_presence_cache_test.cpp b/test/module/irohad/ametsuchi/tx_presence_cache_test.cpp index 4ee9f82206..388f667030 100644 --- a/test/module/irohad/ametsuchi/tx_presence_cache_test.cpp +++ b/test/module/irohad/ametsuchi/tx_presence_cache_test.cpp @@ -10,7 +10,8 @@ #include "interfaces/common_objects/transaction_sequence_common.hpp" #include "interfaces/iroha_internal/transaction_batch_factory_impl.hpp" #include "interfaces/iroha_internal/transaction_batch_impl.hpp" -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" +#include "module/irohad/ametsuchi/mock_block_query.hpp" +#include "module/irohad/ametsuchi/mock_storage.hpp" #include "module/shared_model/interface/mock_transaction_batch_factory.hpp" #include "module/shared_model/interface_mocks.hpp" diff --git a/test/module/irohad/ametsuchi/wsv_query_command_test.cpp b/test/module/irohad/ametsuchi/wsv_query_command_test.cpp index 6ce40e2c56..936c95ae53 100644 --- a/test/module/irohad/ametsuchi/wsv_query_command_test.cpp +++ b/test/module/irohad/ametsuchi/wsv_query_command_test.cpp @@ -20,7 +20,8 @@ namespace iroha { public: void SetUp() override { AmetsuchiTest::SetUp(); - sql = std::make_unique(soci::postgresql, pgopt_); + sql = std::make_unique(*soci::factory_postgresql(), + pgopt_); command = std::make_unique(*sql); query = std::make_unique(*sql, factory); diff --git a/test/module/irohad/consensus/yac/network_test.cpp b/test/module/irohad/consensus/yac/network_test.cpp index 751928e244..772dc34a34 100644 --- a/test/module/irohad/consensus/yac/network_test.cpp +++ b/test/module/irohad/consensus/yac/network_test.cpp @@ -19,8 +19,8 @@ namespace iroha { namespace yac { class YacNetworkTest : public ::testing::Test { public: - static constexpr auto default_ip = "0.0.0.0"; - static constexpr auto default_address = "0.0.0.0:0"; + static constexpr auto default_ip = "127.0.0.1"; + static constexpr auto default_address = "127.0.0.1:0"; void SetUp() override { notifications = std::make_shared(); async_call = std::make_shared< @@ -85,7 +85,8 @@ namespace iroha { // wait for response reader thread std::unique_lock lk(mtx); - cv.wait(lk, [&] { return processed; }); + ASSERT_TRUE(cv.wait_for( + lk, std::chrono::seconds(5), [&] { return processed; })); ASSERT_EQ(1, state.size()); ASSERT_EQ(message, state.front()); diff --git a/test/module/irohad/consensus/yac/peer_orderer_test.cpp b/test/module/irohad/consensus/yac/peer_orderer_test.cpp index ac0801379e..775f7a722e 100644 --- a/test/module/irohad/consensus/yac/peer_orderer_test.cpp +++ b/test/module/irohad/consensus/yac/peer_orderer_test.cpp @@ -13,7 +13,8 @@ #include #include "consensus/yac/impl/peer_orderer_impl.hpp" #include "consensus/yac/storage/yac_proposal_storage.hpp" -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" +#include "module/irohad/ametsuchi/mock_peer_query.hpp" +#include "module/irohad/ametsuchi/mock_peer_query_factory.hpp" #include "module/irohad/consensus/yac/yac_mocks.hpp" using namespace boost::adaptors; diff --git a/test/module/irohad/network/block_loader_test.cpp b/test/module/irohad/network/block_loader_test.cpp index 54f9379824..2c667c4dc8 100644 --- a/test/module/irohad/network/block_loader_test.cpp +++ b/test/module/irohad/network/block_loader_test.cpp @@ -14,7 +14,10 @@ #include "cryptography/hash.hpp" #include "datetime/time.hpp" #include "framework/test_subscriber.hpp" -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" +#include "module/irohad/ametsuchi/mock_block_query.hpp" +#include "module/irohad/ametsuchi/mock_block_query_factory.hpp" +#include "module/irohad/ametsuchi/mock_peer_query.hpp" +#include "module/irohad/ametsuchi/mock_peer_query_factory.hpp" #include "module/shared_model/builders/protobuf/test_block_builder.hpp" #include "module/shared_model/builders/protobuf/test_transaction_builder.hpp" #include "module/shared_model/interface_mocks.hpp" diff --git a/test/module/irohad/ordering/mock_on_demand_os_notification.hpp b/test/module/irohad/ordering/mock_on_demand_os_notification.hpp new file mode 100644 index 0000000000..5c40836831 --- /dev/null +++ b/test/module/irohad/ordering/mock_on_demand_os_notification.hpp @@ -0,0 +1,28 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_ON_DEMAND_OS_NOTIFICATION_HPP +#define IROHA_MOCK_ON_DEMAND_OS_NOTIFICATION_HPP + +#include "ordering/on_demand_os_transport.hpp" + +#include + +namespace iroha { + namespace ordering { + namespace transport { + + struct MockOdOsNotification : public OdOsNotification { + MOCK_METHOD2(onBatches, void(consensus::Round, CollectionType)); + + MOCK_METHOD1(onRequestProposal, + boost::optional(consensus::Round)); + }; + + } // namespace transport + } // namespace ordering +} // namespace iroha + +#endif // IROHA_MOCK_ON_DEMAND_OS_NOTIFICATION_HPP diff --git a/test/module/irohad/ordering/on_demand_connection_manager_test.cpp b/test/module/irohad/ordering/on_demand_connection_manager_test.cpp index e3205fc620..18f03f7f32 100644 --- a/test/module/irohad/ordering/on_demand_connection_manager_test.cpp +++ b/test/module/irohad/ordering/on_demand_connection_manager_test.cpp @@ -101,7 +101,7 @@ TEST_F(OnDemandConnectionManagerTest, onBatches) { * AND return data is forwarded */ TEST_F(OnDemandConnectionManagerTest, onRequestProposal) { - consensus::Round round; + consensus::Round round{}; boost::optional oproposal = OnDemandConnectionManager::ProposalType{}; auto proposal = oproposal.value().get(); @@ -123,7 +123,7 @@ TEST_F(OnDemandConnectionManagerTest, onRequestProposal) { * AND return data is forwarded */ TEST_F(OnDemandConnectionManagerTest, onRequestProposalNone) { - consensus::Round round; + consensus::Round round{}; boost::optional oproposal; EXPECT_CALL(*connections[OnDemandConnectionManager::kIssuer], onRequestProposal(round)) diff --git a/test/module/irohad/ordering/on_demand_ordering_gate_test.cpp b/test/module/irohad/ordering/on_demand_ordering_gate_test.cpp index 5332ee2b7e..20a30e50c9 100644 --- a/test/module/irohad/ordering/on_demand_ordering_gate_test.cpp +++ b/test/module/irohad/ordering/on_demand_ordering_gate_test.cpp @@ -9,7 +9,8 @@ #include #include "framework/test_subscriber.hpp" #include "interfaces/iroha_internal/transaction_batch_impl.hpp" -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" +#include "module/irohad/ametsuchi/mock_tx_presence_cache.hpp" +#include "module/irohad/ordering/mock_on_demand_os_notification.hpp" #include "module/irohad/ordering/ordering_mocks.hpp" #include "module/shared_model/interface_mocks.hpp" #include "ordering/impl/on_demand_common.hpp" diff --git a/test/module/irohad/ordering/on_demand_os_server_grpc_test.cpp b/test/module/irohad/ordering/on_demand_os_server_grpc_test.cpp index ead5870ce9..f72f9c5895 100644 --- a/test/module/irohad/ordering/on_demand_os_server_grpc_test.cpp +++ b/test/module/irohad/ordering/on_demand_os_server_grpc_test.cpp @@ -11,7 +11,7 @@ #include "backend/protobuf/transaction.hpp" #include "interfaces/iroha_internal/transaction_batch_impl.hpp" #include "interfaces/iroha_internal/transaction_batch_parser_impl.hpp" -#include "module/irohad/ordering/ordering_mocks.hpp" +#include "module/irohad/ordering/mock_on_demand_os_notification.hpp" #include "module/shared_model/interface/mock_transaction_batch_factory.hpp" #include "module/shared_model/validators/validators.hpp" diff --git a/test/module/irohad/ordering/ordering_mocks.hpp b/test/module/irohad/ordering/ordering_mocks.hpp index 0f2100a5ad..9046ee8996 100644 --- a/test/module/irohad/ordering/ordering_mocks.hpp +++ b/test/module/irohad/ordering/ordering_mocks.hpp @@ -8,6 +8,7 @@ #include +#include "module/irohad/ordering/mock_on_demand_os_notification.hpp" #include "ordering/impl/ordering_gate_cache/ordering_gate_cache.hpp" #include "ordering/on_demand_ordering_service.hpp" #include "ordering/on_demand_os_transport.hpp" @@ -16,13 +17,6 @@ namespace iroha { namespace ordering { namespace transport { - struct MockOdOsNotification : public OdOsNotification { - MOCK_METHOD2(onBatches, void(consensus::Round, CollectionType)); - - MOCK_METHOD1(onRequestProposal, - boost::optional(consensus::Round)); - }; - struct MockOdOsNotificationFactory : public OdOsNotificationFactory { MOCK_METHOD1(create, std::unique_ptr( diff --git a/test/module/irohad/ordering/ordering_service_test.cpp b/test/module/irohad/ordering/ordering_service_test.cpp index b32c8d63e5..a77783eb75 100644 --- a/test/module/irohad/ordering/ordering_service_test.cpp +++ b/test/module/irohad/ordering/ordering_service_test.cpp @@ -98,7 +98,7 @@ class OrderingServiceTest : public ::testing::Test { std::shared_ptr persistent_state_factory; std::condition_variable cv; std::mutex m; - std::string address{"0.0.0.0:50051"}; + std::string address{"127.0.0.1:50051"}; shared_model::interface::types::PubkeyType pk{std::string(32, '0')}; std::shared_ptr peer; std::shared_ptr wsv; diff --git a/test/module/irohad/simulator/simulator_test.cpp b/test/module/irohad/simulator/simulator_test.cpp index 570b45da9d..93f7418e6e 100644 --- a/test/module/irohad/simulator/simulator_test.cpp +++ b/test/module/irohad/simulator/simulator_test.cpp @@ -14,9 +14,11 @@ #include "builders/protobuf/transaction.hpp" #include "datetime/time.hpp" #include "framework/test_subscriber.hpp" -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" +#include "module/irohad/ametsuchi/mock_block_query.hpp" +#include "module/irohad/ametsuchi/mock_block_query_factory.hpp" +#include "module/irohad/ametsuchi/mock_temporary_factory.hpp" #include "module/irohad/network/network_mocks.hpp" -#include "module/irohad/validation/validation_mocks.hpp" +#include "module/irohad/validation/mock_stateful_validator.hpp" #include "module/shared_model/builders/protobuf/proposal.hpp" #include "module/shared_model/builders/protobuf/test_block_builder.hpp" #include "module/shared_model/builders/protobuf/test_proposal_builder.hpp" diff --git a/test/module/irohad/synchronizer/synchronizer_test.cpp b/test/module/irohad/synchronizer/synchronizer_test.cpp index c7df066424..e646ceeee9 100644 --- a/test/module/irohad/synchronizer/synchronizer_test.cpp +++ b/test/module/irohad/synchronizer/synchronizer_test.cpp @@ -9,7 +9,10 @@ #include #include "backend/protobuf/block.hpp" #include "framework/test_subscriber.hpp" -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" +#include "module/irohad/ametsuchi/mock_block_query.hpp" +#include "module/irohad/ametsuchi/mock_block_query_factory.hpp" +#include "module/irohad/ametsuchi/mock_mutable_factory.hpp" +#include "module/irohad/ametsuchi/mock_mutable_storage.hpp" #include "module/irohad/network/network_mocks.hpp" #include "module/irohad/validation/validation_mocks.hpp" #include "module/shared_model/builders/protobuf/block.hpp" @@ -28,6 +31,17 @@ using ::testing::ByMove; using ::testing::DefaultValue; using ::testing::Return; +/** + * Factory for mock mutable storage generation. + * This method provides technique, + * when required to return object wrapped in Result. + */ +expected::Result, std::string> +createMockMutableStorage() { + return expected::makeValue>( + std::make_unique()); +} + class SynchronizerTest : public ::testing::Test { public: void SetUp() override { diff --git a/test/module/irohad/torii/processor/mock_query_processor.hpp b/test/module/irohad/torii/processor/mock_query_processor.hpp new file mode 100644 index 0000000000..c397ceadfa --- /dev/null +++ b/test/module/irohad/torii/processor/mock_query_processor.hpp @@ -0,0 +1,31 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_QUERY_PROCESSOR_HPP +#define IROHA_MOCK_QUERY_PROCESSOR_HPP + +#include "torii/processor/query_processor.hpp" + +#include + +namespace iroha { + namespace torii { + + class MockQueryProcessor : public QueryProcessor { + public: + MOCK_METHOD1(queryHandle, + std::unique_ptr( + const shared_model::interface::Query &)); + MOCK_METHOD1( + blocksQueryHandle, + rxcpp::observable< + std::shared_ptr>( + const shared_model::interface::BlocksQuery &)); + }; + + } // namespace torii +} // namespace iroha + +#endif // IROHA_MOCK_QUERY_PROCESSOR_HPP diff --git a/test/module/irohad/torii/processor/query_processor_test.cpp b/test/module/irohad/torii/processor/query_processor_test.cpp index 4b022d875a..fdb1d87324 100644 --- a/test/module/irohad/torii/processor/query_processor_test.cpp +++ b/test/module/irohad/torii/processor/query_processor_test.cpp @@ -11,7 +11,10 @@ #include "cryptography/keypair.hpp" #include "framework/test_subscriber.hpp" #include "interfaces/query_responses/block_query_response.hpp" -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" +#include "module/irohad/ametsuchi/mock_block_query.hpp" +#include "module/irohad/ametsuchi/mock_query_executor.hpp" +#include "module/irohad/ametsuchi/mock_storage.hpp" +#include "module/irohad/ametsuchi/mock_wsv_query.hpp" #include "module/irohad/validation/validation_mocks.hpp" #include "module/shared_model/builders/protobuf/test_block_builder.hpp" #include "module/shared_model/builders/protobuf/test_query_builder.hpp" diff --git a/test/module/irohad/torii/query_service_test.cpp b/test/module/irohad/torii/query_service_test.cpp index 284488352f..cf9a90b0a3 100644 --- a/test/module/irohad/torii/query_service_test.cpp +++ b/test/module/irohad/torii/query_service_test.cpp @@ -8,7 +8,7 @@ #include "backend/protobuf/proto_transport_factory.hpp" #include "backend/protobuf/query_responses/proto_query_response.hpp" #include "builders/protobuf/queries.hpp" -#include "module/irohad/torii/torii_mocks.hpp" +#include "module/irohad/torii/processor/mock_query_processor.hpp" #include "utils/query_error_response_visitor.hpp" #include "validators/protobuf/proto_query_validator.hpp" diff --git a/test/module/irohad/torii/torii_mocks.hpp b/test/module/irohad/torii/torii_mocks.hpp index fa2da2ee33..0abcc050e3 100644 --- a/test/module/irohad/torii/torii_mocks.hpp +++ b/test/module/irohad/torii/torii_mocks.hpp @@ -20,18 +20,6 @@ namespace iroha { namespace torii { - class MockQueryProcessor : public QueryProcessor { - public: - MOCK_METHOD1(queryHandle, - std::unique_ptr( - const shared_model::interface::Query &)); - MOCK_METHOD1( - blocksQueryHandle, - rxcpp::observable< - std::shared_ptr>( - const shared_model::interface::BlocksQuery &)); - }; - class MockStatusBus : public StatusBus { public: MOCK_METHOD1(publish, void(StatusBus::Objects)); diff --git a/test/module/irohad/torii/torii_queries_test.cpp b/test/module/irohad/torii/torii_queries_test.cpp index 7419b212a8..83135fefa8 100644 --- a/test/module/irohad/torii/torii_queries_test.cpp +++ b/test/module/irohad/torii/torii_queries_test.cpp @@ -5,7 +5,10 @@ #include #include "crypto/keypair.hpp" -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" +#include "module/irohad/ametsuchi/mock_block_query.hpp" +#include "module/irohad/ametsuchi/mock_query_executor.hpp" +#include "module/irohad/ametsuchi/mock_storage.hpp" +#include "module/irohad/ametsuchi/mock_wsv_query.hpp" #include "module/irohad/network/network_mocks.hpp" #include "module/irohad/pending_txs_storage/pending_txs_storage_mock.hpp" #include "module/irohad/torii/torii_mocks.hpp" diff --git a/test/module/irohad/torii/torii_service_query_test.cpp b/test/module/irohad/torii/torii_service_query_test.cpp index 59e80817d6..893f00d9ac 100644 --- a/test/module/irohad/torii/torii_service_query_test.cpp +++ b/test/module/irohad/torii/torii_service_query_test.cpp @@ -12,7 +12,7 @@ #include "builders/default_builders.hpp" #include "builders/protobuf/queries.hpp" #include "main/server_runner.hpp" -#include "module/irohad/torii/torii_mocks.hpp" +#include "module/irohad/torii/processor/mock_query_processor.hpp" #include "module/shared_model/builders/protobuf/test_query_builder.hpp" #include "torii/query_client.hpp" #include "torii/query_service.hpp" diff --git a/test/module/irohad/torii/torii_transport_command_test.cpp b/test/module/irohad/torii/torii_transport_command_test.cpp index 77782a4a86..c69c697a76 100644 --- a/test/module/irohad/torii/torii_transport_command_test.cpp +++ b/test/module/irohad/torii/torii_transport_command_test.cpp @@ -19,7 +19,6 @@ #include "interfaces/iroha_internal/transaction_batch.hpp" #include "interfaces/iroha_internal/transaction_batch_factory_impl.hpp" #include "interfaces/iroha_internal/transaction_batch_parser_impl.hpp" -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" #include "module/irohad/network/network_mocks.hpp" #include "module/irohad/torii/torii_mocks.hpp" #include "module/shared_model/interface/mock_transaction_batch_factory.hpp" @@ -35,7 +34,6 @@ using ::testing::Property; using ::testing::Return; using ::testing::StrEq; -using namespace iroha::ametsuchi; using namespace iroha::torii; using namespace std::chrono_literals; diff --git a/test/module/irohad/validation/chain_validation_test.cpp b/test/module/irohad/validation/chain_validation_test.cpp index 07b549f7ef..98998e2915 100644 --- a/test/module/irohad/validation/chain_validation_test.cpp +++ b/test/module/irohad/validation/chain_validation_test.cpp @@ -6,7 +6,8 @@ #include "validation/impl/chain_validator_impl.hpp" #include -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" +#include "module/irohad/ametsuchi/mock_mutable_storage.hpp" +#include "module/irohad/ametsuchi/mock_peer_query.hpp" #include "module/irohad/consensus/yac/yac_mocks.hpp" #include "module/shared_model/interface_mocks.hpp" diff --git a/test/module/irohad/validation/mock_stateful_validator.hpp b/test/module/irohad/validation/mock_stateful_validator.hpp new file mode 100644 index 0000000000..8af73fb5d5 --- /dev/null +++ b/test/module/irohad/validation/mock_stateful_validator.hpp @@ -0,0 +1,27 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_STATEFUL_VALIDATOR_HPP +#define IROHA_MOCK_STATEFUL_VALIDATOR_HPP + +#include "validation/stateful_validator.hpp" + +#include + +namespace iroha { + namespace validation { + + class MockStatefulValidator : public StatefulValidator { + public: + MOCK_METHOD2(validate, + std::unique_ptr( + const shared_model::interface::Proposal &, + ametsuchi::TemporaryWsv &)); + }; + + } // namespace validation +} // namespace iroha + +#endif // IROHA_MOCK_STATEFUL_VALIDATOR_HPP diff --git a/test/module/irohad/validation/validation_mocks.hpp b/test/module/irohad/validation/validation_mocks.hpp index 160b12f0af..a9196ae11d 100644 --- a/test/module/irohad/validation/validation_mocks.hpp +++ b/test/module/irohad/validation/validation_mocks.hpp @@ -10,18 +10,11 @@ #include "interfaces/common_objects/types.hpp" #include "interfaces/iroha_internal/proposal.hpp" +#include "module/irohad/validation/mock_stateful_validator.hpp" #include "validation/chain_validator.hpp" -#include "validation/stateful_validator.hpp" namespace iroha { namespace validation { - class MockStatefulValidator : public validation::StatefulValidator { - public: - MOCK_METHOD2(validate, - std::unique_ptr( - const shared_model::interface::Proposal &, - ametsuchi::TemporaryWsv &)); - }; class MockChainValidator : public ChainValidator { public: diff --git a/test/module/libs/cache/single_pointer_cache_test.cpp b/test/module/libs/cache/single_pointer_cache_test.cpp index 0ad16f6fbc..f6d20ae936 100644 --- a/test/module/libs/cache/single_pointer_cache_test.cpp +++ b/test/module/libs/cache/single_pointer_cache_test.cpp @@ -21,6 +21,8 @@ class SinglePointerCacheTest : public ::testing::Test { SinglePointerIntCache int_cache; const int default_int_value = 5; + const int run_times{10}; + const std::chrono::milliseconds sleep_interval{100}; }; /** @@ -64,10 +66,7 @@ TEST_F(SinglePointerCacheTest, Release) { * try to give removed value to the second thread */ TEST_F(SinglePointerCacheTest, MultithreadedCache) { - constexpr std::chrono::milliseconds sleep_interval{100}; - constexpr int run_times{10}; - - auto read = [this, &sleep_interval] { + auto read = [this] { // if cache is not empty, read the value; otherwise do nothing for (auto i = 0; i < run_times; ++i) { auto value_ptr = int_cache.get(); @@ -77,21 +76,21 @@ TEST_F(SinglePointerCacheTest, MultithreadedCache) { std::this_thread::sleep_for(sleep_interval); } }; - auto write_one = [this, &sleep_interval] { + auto write_one = [this] { // just write to cache for (auto i = 0; i < run_times; i++) { std::this_thread::sleep_for(sleep_interval); int_cache.insert(std::make_shared(i)); } }; - auto write_two = [this, &sleep_interval] { + auto write_two = [this] { // just write to cache for (auto i = run_times; i > 0; --i) { std::this_thread::sleep_for(sleep_interval); int_cache.insert(std::make_shared(i)); } }; - auto release = [this, &sleep_interval] { + auto release = [this] { // release the cache for (auto i = 0; i < run_times; ++i) { int_cache.release(); diff --git a/test/system/irohad_test.cpp b/test/system/irohad_test.cpp index ca37e0a248..d7dee9c091 100644 --- a/test/system/irohad_test.cpp +++ b/test/system/irohad_test.cpp @@ -206,7 +206,7 @@ DROP TABLE IF EXISTS index_by_creator_height; DROP TABLE IF EXISTS position_by_account_asset; )"; - soci::session sql(soci::postgresql, pgopts_); + soci::session sql(*soci::factory_postgresql(), pgopts_); sql << drop; } From d95a34c1d2a0984b296968d8ada7033ad46679f9 Mon Sep 17 00:00:00 2001 From: Igor Egorov Date: Tue, 29 Jan 2019 15:56:08 +0300 Subject: [PATCH 21/41] Python sha3 dependency to docker (#2047) Python sha3 dependency to docker Signed-off-by: Igor Egorov --- docker/dependencies/Dockerfile | 4 ++-- docker/develop/Dockerfile | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/dependencies/Dockerfile b/docker/dependencies/Dockerfile index cabe3f8c3b..8c9cc2f7e9 100644 --- a/docker/dependencies/Dockerfile +++ b/docker/dependencies/Dockerfile @@ -277,8 +277,8 @@ RUN set -e; \ # python bindings dependencies RUN set -e; \ - pip install grpcio_tools; \ - pip3 install grpcio_tools + pip install grpcio_tools pysha3; \ + pip3 install grpcio_tools pysha3; # install lcov RUN set -e; \ diff --git a/docker/develop/Dockerfile b/docker/develop/Dockerfile index e9f84225ce..f4d100b2d8 100644 --- a/docker/develop/Dockerfile +++ b/docker/develop/Dockerfile @@ -272,8 +272,8 @@ RUN set -e; \ # python bindings dependencies RUN set -e; \ - pip install grpcio_tools; \ - pip3 install grpcio_tools + pip install grpcio_tools pysha3; \ + pip3 install grpcio_tools pysha3; # install lcov RUN set -e; \ From b69759cb337525fef5d56514ffde8f7606994f0e Mon Sep 17 00:00:00 2001 From: Nikita Alekseev Date: Wed, 30 Jan 2019 09:58:55 +0300 Subject: [PATCH 22/41] Retrieve Ledger state from storage on commit in synchronizer (#2041) * make getVerifiedProposal return block and emit event in subscription * remove round from proposal functions * add LedgerEvent object * add LedgerState return on Commit * return ledger state from commit prepared * pass ledger state via synchronize * Commit from mutable storage returns boost::none in case of failure * refactor peer retrieval in storage. Signed-off-by: Nikita Alekseev --- irohad/ametsuchi/impl/storage_impl.cpp | 36 ++++++-- irohad/ametsuchi/impl/storage_impl.hpp | 6 +- irohad/ametsuchi/ledger_state.hpp | 25 ++++++ irohad/ametsuchi/mutable_factory.hpp | 17 ++-- .../synchronizer/impl/synchronizer_impl.cpp | 52 ++++++++--- .../synchronizer/impl/synchronizer_impl.hpp | 3 +- irohad/synchronizer/synchronizer_common.hpp | 2 + .../irohad/ametsuchi/mock_mutable_factory.hpp | 12 ++- test/module/irohad/ametsuchi/mock_storage.hpp | 12 ++- .../irohad/ordering/ordering_gate_test.cpp | 6 +- .../irohad/synchronizer/synchronizer_test.cpp | 89 +++++++++++++++++-- .../processor/transaction_processor_test.cpp | 3 + .../irohad_test_data/config.sample.copy | 11 --- 13 files changed, 218 insertions(+), 56 deletions(-) create mode 100644 irohad/ametsuchi/ledger_state.hpp delete mode 100644 test/system/irohad_test_data/config.sample.copy diff --git a/irohad/ametsuchi/impl/storage_impl.cpp b/irohad/ametsuchi/impl/storage_impl.cpp index 65d3930b89..77048a6fe1 100644 --- a/irohad/ametsuchi/impl/storage_impl.cpp +++ b/irohad/ametsuchi/impl/storage_impl.cpp @@ -411,7 +411,8 @@ namespace iroha { return storage; } - void StorageImpl::commit(std::unique_ptr mutableStorage) { + boost::optional> StorageImpl::commit( + std::unique_ptr mutableStorage) { auto storage_ptr = std::move(mutableStorage); // get ownership of storage auto storage = static_cast(storage_ptr.get()); for (const auto &block : storage->block_store_) { @@ -420,22 +421,30 @@ namespace iroha { try { *(storage->sql_) << "COMMIT"; storage->committed = true; + return createPeerQuery() | + [](const auto &peer_query) { return peer_query->getLedgerPeers(); } + | [](auto &&peers) { + return boost::optional>( + std::make_unique( + std::make_shared(std::move(peers)))); + }; } catch (std::exception &e) { storage->committed = false; log_->warn("Mutable storage is not committed. Reason: {}", e.what()); + return boost::none; } } - bool StorageImpl::commitPrepared( + boost::optional> StorageImpl::commitPrepared( const shared_model::interface::Block &block) { if (not prepared_blocks_enabled_) { log_->warn("prepared blocks are not enabled"); - return false; + return boost::none; } if (not block_is_prepared) { log_->info("there are no prepared blocks"); - return false; + return boost::none; } log_->info("applying prepared block"); @@ -443,7 +452,7 @@ namespace iroha { std::shared_lock lock(drop_mutex); if (not connection_) { log_->info("connection to database is not initialised"); - return false; + return boost::none; } soci::session sql(*connection_); sql << "COMMIT PREPARED '" + prepared_block_name_ + "';"; @@ -454,10 +463,21 @@ namespace iroha { log_->warn("failed to apply prepared block {}: {}", block.hash().hex(), e.what()); - return false; + return boost::none; } - - return storeBlock(block); + return createPeerQuery() | + [](const auto &peer_query) { + return peer_query->getLedgerPeers(); + } + | [this, &block](auto &&peers) + -> boost::optional> { + if (this->storeBlock(block)) { + return boost::optional>( + std::make_unique( + std::make_shared(std::move(peers)))); + } + return boost::none; + }; } std::shared_ptr StorageImpl::getWsvQuery() const { diff --git a/irohad/ametsuchi/impl/storage_impl.hpp b/irohad/ametsuchi/impl/storage_impl.hpp index 0ddef6e888..a64655a819 100644 --- a/irohad/ametsuchi/impl/storage_impl.hpp +++ b/irohad/ametsuchi/impl/storage_impl.hpp @@ -100,9 +100,11 @@ namespace iroha { void freeConnections() override; - void commit(std::unique_ptr mutableStorage) override; + boost::optional> commit( + std::unique_ptr mutableStorage) override; - bool commitPrepared(const shared_model::interface::Block &block) override; + boost::optional> commitPrepared( + const shared_model::interface::Block &block) override; std::shared_ptr getWsvQuery() const override; diff --git a/irohad/ametsuchi/ledger_state.hpp b/irohad/ametsuchi/ledger_state.hpp new file mode 100644 index 0000000000..6f4a4645ca --- /dev/null +++ b/irohad/ametsuchi/ledger_state.hpp @@ -0,0 +1,25 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_LEDGER_STATE_HPP +#define IROHA_LEDGER_STATE_HPP + +#include + +#include "interfaces/common_objects/peer.hpp" + +namespace iroha { + using PeerList = std::vector>; + + struct LedgerState { + std::shared_ptr ledger_peers; + + LedgerState(std::shared_ptr peers) + : ledger_peers(std::move(peers)) {} + LedgerState() = default; + }; +} // namespace iroha + +#endif // IROHA_LEDGER_STATE_HPP diff --git a/irohad/ametsuchi/mutable_factory.hpp b/irohad/ametsuchi/mutable_factory.hpp index c53a27f64a..ece5a66ee7 100644 --- a/irohad/ametsuchi/mutable_factory.hpp +++ b/irohad/ametsuchi/mutable_factory.hpp @@ -7,13 +7,16 @@ #define IROHA_MUTABLE_FACTORY_HPP #include + +#include #include "common/result.hpp" +#include "ametsuchi/ledger_state.hpp" namespace shared_model { namespace interface { class Block; } -} +} // namespace shared_model namespace iroha { namespace ametsuchi { @@ -35,16 +38,18 @@ namespace iroha { * This transforms Ametsuchi to the new state consistent with * MutableStorage. * @param mutableStorage + * @return new state of the ledger, boost::none if commit failed */ - virtual void commit(std::unique_ptr mutableStorage) = 0; + virtual boost::optional> commit( + std::unique_ptr mutableStorage) = 0; /** * Try to apply prepared block to Ametsuchi. - * @return true if commit is succesful, false if prepared block failed - * to apply. WSV is not changed if it returns false. - * + * @return state of the ledger if commit is succesful, boost::none if + * prepared block failed to apply. WSV is not changed in this case. */ - virtual bool commitPrepared(const shared_model::interface::Block& block) = 0; + virtual boost::optional> commitPrepared( + const shared_model::interface::Block &block) = 0; virtual ~MutableFactory() = default; }; diff --git a/irohad/synchronizer/impl/synchronizer_impl.cpp b/irohad/synchronizer/impl/synchronizer_impl.cpp index 3e095cf1bc..6af915e2dc 100644 --- a/irohad/synchronizer/impl/synchronizer_impl.cpp +++ b/irohad/synchronizer/impl/synchronizer_impl.cpp @@ -42,6 +42,8 @@ namespace iroha { this->processDifferent(msg); }, [this](const consensus::ProposalReject &msg) { + // TODO: nickaleks IR-147 18.01.19 add peers + // list from GateObject when it has one notifier_.get_subscriber().on_next(SynchronizationEvent{ rxcpp::observable<>::empty< std::shared_ptr>(), @@ -49,6 +51,8 @@ namespace iroha { msg.round}); }, [this](const consensus::BlockReject &msg) { + // TODO: nickaleks IR-147 18.01.19 add peers + // list from GateObject when it has one notifier_.get_subscriber().on_next(SynchronizationEvent{ rxcpp::observable<>::empty< std::shared_ptr>(), @@ -56,6 +60,8 @@ namespace iroha { msg.round}); }, [this](const consensus::AgreementOnNone &msg) { + // TODO: nickaleks IR-147 18.01.19 add peers + // list from GateObject when it has one notifier_.get_subscriber().on_next(SynchronizationEvent{ rxcpp::observable<>::empty< std::shared_ptr>(), @@ -64,7 +70,8 @@ namespace iroha { }); } - SynchronizationEvent SynchronizerImpl::downloadMissingBlocks( + boost::optional + SynchronizerImpl::downloadMissingBlocks( const consensus::VoteOther &msg, std::unique_ptr storage, const shared_model::interface::types::HeightType height) { @@ -92,9 +99,16 @@ namespace iroha { if (blocks.back()->height() >= expected_height and validator_->validateAndApply(chain, *storage)) { - mutable_factory_->commit(std::move(storage)); - - return {chain, SynchronizationOutcomeType::kCommit, msg.round}; + auto ledger_state = mutable_factory_->commit(std::move(storage)); + + if (ledger_state) { + return SynchronizationEvent{chain, + SynchronizationOutcomeType::kCommit, + msg.round, + std::move(*ledger_state)}; + } else { + return boost::none; + } } } } @@ -117,7 +131,14 @@ namespace iroha { void SynchronizerImpl::processNext(const consensus::PairValid &msg) { log_->info("at handleNext"); - if (not mutable_factory_->commitPrepared(*msg.block)) { + auto ledger_state = mutable_factory_->commitPrepared(*msg.block); + if (ledger_state) { + notifier_.get_subscriber().on_next( + SynchronizationEvent{rxcpp::observable<>::just(msg.block), + SynchronizationOutcomeType::kCommit, + msg.round, + std::move(*ledger_state)}); + } else { auto opt_storage = getStorage(); if (opt_storage == boost::none) { return; @@ -125,15 +146,20 @@ namespace iroha { std::unique_ptr storage = std::move(opt_storage.value()); if (storage->apply(*msg.block)) { - mutable_factory_->commit(std::move(storage)); + ledger_state = mutable_factory_->commit(std::move(storage)); + if (ledger_state) { + notifier_.get_subscriber().on_next( + SynchronizationEvent{rxcpp::observable<>::just(msg.block), + SynchronizationOutcomeType::kCommit, + msg.round, + std::move(*ledger_state)}); + } else { + log_->error("failed to commit mutable storage"); + } } else { log_->warn("Block was not committed due to fail in mutable storage"); } } - notifier_.get_subscriber().on_next( - SynchronizationEvent{rxcpp::observable<>::just(msg.block), - SynchronizationOutcomeType::kCommit, - msg.round}); } void SynchronizerImpl::processDifferent(const consensus::VoteOther &msg) { @@ -161,9 +187,11 @@ namespace iroha { } std::unique_ptr storage = std::move(opt_storage.value()); - SynchronizationEvent result = + auto result = downloadMissingBlocks(msg, std::move(storage), top_block_height); - notifier_.get_subscriber().on_next(result); + if (result) { + notifier_.get_subscriber().on_next(*result); + } } rxcpp::observable diff --git a/irohad/synchronizer/impl/synchronizer_impl.hpp b/irohad/synchronizer/impl/synchronizer_impl.hpp index a34bc8efb0..aaa816cd1f 100644 --- a/irohad/synchronizer/impl/synchronizer_impl.hpp +++ b/irohad/synchronizer/impl/synchronizer_impl.hpp @@ -9,6 +9,7 @@ #include "synchronizer/synchronizer.hpp" #include "ametsuchi/mutable_factory.hpp" +#include "ametsuchi/peer_query_factory.hpp" #include "logger/logger.hpp" #include "network/block_loader.hpp" #include "network/consensus_gate.hpp" @@ -47,7 +48,7 @@ namespace iroha { * @param height - the top block height of a peer that needs to be * synchronized */ - SynchronizationEvent downloadMissingBlocks( + boost::optional downloadMissingBlocks( const consensus::VoteOther &msg, std::unique_ptr storage, const shared_model::interface::types::HeightType height); diff --git a/irohad/synchronizer/synchronizer_common.hpp b/irohad/synchronizer/synchronizer_common.hpp index de339b2d99..58fe211458 100644 --- a/irohad/synchronizer/synchronizer_common.hpp +++ b/irohad/synchronizer/synchronizer_common.hpp @@ -10,6 +10,7 @@ #include +#include "ametsuchi/ledger_state.hpp" #include "consensus/round.hpp" #include "interfaces/iroha_internal/block.hpp" @@ -40,6 +41,7 @@ namespace iroha { Chain synced_blocks; SynchronizationOutcomeType sync_outcome; consensus::Round round; + std::shared_ptr ledger_state; }; } // namespace synchronizer diff --git a/test/module/irohad/ametsuchi/mock_mutable_factory.hpp b/test/module/irohad/ametsuchi/mock_mutable_factory.hpp index 0bf0f4af76..5f2f2325a2 100644 --- a/test/module/irohad/ametsuchi/mock_mutable_factory.hpp +++ b/test/module/irohad/ametsuchi/mock_mutable_factory.hpp @@ -19,14 +19,18 @@ namespace iroha { createMutableStorage, expected::Result, std::string>(void)); - void commit(std::unique_ptr mutableStorage) override { + boost::optional> commit( + std::unique_ptr mutableStorage) override { // gmock workaround for non-copyable parameters - commit_(mutableStorage); + return commit_(mutableStorage); } MOCK_METHOD1(commitPrepared, - bool(const shared_model::interface::Block &)); - MOCK_METHOD1(commit_, void(std::unique_ptr &)); + boost::optional>( + const shared_model::interface::Block &)); + MOCK_METHOD1(commit_, + boost::optional>( + std::unique_ptr &)); }; } // namespace ametsuchi diff --git a/test/module/irohad/ametsuchi/mock_storage.hpp b/test/module/irohad/ametsuchi/mock_storage.hpp index 5cb63bd31a..1113787d26 100644 --- a/test/module/irohad/ametsuchi/mock_storage.hpp +++ b/test/module/irohad/ametsuchi/mock_storage.hpp @@ -37,9 +37,12 @@ namespace iroha { boost::optional>( std::shared_ptr, std::shared_ptr)); - MOCK_METHOD1(doCommit, void(MutableStorage *storage)); + MOCK_METHOD1(doCommit, + boost::optional>( + MutableStorage *storage)); MOCK_METHOD1(commitPrepared, - bool(const shared_model::interface::Block &)); + boost::optional>( + const shared_model::interface::Block &)); MOCK_METHOD1(insertBlock, bool(const shared_model::interface::Block &)); MOCK_METHOD1(insertBlocks, bool(const std::vector< @@ -58,8 +61,9 @@ namespace iroha { on_commit() override { return notifier.get_observable(); } - void commit(std::unique_ptr storage) override { - doCommit(storage.get()); + boost::optional> commit( + std::unique_ptr storage) override { + return doCommit(storage.get()); } rxcpp::subjects::subject> notifier; diff --git a/test/module/irohad/ordering/ordering_gate_test.cpp b/test/module/irohad/ordering/ordering_gate_test.cpp index 4619115d60..db3f51f53c 100644 --- a/test/module/irohad/ordering/ordering_gate_test.cpp +++ b/test/module/irohad/ordering/ordering_gate_test.cpp @@ -153,7 +153,8 @@ class QueueBehaviorTest : public ::testing::Test { std::make_shared( TestBlockBuilder().height(height).build()))), SynchronizationOutcomeType::kCommit, - {height, 1}}); + {height, 1}, + {}}); } void pushProposal(HeightType height) { @@ -213,7 +214,8 @@ TEST_F(QueueBehaviorTest, SendManyProposals) { commit_subject.get_subscriber().on_next( SynchronizationEvent{rxcpp::observable<>::just(block), SynchronizationOutcomeType::kCommit, - {block->height(), 1}}); + {block->height(), 1}, + {}}); ASSERT_TRUE(wrapper_after.validate()); } diff --git a/test/module/irohad/synchronizer/synchronizer_test.cpp b/test/module/irohad/synchronizer/synchronizer_test.cpp index e646ceeee9..7cd76567ee 100644 --- a/test/module/irohad/synchronizer/synchronizer_test.cpp +++ b/test/module/irohad/synchronizer/synchronizer_test.cpp @@ -17,6 +17,7 @@ #include "module/irohad/validation/validation_mocks.hpp" #include "module/shared_model/builders/protobuf/block.hpp" #include "module/shared_model/builders/protobuf/test_block_builder.hpp" +#include "module/shared_model/interface_mocks.hpp" #include "validation/chain_validator.hpp" using namespace iroha; @@ -75,6 +76,9 @@ class SynchronizerTest : public ::testing::Test { mutable_factory, block_query_factory, block_loader); + + peer = makePeer("127.0.0.1", shared_model::crypto::PublicKey("111")); + ledger_peers = std::make_shared(PeerList{peer}); } std::shared_ptr makeCommit( @@ -102,6 +106,8 @@ class SynchronizerTest : public ::testing::Test { std::shared_ptr commit_message; shared_model::interface::types::PublicKeyCollectionType public_keys; shared_model::interface::types::HashType hash; + std::shared_ptr peer; + std::shared_ptr ledger_peers; rxcpp::subjects::subject gate_outcome; @@ -114,6 +120,8 @@ class SynchronizerTest : public ::testing::Test { * @then Successful commit */ TEST_F(SynchronizerTest, ValidWhenSingleCommitSynchronized) { + EXPECT_CALL(*mutable_factory, commitPrepared(_)) + .WillOnce(Return(ByMove(boost::none))); EXPECT_CALL(*mutable_factory, createMutableStorage()) .WillOnce(::testing::Invoke( []() -> expected::Result, @@ -123,13 +131,15 @@ TEST_F(SynchronizerTest, ValidWhenSingleCommitSynchronized) { return expected::Value>{ std::move(mutable_storage)}; })); - EXPECT_CALL(*mutable_factory, commit_(_)).Times(1); + EXPECT_CALL(*mutable_factory, commit_(_)) + .WillOnce(Return(ByMove(std::make_unique(ledger_peers)))); EXPECT_CALL(*chain_validator, validateAndApply(_, _)).Times(0); EXPECT_CALL(*block_loader, retrieveBlocks(_, _)).Times(0); auto wrapper = make_test_subscriber(synchronizer->on_commit_chain(), 1); wrapper.subscribe([this](auto commit_event) { + EXPECT_EQ(*this->ledger_peers, *commit_event.ledger_state->ledger_peers); auto block_wrapper = make_test_subscriber(commit_event.synced_blocks, 1); block_wrapper.subscribe([this](auto block) { @@ -181,7 +191,8 @@ TEST_F(SynchronizerTest, ValidWhenValidChain) { EXPECT_CALL(*mutable_factory, createMutableStorage()).Times(1); - EXPECT_CALL(*mutable_factory, commit_(_)).Times(1); + EXPECT_CALL(*mutable_factory, commit_(_)) + .WillOnce(Return(ByMove(std::make_unique(ledger_peers)))); EXPECT_CALL(*chain_validator, validateAndApply(_, _)).WillOnce(Return(true)); EXPECT_CALL(*block_loader, retrieveBlocks(_, _)) .WillOnce(Return(rxcpp::observable<>::just(commit_message))); @@ -189,6 +200,7 @@ TEST_F(SynchronizerTest, ValidWhenValidChain) { auto wrapper = make_test_subscriber(synchronizer->on_commit_chain(), 1); wrapper.subscribe([this](auto commit_event) { + EXPECT_EQ(*this->ledger_peers, *commit_event.ledger_state->ledger_peers); auto block_wrapper = make_test_subscriber(commit_event.synced_blocks, 1); block_wrapper.subscribe([this](auto block) { @@ -214,7 +226,9 @@ TEST_F(SynchronizerTest, ExactlyThreeRetrievals) { DefaultValue, std::string>>:: SetFactory(&createMockMutableStorage); EXPECT_CALL(*mutable_factory, createMutableStorage()).Times(1); - EXPECT_CALL(*mutable_factory, commit_(_)).Times(1); + EXPECT_CALL(*mutable_factory, commit_(_)) + .WillOnce(Return(ByMove(boost::optional>( + std::make_unique(ledger_peers))))); EXPECT_CALL(*chain_validator, validateAndApply(_, _)) .WillOnce(Return(false)) .WillOnce(testing::Invoke([](auto chain, auto &) { @@ -247,7 +261,9 @@ TEST_F(SynchronizerTest, RetrieveBlockTwoFailures) { DefaultValue, std::string>>:: SetFactory(&createMockMutableStorage); EXPECT_CALL(*mutable_factory, createMutableStorage()).Times(1); - EXPECT_CALL(*mutable_factory, commit_(_)).Times(1); + EXPECT_CALL(*mutable_factory, commit_(_)) + .WillOnce(Return(ByMove(boost::optional>( + std::make_unique(ledger_peers))))); EXPECT_CALL(*block_loader, retrieveBlocks(_, _)) .WillRepeatedly(Return(rxcpp::observable<>::just(commit_message))); @@ -349,13 +365,16 @@ TEST_F(SynchronizerTest, NoneOutcome) { * @then commitPrepared is called @and commit is not called */ TEST_F(SynchronizerTest, VotedForBlockCommitPrepared) { - EXPECT_CALL(*mutable_factory, commitPrepared(_)).WillOnce(Return(true)); + EXPECT_CALL(*mutable_factory, commitPrepared(_)) + .WillOnce(Return(ByMove(boost::optional>( + std::make_unique(ledger_peers))))); EXPECT_CALL(*mutable_factory, commit_(_)).Times(0); auto wrapper = make_test_subscriber(synchronizer->on_commit_chain(), 1); wrapper.subscribe([this](auto commit_event) { + EXPECT_EQ(*this->ledger_peers, *commit_event.ledger_state->ledger_peers); auto block_wrapper = make_test_subscriber(commit_event.synced_blocks, 1); block_wrapper.subscribe([this](auto block) { @@ -420,7 +439,8 @@ TEST_F(SynchronizerTest, VotedForThisCommitPreparedFailure) { auto storage_value = expected::makeValue>(std::move(ustorage)); - EXPECT_CALL(*mutable_factory, commitPrepared(_)).WillOnce(Return(false)); + EXPECT_CALL(*mutable_factory, commitPrepared(_)) + .WillOnce(Return(ByMove(boost::none))); EXPECT_CALL(*mutable_factory, createMutableStorage()) .WillOnce(Return(ByMove(std::move(storage_value)))); @@ -445,3 +465,60 @@ TEST_F(SynchronizerTest, VotedForThisCommitPreparedFailure) { gate_outcome.get_subscriber().on_next( consensus::PairValid{commit_message, consensus::Round{kHeight, 1}}); } + +/** + * @given A commit from consensus and initialized components + * @when a valid block that can be applied and commit fails + * @then no commit event is emitted + */ +TEST_F(SynchronizerTest, CommitFailureVoteSameBlock) { + EXPECT_CALL(*mutable_factory, commitPrepared(_)) + .WillOnce(Return(ByMove(boost::none))); + EXPECT_CALL(*mutable_factory, createMutableStorage()) + .WillOnce(::testing::Invoke( + []() -> expected::Result, + std::string> { + auto mutable_storage = std::make_unique(); + EXPECT_CALL(*mutable_storage, apply(_)).WillOnce(Return(true)); + return expected::Value>{ + std::move(mutable_storage)}; + })); + EXPECT_CALL(*mutable_factory, commit_(_)) + .WillOnce(Return(ByMove(boost::none))); + EXPECT_CALL(*chain_validator, validateAndApply(_, _)).Times(0); + EXPECT_CALL(*block_loader, retrieveBlocks(_, _)).Times(0); + + auto wrapper = + make_test_subscriber(synchronizer->on_commit_chain(), 0); + + gate_outcome.get_subscriber().on_next( + consensus::PairValid{commit_message, consensus::Round{kHeight, 1}}); + + ASSERT_TRUE(wrapper.validate()); +} + +/** + * @given A commit from consensus and initialized components + * @when gate have voted for other block and commit fails + * @then no commit event is emitted + */ +TEST_F(SynchronizerTest, CommitFailureVoteOther) { + DefaultValue, std::string>>:: + SetFactory(&createMockMutableStorage); + + EXPECT_CALL(*mutable_factory, createMutableStorage()).Times(1); + + EXPECT_CALL(*mutable_factory, commit_(_)) + .WillOnce(Return(ByMove(boost::none))); + EXPECT_CALL(*chain_validator, validateAndApply(_, _)).WillOnce(Return(true)); + EXPECT_CALL(*block_loader, retrieveBlocks(_, _)) + .WillOnce(Return(rxcpp::observable<>::just(commit_message))); + + auto wrapper = + make_test_subscriber(synchronizer->on_commit_chain(), 0); + + gate_outcome.get_subscriber().on_next( + consensus::VoteOther{public_keys, hash, consensus::Round{kHeight, 1}}); + + ASSERT_TRUE(wrapper.validate()); +} diff --git a/test/module/irohad/torii/processor/transaction_processor_test.cpp b/test/module/irohad/torii/processor/transaction_processor_test.cpp index 5293132ddb..049497b810 100644 --- a/test/module/irohad/torii/processor/transaction_processor_test.cpp +++ b/test/module/irohad/torii/processor/transaction_processor_test.cpp @@ -277,6 +277,7 @@ TEST_F(TransactionProcessorTest, TransactionProcessorBlockCreatedTest) { commit_notifier.get_subscriber().on_next( SynchronizationEvent{blocks_notifier.get_observable(), SynchronizationOutcomeType::kCommit, + {}, {}}); blocks_notifier.get_subscriber().on_next( @@ -334,6 +335,7 @@ TEST_F(TransactionProcessorTest, TransactionProcessorOnCommitTest) { rxcpp::observable<>::just( std::shared_ptr(clone(block))), SynchronizationOutcomeType::kCommit, + {}, {}}; commit_notifier.get_subscriber().on_next(commit_event); @@ -407,6 +409,7 @@ TEST_F(TransactionProcessorTest, TransactionProcessorInvalidTxsTest) { rxcpp::observable<>::just( std::shared_ptr(clone(block))), SynchronizationOutcomeType::kCommit, + {}, {}}; commit_notifier.get_subscriber().on_next(commit_event); diff --git a/test/system/irohad_test_data/config.sample.copy b/test/system/irohad_test_data/config.sample.copy deleted file mode 100644 index 7b7a2ddd5b..0000000000 --- a/test/system/irohad_test_data/config.sample.copy +++ /dev/null @@ -1,11 +0,0 @@ -{ - "block_store_path": "/var/folders/w0/pxq3kms13hl5nvy7d50_hjr40000gn/T/abe6-cbd3-b6b1-1ff3", - "torii_port": 50051, - "internal_port": 10001, - "pg_opt": "host=localhost port=5432 user=postgres password=mysecretpassword", - "max_proposal_size": 10, - "proposal_delay": 5000, - "vote_delay": 5000, - "mst_enable": false, - "mst_expiration_time" : 1440 -} From ffd626ed790114dbc5204c1fb75a20fed1c1901b Mon Sep 17 00:00:00 2001 From: Andrei Lebedev Date: Wed, 30 Jan 2019 15:13:54 +0300 Subject: [PATCH 23/41] Remove single peer ordering service (#2064) Signed-off-by: Andrei Lebedev --- irohad/ametsuchi/CMakeLists.txt | 1 - ...gres_ordering_service_persistent_state.cpp | 100 ------ ...gres_ordering_service_persistent_state.hpp | 85 ----- irohad/ametsuchi/impl/storage_impl.cpp | 15 - irohad/ametsuchi/impl/storage_impl.hpp | 3 - .../ordering_service_persistent_state.hpp | 41 --- .../ametsuchi/os_persistent_state_factory.hpp | 27 -- irohad/ametsuchi/storage.hpp | 2 - irohad/main/impl/ordering_init.cpp | 98 ------ irohad/main/impl/ordering_init.hpp | 100 ------ irohad/ordering/CMakeLists.txt | 19 - irohad/ordering/impl/ordering_gate_impl.cpp | 131 ------- irohad/ordering/impl/ordering_gate_impl.hpp | 115 ------ .../impl/ordering_gate_transport_grpc.cpp | 71 ---- .../impl/ordering_gate_transport_grpc.hpp | 55 --- .../impl/ordering_service_transport_grpc.cpp | 88 ----- .../impl/ordering_service_transport_grpc.hpp | 54 --- .../impl/single_peer_ordering_service.cpp | 152 -------- .../impl/single_peer_ordering_service.hpp | 144 -------- .../irohad/ametsuchi/ametsuchi_mocks.hpp | 8 - .../irohad/ametsuchi/ametsuchi_test.cpp | 92 +---- test/module/irohad/ametsuchi/mock_storage.hpp | 3 - test/module/irohad/ordering/CMakeLists.txt | 17 - ...mock_ordering_service_persistent_state.hpp | 31 -- .../irohad/ordering/ordering_gate_test.cpp | 331 ------------------ .../irohad/ordering/ordering_service_test.cpp | 296 ---------------- 26 files changed, 4 insertions(+), 2075 deletions(-) delete mode 100644 irohad/ametsuchi/impl/postgres_ordering_service_persistent_state.cpp delete mode 100644 irohad/ametsuchi/impl/postgres_ordering_service_persistent_state.hpp delete mode 100644 irohad/ametsuchi/ordering_service_persistent_state.hpp delete mode 100644 irohad/ametsuchi/os_persistent_state_factory.hpp delete mode 100644 irohad/main/impl/ordering_init.cpp delete mode 100644 irohad/main/impl/ordering_init.hpp delete mode 100644 irohad/ordering/impl/ordering_gate_impl.cpp delete mode 100644 irohad/ordering/impl/ordering_gate_impl.hpp delete mode 100644 irohad/ordering/impl/ordering_gate_transport_grpc.cpp delete mode 100644 irohad/ordering/impl/ordering_gate_transport_grpc.hpp delete mode 100644 irohad/ordering/impl/ordering_service_transport_grpc.cpp delete mode 100644 irohad/ordering/impl/ordering_service_transport_grpc.hpp delete mode 100644 irohad/ordering/impl/single_peer_ordering_service.cpp delete mode 100644 irohad/ordering/impl/single_peer_ordering_service.hpp delete mode 100644 test/module/irohad/ordering/mock_ordering_service_persistent_state.hpp delete mode 100644 test/module/irohad/ordering/ordering_gate_test.cpp delete mode 100644 test/module/irohad/ordering/ordering_service_test.cpp diff --git a/irohad/ametsuchi/CMakeLists.txt b/irohad/ametsuchi/CMakeLists.txt index 87541fcb3a..46ed727805 100644 --- a/irohad/ametsuchi/CMakeLists.txt +++ b/irohad/ametsuchi/CMakeLists.txt @@ -14,7 +14,6 @@ add_library(ametsuchi impl/postgres_block_query.cpp impl/postgres_command_executor.cpp impl/postgres_block_index.cpp - impl/postgres_ordering_service_persistent_state.cpp impl/wsv_restorer_impl.cpp impl/postgres_options.cpp impl/postgres_query_executor.cpp diff --git a/irohad/ametsuchi/impl/postgres_ordering_service_persistent_state.cpp b/irohad/ametsuchi/impl/postgres_ordering_service_persistent_state.cpp deleted file mode 100644 index e9501e25d9..0000000000 --- a/irohad/ametsuchi/impl/postgres_ordering_service_persistent_state.cpp +++ /dev/null @@ -1,100 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include "ametsuchi/impl/postgres_ordering_service_persistent_state.hpp" - -#include -#include -#include - -namespace iroha { - namespace ametsuchi { - - bool PostgresOrderingServicePersistentState::execute_(std::string query) { - try { - *sql_ << query; - } catch (std::exception &e) { - log_->error("Failed to execute query: {}. Reason: {}", query, e.what()); - return false; - } - return true; - } - - expected::Result, - std::string> - PostgresOrderingServicePersistentState::create( - const std::string &postgres_options) { - std::unique_ptr sql; - try { - sql = - std::make_unique(*soci::factory_postgresql(), postgres_options); - - } catch (std::exception &e) { - return expected::makeError( - (boost::format("Connection to PostgreSQL broken: %s") % e.what()) - .str()); - } - expected::Result, - std::string> - storage; - storage = expected::makeValue( - std::make_shared( - std::move(sql))); - return storage; - } - - PostgresOrderingServicePersistentState:: - PostgresOrderingServicePersistentState( - std::unique_ptr sql, logger::Logger log) - : sql_(std::move(sql)), log_(std::move(log)) {} - - bool PostgresOrderingServicePersistentState::initStorage() { - return execute_( - "CREATE TABLE IF NOT EXISTS ordering_service_state " - "(proposal_height bigserial)") - && execute_("INSERT INTO ordering_service_state VALUES (2)"); - } - - bool PostgresOrderingServicePersistentState::dropStorgage() { - log_->info("Drop storage"); - return execute_("DROP TABLE IF EXISTS ordering_service_state"); - } - - bool PostgresOrderingServicePersistentState::saveProposalHeight( - size_t height) { - log_->info("Save proposal_height in ordering_service_state " - + std::to_string(height)); - return execute_("DELETE FROM ordering_service_state") - && execute_("INSERT INTO ordering_service_state VALUES (" - + std::to_string(height) + ")"); - } - - boost::optional - PostgresOrderingServicePersistentState::loadProposalHeight() const { - boost::optional height; - std::string query = "SELECT * FROM ordering_service_state LIMIT 1"; - try { - *sql_ << query, soci::into(height); - } catch (std::exception &e) { - log_->error("Failed to execute query: " + query - + ". Reason: " + e.what()); - return boost::none; - } - - if (not height) { - log_->error( - "There is no proposal_height in ordering_service_state. " - "Use default value 2."); - height = 2; - } - return height; - } - - bool PostgresOrderingServicePersistentState::resetState() { - return dropStorgage() & initStorage(); - } - - } // namespace ametsuchi -} // namespace iroha diff --git a/irohad/ametsuchi/impl/postgres_ordering_service_persistent_state.hpp b/irohad/ametsuchi/impl/postgres_ordering_service_persistent_state.hpp deleted file mode 100644 index 3231f90778..0000000000 --- a/irohad/ametsuchi/impl/postgres_ordering_service_persistent_state.hpp +++ /dev/null @@ -1,85 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_POSTGRES_ORDERING_SERVICE_PERSISTENT_STATE_HPP -#define IROHA_POSTGRES_ORDERING_SERVICE_PERSISTENT_STATE_HPP - -#include "ametsuchi/ordering_service_persistent_state.hpp" - -#include -#include "common/result.hpp" -#include "logger/logger.hpp" - -namespace iroha { - namespace ametsuchi { - - /** - * Class implements OrderingServicePersistentState for persistent storage of - * Ordering Service with PostgreSQL. - */ - class PostgresOrderingServicePersistentState - : public OrderingServicePersistentState { - public: - /** - * Create the instance of PostgresOrderingServicePersistentState - * @param postgres_options postgres connection string - * @return new instace of PostgresOrderingServicePersistentState - */ - static expected::Result< - std::shared_ptr, - std::string> - create(const std::string &postgres_options); - - /** - * @param sql - pointer to soci session - * @param log to print progress - */ - PostgresOrderingServicePersistentState( - std::unique_ptr sql, - logger::Logger log = - logger::log("PostgresOrderingServicePersistentState")); - - /** - * Initialize storage. - * Create tables and fill with initial value. - */ - bool initStorage(); - - /** - * Drop storage tables. - */ - bool dropStorgage(); - - /** - * Save proposal height that it can be restored - * after launch - * @param height is height of last proposal - * @return if height has been saved - */ - bool saveProposalHeight(size_t height) override; - - /** - * Load proposal height - * @return proposal height if it was saved, otherwise boost::none - */ - boost::optional loadProposalHeight() const override; - - /** - * Reset storage state to default - * @return whether state was reset successfully - */ - bool resetState() override; - - private: - std::unique_ptr sql_; - - logger::Logger log_; - - bool execute_(std::string query); - }; - } // namespace ametsuchi -} // namespace iroha - -#endif // IROHA_POSTGRES_ORDERING_SERVICE_PERSISTENT_STATE_HPP diff --git a/irohad/ametsuchi/impl/storage_impl.cpp b/irohad/ametsuchi/impl/storage_impl.cpp index 77048a6fe1..1460cddb91 100644 --- a/irohad/ametsuchi/impl/storage_impl.cpp +++ b/irohad/ametsuchi/impl/storage_impl.cpp @@ -20,7 +20,6 @@ #include "common/bind.hpp" #include "common/byteutils.hpp" #include "converters/protobuf/json_proto_converter.hpp" -#include "postgres_ordering_service_persistent_state.hpp" namespace { void prepareStatements(soci::connection_pool &connections, size_t pool_size) { @@ -158,20 +157,6 @@ namespace iroha { return boost::make_optional(block_query); } - boost::optional> - StorageImpl::createOsPersistentState() const { - log_->info("create ordering service persistent state"); - std::shared_lock lock(drop_mutex); - if (not connection_) { - log_->info("connection to database is not initialised"); - return boost::none; - } - return boost::make_optional< - std::shared_ptr>( - std::make_shared( - std::make_unique(*connection_))); - } - boost::optional> StorageImpl::createQueryExecutor( std::shared_ptr pending_txs_storage, diff --git a/irohad/ametsuchi/impl/storage_impl.hpp b/irohad/ametsuchi/impl/storage_impl.hpp index a64655a819..de7c0f927d 100644 --- a/irohad/ametsuchi/impl/storage_impl.hpp +++ b/irohad/ametsuchi/impl/storage_impl.hpp @@ -70,9 +70,6 @@ namespace iroha { boost::optional> createBlockQuery() const override; - boost::optional> - createOsPersistentState() const override; - boost::optional> createQueryExecutor( std::shared_ptr pending_txs_storage, std::shared_ptr diff --git a/irohad/ametsuchi/ordering_service_persistent_state.hpp b/irohad/ametsuchi/ordering_service_persistent_state.hpp deleted file mode 100644 index 87eb4c21f1..0000000000 --- a/irohad/ametsuchi/ordering_service_persistent_state.hpp +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_ORDERING_SERVICE_PERSISTENT_STATE_HPP -#define IROHA_ORDERING_SERVICE_PERSISTENT_STATE_HPP - -#include - -namespace iroha { - namespace ametsuchi { - - /** - * Interface for Ordering Service persistence to store proposal's height in - * a persistent way - */ - class OrderingServicePersistentState { - public: - /** - * Save proposal height that it can be restored - * after launch - */ - virtual bool saveProposalHeight(size_t height) = 0; - - /** - * Load proposal height - */ - virtual boost::optional loadProposalHeight() const = 0; - - /** - * Reset storage to default state - */ - virtual bool resetState() = 0; - - virtual ~OrderingServicePersistentState() = default; - }; - } // namespace ametsuchi -} // namespace iroha - -#endif // IROHA_ORDERING_SERVICE_PERSISTENT_STATE_HPP diff --git a/irohad/ametsuchi/os_persistent_state_factory.hpp b/irohad/ametsuchi/os_persistent_state_factory.hpp deleted file mode 100644 index 78bf20c265..0000000000 --- a/irohad/ametsuchi/os_persistent_state_factory.hpp +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_OS_PERSISTENT_STATE_FACTORY_HPP -#define IROHA_OS_PERSISTENT_STATE_FACTORY_HPP - -#include - -#include "ametsuchi/ordering_service_persistent_state.hpp" - -namespace iroha { - namespace ametsuchi { - class OsPersistentStateFactory { - public: - /** - * @return ordering service persistent state - */ - virtual boost::optional> - createOsPersistentState() const = 0; - - virtual ~OsPersistentStateFactory() = default; - }; - } // namespace ametsuchi -} // namespace iroha -#endif // IROHA_OS_PERSISTENT_STATE_FACTORY_HPP diff --git a/irohad/ametsuchi/storage.hpp b/irohad/ametsuchi/storage.hpp index 96522e29a9..b4f707c1b7 100644 --- a/irohad/ametsuchi/storage.hpp +++ b/irohad/ametsuchi/storage.hpp @@ -11,7 +11,6 @@ #include #include "ametsuchi/block_query_factory.hpp" #include "ametsuchi/mutable_factory.hpp" -#include "ametsuchi/os_persistent_state_factory.hpp" #include "ametsuchi/peer_query_factory.hpp" #include "ametsuchi/query_executor_factory.hpp" #include "ametsuchi/temporary_factory.hpp" @@ -38,7 +37,6 @@ namespace iroha { public MutableFactory, public PeerQueryFactory, public BlockQueryFactory, - public OsPersistentStateFactory, public QueryExecutorFactory { public: virtual std::shared_ptr getWsvQuery() const = 0; diff --git a/irohad/main/impl/ordering_init.cpp b/irohad/main/impl/ordering_init.cpp deleted file mode 100644 index 2bc4667292..0000000000 --- a/irohad/main/impl/ordering_init.cpp +++ /dev/null @@ -1,98 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include "main/impl/ordering_init.hpp" -#include "ametsuchi/os_persistent_state_factory.hpp" -#include "common/bind.hpp" -#include "interfaces/common_objects/peer.hpp" -#include "interfaces/common_objects/types.hpp" -#include "interfaces/iroha_internal/block.hpp" - -namespace iroha { - namespace network { - auto OrderingInit::createGate( - std::shared_ptr transport, - std::shared_ptr block_query_factory) { - return block_query_factory->createBlockQuery() | - [this, &transport](const auto &block_query) { - return block_query->getTopBlock().match( - [this, &transport]( - expected::Value< - std::shared_ptr> &block) - -> std::shared_ptr { - const auto &height = block.value->height(); - auto gate = std::make_shared( - transport, height); - log_->info("Creating Ordering Gate with initial height {}", - height); - transport->subscribe(gate); - return gate; - }, - [](expected::Error &error) - -> std::shared_ptr { - // TODO 12.06.18 Akvinikym: handle the exception IR-1415 - throw std::runtime_error("Ordering Gate creation failed! " - + error.error); - }); - }; - } - - auto OrderingInit::createService( - std::shared_ptr peer_query_factory, - size_t max_size, - std::chrono::milliseconds delay_milliseconds, - std::shared_ptr transport, - std::shared_ptr persistent_state) { - auto factory = std::make_unique>(); - return std::make_shared( - peer_query_factory, - max_size, - rxcpp::observable<>::interval(delay_milliseconds, - rxcpp::observe_on_new_thread()), - transport, - persistent_state, - std::move(factory)); - } - - std::shared_ptr OrderingInit::initOrderingGate( - std::shared_ptr peer_query_factory, - size_t max_size, - std::chrono::milliseconds delay_milliseconds, - std::shared_ptr persistent_state, - std::shared_ptr block_query_factory, - std::shared_ptr - transaction_batch_factory, - std::shared_ptr> - async_call) { - auto query = peer_query_factory->createPeerQuery(); - if (not query or not query.get()) { - log_->error("Cannot get the peer query"); - } - auto ledger_peers = query.get()->getLedgerPeers(); - if (not ledger_peers or ledger_peers.value().empty()) { - log_->error( - "Ledger don't have peers. Do you set correct genesis block?"); - } - auto network_address = ledger_peers->front()->address(); - log_->info("Ordering gate is at {}", network_address); - ordering_gate_transport = - std::make_shared( - network_address, async_call); - - ordering_service_transport = - std::make_shared( - std::move(transaction_batch_factory), std::move(async_call)); - ordering_service = createService(peer_query_factory, - max_size, - delay_milliseconds, - ordering_service_transport, - persistent_state); - ordering_service_transport->subscribe(ordering_service); - ordering_gate = createGate(ordering_gate_transport, block_query_factory); - return ordering_gate; - } - } // namespace network -} // namespace iroha diff --git a/irohad/main/impl/ordering_init.hpp b/irohad/main/impl/ordering_init.hpp deleted file mode 100644 index 1c980deeee..0000000000 --- a/irohad/main/impl/ordering_init.hpp +++ /dev/null @@ -1,100 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_ORDERING_INIT_HPP -#define IROHA_ORDERING_INIT_HPP - -#include "ametsuchi/block_query_factory.hpp" -#include "ametsuchi/os_persistent_state_factory.hpp" -#include "ametsuchi/peer_query_factory.hpp" -#include "logger/logger.hpp" -#include "ordering/impl/ordering_gate_impl.hpp" -#include "ordering/impl/ordering_gate_transport_grpc.hpp" -#include "ordering/impl/ordering_service_transport_grpc.hpp" -#include "ordering/impl/single_peer_ordering_service.hpp" - -namespace iroha { - - namespace ametsuchi { - class OrderingServicePersistentState; - } - - namespace network { - - /** - * Class aimed to effective initialization of OrderingGate component - */ - class OrderingInit { - private: - /** - * Init effective realisation of ordering gate (client of ordering - * service) - * @param transport - object which will be notified - * about incoming proposals and send transactions - * @param block_query_factory - block store factory to get last block - * height - * @return ordering gate - */ - auto createGate( - std::shared_ptr transport, - std::shared_ptr block_query_factory); - - /** - * Init ordering service - * @param peer_query_factory - factory to get peer list - * @param max_size - limitation of proposal size - * @param delay_milliseconds - delay before emitting proposal - * @param transport - ordering service transport - * @param persistent_state - factory to access persistent state - * @return ordering service - */ - auto createService( - std::shared_ptr peer_query_factory, - size_t max_size, - std::chrono::milliseconds delay_milliseconds, - std::shared_ptr transport, - std::shared_ptr - persistent_state); - - public: - /** - * Initialization of ordering gate(client) and ordering service (service) - * @param peer_query_factory - factory to get peer list - * @param max_size - limitation of proposal size - * @param delay_milliseconds - delay before emitting proposal - * @param persistent_state - factory to access persistent state - * @param block_query_factory - block store factory to get last block - * height - * @param transaction_batch_factory - factory to create transaction - * batches - * @param async_call - async grpc client that is passed to transport - * components - * @return efficient implementation of OrderingGate - */ - std::shared_ptr initOrderingGate( - std::shared_ptr peer_query_factory, - size_t max_size, - std::chrono::milliseconds delay_milliseconds, - std::shared_ptr persistent_state, - std::shared_ptr block_query_factory, - std::shared_ptr - transaction_batch_factory, - std::shared_ptr> - async_call); - - std::shared_ptr ordering_service; - std::shared_ptr ordering_gate; - std::shared_ptr - ordering_gate_transport; - std::shared_ptr - ordering_service_transport; - - protected: - logger::Logger log_ = logger::log("OrderingInit"); - }; - } // namespace network -} // namespace iroha - -#endif // IROHA_ORDERING_INIT_HPP diff --git a/irohad/ordering/CMakeLists.txt b/irohad/ordering/CMakeLists.txt index 80c3325e5e..9e1e6fcc3b 100644 --- a/irohad/ordering/CMakeLists.txt +++ b/irohad/ordering/CMakeLists.txt @@ -1,25 +1,6 @@ # Copyright Soramitsu Co., Ltd. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -add_library(ordering_service - impl/ordering_gate_impl.cpp - impl/single_peer_ordering_service.cpp - impl/ordering_gate_transport_grpc.cpp - impl/ordering_service_transport_grpc.cpp - ) - - -target_link_libraries(ordering_service - rxcpp - common - tbb - shared_model_interfaces - shared_model_proto_backend - ordering_grpc - logger - shared_model_interfaces_factories - ) - add_library(on_demand_common impl/on_demand_common.cpp ) diff --git a/irohad/ordering/impl/ordering_gate_impl.cpp b/irohad/ordering/impl/ordering_gate_impl.cpp deleted file mode 100644 index cccd7e67b8..0000000000 --- a/irohad/ordering/impl/ordering_gate_impl.cpp +++ /dev/null @@ -1,131 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include "ordering/impl/ordering_gate_impl.hpp" - -#include -#include - -#include "interfaces/iroha_internal/block.hpp" -#include "interfaces/iroha_internal/proposal.hpp" -#include "interfaces/iroha_internal/transaction_batch.hpp" -#include "ordering/impl/on_demand_common.hpp" - -namespace iroha { - namespace ordering { - - bool ProposalComparator::operator()( - const std::shared_ptr &lhs, - const std::shared_ptr &rhs) const { - return lhs->height() > rhs->height(); - } - - OrderingGateImpl::OrderingGateImpl( - std::shared_ptr transport, - shared_model::interface::types::HeightType initial_height, - bool run_async, - logger::Logger log) - : transport_(std::move(transport)), - last_block_height_(initial_height), - log_(std::move(log)), - run_async_(run_async) {} - - void OrderingGateImpl::propagateBatch( - std::shared_ptr batch) { - if (batch->transactions().empty()) { - log_->warn("trying to propagate empty batch"); - return; - } - log_->info("propagate batch, account_id: {}", - batch->transactions().front()->creatorAccountId()); - - transport_->propagateBatch(batch); - } - - rxcpp::observable OrderingGateImpl::onProposal() { - return proposals_.get_observable(); - } - - void OrderingGateImpl::setPcs( - const iroha::network::PeerCommunicationService &pcs) { - log_->info("setPcs"); - - /// observable which contains heights of the top committed blocks - auto top_block_height = - pcs.on_commit() - .transform( - [this](const synchronizer::SynchronizationEvent &sync_event) { - sync_event.synced_blocks.subscribe( - // take height of next block - [this](std::shared_ptr - block_ptr) { - last_block_height_ = block_ptr->height(); - }); - return last_block_height_; - }) - .start_with(last_block_height_); - - /// merge_strategy - observable with another source of block heights - auto subscribe = [&](auto merge_strategy) { - pcs_subscriber_ = merge_strategy(net_proposals_.get_observable()) - .subscribe([this](const auto &t) { - // t is zip of two observables, there is - // intentionally ignored first value (with stub - // values) because it is required only for - // synchronization - this->tryNextRound(std::get<1>(t)); - }); - }; - - if (run_async_) { - subscribe([&top_block_height](auto observable) { - return observable.combine_latest(rxcpp::synchronize_new_thread(), - top_block_height); - }); - } else { - subscribe([&top_block_height](auto observable) { - return observable.combine_latest(top_block_height); - }); - } - } - - void OrderingGateImpl::onProposal( - std::shared_ptr proposal) { - log_->info("Received new proposal, height: {}", proposal->height()); - proposal_queue_.push(std::move(proposal)); - std::lock_guard lock(proposal_mutex_); - // intentionally pass stub value - net_proposals_.get_subscriber().on_next(0); - } - - void OrderingGateImpl::tryNextRound( - shared_model::interface::types::HeightType last_block_height) { - log_->debug("TryNextRound"); - std::shared_ptr next_proposal; - while (proposal_queue_.try_pop(next_proposal)) { - // check for old proposal - if (next_proposal->height() < last_block_height + 1) { - log_->debug("Old proposal, discarding"); - continue; - } - // check for new proposal - if (next_proposal->height() > last_block_height + 1) { - log_->debug("Proposal newer than last block, keeping in queue"); - proposal_queue_.push(next_proposal); - break; - } - log_->info("Pass the proposal to pipeline height {}", - next_proposal->height()); - proposals_.get_subscriber().on_next(network::OrderingEvent{ - next_proposal, {next_proposal->height(), kFirstRejectRound}}); - } - } - - OrderingGateImpl::~OrderingGateImpl() { - pcs_subscriber_.unsubscribe(); - } - - } // namespace ordering -} // namespace iroha diff --git a/irohad/ordering/impl/ordering_gate_impl.hpp b/irohad/ordering/impl/ordering_gate_impl.hpp deleted file mode 100644 index 08b3a57e71..0000000000 --- a/irohad/ordering/impl/ordering_gate_impl.hpp +++ /dev/null @@ -1,115 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_ORDERING_GATE_IMPL_HPP -#define IROHA_ORDERING_GATE_IMPL_HPP - -#include "network/ordering_gate.hpp" - -#include - -#include - -#include "interfaces/common_objects/types.hpp" -#include "logger/logger.hpp" -#include "network/impl/async_grpc_client.hpp" -#include "network/ordering_gate_transport.hpp" - -namespace shared_model { - namespace interface { - class Proposal; - } // namespace interface -} // namespace shared_model - -namespace iroha { - namespace ordering { - - /** - * Compare proposals by height - */ - struct ProposalComparator { - bool operator()( - const std::shared_ptr &lhs, - const std::shared_ptr &rhs) const; - }; - - /** - * OrderingGate implementation with gRPC asynchronous client - * Interacts with given OrderingService - * by propagating transactions and receiving proposals - * @param server_address OrderingService address - */ - class OrderingGateImpl : public network::OrderingGate, - public network::OrderingGateNotification { - public: - /** - * @param transport - network communication layer - * @param initial_height - height of the last block stored on this peer - * @param run_async - whether proposals should be handled - * asynchronously (on separate thread). Default is true. - * @param log to print progress - */ - OrderingGateImpl( - std::shared_ptr transport, - shared_model::interface::types::HeightType initial_height, - bool run_async = true, - logger::Logger log = logger::log("OrderingGate")); - - void propagateBatch( - std::shared_ptr batch) - override; - - rxcpp::observable onProposal() override; - - void setPcs(const iroha::network::PeerCommunicationService &pcs) override; - - void onProposal( - std::shared_ptr proposal) override; - - ~OrderingGateImpl() override; - - private: - /** - * Try to push proposal for next consensus round - * @param - last_block_height - what is the last block stored on this - * peer, or for which commit was received. If block is newer than - * currently stored proposals, proposals are discarded. If it is older, - * newer proposals are propagated in order - */ - void tryNextRound( - shared_model::interface::types::HeightType last_block_height); - - rxcpp::subjects::subject proposals_; - - /** - * Notification subject which is used only for notification purposes - * without semantic for emitted values - */ - rxcpp::subjects::subject - net_proposals_; - std::shared_ptr transport_; - - std::mutex proposal_mutex_; - - /// queue with all proposals received from ordering service - tbb::concurrent_priority_queue< - std::shared_ptr, - ProposalComparator> - proposal_queue_; - - /// last commited block height - shared_model::interface::types::HeightType last_block_height_; - - /// subscription of pcs::on_commit - rxcpp::composite_subscription pcs_subscriber_; - - logger::Logger log_; - - bool run_async_; - }; - } // namespace ordering -} // namespace iroha - -#endif // IROHA_ORDERING_GATE_IMPL_HPP diff --git a/irohad/ordering/impl/ordering_gate_transport_grpc.cpp b/irohad/ordering/impl/ordering_gate_transport_grpc.cpp deleted file mode 100644 index a340a59943..0000000000 --- a/irohad/ordering/impl/ordering_gate_transport_grpc.cpp +++ /dev/null @@ -1,71 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include "ordering_gate_transport_grpc.hpp" - -#include "backend/protobuf/transaction.hpp" -#include "endpoint.pb.h" -#include "interfaces/common_objects/types.hpp" -#include "network/impl/grpc_channel_builder.hpp" - -using namespace iroha; -using namespace iroha::ordering; - -grpc::Status OrderingGateTransportGrpc::onProposal( - ::grpc::ServerContext *context, - const iroha::protocol::Proposal *request, - ::google::protobuf::Empty *response) { - async_call_->log_->info("receive proposal"); - - auto proposal_res = factory_->createProposal(*request); - proposal_res.match( - [this](iroha::expected::Value< - std::unique_ptr> &v) { - async_call_->log_->info("transactions in proposal: {}", - v.value->transactions().size()); - - if (not subscriber_.expired()) { - subscriber_.lock()->onProposal(std::move(v.value)); - } else { - async_call_->log_->error("(onProposal) No subscriber"); - } - }, - [this](const iroha::expected::Error &e) { - async_call_->log_->error("Received invalid proposal: {}", e.error); - }); - - return grpc::Status::OK; -} - -OrderingGateTransportGrpc::OrderingGateTransportGrpc( - const std::string &server_address, - std::shared_ptr> - async_call) - : client_(network::createClient( - server_address)), - async_call_(std::move(async_call)), - factory_(std::make_unique>()) {} - -void OrderingGateTransportGrpc::propagateBatch( - std::shared_ptr batch) { - async_call_->log_->info("Propagate transaction batch (on transport)"); - - iroha::protocol::TxList batch_transport; - for (const auto tx : batch->transactions()) { - new (batch_transport.add_transactions()) iroha::protocol::Transaction( - std::static_pointer_cast(tx) - ->getTransport()); - } - async_call_->Call([&](auto context, auto cq) { - return client_->AsynconBatch(context, batch_transport, cq); - }); -} - -void OrderingGateTransportGrpc::subscribe( - std::shared_ptr subscriber) { - async_call_->log_->info("Subscribe"); - subscriber_ = subscriber; -} diff --git a/irohad/ordering/impl/ordering_gate_transport_grpc.hpp b/irohad/ordering/impl/ordering_gate_transport_grpc.hpp deleted file mode 100644 index 70d7d9c271..0000000000 --- a/irohad/ordering/impl/ordering_gate_transport_grpc.hpp +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ -#ifndef IROHA_ORDERING_GATE_TRANSPORT_GRPC_H -#define IROHA_ORDERING_GATE_TRANSPORT_GRPC_H - -#include - -#include "backend/protobuf/proto_proposal_factory.hpp" -#include "backend/protobuf/transaction.hpp" -#include "interfaces/iroha_internal/transaction_batch.hpp" -#include "logger/logger.hpp" -#include "network/impl/async_grpc_client.hpp" -#include "network/ordering_gate_transport.hpp" -#include "ordering.grpc.pb.h" -#include "validators/default_validator.hpp" - -namespace iroha { - namespace ordering { - class OrderingGateTransportGrpc - : public iroha::network::OrderingGateTransport, - public proto::OrderingGateTransportGrpc::Service { - public: - OrderingGateTransportGrpc( - const std::string &server_address, - std::shared_ptr> - async_call); - - grpc::Status onProposal(::grpc::ServerContext *context, - const protocol::Proposal *request, - ::google::protobuf::Empty *response) override; - - void propagateBatch( - std::shared_ptr batch) - override; - - void subscribe(std::shared_ptr - subscriber) override; - - private: - std::weak_ptr subscriber_; - std::unique_ptr - client_; - std::shared_ptr> - async_call_; - std::unique_ptr> - factory_; - }; - - } // namespace ordering -} // namespace iroha - -#endif // IROHA_ORDERING_GATE_TRANSPORT_GRPC_H diff --git a/irohad/ordering/impl/ordering_service_transport_grpc.cpp b/irohad/ordering/impl/ordering_service_transport_grpc.cpp deleted file mode 100644 index e81411b0ac..0000000000 --- a/irohad/ordering/impl/ordering_service_transport_grpc.cpp +++ /dev/null @@ -1,88 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include "ordering/impl/ordering_service_transport_grpc.hpp" - -#include "backend/protobuf/proposal.hpp" -#include "backend/protobuf/transaction.hpp" -#include "interfaces/common_objects/transaction_sequence_common.hpp" -#include "network/impl/grpc_channel_builder.hpp" - -using namespace iroha; -using namespace iroha::ordering; - -void OrderingServiceTransportGrpc::subscribe( - std::shared_ptr subscriber) { - subscriber_ = subscriber; -} - -grpc::Status OrderingServiceTransportGrpc::onBatch( - ::grpc::ServerContext *context, - const protocol::TxList *request, - ::google::protobuf::Empty *response) { - async_call_->log_->info("OrderingServiceTransportGrpc::onBatch"); - if (subscriber_.expired()) { - async_call_->log_->error("No subscriber"); - } else { - auto txs = - std::vector>( - request->transactions_size()); - std::transform( - std::begin(request->transactions()), - std::end(request->transactions()), - std::begin(txs), - [](const auto &tx) { - return std::make_shared(tx); - }); - - // TODO [IR-1730] Akvinikym 04.10.18: use transaction factory to stateless - // validate transactions before wrapping them into batches - auto batch_result = batch_factory_->createTransactionBatch(txs); - batch_result.match( - [this](iroha::expected::Value> &batch) { - subscriber_.lock()->onBatch(std::move(batch.value)); - }, - [this](const iroha::expected::Error &error) { - async_call_->log_->error( - "Could not create batch from received transaction list: {}", - error.error); - }); - } - return ::grpc::Status::OK; -} - -void OrderingServiceTransportGrpc::publishProposal( - std::unique_ptr proposal, - const std::vector &peers) { - async_call_->log_->info("OrderingServiceTransportGrpc::publishProposal"); - std::unordered_map< - std::string, - std::unique_ptr> - peers_map; - for (const auto &peer : peers) { - peers_map[peer] = - network::createClient(peer); - } - - for (const auto &peer : peers_map) { - auto proto = static_cast(proposal.get()); - async_call_->log_->debug("Publishing proposal: '{}'", - proto->getTransport().DebugString()); - - auto transport = proto->getTransport(); - async_call_->Call([&](auto context, auto cq) { - return peer.second->AsynconProposal(context, transport, cq); - }); - } -} - -OrderingServiceTransportGrpc::OrderingServiceTransportGrpc( - std::shared_ptr - transaction_batch_factory, - std::shared_ptr> - async_call) - : async_call_(std::move(async_call)), - batch_factory_(std::move(transaction_batch_factory)) {} diff --git a/irohad/ordering/impl/ordering_service_transport_grpc.hpp b/irohad/ordering/impl/ordering_service_transport_grpc.hpp deleted file mode 100644 index 9692992472..0000000000 --- a/irohad/ordering/impl/ordering_service_transport_grpc.hpp +++ /dev/null @@ -1,54 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ -#ifndef IROHA_ORDERING_SERVICE_TRANSPORT_GRPC_HPP -#define IROHA_ORDERING_SERVICE_TRANSPORT_GRPC_HPP - -#include - -#include "interfaces/iroha_internal/transaction_batch_factory.hpp" -#include "logger/logger.hpp" -#include "network/impl/async_grpc_client.hpp" -#include "network/ordering_service_transport.hpp" -#include "ordering.grpc.pb.h" -#include "transaction.pb.h" - -namespace iroha { - namespace ordering { - - class OrderingServiceTransportGrpc - : public iroha::network::OrderingServiceTransport, - public proto::OrderingServiceTransportGrpc::Service { - public: - OrderingServiceTransportGrpc( - std::shared_ptr - transaction_batch_factory, - std::shared_ptr> - async_call); - void subscribe( - std::shared_ptr - subscriber) override; - - void publishProposal( - std::unique_ptr proposal, - const std::vector &peers) override; - - grpc::Status onBatch(::grpc::ServerContext *context, - const protocol::TxList *request, - ::google::protobuf::Empty *response) override; - - ~OrderingServiceTransportGrpc() = default; - - private: - std::weak_ptr subscriber_; - std::shared_ptr> - async_call_; - std::shared_ptr - batch_factory_; - }; - - } // namespace ordering -} // namespace iroha - -#endif // IROHA_ORDERING_SERVICE_TRANSPORT_GRPC_HPP diff --git a/irohad/ordering/impl/single_peer_ordering_service.cpp b/irohad/ordering/impl/single_peer_ordering_service.cpp deleted file mode 100644 index 0ba99dad80..0000000000 --- a/irohad/ordering/impl/single_peer_ordering_service.cpp +++ /dev/null @@ -1,152 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include "ordering/impl/single_peer_ordering_service.hpp" - -#include -#include - -#include - -#include "ametsuchi/ordering_service_persistent_state.hpp" -#include "common/bind.hpp" -#include "datetime/time.hpp" -#include "interfaces/common_objects/peer.hpp" -#include "interfaces/iroha_internal/transaction_batch_impl.hpp" -#include "network/ordering_service_transport.hpp" - -namespace iroha { - namespace ordering { - SinglePeerOrderingService::SinglePeerOrderingService( - std::shared_ptr peer_query_factory, - size_t max_size, - rxcpp::observable proposal_timeout, - std::shared_ptr transport, - std::shared_ptr persistent_state, - std::unique_ptr factory, - bool is_async, - logger::Logger log) - : peer_query_factory_(peer_query_factory), - max_size_(max_size), - current_size_(0), - transport_(transport), - persistent_state_(persistent_state), - factory_(std::move(factory)), - proposal_height_(persistent_state_->createOsPersistentState() | - [](const auto &state) { - return state->loadProposalHeight().value(); - }), - log_(std::move(log)) { - // restore state of ordering service from persistent storage - rxcpp::observable timer = - proposal_timeout.map([](auto) { return ProposalEvent::kTimerEvent; }); - - auto subscribe = [&](auto merge_strategy) { - handle_ = merge_strategy(rxcpp::observable<>::from( - timer, transactions_.get_observable())) - .subscribe([this](auto &&v) { - auto check_queue = [&] { - switch (v) { - case ProposalEvent::kTimerEvent: - return not queue_.empty(); - case ProposalEvent::kBatchEvent: - return current_size_.load() >= max_size_; - default: - BOOST_ASSERT_MSG(false, "Unknown value"); - } - }; - if (check_queue()) { - this->generateProposal(); - } - }); - }; - - if (is_async) { - subscribe([](auto observable) { - return observable.merge(rxcpp::synchronize_new_thread()); - }); - } else { - subscribe([](auto observable) { return observable.merge(); }); - } - } - - void SinglePeerOrderingService::onBatch( - std::unique_ptr batch) { - std::shared_lock batch_prop_lock( - batch_prop_mutex_); - - current_size_.fetch_add(batch->transactions().size()); - queue_.push(std::move(batch)); - log_->info("Queue size is {}", current_size_.load()); - - batch_prop_lock.unlock(); - - std::lock_guard event_lock(event_mutex_); - transactions_.get_subscriber().on_next(ProposalEvent::kBatchEvent); - } - - void SinglePeerOrderingService::generateProposal() { - std::lock_guard lock(batch_prop_mutex_); - log_->info("Start proposal generation"); - std::vector> txs; - for (std::unique_ptr batch; - txs.size() < max_size_ and queue_.try_pop(batch);) { - auto batch_size = batch->transactions().size(); - // TODO 29.08.2018 andrei IR-1667 Timestamp validation during proposal - // generation - txs.insert(std::end(txs), - std::make_move_iterator(std::begin(batch->transactions())), - std::make_move_iterator(std::end(batch->transactions()))); - current_size_ -= batch_size; - } - - auto tx_range = txs | boost::adaptors::indirected; - auto proposal = factory_->createProposal( - proposal_height_, iroha::time::now(), tx_range); - - proposal.match( - [this](expected::Value< - std::unique_ptr> &v) { - // Save proposal height to the persistent storage. - // In case of restart it reloads state. - if (persistent_state_->createOsPersistentState() | - [this](const auto &state) { - return state->saveProposalHeight(proposal_height_ + 1); - }) { - publishProposal(std::move(v.value)); - proposal_height_++; - } else { - // TODO(@l4l) 23/03/18: publish proposal independent of psql - // status IR-1162 - log_->warn( - "Proposal height cannot be saved. Skipping proposal publish"); - } - }, - [this](expected::Error &e) { - log_->warn("Failed to initialize proposal: {}", e.error); - }); - } - - void SinglePeerOrderingService::publishProposal( - std::unique_ptr proposal) { - auto peers = peer_query_factory_->createPeerQuery() | - [](const auto &query) { return query->getLedgerPeers(); }; - if (peers) { - std::vector addresses; - std::transform(peers->begin(), - peers->end(), - std::back_inserter(addresses), - [](auto &p) { return p->address(); }); - transport_->publishProposal(std::move(proposal), addresses); - } else { - log_->error("Cannot get the peer list"); - } - } - - SinglePeerOrderingService::~SinglePeerOrderingService() { - handle_.unsubscribe(); - } - } // namespace ordering -} // namespace iroha diff --git a/irohad/ordering/impl/single_peer_ordering_service.hpp b/irohad/ordering/impl/single_peer_ordering_service.hpp deleted file mode 100644 index eb0736c2ed..0000000000 --- a/irohad/ordering/impl/single_peer_ordering_service.hpp +++ /dev/null @@ -1,144 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_ORDERING_SERVICE_IMPL_HPP -#define IROHA_ORDERING_SERVICE_IMPL_HPP - -#include -#include - -#include -#include - -#include "ametsuchi/os_persistent_state_factory.hpp" -#include "ametsuchi/peer_query_factory.hpp" -#include "interfaces/iroha_internal/proposal_factory.hpp" -#include "logger/logger.hpp" -#include "network/ordering_service.hpp" -#include "ordering.grpc.pb.h" - -namespace iroha { - - namespace ametsuchi { - class OrderingServicePersistentState; - class PeerQuery; - } // namespace ametsuchi - - namespace ordering { - - /** - * OrderingService implementation with gRPC synchronous server - * Allows receiving transactions concurrently from multiple peers by using - * concurrent queue - * Sends proposal by given timer interval and proposal size - */ - class SinglePeerOrderingService : public network::OrderingService { - public: - using TimeoutType = long; - /** - * Constructor - * @param peer_query_factory interface for fetching peers from world state - * view - * @param max_size maximum size of proposal - * @param proposal_timeout observable timeout for proposal creation - * @param transport receive transactions and publish proposals - * @param persistent_state factory to storage for auxiliary information - * @param factory is used to generate proposals - * @param is_async whether proposals are generated in a separate thread - * @param log to print progress - */ - SinglePeerOrderingService( - std::shared_ptr peer_query_factory, - size_t max_size, - rxcpp::observable proposal_timeout, - std::shared_ptr transport, - std::shared_ptr persistent_state, - std::unique_ptr factory, - bool is_async = true, - logger::Logger log = logger::log("OrderingServiceImpl")); - - /** - * Process transaction(s) received from network - * Enqueues transactions and publishes corresponding event - * @param batch, in which transactions are packed - */ - void onBatch(std::unique_ptr - batch) override; - - ~SinglePeerOrderingService() override; - - protected: - /** - * Transform model proposal to transport object and send to peers - * @param proposal - object for propagation - */ - void publishProposal( - std::unique_ptr proposal) override; - - private: - /** - * Events for queue check strategy - */ - enum class ProposalEvent { kBatchEvent, kTimerEvent }; - - /** - * Collect transactions from queue - * Passes the generated proposal to publishProposal - */ - void generateProposal(); - - std::shared_ptr peer_query_factory_; - - tbb::concurrent_queue< - std::unique_ptr> - queue_; - - /** - * max number of txs in proposal - */ - const size_t max_size_; - - /** - * current number of transactions in a queue - */ - std::atomic_ulong current_size_; - - std::shared_ptr transport_; - - /** - * Factory to persistent storage for proposal counter. - * In case of relaunch, ordering server will enumerate proposals - * consecutively. - */ - std::shared_ptr persistent_state_; - - /// Observable for transaction events from the network - rxcpp::subjects::subject transactions_; - - /// Internal event observable handle - rxcpp::composite_subscription handle_; - - /** - * Variables for concurrency - */ - /// mutex for both batch and proposal generation - std::shared_timed_mutex batch_prop_mutex_; - /// mutex for events activating - std::mutex event_mutex_; - - std::unique_ptr factory_; - - /** - * Proposal counter of expected proposal. Should be number of blocks in - * the ledger + 1. - */ - size_t proposal_height_; - - logger::Logger log_; - }; - } // namespace ordering -} // namespace iroha - -#endif // IROHA_ORDERING_SERVICE_IMPL_HPP diff --git a/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp b/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp index a7c04dedf4..45bbc504ab 100644 --- a/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp +++ b/test/module/irohad/ametsuchi/ametsuchi_mocks.hpp @@ -8,7 +8,6 @@ #include #include -#include "ametsuchi/os_persistent_state_factory.hpp" #include "ametsuchi/temporary_wsv.hpp" #include "ametsuchi/wsv_command.hpp" #include "common/result.hpp" @@ -113,13 +112,6 @@ namespace iroha { MOCK_METHOD0(release, void(void)); }; - class MockOsPersistentStateFactory : public OsPersistentStateFactory { - public: - MOCK_CONST_METHOD0( - createOsPersistentState, - boost::optional>()); - }; - namespace tx_cache_status_responses { std::ostream &operator<<(std::ostream &os, const Committed &resp) { return os << resp.hash.toString(); diff --git a/test/module/irohad/ametsuchi/ametsuchi_test.cpp b/test/module/irohad/ametsuchi/ametsuchi_test.cpp index 3378ee9ae6..c834ede157 100644 --- a/test/module/irohad/ametsuchi/ametsuchi_test.cpp +++ b/test/module/irohad/ametsuchi/ametsuchi_test.cpp @@ -6,7 +6,6 @@ #include #include "ametsuchi/impl/postgres_block_query.hpp" -#include "ametsuchi/impl/postgres_ordering_service_persistent_state.hpp" #include "ametsuchi/impl/postgres_wsv_query.hpp" #include "ametsuchi/impl/wsv_restorer_impl.hpp" #include "ametsuchi/mutable_storage.hpp" @@ -449,85 +448,6 @@ TEST_F(AmetsuchiTest, TestingStorageWhenCommitBlock) { wrapper.unsubscribe(); } -/** - * @given initialized storage for ordering service - * @when save proposal height - * @then load proposal height and ensure it is correct - */ -TEST_F(AmetsuchiTest, OrderingServicePersistentStorageTest) { - auto os_opt = storage->createOsPersistentState(); - ASSERT_TRUE(os_opt); - auto ordering_state = os_opt.get(); - ASSERT_TRUE(ordering_state); - - ordering_state->resetState(); - ASSERT_EQ(2, ordering_state->loadProposalHeight().value()); - ASSERT_TRUE(ordering_state->saveProposalHeight(11)); - ASSERT_EQ(11, ordering_state->loadProposalHeight().value()); - ASSERT_TRUE(ordering_state->saveProposalHeight(33)); - ASSERT_EQ(33, ordering_state->loadProposalHeight().value()); - ordering_state->resetState(); - ASSERT_EQ(2, ordering_state->loadProposalHeight().value()); -} - -/** - * @given initialized storage for ordering service - * @when save proposal height - * @then load proposal height and ensure it is correct - */ -TEST_F(AmetsuchiTest, OrderingServicePersistentStorageRestartTest) { - auto os_opt = storage->createOsPersistentState(); - ASSERT_TRUE(os_opt); - auto ordering_state = os_opt.get(); - ASSERT_TRUE(ordering_state); - - ordering_state->resetState(); - ASSERT_EQ(2, ordering_state->loadProposalHeight().value()); - ASSERT_TRUE(ordering_state->saveProposalHeight(11)); - ASSERT_EQ(11, ordering_state->loadProposalHeight().value()); - - // restart Ordering Service Storage - ordering_state.reset(); - os_opt = storage->createOsPersistentState(); - ASSERT_TRUE(os_opt); - ordering_state = os_opt.get(); - ASSERT_TRUE(ordering_state); - ASSERT_TRUE(ordering_state); - ASSERT_EQ(11, ordering_state->loadProposalHeight().value()); -} - -/** - * @given 2 different initialized storages for ordering service - * @when save proposal height to the first one - * @then the state is consistent - */ -TEST_F(AmetsuchiTest, - OrderingServicePersistentStorageDifferentConnectionsTest) { - auto os_opt1 = storage->createOsPersistentState(); - ASSERT_TRUE(os_opt1); - auto ordering_state_1 = os_opt1.get(); - ASSERT_TRUE(ordering_state_1); - - auto os_opt2 = storage->createOsPersistentState(); - ASSERT_TRUE(os_opt2); - auto ordering_state_2 = os_opt2.get(); - ASSERT_TRUE(ordering_state_2); - - ordering_state_2->resetState(); - ASSERT_EQ(2, ordering_state_1->loadProposalHeight().value()); - ASSERT_EQ(2, ordering_state_2->loadProposalHeight().value()); - ASSERT_TRUE(ordering_state_1->saveProposalHeight(11)); - ASSERT_EQ(11, ordering_state_1->loadProposalHeight().value()); - ASSERT_EQ(11, ordering_state_2->loadProposalHeight().value()); - - ordering_state_2->resetState(); - ASSERT_EQ(2, ordering_state_1->loadProposalHeight().value()); - ASSERT_EQ(2, ordering_state_2->loadProposalHeight().value()); - ASSERT_TRUE(ordering_state_2->saveProposalHeight(42)); - ASSERT_EQ(42, ordering_state_1->loadProposalHeight().value()); - ASSERT_EQ(42, ordering_state_2->loadProposalHeight().value()); -} - /** * @given spoiled WSV * @when WSV is restored @@ -672,8 +592,7 @@ TEST_F(PreparedBlockTest, PrepareBlockNoStateChanged) { storage->prepareBlock(std::move(temp_wsv)); // balance remains unchanged - validateAccountAsset( - sql_query, "admin@test", "coin#test", base_balance); + validateAccountAsset(sql_query, "admin@test", "coin#test", base_balance); } /** @@ -696,8 +615,7 @@ TEST_F(PreparedBlockTest, CommitPreparedStateChanged) { shared_model::interface::Amount resultingAmount("10.00"); - validateAccountAsset( - sql_query, "admin@test", "coin#test", resultingAmount); + validateAccountAsset(sql_query, "admin@test", "coin#test", resultingAmount); } /** @@ -719,8 +637,7 @@ TEST_F(PreparedBlockTest, PrepareBlockCommitDifferentBlock) { apply(storage, block); shared_model::interface::Amount resultingBalance{"15.00"}; - validateAccountAsset( - sql_query, "admin@test", "coin#test", resultingBalance); + validateAccountAsset(sql_query, "admin@test", "coin#test", resultingBalance); } /** @@ -748,6 +665,5 @@ TEST_F(PreparedBlockTest, CommitPreparedFailsAfterCommit) { ASSERT_FALSE(commited); shared_model::interface::Amount resultingBalance{"15.00"}; - validateAccountAsset( - sql_query, "admin@test", "coin#test", resultingBalance); + validateAccountAsset(sql_query, "admin@test", "coin#test", resultingBalance); } diff --git a/test/module/irohad/ametsuchi/mock_storage.hpp b/test/module/irohad/ametsuchi/mock_storage.hpp index 1113787d26..92db4670c8 100644 --- a/test/module/irohad/ametsuchi/mock_storage.hpp +++ b/test/module/irohad/ametsuchi/mock_storage.hpp @@ -29,9 +29,6 @@ namespace iroha { boost::optional>()); MOCK_CONST_METHOD0(createBlockQuery, boost::optional>()); - MOCK_CONST_METHOD0( - createOsPersistentState, - boost::optional>()); MOCK_CONST_METHOD2( createQueryExecutor, boost::optional>( diff --git a/test/module/irohad/ordering/CMakeLists.txt b/test/module/irohad/ordering/CMakeLists.txt index 678aad4916..f29e06dac8 100644 --- a/test/module/irohad/ordering/CMakeLists.txt +++ b/test/module/irohad/ordering/CMakeLists.txt @@ -1,23 +1,6 @@ # Copyright Soramitsu Co., Ltd. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -addtest(ordering_service_test ordering_service_test.cpp) -target_link_libraries(ordering_service_test - ordering_service - shared_model_stateless_validation - shared_model_proto_backend - ) - -addtest(ordering_gate_test ordering_gate_test.cpp) -target_link_libraries(ordering_gate_test - ordering_service - shared_model_cryptography_model - shared_model_stateless_validation - consensus_round - ordering_gate_common - on_demand_common - ) - addtest(on_demand_os_test on_demand_os_test.cpp) target_link_libraries(on_demand_os_test on_demand_ordering_service diff --git a/test/module/irohad/ordering/mock_ordering_service_persistent_state.hpp b/test/module/irohad/ordering/mock_ordering_service_persistent_state.hpp deleted file mode 100644 index bb95434b87..0000000000 --- a/test/module/irohad/ordering/mock_ordering_service_persistent_state.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ -#ifndef IROHA_MOCK_ORDERING_SERVICE_PERSISTENT_STATE_HPP -#define IROHA_MOCK_ORDERING_SERVICE_PERSISTENT_STATE_HPP - -#include - -#include "ametsuchi/ordering_service_persistent_state.hpp" - -class MockOrderingServicePersistentState - : public iroha::ametsuchi::OrderingServicePersistentState { - public: - /** - * Save proposal height - */ - MOCK_METHOD1(saveProposalHeight, bool(size_t height)); - - /** - * Load proposal height - */ - MOCK_CONST_METHOD0(loadProposalHeight, boost::optional()); - - /** - * Reset state - */ - MOCK_METHOD0(resetState, bool()); -}; - -#endif // IROHA_MOCK_ORDERING_SERVICE_PERSISTENT_STATE_HPP diff --git a/test/module/irohad/ordering/ordering_gate_test.cpp b/test/module/irohad/ordering/ordering_gate_test.cpp deleted file mode 100644 index db3f51f53c..0000000000 --- a/test/module/irohad/ordering/ordering_gate_test.cpp +++ /dev/null @@ -1,331 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include -#include - -#include "framework/test_subscriber.hpp" - -#include "builders/protobuf/transaction.hpp" -#include "module/irohad/network/network_mocks.hpp" -#include "module/shared_model/builders/protobuf/proposal.hpp" -#include "module/shared_model/builders/protobuf/test_block_builder.hpp" -#include "module/shared_model/builders/protobuf/test_proposal_builder.hpp" -#include "module/shared_model/builders/protobuf/test_transaction_builder.hpp" -#include "ordering/impl/ordering_gate_impl.hpp" -#include "ordering/impl/ordering_gate_transport_grpc.hpp" - -using namespace iroha; -using namespace iroha::ordering; -using namespace iroha::network; -using namespace framework::test_subscriber; -using namespace std::chrono_literals; -using namespace iroha::synchronizer; - -using ::testing::_; -using ::testing::InvokeWithoutArgs; -using ::testing::Return; - -using shared_model::interface::types::HeightType; - -class MockOrderingGateTransportGrpcService - : public proto::OrderingServiceTransportGrpc::Service {}; - -class MockOrderingGateTransport : public OrderingGateTransport { - MOCK_METHOD1(subscribe, void(std::shared_ptr)); - MOCK_METHOD1( - propagateBatch, - void(std::shared_ptr)); -}; - -class OrderingGateTest : public ::testing::Test { - public: - OrderingGateTest() - : fake_service{std::make_shared()} { - } - - void SetUp() override { - grpc::ServerBuilder builder; - int port = 0; - builder.AddListeningPort( - "0.0.0.0:0", grpc::InsecureServerCredentials(), &port); - - builder.RegisterService(fake_service.get()); - - server = builder.BuildAndStart(); - auto address = "0.0.0.0:" + std::to_string(port); - // Initialize components after port has been bind - async_call_ = - std::make_shared>(); - transport = - std::make_shared(address, async_call_); - gate_impl = std::make_shared(transport, 1, false); - transport->subscribe(gate_impl); - - ASSERT_NE(port, 0); - ASSERT_TRUE(server); - } - - void TearDown() override { - server->Shutdown(); - } - - std::unique_ptr server; - - std::shared_ptr transport; - std::shared_ptr gate_impl; - std::shared_ptr fake_service; - std::condition_variable cv; - std::mutex m; - std::shared_ptr> - async_call_; -}; - -/** - * @given Initialized OrderingGate - * @when Emulation of receiving proposal from the network - * @then Round starts <==> proposal is emitted to subscribers - */ -TEST_F(OrderingGateTest, ProposalReceivedByGateWhenSent) { - auto wrapper = make_test_subscriber(gate_impl->onProposal(), 1); - wrapper.subscribe(); - - auto pcs = std::make_shared(); - rxcpp::subjects::subject commit_subject; - EXPECT_CALL(*pcs, on_commit()) - .WillOnce(Return(commit_subject.get_observable())); - gate_impl->setPcs(*pcs); - - grpc::ServerContext context; - - std::vector txs; - txs.push_back(shared_model::proto::TransactionBuilder() - .createdTime(iroha::time::now()) - .creatorAccountId("admin@ru") - .addAssetQuantity("coin#coin", "1.0") - .quorum(1) - .build() - .signAndAddSignature( - shared_model::crypto::DefaultCryptoAlgorithmType:: - generateKeypair()) - .finish()); - iroha::protocol::Proposal proposal = shared_model::proto::ProposalBuilder() - .height(2) - .createdTime(iroha::time::now()) - .transactions(txs) - .build() - .getTransport(); - - google::protobuf::Empty response; - - transport->onProposal(&context, &proposal, &response); - - ASSERT_TRUE(wrapper.validate()); -} - -class QueueBehaviorTest : public ::testing::Test { - public: - QueueBehaviorTest() : ordering_gate(transport, 1, false){}; - - void SetUp() override { - transport = std::make_shared(); - pcs = std::make_shared(); - EXPECT_CALL(*pcs, on_commit()) - .WillOnce(Return(commit_subject.get_observable())); - - ordering_gate.setPcs(*pcs); - ordering_gate.onProposal().subscribe( - [&](auto val) { messages.push_back(val); }); - } - - std::shared_ptr transport; - std::shared_ptr pcs; - rxcpp::subjects::subject commit_subject; - OrderingGateImpl ordering_gate; - std::vector messages; - - void pushCommit(HeightType height) { - commit_subject.get_subscriber().on_next(SynchronizationEvent{ - rxcpp::observable<>::just( - std::static_pointer_cast( - std::make_shared( - TestBlockBuilder().height(height).build()))), - SynchronizationOutcomeType::kCommit, - {height, 1}, - {}}); - } - - void pushProposal(HeightType height) { - ordering_gate.onProposal(std::make_shared( - TestProposalBuilder().height(height).build())); - }; -}; - -/** - * @given Initialized OrderingGate - * AND MockPeerCommunicationService - * @when Send two proposals - * AND one commit in node - * @then Check that send round appears after commit - */ -TEST_F(QueueBehaviorTest, SendManyProposals) { - auto wrapper_before = - make_test_subscriber(ordering_gate.onProposal(), 1); - wrapper_before.subscribe(); - auto wrapper_after = - make_test_subscriber(ordering_gate.onProposal(), 2); - wrapper_after.subscribe(); - - std::vector txs; - txs.push_back(shared_model::proto::TransactionBuilder() - .createdTime(iroha::time::now()) - .creatorAccountId("admin@ru") - .addAssetQuantity("coin#coin", "1.0") - .quorum(1) - .build() - .signAndAddSignature( - shared_model::crypto::DefaultCryptoAlgorithmType:: - generateKeypair()) - .finish()); - auto proposal1 = std::make_shared( - shared_model::proto::ProposalBuilder() - .height(2) - .createdTime(iroha::time::now()) - .transactions(txs) - .build()); - auto proposal2 = std::make_shared( - shared_model::proto::ProposalBuilder() - .height(3) - .createdTime(iroha::time::now()) - .transactions(txs) - .build()); - - ordering_gate.onProposal(proposal1); - ordering_gate.onProposal(proposal2); - - ASSERT_TRUE(wrapper_before.validate()); - - std::shared_ptr block = - std::make_shared( - TestBlockBuilder().height(2).build()); - - commit_subject.get_subscriber().on_next( - SynchronizationEvent{rxcpp::observable<>::just(block), - SynchronizationOutcomeType::kCommit, - {block->height(), 1}, - {}}); - - ASSERT_TRUE(wrapper_after.validate()); -} - -/** - * @given Initialized OrderingGate - * AND MockPeerCommunicationService - * @when Receive proposals in random order - * @then onProposal output is ordered - */ -TEST_F(QueueBehaviorTest, ReceiveUnordered) { - // this will set unlock_next_ to false, so proposals 3 and 4 are enqueued - pushProposal(2); - - pushProposal(4); - pushProposal(3); - - pushCommit(2); - pushCommit(3); - - ASSERT_EQ(3, messages.size()); - ASSERT_EQ(2, getProposalUnsafe(messages.at(0))->height()); - ASSERT_EQ(3, getProposalUnsafe(messages.at(1))->height()); - ASSERT_EQ(4, getProposalUnsafe(messages.at(2))->height()); -} - -/** - * @given Initialized OrderingGate - * AND MockPeerCommunicationService - * @when Receive commits which are newer than existing proposals - * @then onProposal is not invoked on proposals - * which are older than last committed block - */ -TEST_F(QueueBehaviorTest, DiscardOldProposals) { - pushProposal(2); - pushProposal(3); - - pushProposal(4); - pushProposal(5); - pushCommit(4); - - // proposals 2 and 3 must not be forwarded down the pipeline. - EXPECT_EQ(2, messages.size()); - ASSERT_EQ(2, getProposalUnsafe(messages.at(0))->height()); - ASSERT_EQ(5, getProposalUnsafe(messages.at(1))->height()); -} - -/** - * @given Initialized OrderingGate - * AND MockPeerCommunicationService - * @when Proposals are newer than received commits - * @then newer proposals are kept in queue - */ -TEST_F(QueueBehaviorTest, KeepNewerProposals) { - pushProposal(2); - pushProposal(3); - pushProposal(4); - - pushCommit(2); - - // proposal 3 must be forwarded down the pipeline, 4 kept in queue. - EXPECT_EQ(2, messages.size()); - EXPECT_EQ(2, getProposalUnsafe(messages.at(0))->height()); - EXPECT_EQ(3, getProposalUnsafe(messages.at(1))->height()); - - pushCommit(3); - // Now proposal 4 is forwarded to the pipeline - EXPECT_EQ(3, messages.size()); - EXPECT_EQ(4, getProposalUnsafe(messages.at(2))->height()); -} - -/** - * @given Initialized OrderingGate - * AND MockPeerCommunicationService - * @when commit is received before any proposals - * @then old proposals are discarded and new is propagated - */ -TEST_F(QueueBehaviorTest, CommitBeforeProposal) { - pushCommit(4); - - // Old proposals should be discarded - pushProposal(2); - pushProposal(3); - pushProposal(4); - - EXPECT_EQ(0, messages.size()); - - // should be propagated - pushProposal(5); - - // should not be propagated - pushProposal(6); - - EXPECT_EQ(1, messages.size()); - EXPECT_EQ(5, getProposalUnsafe(messages.at(0))->height()); -} - -/** - * @given Initialized OrderingGate - * AND MockPeerCommunicationService - * @when commit is received which newer than all proposals - * @then all proposals are discarded and none are propagated - */ -TEST_F(QueueBehaviorTest, CommitNewerThanAllProposals) { - pushProposal(2); - // Old proposals should be discarded - pushProposal(3); - pushProposal(4); - - pushCommit(4); - EXPECT_EQ(1, messages.size()); - EXPECT_EQ(2, getProposalUnsafe(messages.at(0))->height()); -} diff --git a/test/module/irohad/ordering/ordering_service_test.cpp b/test/module/irohad/ordering/ordering_service_test.cpp deleted file mode 100644 index a77783eb75..0000000000 --- a/test/module/irohad/ordering/ordering_service_test.cpp +++ /dev/null @@ -1,296 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#include - -#include "backend/protobuf/common_objects/peer.hpp" -#include "backend/protobuf/proto_proposal_factory.hpp" -#include "framework/batch_helper.hpp" -#include "interfaces/iroha_internal/transaction_batch.hpp" -#include "logger/logger.hpp" -#include "module/irohad/ametsuchi/ametsuchi_mocks.hpp" -#include "module/irohad/network/network_mocks.hpp" -#include "module/irohad/ordering/mock_ordering_service_persistent_state.hpp" -#include "module/shared_model/builders/protobuf/test_proposal_builder.hpp" -#include "module/shared_model/interface_mocks.hpp" -#include "ordering/impl/ordering_service_transport_grpc.hpp" -#include "ordering/impl/single_peer_ordering_service.hpp" - -using namespace iroha; -using namespace iroha::ordering; -using namespace iroha::network; -using namespace iroha::ametsuchi; -using namespace std::chrono_literals; - -using ::testing::_; -using ::testing::AtLeast; -using ::testing::DoAll; -using ::testing::Invoke; -using ::testing::InvokeWithoutArgs; -using ::testing::Return; - -class MockOrderingServiceTransport : public network::OrderingServiceTransport { - public: - void subscribe(std::shared_ptr - subscriber) override { - subscriber_ = subscriber; - } - - void publishProposal( - std::unique_ptr proposal, - const std::vector &peers) override { - return publishProposalProxy(proposal.get(), peers); - } - - MOCK_METHOD2(publishProposalProxy, - void(shared_model::interface::Proposal *proposal, - const std::vector &peers)); - - std::weak_ptr subscriber_; -}; - -class OrderingServiceTest : public ::testing::Test { - public: - OrderingServiceTest() { - peer = std::make_unique(); - EXPECT_CALL(*peer, address()).WillRepeatedly(testing::ReturnRef(address)); - EXPECT_CALL(*peer, pubkey()).WillRepeatedly(testing::ReturnRef(pk)); - } - - void SetUp() override { - wsv = std::make_shared(); - pqfactory = std::make_shared(); - fake_transport = std::make_shared(); - fake_persistent_state = - std::make_shared(); - persistent_state_factory = std::make_shared(); - factory = std::make_unique>(); - - EXPECT_CALL(*pqfactory, createPeerQuery()) - .WillRepeatedly( - Return(boost::make_optional(std::shared_ptr(wsv)))); - EXPECT_CALL(*persistent_state_factory, createOsPersistentState()) - .WillRepeatedly(Return(boost::make_optional( - std::shared_ptr( - fake_persistent_state)))); - } - - auto initOs(size_t max_proposal) { - return std::make_shared( - pqfactory, - max_proposal, - proposal_timeout.get_observable(), - fake_transport, - persistent_state_factory, - std::move(factory), - false); - } - - void makeProposalTimeout() { - proposal_timeout.get_subscriber().on_next(0); - } - - std::shared_ptr fake_transport; - std::shared_ptr fake_persistent_state; - std::shared_ptr persistent_state_factory; - std::condition_variable cv; - std::mutex m; - std::string address{"127.0.0.1:50051"}; - shared_model::interface::types::PubkeyType pk{std::string(32, '0')}; - std::shared_ptr peer; - std::shared_ptr wsv; - std::shared_ptr pqfactory; - std::unique_ptr factory; - rxcpp::subjects::subject - proposal_timeout; -}; - -/** - * @given OrderingService and MockOrderingServiceTransport - * @when publishProposal is called at transport - * @then publishProposalProxy is called - */ -TEST_F(OrderingServiceTest, SimpleTest) { - const size_t max_proposal = 5; - - EXPECT_CALL(*fake_persistent_state, loadProposalHeight()) - .Times(1) - .WillOnce(Return(boost::optional(2))); - EXPECT_CALL(*fake_transport, publishProposalProxy(_, _)).Times(1); - - auto ordering_service = initOs(max_proposal); - fake_transport->subscribe(ordering_service); - - fake_transport->publishProposal( - std::make_unique( - TestProposalBuilder() - .height(1) - .createdTime(iroha::time::now()) - .build()), - {}); -} - -/** - * @given OrderingService with max_proposal==5 and only self peer - * and MockOrderingServiceTransport - * and MockOrderingServicePersistentState - * @when OrderingService::onTransaction called 10 times - * @then publishProposalProxy called twice - * and proposal height was loaded once and saved twice - */ -TEST_F(OrderingServiceTest, ValidWhenProposalSizeStrategy) { - const size_t max_proposal = 5; - const size_t tx_num = 10; - - EXPECT_CALL(*fake_persistent_state, saveProposalHeight(_)) - .Times(2) - .WillRepeatedly(Return(true)); - EXPECT_CALL(*fake_persistent_state, loadProposalHeight()) - .Times(1) - .WillOnce(Return(boost::optional(2))); - EXPECT_CALL(*fake_transport, publishProposalProxy(_, _)) - .Times(tx_num / max_proposal); - EXPECT_CALL(*wsv, getLedgerPeers()) - .WillRepeatedly(Return( - std::vector>{peer})); - - auto ordering_service = initOs(max_proposal); - fake_transport->subscribe(ordering_service); - - for (size_t i = 0; i < tx_num; ++i) { - ordering_service->onBatch(framework::batch::createValidBatch(1)); - } -} - -/** - * @given OrderingService with big enough max_proposal and only self peer - * and MockOrderingServiceTransport - * and MockOrderingServicePersistentState - * @when OrderingService::onTransaction called 8 times - * and after triggered timeout - * and then repeat with 2 onTransaction calls - * @then publishProposalProxy called twice - * and proposal height was loaded once and saved twice - */ -TEST_F(OrderingServiceTest, ValidWhenTimerStrategy) { - const size_t max_proposal = 100; - - EXPECT_CALL(*fake_persistent_state, saveProposalHeight(_)) - .Times(2) - .WillRepeatedly(Return(true)); - EXPECT_CALL(*fake_persistent_state, loadProposalHeight()) - .Times(1) - .WillOnce(Return(boost::optional(2))); - EXPECT_CALL(*wsv, getLedgerPeers()) - .WillRepeatedly(Return( - std::vector>{peer})); - EXPECT_CALL(*fake_transport, publishProposalProxy(_, _)).Times(2); - - auto ordering_service = initOs(max_proposal); - fake_transport->subscribe(ordering_service); - - for (size_t i = 0; i < 8; ++i) { - ordering_service->onBatch(framework::batch::createValidBatch(1)); - } - makeProposalTimeout(); - - ordering_service->onBatch(framework::batch::createValidBatch(1)); - ordering_service->onBatch(framework::batch::createValidBatch(1)); - - makeProposalTimeout(); -} - -/** - * @given Ordering service and the persistent state that cannot save - * proposals - * @when onTransaction is called - * @then no published proposal - */ -TEST_F(OrderingServiceTest, BrokenPersistentState) { - const size_t max_proposal = 1; - - EXPECT_CALL(*fake_persistent_state, loadProposalHeight()) - .Times(1) - .WillOnce(Return(boost::optional(1))); - EXPECT_CALL(*fake_persistent_state, saveProposalHeight(2)) - .Times(1) - .WillRepeatedly(Return(false)); - - auto ordering_service = initOs(max_proposal); - ordering_service->onBatch(framework::batch::createValidBatch(1)); - - makeProposalTimeout(); -} - -/** - * @given Ordering service up and running - * @when Send 1000 transactions from each of 2 threads - * @then Ordering service should not crash - */ -TEST_F(OrderingServiceTest, ConcurrentGenerateProposal) { - const auto max_proposal = 1; - EXPECT_CALL(*fake_persistent_state, loadProposalHeight()) - .Times(1) - .WillOnce(Return(boost::optional(1))); - EXPECT_CALL(*fake_persistent_state, saveProposalHeight(_)) - .WillRepeatedly(Return(false)); - - auto ordering_service = initOs(max_proposal); - - auto on_tx = [&]() { - for (int i = 0; i < 1000; ++i) { - ordering_service->onBatch(framework::batch::createValidBatch(1)); - } - }; - - const auto num_threads = 2; - - std::vector threads; - for (int i = 0; i < num_threads; ++i) { - threads.emplace_back(std::thread(on_tx)); - } - - for (int i = 0; i < num_threads; ++i) { - threads.at(i).join(); - } - makeProposalTimeout(); -} - -/** - * Check that batches are processed by ordering service - * @given ordering service up and running - * @when feeding the ordering service two batches, such that number of - * transactions in both summed is greater than maximum number of transactions - * inside proposal - * @then proposal will still contain number of transactions, equal to sum of the - * batches - */ -TEST_F(OrderingServiceTest, BatchesProceed) { - const auto max_proposal = 12; - const auto first_batch_size = 10; - const auto second_batch_size = 5; - - auto batch_one = framework::batch::createValidBatch(first_batch_size); - auto batch_two = framework::batch::createValidBatch(second_batch_size); - - EXPECT_CALL(*fake_persistent_state, saveProposalHeight(_)) - .Times(1) - .WillRepeatedly(Return(true)); - EXPECT_CALL(*fake_persistent_state, loadProposalHeight()) - .Times(1) - .WillOnce(Return( - boost::optional(first_batch_size + second_batch_size))); - EXPECT_CALL(*fake_transport, publishProposalProxy(_, _)).Times(1); - EXPECT_CALL(*wsv, getLedgerPeers()) - .WillRepeatedly(Return( - std::vector>{peer})); - - auto ordering_service = initOs(max_proposal); - fake_transport->subscribe(ordering_service); - - ordering_service->onBatch(std::move(batch_one)); - ordering_service->onBatch(std::move(batch_two)); -} From 9da48b2026118313cb5292b23d9ee254e4bf6695 Mon Sep 17 00:00:00 2001 From: Robert Sayakhov Date: Wed, 30 Jan 2019 21:22:21 +0300 Subject: [PATCH 24/41] IR-156: Remove node.js library (#2025) * feat: remove js library and documentation Signed-off-by: Robert Sayakhov * chore: added documentation for new library Signed-off-by: Robert Sayakhov * chore: added example of private key Signed-off-by: Robert Sayakhov * feat: remove node.js build param Signed-off-by: Robert Sayakhov * fix: missing comma Signed-off-by: Robert Sayakhov * feat: remove all information about SWIG_NODE Signed-off-by: Robert Sayakhov * feat: remove cmake of nodejs Signed-off-by: Robert Sayakhov --- CMakeLists.txt | 3 - cmake/Modules/Findnodejs.cmake | 96 -------- docs/source/api/queries.rst | 2 +- docs/source/guides/libraries.rst | 2 +- docs/source/guides/libraries/javascript.rst | 138 +++++++++++ docs/source/guides/libraries/nodejs.rst | 100 -------- example/node/.gitignore | 61 ----- example/node/README.md | 30 --- example/node/index.js | 181 --------------- example/node/package.json | 20 -- shared_model/bindings/CMakeLists.txt | 32 +-- shared_model/packages/javascript/.gitignore | 67 ------ shared_model/packages/javascript/LICENSE | 201 ---------------- shared_model/packages/javascript/README.md | 72 ------ shared_model/packages/javascript/binding.gyp | 135 ----------- .../javascript/example/admin@test.priv | 1 - .../javascript/example/admin@test.pub | 1 - .../packages/javascript/example/index.js | 183 --------------- shared_model/packages/javascript/index.js | 6 - shared_model/packages/javascript/package.json | 74 ------ .../packages/javascript/pb/.npmignore | 2 - .../packages/javascript/scripts/clean.sh | 5 - .../javascript/scripts/generate-protobuf.sh | 20 -- .../scripts/install-dependencies.sh | 28 --- .../packages/javascript/tests/crypto.js | 32 --- .../packages/javascript/tests/queryBuilder.js | 106 --------- .../packages/javascript/tests/txbuilder.js | 215 ------------------ .../shared_model/bindings/CMakeLists.txt | 7 - 28 files changed, 141 insertions(+), 1679 deletions(-) delete mode 100644 cmake/Modules/Findnodejs.cmake create mode 100644 docs/source/guides/libraries/javascript.rst delete mode 100644 docs/source/guides/libraries/nodejs.rst delete mode 100644 example/node/.gitignore delete mode 100644 example/node/README.md delete mode 100644 example/node/index.js delete mode 100644 example/node/package.json delete mode 100644 shared_model/packages/javascript/.gitignore delete mode 100644 shared_model/packages/javascript/LICENSE delete mode 100644 shared_model/packages/javascript/README.md delete mode 100644 shared_model/packages/javascript/binding.gyp delete mode 100644 shared_model/packages/javascript/example/admin@test.priv delete mode 100644 shared_model/packages/javascript/example/admin@test.pub delete mode 100644 shared_model/packages/javascript/example/index.js delete mode 100644 shared_model/packages/javascript/index.js delete mode 100644 shared_model/packages/javascript/package.json delete mode 100644 shared_model/packages/javascript/pb/.npmignore delete mode 100755 shared_model/packages/javascript/scripts/clean.sh delete mode 100755 shared_model/packages/javascript/scripts/generate-protobuf.sh delete mode 100755 shared_model/packages/javascript/scripts/install-dependencies.sh delete mode 100644 shared_model/packages/javascript/tests/crypto.js delete mode 100644 shared_model/packages/javascript/tests/queryBuilder.js delete mode 100644 shared_model/packages/javascript/tests/txbuilder.js diff --git a/CMakeLists.txt b/CMakeLists.txt index 7e8db37479..aa7318e6aa 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -82,7 +82,6 @@ option(SWIG_PYTHON "Generate Swig Python bindings" OFF) option(SWIG_JAVA "Generate Swig Java bindings" OFF) option(SUPPORT_PYTHON2 "ON if Python2, OFF if python3" OFF) option(SWIG_CSHARP "Generate Swig C# bindings" OFF) -option(SWIG_NODE "Generate Swig NodeJS" OFF) option(USE_LIBIROHA "Use external model library" OFF) @@ -104,7 +103,6 @@ if(PACKAGE_TGZ OR PACKAGE_ZIP OR PACKAGE_RPM OR PACKAGE_DEB) set(FUZZING OFF) set(SWIG_PYTHON OFF) set(SWIG_JAVA OFF) - set(SWIG_NODE OFF) endif() message(STATUS "-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}") @@ -121,7 +119,6 @@ message(STATUS "-DSWIG_PYTHON=${SWIG_PYTHON}") message(STATUS "-DSWIG_JAVA=${SWIG_JAVA}") message(STATUS "-DSUPPORT_PYTHON2=${SUPPORT_PYTHON2}") message(STATUS "-DSWIG_CSHARP=${SWIG_CSHARP}") -message(STATUS "-DSWIG_NODE=${SWIG_NODE}") set(IROHA_SCHEMA_DIR "${CMAKE_CURRENT_SOURCE_DIR}/schema") set(SM_SCHEMA_DIR "${PROJECT_SOURCE_DIR}/shared_model/schema") diff --git a/cmake/Modules/Findnodejs.cmake b/cmake/Modules/Findnodejs.cmake deleted file mode 100644 index eb2a0a4a59..0000000000 --- a/cmake/Modules/Findnodejs.cmake +++ /dev/null @@ -1,96 +0,0 @@ - # Macro to add directory to NODEJS_INCLUDE_DIRS if it exists and is not /usr/include - macro(add_include_dir dir) - if (IS_DIRECTORY ${dir} AND NOT ${dir} STREQUAL "/usr/include") - set(NODEJS_INCLUDE_DIRS ${NODEJS_INCLUDE_DIRS} ${dir}) - endif() -endmacro() - - -find_program (NODEJS_EXECUTABLE NAMES node nodejs - HINTS - $ENV{NODE_DIR} - PATH_SUFFIXES bin - DOC "Node.js interpreter" -) - -include (FindPackageHandleStandardArgs) - -# If compat-libuv package exists, it must be at start of include path -find_path (UV_ROOT_DIR "uv.h" PATHS /usr/include/compat-libuv010 NO_DEFAULT_PATH) -if (UV_ROOT_DIR) - # set (NODEJS_INCLUDE_DIRS ${UV_ROOT_DIR}) - add_include_dir(${UV_ROOT_DIR}) -endif() - -# Now look for node. Flag an error if not found -find_path (NODE_ROOT_DIR "include/node/node.h" "include/src/node.h" "src/node.h" - PATHS /usr/include/nodejs /usr/local/include/nodejs /usr/local/include) -if (NODE_ROOT_DIR) - add_include_dir(${NODE_ROOT_DIR}/include/src) - add_include_dir(${NODE_ROOT_DIR}/src) - add_include_dir(${NODE_ROOT_DIR}/include/node) - add_include_dir(${NODE_ROOT_DIR}/include/deps/v8/include) - add_include_dir(${NODE_ROOT_DIR}/deps/v8/include) - add_include_dir(${NODE_ROOT_DIR}/include/deps/uv/include) - add_include_dir(${NODE_ROOT_DIR}/deps/uv/include) -else() - unset(NODEJS_INCLUDE_DIRS) - message(FATAL_ERROR " - node.h not found") -endif() - -# Check that v8.h is in NODEJS_INCLUDE_DIRS -find_path (V8_ROOT_DIR "v8.h" PATHS ${NODEJS_INCLUDE_DIRS}) -if (NOT V8_ROOT_DIR) - unset(NODEJS_INCLUDE_DIRS) - message(FATAL_ERROR " - v8.h not found") -endif() - -# Check that uv.h is in NODEJS_INCLUDE_DIRS -find_path (UV_ROOT_DIR "uv.h" PATHS ${NODEJS_INCLUDE_DIRS}) -if (NOT UV_ROOT_DIR) - unset(NODEJS_INCLUDE_DIRS) - message(FATAL_ERROR " - uv.h not found") -endif() - -find_package_handle_standard_args (Nodejs DEFAULT_MSG - NODEJS_EXECUTABLE - NODEJS_INCLUDE_DIRS -) - -if (NODEJS_EXECUTABLE) - execute_process(COMMAND ${NODEJS_EXECUTABLE} --version - OUTPUT_VARIABLE _VERSION - RESULT_VARIABLE _NODE_VERSION_RESULT) - execute_process(COMMAND ${NODEJS_EXECUTABLE} -e "console.log(process.versions.v8)" - OUTPUT_VARIABLE _V8_VERSION - RESULT_VARIABLE _V8_RESULT) - if (NOT _NODE_VERSION_RESULT AND NOT _V8_RESULT) - string (REPLACE "v" "" NODE_VERSION_STRING "${_VERSION}") - string (REPLACE "." ";" _VERSION_LIST "${NODE_VERSION_STRING}") - list (GET _VERSION_LIST 0 NODE_VERSION_MAJOR) - list (GET _VERSION_LIST 1 NODE_VERSION_MINOR) - list (GET _VERSION_LIST 2 NODE_VERSION_PATCH) - set (V8_VERSION_STRING ${_V8_VERSION}) - string (REPLACE "." ";" _V8_VERSION_LIST "${_V8_VERSION}") - list (GET _V8_VERSION_LIST 0 V8_VERSION_MAJOR) - list (GET _V8_VERSION_LIST 1 V8_VERSION_MINOR) - list (GET _V8_VERSION_LIST 2 V8_VERSION_PATCH) - # we end up with a nasty newline so strip everything that isn't a number - string (REGEX MATCH "^[0-9]*" V8_VERSION_PATCH ${V8_VERSION_PATCH}) - else () - set (NODE_VERSION_STRING "0.10.30") - set (NODE_VERSION_MAJOR "0") - set (NODE_VERSION_MINOR "10") - set (NODE_VERSION_PATCH "30") - set (V8_VERSION_MAJOR "3") - set (V8_VERSION_MINOR"14") - set (V8_VERSION_PATCH "5") - set (V8_VERSION_STRING "3.28.72") - message (STATUS "defaulted to node 0.10.30") - endif () - string (REGEX REPLACE "\n" "" NODE_VERSION_STRING ${NODE_VERSION_STRING}) - string (REGEX REPLACE "\n" "" V8_VERSION_STRING ${V8_VERSION_STRING}) - message (STATUS "INFO - Node version is " ${NODE_VERSION_STRING}) - message (STATUS "INFO - Node using v8 " ${V8_VERSION_STRING}) - mark_as_advanced (NODEJS_EXECUTABLE) -endif () diff --git a/docs/source/api/queries.rst b/docs/source/api/queries.rst index 20f38614b8..d7bc660e94 100644 --- a/docs/source/api/queries.rst +++ b/docs/source/api/queries.rst @@ -664,7 +664,7 @@ Possible Stateful Validation Errors "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" Response Structure -^^^^^^^^^^^^^^^^^^ +------------------ .. csv-table:: :header: "Field", "Description", "Constraint", "Example" diff --git a/docs/source/guides/libraries.rst b/docs/source/guides/libraries.rst index 65860a742b..7776eff742 100644 --- a/docs/source/guides/libraries.rst +++ b/docs/source/guides/libraries.rst @@ -7,7 +7,7 @@ Client Libraries libraries/android.rst libraries/java.rst - libraries/nodejs.rst + libraries/javascript.rst libraries/python.rst libraries/swift_ios.rst diff --git a/docs/source/guides/libraries/javascript.rst b/docs/source/guides/libraries/javascript.rst new file mode 100644 index 0000000000..9ae80400f0 --- /dev/null +++ b/docs/source/guides/libraries/javascript.rst @@ -0,0 +1,138 @@ +Javascript library +----------------- + +.. image:: https://img.shields.io/npm/v/iroha-helpers.svg + +This library provides functions which will help you to interact with Hyperledger Iroha from your JS program. + +Installation +^^^^^^^^^^^^ +Via npm + +.. code:: sh + + $ npm i iroha-helpers + +Via yarn + +.. code:: sh + + $ yarn add iroha-helpers + +Commands +^^^^^^^^ +For usage of any command you need to provide ``commandOptions`` as a first argument. + +.. code-block:: javascript + + const commandOptions = { + privateKeys: ['f101537e319568c765b2cc89698325604991dca57b9716b58016b253506cab70'], // Array of private keys in hex format + creatorAccountId: '', // Account id, ex. admin@test + quorum: 1, + commandService: null + } + +As second argument you need to provide object that contains properties for required command. + +.. code-block:: javascript + + // Example usage of setAccountDetail + + const commandService = new CommandService_v1Client( + '127.0.0.1:50051', + grpc.credentials.createInsecure() + ) + + const adminPriv = 'f101537e319568c765b2cc89698325604991dca57b9716b58016b253506cab70' + + commands.setAccountDetail({ + privateKeys: [adminPriv], + creatorAccountId: 'admin@test', + quorum: 1, + commandService + }, { + accountId: 'admin@test', + key: 'jason', + value: 'statham' + }) + +Queries +^^^^^^^ +For usage of any query you need to provide ``queryOptions`` as a first argument. + +.. code-block:: javascript + + const queryOptions = { + privateKey: 'f101537e319568c765b2cc89698325604991dca57b9716b58016b253506cab70', // Private key in hex format + creatorAccountId: '', // Account id, ex. admin@test + queryService: null + } + +As second argument you need to provide object that contains properties for required query. + +.. code-block:: javascript + + // Example usage of getAccountDetail + + const queryService = new QueryService_v1Client( + '127.0.0.1:50051', + grpc.credentials.createInsecure() + ) + + const adminPriv = 'f101537e319568c765b2cc89698325604991dca57b9716b58016b253506cab70' + + queries.getAccountDetail({ + privateKey: adminPriv, + creatorAccountId: 'admin@test', + queryService + }, { + accountId: 'admin@test' + }) + +Example code +^^^^^^^^^^^^ + +.. code-block:: javascript + + import grpc from 'grpc' + import { + QueryService_v1Client, + CommandService_v1Client + } from '../iroha-helpers/lib/proto/endpoint_grpc_pb' + import { commands, queries } from 'iroha-helpers' + + const IROHA_ADDRESS = 'localhost:50051' + const adminPriv = + 'f101537e319568c765b2cc89698325604991dca57b9716b58016b253506cab70' + + const commandService = new CommandService_v1Client( + IROHA_ADDRESS, + grpc.credentials.createInsecure() + ) + + const queryService = new QueryService_v1Client( + IROHA_ADDRESS, + grpc.credentials.createInsecure() + ) + + Promise.all([ + commands.setAccountDetail({ + privateKeys: [adminPriv], + creatorAccountId: 'admin@test', + quorum: 1, + commandService + }, { + accountId: 'admin@test', + key: 'jason', + value: 'statham' + }), + queries.getAccountDetail({ + privateKey: adminPriv, + creatorAccountId: 'admin@test', + queryService + }, { + accountId: 'admin@test' + }) + ]) + .then(a => console.log(a)) + .catch(e => console.error(e)) \ No newline at end of file diff --git a/docs/source/guides/libraries/nodejs.rst b/docs/source/guides/libraries/nodejs.rst deleted file mode 100644 index 90451d83e7..0000000000 --- a/docs/source/guides/libraries/nodejs.rst +++ /dev/null @@ -1,100 +0,0 @@ -Node.js Library ---------------- -.. warning:: Please note that Node.js library is under heavy testing now and - problems `might` occur. Don't hesitate to report them to us. - -There are two main ways of obtaining the Node.js library. If you are a happy -macOS or Linux user, you can install it `through NPM <#installing-through-npm>`_. -If your system is not yet supported or you want to try the latest version, you -can `build this library manually <#building-manually>`_. - -Prerequisites -^^^^^^^^^^^^^ - -- Node.js (>=7) (you can try using lower versions though). - -Installing Through NPM -^^^^^^^^^^^^^^^^^^^^^^ -If you are a happy macOS or Linux user, you can install the library from `NPM -repository `_ using NPM - -.. code-block:: shell - - npm install iroha-lib - -Now you can import it in your project - -.. code-block:: javascript - - const iroha = require('iroha-lib') - -Building Manually -^^^^^^^^^^^^^^^^^ -You need this section if you want to build iroha-lib manually for example if -your architecture/OS is not supported yet. - -Prerequisites -""""""""""""" -1. CMake (>=3.8.2) -2. Protobuf (>=3.5.1) -3. Boost (>=1.65.1) - -macOS users can install dependencies with following commands: - -.. code-block:: shell - - brew install node cmake # Common dependencies - brew install autoconf automake ccache # SWIG dependencies - brew install protobuf boost # Iroha dependencies - -.. warning:: If you have SWIG already installed, you **MUST** install patched - 3.0.12 version instead using - `this patch `_. - The current version of SWIG doesn't support Node.js versions higher than 6. - Also you can just delete the global installed SWIG and iroha will pull and - build it automatically. - -Build Process -""""""""""""" -Clone Iroha repository - -.. code-block:: shell - - git clone -b develop --depth=1 https://github.com/hyperledger/iroha - -Go to the NPM package directory and start the build process - -.. code-block:: shell - - cd iroha/shared_model/packages/javascript - npm install --build-from-source=iroha-lib - -That's all. You can use the library now. - -Example code -^^^^^^^^^^^^ -Explore ``example/node/index.js`` file to get an idea of how to -work with a library. - -Prerequisites -""""""""""""" -To run this example, you need an Iroha node up and running. Please check out -:ref:`getting-started` if you want to learn how to start it. - -Running the Example -""""""""""""""""""" -To start the example, you need to install all its dependencies -(basically ``iroha-lib``) - -.. code-block:: shell - - npm install - -.. note:: if you have built the library manually, please change import paths - to ``path-to-iroha/shared_model/packages/javascript`` in index.js - -Now, to run this example please write: - -.. code-block:: shell - - node index.js diff --git a/example/node/.gitignore b/example/node/.gitignore deleted file mode 100644 index 9607012b57..0000000000 --- a/example/node/.gitignore +++ /dev/null @@ -1,61 +0,0 @@ -# Logs -logs -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* - -# Runtime data -pids -*.pid -*.seed -*.pid.lock - -# Directory for instrumented libs generated by jscoverage/JSCover -lib-cov - -# Coverage directory used by tools like istanbul -coverage - -# nyc test coverage -.nyc_output - -# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) -.grunt - -# Bower dependency directory (https://bower.io/) -bower_components - -# node-waf configuration -.lock-wscript - -# Compiled binary addons (http://nodejs.org/api/addons.html) -build/Release - -# Dependency directories -node_modules/ -jspm_packages/ - -# Typescript v1 declaration files -typings/ - -# Optional npm cache directory -.npm - -# Optional eslint cache -.eslintcache - -# Optional REPL history -.node_repl_history - -# Output of 'npm pack' -*.tgz - -# Yarn Integrity file -.yarn-integrity - -# dotenv environment variables file -.env - -# NPM lockfile -package-lock.json diff --git a/example/node/README.md b/example/node/README.md deleted file mode 100644 index ee80968376..0000000000 --- a/example/node/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# NodeJS client library example - -## Prerequisites - -1. Make sure you have running iroha on your machine. You can follow [this](https://hyperledger.github.io/iroha-api/#run-the-daemon-irohad) guide to launch iroha daemon. Please use keys for iroha from *iroha/example* folder, since this example uses keys from there. - -2. If you are a lucky owner of a processor with the x64 architecture, you can install **iroha-lib** from the NPM repository with a simple command: - -```sh -npm install iroha-lib -``` - -In other cases, you need to download the complete Iroha repository (in which you are now), go to the *shared_model/packages/javascript* folder and build the package on your system manually using the instructions from **README.md**. -In such case, you need to change the import paths in this example to *shared_model/packages/javascript*. - -## Launch example - -Script `index.js` does the following: -1. Assemble transaction from several commands using tx builder -2. Sign it using keys from iroha/example folder -3. Send it to iroha -4. Wait 5 secs and check transaction's status using its hash -5. Assemble query using query builder -6. Send query to iroha -7. Read query response - -Launch it: -```sh -node index.js -``` diff --git a/example/node/index.js b/example/node/index.js deleted file mode 100644 index 988b414f27..0000000000 --- a/example/node/index.js +++ /dev/null @@ -1,181 +0,0 @@ -/** - * Copyright 2018 HUAWEI. All Rights Reserved. - * - * SPDX-License-Identifier: Apache-2.0 - * - */ - -'use strict' - -function blob2array (blob) { - var bytearray = new Uint8Array(blob.size()) - for (let i = 0; i < blob.size(); ++i) { - bytearray[i] = blob.get(i) - } - return bytearray -} - -var iroha = require('iroha-lib') -var txBuilder = new iroha.ModelTransactionBuilder() -var queryBuilder = new iroha.ModelQueryBuilder() -var crypto = new iroha.ModelCrypto() -var fs = require('fs') -var adminPriv = fs.readFileSync('../admin@test.priv').toString() -var adminPub = fs.readFileSync('../admin@test.pub').toString() - -var keys = crypto.convertFromExisting(adminPub, adminPriv) - -var currentTime = Date.now() -var startQueryCounter = 1 -var creator = 'admin@test' - -// build transaction -var tx = txBuilder - .creatorAccountId(creator) - .createdTime(currentTime) - .createDomain('ru', 'user') - .createAsset('dollar', 'ru', 2) - .build() - -// sign transaction and get its binary representation (Blob) -var txblob = new iroha.ModelProtoTransaction(tx).signAndAddSignature(keys).finish().blob() -var txArray = blob2array(txblob) -// create proto object and send to iroha -var blockTransaction = require('iroha-lib/pb/block_pb.js').Transaction // block_pb2.Transaction() -var protoTx = blockTransaction.deserializeBinary(txArray) -console.log(protoTx.getPayload().getCreatorAccountId()) - -var grpc = require('grpc') -var endpointGrpc = require('iroha-lib/pb/endpoint_grpc_pb.js') -var client = new endpointGrpc.CommandServiceClient( - 'localhost:50051', - grpc.credentials.createInsecure() -) -var txHashBlob = tx.hash().blob() -var txHash = blob2array(txHashBlob) -var p = new Promise((resolve, reject) => { - console.log('Submit transaction...') - client.torii(protoTx, (err, data) => { - if (err) { - reject(err) - } else { - console.log('Submitted transaction successfully') - resolve() - } - }) -}) - -p - .then(() => { - console.log('Sleep 5 seconds...') - return sleep(5000) - }) - .then(() => { - console.log('Send transaction status request...') - return new Promise((resolve, reject) => { - // create status request - var endpointPb = require('iroha-lib/pb/endpoint_pb.js') - var request = new endpointPb.TxStatusRequest() - request.setTxHash(txHash) - client.status(request, (err, response) => { - if (err) { - reject(err) - } else { - let status = response.getTxStatus() - let TxStatus = require('iroha-lib/pb/endpoint_pb.js').TxStatus - let statusName = getProtoEnumName( - TxStatus, - 'iroha.protocol.TxStatus', - status - ) - console.log('Got transaction status: ' + statusName) - if (statusName !== 'COMMITTED') { - reject(new Error("Your transaction wasn't committed")) - } else { - resolve() - } - } - }) - }) - }) - .then(() => { - console.log('Query transaction...') - let query = queryBuilder - .creatorAccountId(creator) - .createdTime(Date.now()) - .queryCounter(startQueryCounter) - .getAssetInfo('dollar#ru') - .build() - let queryBlob = new iroha.ModelProtoQuery(query).signAndAddSignature(keys).finish().blob() - let pbQuery = require('iroha-lib/pb/queries_pb.js').Query - let queryArray = blob2array(queryBlob) - let protoQuery = pbQuery.deserializeBinary(queryArray) - let client = new endpointGrpc.QueryServiceClient( - 'localhost:50051', - grpc.credentials.createInsecure() - ) - return new Promise((resolve, reject) => { - client.find(protoQuery, (err, response) => { - if (err) { - reject(err) - } else { - console.log('Submitted transaction successfully') - let type = response.getResponseCase() - let responsePb = require('iroha-lib/pb/responses_pb.js') - let name = getProtoEnumName( - responsePb.QueryResponse.ResponseCase, - 'iroha.protocol.QueryResponse', - type - ) - if (name !== 'ASSET_RESPONSE') { - reject(new Error('Query response error')) - } else { - let asset = response.getAssetResponse().getAsset() - console.log( - 'Asset Id = ' + - asset.getAssetId() + - ' , Precision = ' + - asset.getPrecision() - ) - resolve() - } - } - }) - }) - }) - .then(() => { - console.log('done!') - }) - .catch(err => { - console.log(err) - }) - -function sleep (ms) { - return new Promise(resolve => setTimeout(resolve, ms)) -} - -var protoEnumName = {} -function getProtoEnumName (obj, key, value) { - if (protoEnumName.hasOwnProperty(key)) { - if (protoEnumName[key].length < value) { - return 'unknown' - } else { - return protoEnumName[key][value] - } - } else { - protoEnumName[key] = [] - for (var k in obj) { - let idx = obj[k] - if (isNaN(idx)) { - console.log( - 'getProtoEnumName:wrong enum value, now is type of ' + - typeof idx + - ' should be integer' - ) - } else { - protoEnumName[key][idx] = k - } - } - return getProtoEnumName(obj, key, value) - } -} diff --git a/example/node/package.json b/example/node/package.json deleted file mode 100644 index 16ee00c1d5..0000000000 --- a/example/node/package.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "name": "iroha-example", - "version": "1.0.0", - "description": "Example of how to use Iroha Library with Node.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1", - "node-gyp": "node-gyp" - }, - "repository": "git+https://github.com/hyperledger/iroha.git", - "author": "", - "license": "Apache-2.0", - "bugs": { - "url": "https://github.com/hyperledger/iroha/issues" - }, - "homepage": "https://github.com/hyperledger/iroha#readme", - "dependencies": { - "iroha-lib": "^0.0.1" - }, - "main": "index.js" -} diff --git a/shared_model/bindings/CMakeLists.txt b/shared_model/bindings/CMakeLists.txt index 0983f2a5ef..1c6a8e05e0 100644 --- a/shared_model/bindings/CMakeLists.txt +++ b/shared_model/bindings/CMakeLists.txt @@ -22,7 +22,7 @@ target_link_libraries(bindings -if (SWIG_PYTHON OR SWIG_JAVA OR SWIG_CSHARP OR SWIG_NODE) +if (SWIG_PYTHON OR SWIG_JAVA OR SWIG_CSHARP) find_package(swig REQUIRED) include(${SWIG_USE_FILE}) @@ -95,33 +95,3 @@ if (SWIG_CSHARP) swig_link_libraries(libirohacs bindings) add_custom_target(irohacs DEPENDS ${SWIG_MODULE_libirohacs_REAL_NAME}) endif() - -if (SWIG_NODE) - find_package (nodejs REQUIRED) - - set (V8_VERSION_HEX 0x0${V8_VERSION_MAJOR}${V8_VERSION_MINOR}${V8_VERSION_PATCH}) - string (LENGTH "${V8_VERSION_HEX}" V8_VERSION_HEX_length) - while (V8_VERSION_HEX_length LESS 8) - set (V8_VERSION_HEX "${V8_VERSION_HEX}0") - message (STATUS "INFO - Padded V8 version to match SWIG format") - string (LENGTH "${V8_VERSION_HEX}" V8_VERSION_HEX_length) - endwhile () - - if (${CMAKE_SYSTEM_NAME} STREQUAL Darwin) - set(MAC_OPTS "-flat_namespace -undefined suppress") - endif() - - set_property(SOURCE bindings.i PROPERTY SWIG_FLAGS "-node" "-DV8_VERSION=${V8_VERSION_HEX}") - - # Build SWIG library always statically for the subsequent assembly by GYP - myswig_add_library(irohanode - TYPE STATIC - LANGUAGE javascript - SOURCES bindings.i - ) - set_target_properties(irohanode PROPERTIES PREFIX ${CMAKE_STATIC_LIBRARY_PREFIX}) - target_link_libraries(irohanode bindings ${MAC_OPTS}) - target_include_directories(${SWIG_MODULE_irohanode_REAL_NAME} PUBLIC - ${NODEJS_INCLUDE_DIRS} - ) -endif() diff --git a/shared_model/packages/javascript/.gitignore b/shared_model/packages/javascript/.gitignore deleted file mode 100644 index a578af2687..0000000000 --- a/shared_model/packages/javascript/.gitignore +++ /dev/null @@ -1,67 +0,0 @@ -# Logs -logs -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* - -# Runtime data -pids -*.pid -*.seed -*.pid.lock - -# Directory for instrumented libs generated by jscoverage/JSCover -lib-cov - -# Coverage directory used by tools like istanbul -coverage - -# nyc test coverage -.nyc_output - -# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) -.grunt - -# Bower dependency directory (https://bower.io/) -bower_components - -# node-waf configuration -.lock-wscript - -# Compiled binary addons (http://nodejs.org/api/addons.html) -build/Release - -# Dependency directories -node_modules/ -jspm_packages/ - -# Typescript v1 declaration files -typings/ - -# Optional npm cache directory -.npm - -# Optional eslint cache -.eslintcache - -# Optional REPL history -.node_repl_history - -# Output of 'npm pack' -*.tgz - -# Yarn Integrity file -.yarn-integrity - -# dotenv environment variables file -.env - -# Build and generated directories -build/* -pb/* -lib/* - -package-lock.json - -!.npmignore diff --git a/shared_model/packages/javascript/LICENSE b/shared_model/packages/javascript/LICENSE deleted file mode 100644 index 40f9c24091..0000000000 --- a/shared_model/packages/javascript/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2018 Soramitsu LLC - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/shared_model/packages/javascript/README.md b/shared_model/packages/javascript/README.md deleted file mode 100644 index 8a5ad2f7cf..0000000000 --- a/shared_model/packages/javascript/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# iroha-js - -Official Iroha JavaScript Library. https://github.com/hyperledger/iroha - -## Usage - -You can use regular Node.js style to import **iroha-lib** package and related protobufs: - -```javascript -const iroha = require('iroha-lib') - -const blockTransaction = require('iroha-lib/pb/block_pb.js').Transaction -const endpointGrpc = require('iroha-lib/pb/endpoint_grpc_pb.js') - -... - -``` - -Watch usage in *example* folder. - -## Build - -You need this section if you want to build **iroha-lib** manually for publishing or if your architecture/OS not supported yet. - -### Prerequisities - -** -WARNING! -If you have already installed SWIG, you MUST install patched version instead using [this patch](https://github.com/swig/swig/pull/968.patch). -Or just delete global installed SWIG - Iroha be able to pull and compile it automatically. -** - -In order to build NPM package by `node-gyp` on your machine you need some global installed dependencies: - -1. CMake (>=3.8.2) - -2. Protobuf (>=3.5.1) - -3. Boost (>=1.65.1) - -#### For Mac users - -To build **iroha-lib** on Mac the following dependencies should be installed: - -```sh -brew install node cmake # Common dependencies -brew install autoconf automake ccache # SWIG dependencies -brew install protobuf boost # Iroha dependencies -``` - -### Build process - -1. Clone full Iroha repository - -```sh -git clone -b develop --depth=1 https://github.com/hyperledger/iroha - -``` - -2. Go to the NPM package directory and start build - -```sh -cd iroha/shared_model/packages/javascript -npm install -``` - -That's pretty all. - ---- - - -This NPM package is in deep pre-alfa phase, so if you have any troubles, feel free to create a new issue or contact contributors from *package.json*. diff --git a/shared_model/packages/javascript/binding.gyp b/shared_model/packages/javascript/binding.gyp deleted file mode 100644 index e8b0af2989..0000000000 --- a/shared_model/packages/javascript/binding.gyp +++ /dev/null @@ -1,135 +0,0 @@ -{ - 'variables': { - 'iroha_lib_dir': '../../' - }, - 'targets': [ - { - 'target_name': 'shared_model', - 'type': 'none', - 'actions': [ - { - 'action_name': 'configure', - 'message': 'Generate CMake build configuration for shared_model...', - 'inputs': [ - '<(iroha_lib_dir)/bindings/CMakeLists.txt' - ], - 'outputs': [ - '<(SHARED_INTERMEDIATE_DIR)/bindings/Makefile', - ], - 'action': [ - 'cmake', - '-H<(iroha_lib_dir)', - '-B<(SHARED_INTERMEDIATE_DIR)', - '-DSWIG_NODE=ON', - '-DENABLE_LIBS_PACKAGING=OFF', - '-DCMAKE_POSITION_INDEPENDENT_CODE=ON', - '-DCMAKE_BUILD_TYPE=Release' - ], - }, - { - 'action_name': 'build', - 'message': 'Build shared_model libraries by CMake...', - 'inputs': [ - '<(SHARED_INTERMEDIATE_DIR)/bindings/Makefile', - ], - 'outputs': [ - '<(SHARED_INTERMEDIATE_DIR)/bindings/bindingsJAVASCRIPT_wrap.cxx', - '<(SHARED_INTERMEDIATE_DIR)/bindings/libirohanode.a', - '<(SHARED_INTERMEDIATE_DIR)/bindings/libbindings.a' - ], - 'action': [ - 'cmake', - '--build', '<(SHARED_INTERMEDIATE_DIR)', - '--target', 'irohanode', - '--', - '-j { - console.log('Submit transaction...') - client.torii(protoTx, (err, data) => { - if (err) { - reject(err) - } else { - console.log('Submitted transaction successfully') - resolve() - } - }) -}) - -p - .then(() => { - console.log('Sleep 5 seconds...') - return sleep(5000) - }) - .then(() => { - console.log('Send transaction status request...') - return new Promise((resolve, reject) => { - // create status request - var endpointPb = require('../pb/endpoint_pb.js') - var request = new endpointPb.TxStatusRequest() - request.setTxHash(txHash) - client.status(request, (err, response) => { - if (err) { - reject(err) - } else { - let status = response.getTxStatus() - let TxStatus = require('../pb/endpoint_pb.js').TxStatus - let statusName = getProtoEnumName( - TxStatus, - 'iroha.protocol.TxStatus', - status - ) - console.log('Got transaction status: ' + statusName) - if (statusName !== 'COMMITTED') { - reject(new Error("Your transaction wasn't committed")) - } else { - resolve() - } - } - }) - }) - }) - .then(() => { - console.log('Query transaction...') - let query = queryBuilder - .creatorAccountId(creator) - .createdTime(Date.now()) - .queryCounter(startQueryCounter) - .getAssetInfo('dollar#ru') - .build() - let queryBlob = protoQueryHelper.signAndAddSignature(query, keys).blob() - let pbQuery = require('../pb/queries_pb.js').Query - let queryArray = blob2array(queryBlob) - let protoQuery = pbQuery.deserializeBinary(queryArray) - let client = new endpointGrpc.QueryServiceClient( - 'localhost:50051', - grpc.credentials.createInsecure() - ) - return new Promise((resolve, reject) => { - client.find(protoQuery, (err, response) => { - if (err) { - reject(err) - } else { - console.log('Submitted transaction successfully') - let type = response.getResponseCase() - let responsePb = require('../pb/responses_pb.js') - let name = getProtoEnumName( - responsePb.QueryResponse.ResponseCase, - 'iroha.protocol.QueryResponse', - type - ) - if (name !== 'ASSET_RESPONSE') { - reject(new Error('Query response error')) - } else { - let asset = response.getAssetResponse().getAsset() - console.log( - 'Asset Id = ' + - asset.getAssetId() + - ' , Precision = ' + - asset.getPrecision() - ) - resolve() - } - } - }) - }) - }) - .then(() => { - console.log('done!') - }) - .catch(err => { - console.log(err) - }) - -function sleep (ms) { - return new Promise(resolve => setTimeout(resolve, ms)) -} - -var protoEnumName = {} -function getProtoEnumName (obj, key, value) { - if (protoEnumName.hasOwnProperty(key)) { - if (protoEnumName[key].length < value) { - return 'unknown' - } else { - return protoEnumName[key][value] - } - } else { - protoEnumName[key] = [] - for (var k in obj) { - let idx = obj[k] - if (isNaN(idx)) { - console.log( - 'getProtoEnumName:wrong enum value, now is type of ' + - typeof idx + - ' should be integer' - ) - } else { - protoEnumName[key][idx] = k - } - } - return getProtoEnumName(obj, key, value) - } -} diff --git a/shared_model/packages/javascript/index.js b/shared_model/packages/javascript/index.js deleted file mode 100644 index 8121ce6332..0000000000 --- a/shared_model/packages/javascript/index.js +++ /dev/null @@ -1,6 +0,0 @@ -var binary = require('node-pre-gyp') -var path = require('path') -var bindingPath = binary.find(path.resolve(path.join(__dirname, './package.json'))) -var binding = require(bindingPath) - -module.exports = binding diff --git a/shared_model/packages/javascript/package.json b/shared_model/packages/javascript/package.json deleted file mode 100644 index 8776868774..0000000000 --- a/shared_model/packages/javascript/package.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "name": "iroha-lib", - "version": "0.0.4", - "description": "Modern JavaScript library for Iroha. https://github.com/hyperledger/iroha", - "main": "index.js", - "scripts": { - "prepare": "sh scripts/generate-protobuf.sh", - "prepublishOnly": "npm ls", - "install": "node-pre-gyp install --fallback-to-build", - "test": "tape tests/**/*.js | tap-dot", - "build": "node-pre-gyp build", - "rebuild": "node-pre-gyp rebuild", - "build-and-test": "npm install --build-from-source=iroha-lib && npm run test" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/hyperledger/iroha-javascript.git" - }, - "keywords": [ - "iroha", - "iroha-js", - "irohajs", - "iroha-node", - "iroha-nodejs", - "iroha-javascript", - "iroha-client", - "iroha-lib" - ], - "contributors": [ - "Arseniy Fokin (https://github.com/stinger112/)", - "Viacheslav Bikbaev (https://github.com/lasintez/)" - ], - "license": "Apache-2.0", - "bugs": { - "url": "https://github.com/hyperledger/iroha/issues" - }, - "homepage": "https://github.com/hyperledger/iroha#readme", - "os": [ - "darwin", - "linux" - ], - "cpu": [ - "x64" - ], - "dependencies": { - "google-protobuf": "^3.5.0", - "grpc": "^1.9.1", - "node-pre-gyp": "^0.6.39", - "npm": "^5.8.0", - "run": "^1.4.0", - "test": "^0.6.0" - }, - "devDependencies": { - "grpc-tools": "^1.6.6", - "node-gyp": "^3.6.2", - "node-pre-gyp-github": "^1.3.1", - "tap-dot": "^1.0.5", - "tape": "^4.9.0" - }, - "bundledDependencies": [ - "node-pre-gyp" - ], - "directories": { - "example": "example", - "lib": "lib" - }, - "binary": { - "module_name": "iroha_lib", - "module_path": "lib/{node_abi}-{platform}-{arch}-{libc}/", - "host": "https://github.com/hyperledger/iroha-javascript/releases/download/", - "remote_path": "{version}", - "package_name": "{node_abi}-{platform}-{arch}-{libc}.tar.gz" - } -} diff --git a/shared_model/packages/javascript/pb/.npmignore b/shared_model/packages/javascript/pb/.npmignore deleted file mode 100644 index 89771d0e7a..0000000000 --- a/shared_model/packages/javascript/pb/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -# Add all generated content into NPM package -!* diff --git a/shared_model/packages/javascript/scripts/clean.sh b/shared_model/packages/javascript/scripts/clean.sh deleted file mode 100755 index b53ed466cf..0000000000 --- a/shared_model/packages/javascript/scripts/clean.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -rm -rf build -rm -rf lib -rm -rf pb/*.js diff --git a/shared_model/packages/javascript/scripts/generate-protobuf.sh b/shared_model/packages/javascript/scripts/generate-protobuf.sh deleted file mode 100755 index bdbada99e5..0000000000 --- a/shared_model/packages/javascript/scripts/generate-protobuf.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env bash - -CURDIR="$(cd "$(dirname "$0")"; pwd)" -IROHA_HOME="$(dirname $(dirname $(dirname $(dirname "${CURDIR}"))))" - -# Check if we inside Iroha repository -if [ -d "$IROHA_HOME/schema" ]; then - echo ------------------------- - echo "Generating Protobuf JS files..." - echo "IROHA_HOME: $IROHA_HOME" - - ./node_modules/.bin/grpc_tools_node_protoc --proto_path=$IROHA_HOME/schema \ - --plugin=protoc-gen-grpc=./node_modules/grpc-tools/bin/grpc_node_plugin \ - --js_out=import_style=commonjs,binary:./pb \ - --grpc_out=./pb \ - endpoint.proto yac.proto ordering.proto loader.proto block.proto primitive.proto commands.proto queries.proto responses.proto - - echo "Success!" - echo ------------------------- -fi diff --git a/shared_model/packages/javascript/scripts/install-dependencies.sh b/shared_model/packages/javascript/scripts/install-dependencies.sh deleted file mode 100755 index f66543dc17..0000000000 --- a/shared_model/packages/javascript/scripts/install-dependencies.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env bash -# This script build and install source-based dependencies. -# By default it builds static versions of used libs, so it CAN BE used for NPM package publishing. - -# CMake 3.7 -git clone https://gitlab.kitware.com/cmake/cmake.git /tmp/cmake; \ - (cd /tmp/cmake ; git checkout 64130a7e793483e24c1d68bdd234f81d5edb2d51); \ - (cd /tmp/cmake ; /tmp/cmake/bootstrap --parallel="$(getconf _NPROCESSORS_ONLN)" --enable-ccache); \ - make -j"$(getconf _NPROCESSORS_ONLN)" -C /tmp/cmake; \ - make -C /tmp/cmake install; \ - rm -rf /tmp/cmake - -# Boost (static) -git clone https://github.com/boostorg/boost /tmp/boost; \ - (cd /tmp/boost ; git checkout 436ad1dfcfc7e0246141beddd11c8a4e9c10b146); \ - (cd /tmp/boost ; git submodule init); \ - (cd /tmp/boost ; git submodule update --recursive -j "$(getconf _NPROCESSORS_ONLN)"; \ - (cd /tmp/boost ; /tmp/boost/bootstrap.sh --with-libraries=thread,system,filesystem); \ - (cd /tmp/boost ; /tmp/boost/b2 headers); \ - (cd /tmp/boost ; sudo /tmp/boost/b2 link=static cxxflags="-std=c++14" -j "$(getconf _NPROCESSORS_ONLN)" install); \ - rm -rf /tmp/boost - -# Protobuf (static) v3.5.1 -git clone https://github.com/google/protobuf -cd protobuf -git checkout 106ffc04be1abf3ff3399f54ccf149815b287dd9 -cmake -Hcmake/ -Bbuild -Dprotobuf_BUILD_TESTS=OFF -Dprotobuf_BUILD_SHARED_LIBS=OFF -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_BUILD_TYPE=Release -sudo cmake --build build/ --target install -- -j"$(getconf _NPROCESSORS_ONLN)" diff --git a/shared_model/packages/javascript/tests/crypto.js b/shared_model/packages/javascript/tests/crypto.js deleted file mode 100644 index 377e94525c..0000000000 --- a/shared_model/packages/javascript/tests/crypto.js +++ /dev/null @@ -1,32 +0,0 @@ -var test = require('tape') -var iroha = require('../index') - -const publicKey = '407e57f50ca48969b08ba948171bb2435e035d82cec417e18e4a38f5fb113f83' -const privateKey = '1d7e0a32ee0affeb4d22acd73c2c6fb6bd58e266c8c2ce4fa0ffe3dd6a253ffb' -const randomKey = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' -const incorrectInputLength = 'aaaaaaaaaaaaaaaa' -const incorrectInputShorter = 'a' -const incorrectInputLonger = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' - -test('ModelCrypto tests', function (t) { - t.plan(9) - - let crypto = new iroha.ModelCrypto() - - t.throws(() => crypto.convertFromExisting(randomKey, randomKey), /Provided keypair is not correct/, 'Should throw "Provided keypair is not correct"') - t.throws(() => crypto.convertFromExisting(incorrectInputLength, incorrectInputLength), /input string has incorrect length/, 'Should throw "input string has incorrect length"') - - let keypair = crypto.convertFromExisting(publicKey, privateKey) - t.equal(keypair.publicKey().hex(), publicKey, 'Should be the same as public key was passed to convertFromExisting') - t.equal(keypair.privateKey().hex(), privateKey, 'Should be the same as private key was passed to convertFromExisting') - - let newKeypair = crypto.generateKeypair() - t.equal(newKeypair.publicKey().hex().length, publicKey.length, 'Size of generated public key should be the same as size of predefined public key') - t.equal(newKeypair.privateKey().hex().length, privateKey.length, 'Size of generated private key should be the same as size of predefined private key') - - t.throws(() => crypto.fromPrivateKey(incorrectInputShorter), /input string has incorrect length/, 'Should throw "input string has incorrect length" for keys shorter then 64') - t.throws(() => crypto.fromPrivateKey(incorrectInputLonger), /input string has incorrect length/, 'Should throw "input string has incorrect length" for keys longer then 64') - t.equal(crypto.fromPrivateKey(privateKey).publicKey().hex(), publicKey) - - t.end() -}) diff --git a/shared_model/packages/javascript/tests/queryBuilder.js b/shared_model/packages/javascript/tests/queryBuilder.js deleted file mode 100644 index 5f1107c050..0000000000 --- a/shared_model/packages/javascript/tests/queryBuilder.js +++ /dev/null @@ -1,106 +0,0 @@ -var test = require('tape') -var iroha = require('../index') - -const accountId = 'admin@test' -const assetId = 'coin#test' - -test('ModelQueryBuilder tests', function (t) { - t.plan(48) - - let queryBuilder = new iroha.ModelQueryBuilder() - const time = (new Date()).getTime() - - // Tests for concrete query - t.comment('Basic QueryBuilder tests') - t.throws(() => queryBuilder.build(), /Missing concrete query/, 'Should throw Missing concrete query') - t.throws(() => queryBuilder.creatorAccountId(accountId).build(), /Missing concrete query/, 'Should throw Missing concrete query') - t.throws(() => queryBuilder.creatorAccountId(accountId).createdTime(time).build(), /Missing concrete query/, 'Should throw Missing concrete query') - t.throws(() => queryBuilder.creatorAccountId(accountId).createdTime(time).queryCounter(1).build(), /Missing concrete query/, 'Should throw Missing concrete query') - t.throws(() => queryBuilder.creatorAccountId('').createdTime(time).queryCounter(1).getAccount(accountId).build(), /Wrongly formed creator_account_id, passed value: ''/, 'Should throw Wrongly formed creator_account_id') - t.throws(() => queryBuilder.creatorAccountId(accountId).createdTime(0).queryCounter(1).getAccount(accountId).build(), /bad timestamp: too old, timestamp: 0, now:/, 'Should throw bad timestamp: too old') - t.throws(() => queryBuilder.creatorAccountId(accountId).createdTime(time).queryCounter(0).getAccount(accountId).build(), /Counter should be > 0, passed value: 0/, 'Should throw Counter should be > 0') - - // Query with valid queryCounter, creatorAccountId and createdTime - let correctQuery = queryBuilder.creatorAccountId(accountId).createdTime(time).queryCounter(1) - - // getAccount() tests - t.comment('Testing getAccount()') - t.throws(() => correctQuery.getAccount(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctQuery.getAccount('').build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id,') - t.throws(() => correctQuery.getAccount('@@@').build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id,') - t.doesNotThrow(() => correctQuery.getAccount(accountId).build(), null, 'Should not throw any exceptions') - - // getSignatories() tests - t.comment('Testing getSignatories()') - t.throws(() => correctQuery.getSignatories(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctQuery.getSignatories('').build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id,') - t.throws(() => correctQuery.getSignatories('@@@').build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id,') - t.doesNotThrow(() => correctQuery.getSignatories(accountId).build(), null, 'Should not throw any exceptions') - - // getAccountTransactions() tests - t.comment('Testing getAccountTransactions()') - t.throws(() => correctQuery.getAccountTransactions(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctQuery.getAccountTransactions('').build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id,') - t.throws(() => correctQuery.getAccountTransactions('@@@').build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id,') - t.doesNotThrow(() => correctQuery.getAccountTransactions(accountId).build(), null, 'Should not throw any exceptions') - - // getAccountAssetTransactions() tests - t.comment('Testing getAccountAssetTransactions()') - t.throws(() => correctQuery.getAccountAssetTransactions(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctQuery.getAccountAssetTransactions(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctQuery.getAccountAssetTransactions('', assetId).build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id,') - t.throws(() => correctQuery.getAccountAssetTransactions('@@@', assetId).build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id,') - t.throws(() => correctQuery.getAccountAssetTransactions(accountId, '').build(), /Wrongly formed asset_id, passed value: ''/, 'Should throw Wrongly formed asset_id,') - t.throws(() => correctQuery.getAccountAssetTransactions(accountId, '@@@').build(), /Wrongly formed asset_id, passed value: '@@@'/, 'Should throw Wrongly formed asset_id,') - t.doesNotThrow(() => correctQuery.getAccountAssetTransactions(accountId, assetId).build(), null, 'Should not throw any exceptions') - - // getAccountAssets() tests - t.comment('Testing getAccountAssets()') - t.throws(() => correctQuery.getAccountAssets(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctQuery.getAccountAssets('').build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id,') - t.throws(() => correctQuery.getAccountAssets('@@@').build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id,') - t.doesNotThrow(() => correctQuery.getAccountAssets(accountId).build(), null, 'Should not throw any exceptions') - - // getRoles() tests - t.comment('Testing getRoles()') - t.doesNotThrow(() => correctQuery.getRoles().build(), null, 'Should not throw any exceptions') - - // getAssetInfo() tests - t.comment('Testing getAssetInfo()') - t.throws(() => correctQuery.getAssetInfo(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctQuery.getAssetInfo('').build(), /Wrongly formed asset_id, passed value: ''/, 'Should throw Wrongly formed asset_id,') - t.throws(() => correctQuery.getAssetInfo('@@@').build(), /Wrongly formed asset_id, passed value: '@@@'/, 'Should throw Wrongly formed asset_id,') - t.doesNotThrow(() => correctQuery.getAssetInfo(assetId).build(), null, 'Should not throw any exceptions') - - // getRolePermissions() tests - t.comment('Testing getRolePermissions()') - t.throws(() => correctQuery.getRolePermissions(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctQuery.getRolePermissions('').build(), /Wrongly formed role_id, passed value: ''/, 'Should throw Wrongly formed role_id,') - t.throws(() => correctQuery.getRolePermissions('@@@').build(), /Wrongly formed role_id, passed value: '@@@'/, 'Should throw Wrongly formed role_id,') - t.doesNotThrow(() => correctQuery.getRolePermissions('role').build(), null, 'Should not throw any exceptions') - - // getTransactions() tests - t.comment('Testing getTransactions()') - t.throws(() => correctQuery.getTransactions(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctQuery.getTransactions(''), /argument 2 of type 'std::vector< shared_model::crypto::Hash >/, 'Should throw ...argument 2 of type...') - - let hv = new iroha.HashVector() - hv.add(new iroha.Hash('11111111111111111111111111111111')) - hv.add(new iroha.Hash('22222222222222222222222222222222')) - let invalidHv = new iroha.HashVector() - invalidHv.add(new iroha.Hash('')) - let emptyHv = new iroha.HashVector() - - t.throws(() => correctQuery.getTransactions(emptyHv).build(), /tx_hashes cannot be empty/, 'Should throw tx_hashes cannot be empty') - t.throws(() => correctQuery.getTransactions(invalidHv).build(), /Hash has invalid size: 0/, 'Should throw Hash has invalid size') - t.doesNotThrow(() => correctQuery.getTransactions(hv).build(), null, 'Should not throw any exceptions') - - // getAccountDetail() tests - t.comment('Testing getAccountDetail()') - t.throws(() => correctQuery.getAccountDetail(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctQuery.getAccountDetail('').build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id,') - t.throws(() => correctQuery.getAccountDetail('@@@').build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id,') - t.doesNotThrow(() => correctQuery.getAccountDetail(accountId).build(), null, 'Should not throw any exceptions') - - t.end() -}) diff --git a/shared_model/packages/javascript/tests/txbuilder.js b/shared_model/packages/javascript/tests/txbuilder.js deleted file mode 100644 index 5642f00d53..0000000000 --- a/shared_model/packages/javascript/tests/txbuilder.js +++ /dev/null @@ -1,215 +0,0 @@ -var test = require('tape') -var iroha = require('../index') - -const publicKey = '407e57f50ca48969b08ba948171bb2435e035d82cec417e18e4a38f5fb113f83' -const privateKey = '1d7e0a32ee0affeb4d22acd73c2c6fb6bd58e266c8c2ce4fa0ffe3dd6a253ffb' - -const adminAccountId = 'admin@test' -const assetId = 'coin#test' -const testAccountId = 'test@test' - -test('ModelTransactionBuilder tests', function (t) { - t.plan(127) - - let crypto = new iroha.ModelCrypto() - let keypair = crypto.convertFromExisting(publicKey, privateKey) - - let txBuilder = new iroha.ModelTransactionBuilder() - const time = (new Date()).getTime() - const futureTime = 2400000000000 - const address = '0.0.0.0:50051' - - t.comment('Basic TransactionBuilder tests') - - t.throws(() => txBuilder.build(), /Transaction should contain at least one command(.*)Wrongly formed creator_account_id, passed value: ''(.*)bad timestamp: too old/, 'Should throw exception 0 commands in transaction, wrong creator_account_id, timestamp') - t.throws(() => txBuilder.creatorAccountId(adminAccountId).build(), /Transaction should contain at least one command(.*)bad timestamp: too old/, 'Should throw exception about zero commands in transaction, wrong timestamp') - t.throws(() => txBuilder.creatorAccountId(adminAccountId).createdTime(0).build(), /Transaction should contain at least one command(.*)bad timestamp: too old/, 'Should throw 0 commands + bad timestamp: too old') - t.throws(() => txBuilder.creatorAccountId(adminAccountId).createdTime(time).build(), /Transaction should contain at least one command/, 'Should throw 0 commands') - t.throws(() => txBuilder.creatorAccountId('').createdTime(time).build(), /Transaction should contain at least one command(.*)Wrongly formed creator_account_id, passed value: ''/, 'Should throw 0 commands + Wrongly formed creator_account_id') - t.throws(() => txBuilder.creatorAccountId('@@@').createdTime(time).build(), /Transaction should contain at least one command(.*)Wrongly formed creator_account_id, passed value: '@@@'/, 'Should throw 0 commands + Wrongly formed creator_account_id') - t.throws(() => txBuilder.creatorAccountId(adminAccountId).createdTime(futureTime).build(), /Transaction should contain at least one command(.*)bad timestamp: sent from future/, 'Should throw exception about zero commands in transaction, Sent from future') - t.throws(() => txBuilder.creatorAccountId(adminAccountId).createdTime(time).build(), /Transaction should contain at least one command/, 'Should throw exception about zero commands in transaction') - t.throws(() => txBuilder.quorum(0).build(), /(.*)Quorum should be within range \(0, 128\](.*)/, 'Should throw exception about zero quorum') - - // Transaction with valid creatorAccountId and createdTime - let correctTx = txBuilder.creatorAccountId(adminAccountId).createdTime(time) - - // addAssetQuantity() tests - t.comment('Testing addAssetQuantity()') - t.throws(() => correctTx.addAssetQuantity(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.addAssetQuantity(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.addAssetQuantity('', '').build(), /AddAssetQuantity: \[\[Wrongly formed asset_id, passed value: ''(.*)Amount must be greater than 0, passed value: 0 \]\]/, 'Should throw wrongly formed account_id, asset_id, Amount must be greater than 0') - t.throws(() => correctTx.addAssetQuantity(assetId, '0').build(), /AddAssetQuantity: \[\[Amount must be greater than 0, passed value: 0 \]\]/, 'Should throw Amount must be greater than 0') - t.throws(() => correctTx.addAssetQuantity('', '1000').build(), /Wrongly formed asset_id, passed value: ''/, 'Should throw Wrongly formed asset_id') - t.throws(() => correctTx.addAssetQuantity('###', '1000').build(), /Wrongly formed asset_id, passed value: '###'/, 'Should throw Wrongly formed asset_id') - t.doesNotThrow(() => correctTx.addAssetQuantity(assetId, '1000').build(), null, 'Should not throw any exceptions') - - // addPeer() tests - t.comment('Testing addPeer()') - t.throws(() => correctTx.addPeer(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.addPeer(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.addPeer('', keypair.publicKey()).build(), /Wrongly formed peer address/, 'Should throw exception about wrongly formed peer address') - t.throws(() => correctTx.addPeer(address, '').build(), /argument 3 of type 'shared_model::crypto::PublicKey const &'/, 'Should throw ...argument 3 of type...') - t.doesNotThrow(() => correctTx.addPeer(address, keypair.publicKey()).build(), null, 'Should not throw any exceptions') - - // addSignatory() tests - t.comment('Testing addSignatory()') - t.throws(() => correctTx.addSignatory(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.addSignatory(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.addSignatory('', keypair.publicKey()).build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.addSignatory('@@@', keypair.publicKey()).build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.addSignatory(adminAccountId, '').build(), /argument 3 of type 'shared_model::crypto::PublicKey const &'/, 'Should throw ...argument 3 of type...') - t.doesNotThrow(() => correctTx.addSignatory(adminAccountId, keypair.publicKey()).build(), null, 'Should not throw any exceptions') - - // removeSignatory() tests - t.comment('Testing removeSignatory()') - t.throws(() => correctTx.removeSignatory(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.removeSignatory(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.removeSignatory('', keypair.publicKey()).build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.removeSignatory('@@@', keypair.publicKey()).build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.removeSignatory(adminAccountId, '').build(), /argument 3 of type 'shared_model::crypto::PublicKey const &'/, 'Should throw ...argument 3 of type...') - t.doesNotThrow(() => correctTx.removeSignatory(adminAccountId, keypair.publicKey()).build(), null, 'Should not throw any exceptions') - - // appendRole() tests - t.comment('Testing appendRole()') - t.throws(() => correctTx.appendRole(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.appendRole(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.appendRole('', 'new_user_role').build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.appendRole('@@@', 'new_user_role').build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.appendRole(adminAccountId, '').build(), /Wrongly formed role_id, passed value: ''/, 'Should throw Wrongly formed role_id') - t.throws(() => correctTx.appendRole(adminAccountId, '@@@').build(), /Wrongly formed role_id, passed value: '@@@'/, 'Should throw Wrongly formed role_id') - t.doesNotThrow(() => correctTx.appendRole(adminAccountId, 'new_user_role').build(), null, 'Should not throw any exceptions') - - // createAsset() tests - t.comment('Testing createAsset()') - t.throws(() => correctTx.createAsset(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.createAsset(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.createAsset('', ''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.createAsset('', 'domain', 2).build(), /Wrongly formed asset_name, passed value: ''/, 'Should throw Wrongly formed asset_name') - t.throws(() => correctTx.createAsset('$$$', 'domain', 2).build(), /Wrongly formed asset_name, passed value: '\$\$\$'/, 'Should throw Wrongly formed asset_name') - t.throws(() => correctTx.createAsset('coin', '', 2).build(), /Wrongly formed domain_id, passed value: ''/, 'Should throw Wrongly formed domain_id') - t.throws(() => correctTx.createAsset('coin', '$$$', 2).build(), /Wrongly formed domain_id, passed value: '\$\$\$'/, 'Should throw Wrongly formed domain_id') - t.throws(() => correctTx.createAsset('coin', 'domain', -10).build(), /argument 4 of type 'shared_model::interface::types::PrecisionType'/, 'Should throw ...argument 4 of type...') - // t.throws(() => correctTx.createAsset('coin', 'domain', 1.2).build(), /argument 4 of type 'shared_model::interface::types::PrecisionType'/, 'Should throw ...argument 4 of type...') - t.doesNotThrow(() => correctTx.createAsset('coin', 'domain', 2).build(), null, 'Should not throw any exceptions') - - // createAccount() tests - t.comment('Testing createAccount()') - t.throws(() => correctTx.createAccount(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.createAccount(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.createAccount('', ''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.createAccount('', 'domain', keypair.publicKey()).build(), /Wrongly formed account_name, passed value: ''/, 'Should throw Wrongly formed asset_name') - t.throws(() => correctTx.createAccount('$$$', 'domain', keypair.publicKey()).build(), /Wrongly formed account_name, passed value: '\$\$\$'/, 'Should throw Wrongly formed asset_name') - t.throws(() => correctTx.createAccount('admin', '', keypair.publicKey()).build(), /Wrongly formed domain_id, passed value: ''/, 'Should throw Wrongly formed domain_id') - t.throws(() => correctTx.createAccount('admin', '$$$', keypair.publicKey()).build(), /Wrongly formed domain_id, passed value: '\$\$\$'/, 'Should throw Wrongly formed domain_id') - t.throws(() => correctTx.createAccount('admin', 'domain', 'aaa'), /argument 4 of type 'shared_model::crypto::PublicKey/, 'Should throw ...argument 4 of type...') - t.doesNotThrow(() => correctTx.createAccount('admin', 'domain', keypair.publicKey()).build(), null, 'Should not throw any exceptions') - - // createDomain() tests - t.comment('Testing createDomain()') - t.throws(() => correctTx.createDomain(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.createDomain(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.createDomain('', 'new_user_role').build(), /Wrongly formed domain_id, passed value: ''/, 'Should throw Wrongly formed domain_id') - t.throws(() => correctTx.createDomain('$$$', 'new_user_role').build(), /Wrongly formed domain_id, passed value: '\$\$\$'/, 'Should throw Wrongly formed domain_id') - t.throws(() => correctTx.createDomain('domain', '').build(), /Wrongly formed role_id, passed value: ''/, 'Should throw Wrongly formed role_id') - t.throws(() => correctTx.createDomain('domain', '@@@').build(), /Wrongly formed role_id, passed value: '@@@'/, 'Should throw Wrongly formed role_id') - t.doesNotThrow(() => correctTx.createDomain('domain', 'new_user_role').build(), null, 'Should not throw any exceptions') - - // createRole() tests - t.comment('Testing createRole()') - t.throws(() => correctTx.createRole(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.createRole(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - - let emptyPerm = new iroha.RolePermissionSet() - let validPermissions = new iroha.RolePermissionSet() - validPermissions.set(iroha.Role_kAddPeer) - validPermissions.set(iroha.Role_kAddAssetQty) - - t.doesNotThrow(() => correctTx.createRole('new_user_role', emptyPerm).build(), null, 'Should not throw any exceptions') - t.throws(() => correctTx.createRole('', validPermissions).build(), /Wrongly formed role_id, passed value: ''/, 'Should throw Wrongly formed role_id') - t.throws(() => correctTx.createRole('@@@', validPermissions).build(), /Wrongly formed role_id, passed value: '@@@'/, 'Should throw Wrongly formed role_id') - t.throws(() => correctTx.createRole('new_user_role', '').build(), /argument 3 of type 'shared_model::interface::RolePermissionSet/, 'Should throw ...argument 3 of type...') - t.doesNotThrow(() => correctTx.createRole('new_user_role', validPermissions).build(), null, 'Should not throw any exceptions') - - // detachRole() tests - t.comment('Testing detachRole()') - t.throws(() => correctTx.detachRole(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.detachRole(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.detachRole('', 'new_user_role').build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.detachRole('@@@', 'new_user_role').build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.detachRole(adminAccountId, '').build(), /Wrongly formed role_id, passed value: ''/, 'Should throw Wrongly formed role_id') - t.throws(() => correctTx.detachRole(adminAccountId, '@@@').build(), /Wrongly formed role_id, passed value: '@@@'/, 'Should throw Wrongly formed role_id') - t.doesNotThrow(() => correctTx.detachRole(adminAccountId, 'new_user_role').build(), null, 'Should not throw any exceptions') - - // grantPermission() tests - t.comment('Testing grantPermission()') - t.throws(() => correctTx.grantPermission(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.grantPermission(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.grantPermission('', iroha.Grantable_kSetMyQuorum).build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.grantPermission('@@@', iroha.Grantable_kSetMyQuorum).build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.grantPermission(adminAccountId, '').build(), /argument 3 of type 'shared_model::interface::permissions::Grantable/, 'Should throw ...argument 3 of type...') - t.doesNotThrow(() => correctTx.grantPermission(adminAccountId, iroha.Grantable_kSetMyQuorum).build(), null, 'Should not throw any exceptions') - - // revokePermission() tests - t.comment('Testing revokePermission()') - t.throws(() => correctTx.revokePermission(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.revokePermission(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.revokePermission('', iroha.Grantable_kSetMyQuorum).build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.revokePermission('@@@', iroha.Grantable_kSetMyQuorum).build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.revokePermission(adminAccountId, '').build(), /argument 3 of type 'shared_model::interface::permissions::Grantable/, 'Should throw ...argument 3 of type...') - t.doesNotThrow(() => correctTx.revokePermission(adminAccountId, iroha.Grantable_kSetMyQuorum).build(), null, 'Should not throw any exceptions') - - // setAccountDetail() tests - t.comment('Testing setAccountDetail()') - t.throws(() => correctTx.setAccountDetail(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.setAccountDetail(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.setAccountDetail('', ''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.setAccountDetail('', 'key', 'value').build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.setAccountDetail('@@@', 'key', 'value').build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.setAccountDetail(adminAccountId, '', 'value').build(), /Wrongly formed key, passed value: ''/, 'Should throw Wrongly formed key') - t.throws(() => correctTx.setAccountDetail(adminAccountId, '@@@', 'value').build(), /Wrongly formed key, passed value: '@@@'/, 'Should throw Wrongly formed key') - t.doesNotThrow(() => correctTx.setAccountDetail(adminAccountId, 'key', 'value').build(), null, 'Should not throw any exceptions') - - // setAccountQuorum() tests - t.comment('Testing setAccountQuorum()') - t.throws(() => correctTx.setAccountQuorum(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.setAccountQuorum(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.setAccountQuorum('', 10).build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.setAccountQuorum('@@@', 10).build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id') - t.throws(() => correctTx.setAccountQuorum(adminAccountId, 'kek').build(), /argument 3 of type 'shared_model::interface::types::QuorumType'/, 'Should throw ...argument 3 of type...') - t.throws(() => correctTx.setAccountQuorum(adminAccountId, 0).build(), /Quorum should be within range \(0, 128\]/, 'Should throw Quorum should be within range (0, 128]') - t.throws(() => correctTx.setAccountQuorum(adminAccountId, 200).build(), /Quorum should be within range \(0, 128\]/, 'Should throw Quorum should be within range (0, 128]') - t.doesNotThrow(() => correctTx.setAccountQuorum(adminAccountId, 10).build(), null, 'Should not throw any exceptions') - - // subtractAssetQuantity() tests - t.comment('Testing subtractAssetQuantity()') - t.throws(() => correctTx.subtractAssetQuantity(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.subtractAssetQuantity(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.subtractAssetQuantity('', '').build(), /SubtractAssetQuantity: \[\[Wrongly formed asset_id, passed value: ''(.*)Amount must be greater than 0, passed value: 0 \]\]/, 'Should throw wrongly formed account_id, asset_id, Amount must be greater than 0') - t.throws(() => correctTx.subtractAssetQuantity(assetId, '0').build(), /SubtractAssetQuantity: \[\[Amount must be greater than 0, passed value: 0 \]\]/, 'Should throw Amount must be greater than 0') - // TODO: MAYBE Throw an exception on real amount - // t.throws(() => correctTx.subtractAssetQuantity(assetId, '0.123').build(), /SubtractAssetQuantity: \[\[Amount must be integer, passed value: 0.123 \]\]/, 'Should throw Amount must be integer') - t.throws(() => correctTx.subtractAssetQuantity('', '1000').build(), /Wrongly formed asset_id, passed value: ''/, 'Should throw Wrongly formed asset_id') - t.throws(() => correctTx.subtractAssetQuantity('###', '1000').build(), /Wrongly formed asset_id, passed value: '###'/, 'Should throw Wrongly formed asset_id') - t.doesNotThrow(() => correctTx.subtractAssetQuantity(assetId, '1000').build(), null, 'Should not throw any exceptions') - - // transferAsset() tests - t.comment('Testing transferAsset()') - t.throws(() => correctTx.transferAsset(), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.transferAsset(''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.transferAsset('', ''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.transferAsset('', '', ''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.transferAsset('', '', '', ''), /Error: Illegal number of arguments/, 'Should throw Illegal number of arguments') - t.throws(() => correctTx.transferAsset('', testAccountId, assetId, 'some message', '100').build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id,') - t.throws(() => correctTx.transferAsset('@@@', testAccountId, assetId, 'some message', '100').build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id,') - t.throws(() => correctTx.transferAsset(adminAccountId, '', assetId, 'some message', '100').build(), /Wrongly formed account_id, passed value: ''/, 'Should throw Wrongly formed account_id,') - t.throws(() => correctTx.transferAsset(adminAccountId, '@@@', assetId, 'some message', '100').build(), /Wrongly formed account_id, passed value: '@@@'/, 'Should throw Wrongly formed account_id,') - t.throws(() => correctTx.transferAsset(adminAccountId, testAccountId, '', 'some message', '100').build(), /Wrongly formed asset_id, passed value: ''/, 'Should throw Wrongly formed asset_id,') - t.throws(() => correctTx.transferAsset(adminAccountId, testAccountId, '@@@', 'some message', '100').build(), /Wrongly formed asset_id, passed value: '@@@'/, 'Should throw Wrongly formed asset_id,') - t.throws(() => correctTx.transferAsset(adminAccountId, testAccountId, assetId, 'some mesage', '0').build(), /TransferAsset: \[\[Amount must be greater than 0, passed value: 0 \]\]/, 'Should throw Amount must be greater than 0') - // TODO: MAYBE Throw an exception on real amount - // t.throws(() => correctTx.transferAsset(adminAccountId, testAccountId, assetId, 'some mesage', '0.123').build(), /TransferAsset: \[\[Amount must be integer, passed value: 0.123 \]\]/, 'Should throw Amount must be integer') - t.doesNotThrow(() => correctTx.transferAsset(adminAccountId, testAccountId, assetId, 'some mesage', '100').build(), null, 'Should not throw any exceptions') - - t.end() -}) diff --git a/test/module/shared_model/bindings/CMakeLists.txt b/test/module/shared_model/bindings/CMakeLists.txt index d247e7d73d..666da9e81d 100644 --- a/test/module/shared_model/bindings/CMakeLists.txt +++ b/test/module/shared_model/bindings/CMakeLists.txt @@ -102,10 +102,3 @@ if (SWIG_JAVA) set_tests_properties(java_builders_test PROPERTIES DEPENDS builders) endif() - -if (SWIG_NODE) - find_package (nodejs REQUIRED) - add_test(NAME javascript_tests - COMMAND npm run build-and-test - WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/shared_model/packages/javascript) -endif() From 2feefcc6baa3366c2d63d0dae687245fd0053cb3 Mon Sep 17 00:00:00 2001 From: Bulat Saifullin Date: Thu, 31 Jan 2019 10:12:39 +0300 Subject: [PATCH 25/41] IR-255: Fix new pipeline (#2063) * fix docker problems Signed-off-by: Bulat Saifullin * fix docker build Signed-off-by: Bulat Saifullin * typo Signed-off-by: Bulat Saifullin * typo Signed-off-by: Bulat Saifullin * refactoring Signed-off-by: Bulat Saifullin * fix pr Signed-off-by: Bulat Saifullin --- .../builders/x64-linux-build-steps.groovy | 34 +++++++++------ .../utils/docker-pull-or-build.groovy | 37 +++++++---------- .jenkinsci/docker-pull-or-build.groovy | 41 +++++++------------ 3 files changed, 50 insertions(+), 62 deletions(-) diff --git a/.jenkinsci-new/builders/x64-linux-build-steps.groovy b/.jenkinsci-new/builders/x64-linux-build-steps.groovy index 633a27a5c9..40fe5d085b 100644 --- a/.jenkinsci-new/builders/x64-linux-build-steps.groovy +++ b/.jenkinsci-new/builders/x64-linux-build-steps.groovy @@ -61,7 +61,19 @@ def buildSteps(int parallelism, List compilerVersions, String build_type, boolea if (sanitize){ cmakeOptions += " -DSANITIZE='address;leak' " } - sh "docker network create ${env.IROHA_NETWORK}" + + // Create postgres + // enable prepared transactions so that 2 phase commit works + // we set it to 100 as a safe value + sh """#!/bin/bash -xe + if [ ! "\$(docker ps -q -f name=${env.IROHA_POSTGRES_HOST})" ]; then + docker network create ${env.IROHA_NETWORK} + docker run -td -e POSTGRES_USER=${env.IROHA_POSTGRES_USER} \ + -e POSTGRES_PASSWORD=${env.IROHA_POSTGRES_PASSWORD} --name ${env.IROHA_POSTGRES_HOST} \ + --network=${env.IROHA_NETWORK} postgres:9.5 -c 'max_prepared_transactions=100' + fi + """ + iC = dockerUtils.dockerPullOrBuild("${platform}-develop-build", "${env.GIT_RAW_BASE_URL}/${scmVars.GIT_COMMIT}/docker/develop/Dockerfile", "${env.GIT_RAW_BASE_URL}/${utils.previousCommitOrCurrent(scmVars)}/docker/develop/Dockerfile", @@ -69,11 +81,7 @@ def buildSteps(int parallelism, List compilerVersions, String build_type, boolea scmVars, environment, ['PARALLELISM': parallelism]) - // enable prepared transactions so that 2 phase commit works - // we set it to 100 as a safe value - sh "docker run -td -e POSTGRES_USER=${env.IROHA_POSTGRES_USER} \ - -e POSTGRES_PASSWORD=${env.IROHA_POSTGRES_PASSWORD} --name ${env.IROHA_POSTGRES_HOST} \ - --network=${env.IROHA_NETWORK} postgres:9.5 -c 'max_prepared_transactions=100'" + iC.inside("" + " -e IROHA_POSTGRES_HOST=${env.IROHA_POSTGRES_HOST}" + " -e IROHA_POSTGRES_PORT=${env.IROHA_POSTGRES_PORT}" @@ -102,8 +110,8 @@ def buildSteps(int parallelism, List compilerVersions, String build_type, boolea // We run coverage once, using the first compiler as it is enough coverage = false } - } - } + } //end if + } //end for stage("Analysis") { cppcheck ? build.cppCheck(buildDir, parallelism) : echo('Skipping Cppcheck...') sonar ? build.sonarScanner(scmVars, environment) : echo('Skipping Sonar Scanner...') @@ -111,11 +119,11 @@ def buildSteps(int parallelism, List compilerVersions, String build_type, boolea stage('Build docs'){ docs ? doxygen.doDoxygen(specialBranch, scmVars.GIT_LOCAL_BRANCH) : echo("Skipping Doxygen...") } - stage ('Docker ManifestPush'){ - if (specialBranch) { - utils.dockerPush(iC, "${platform}-develop-build") - dockerManifestPush(iC, "develop-build", environment) - } + } // end iC.inside + stage ('Docker ManifestPush'){ + if (specialBranch) { + utils.dockerPush(iC, "${platform}-develop-build") + dockerManifestPush(iC, "develop-build", environment) } } } diff --git a/.jenkinsci-new/utils/docker-pull-or-build.groovy b/.jenkinsci-new/utils/docker-pull-or-build.groovy index 65dab2440f..8848ad4efc 100644 --- a/.jenkinsci-new/utils/docker-pull-or-build.groovy +++ b/.jenkinsci-new/utils/docker-pull-or-build.groovy @@ -26,34 +26,25 @@ def dockerPullOrBuild(imageName, currentDockerfileURL, previousDockerfileURL, re currentDockerfile = utils.getUrl(currentDockerfileURL, "/tmp/${randDir}/currentDockerfile", true) previousDockerfile = utils.getUrl(previousDockerfileURL, "/tmp/${randDir}/previousDockerfile") referenceDockerfile = utils.getUrl(referenceDockerfileURL, "/tmp/${randDir}/referenceDockerfile") - if (utils.filesDiffer(currentDockerfile, previousDockerfile) && utils.filesDiffer(currentDockerfile, referenceDockerfile)) { - // Dockerfile has been changed compared to both the previous commit and reference Dockerfile - // Worst case scenario. We cannot count on the local cache - // because Dockerfile may contain apt-get entries that would try to update - // from invalid (stale) addresses - if(remoteFilesDiffer(currentDockerfileURL, referenceDockerfileURL)){ - // Dockerfile has been changed compared to the develop + if (utils.filesDiffer(currentDockerfile, referenceDockerfile)) { + // Dockerfile is differ from develop + if (utils.filesDiffer(currentDockerfile, previousDockerfile)) { + // Dockerfile has been changed compared to both the previous commit and reference Dockerfile + // Worst case scenario. We cannot count on the local cache + // because Dockerfile may contain apt-get entries that would try to update + // from invalid (stale) addresses iC = docker.build("${env.DOCKER_REGISTRY_BASENAME}:${randDir}-${BUILD_NUMBER}", "${buildOptions} --no-cache -f ${currentDockerfile} .") } else { - // Dockerfile is same as develop, we can just pull it - def testExitCode = sh(script: "docker pull ${env.DOCKER_REGISTRY_BASENAME}:${imageName}", returnStatus: true) - if (testExitCode != 0) { - // image does not (yet) exist on Dockerhub. Build it - iC = docker.build("${env.DOCKER_REGISTRY_BASENAME}:${randDir}-${BUILD_NUMBER}", "${buildOptions} --no-cache -f ${currentDockerfile} .") - } - else { - // no difference found compared to both previous and reference Dockerfile - iC = docker.image("${env.DOCKER_REGISTRY_BASENAME}:${imageName}") - } + // if we're lucky to build on the same agent, image will be built using cache + iC = docker.build("${env.DOCKER_REGISTRY_BASENAME}:${randDir}-${BUILD_NUMBER}", "${buildOptions} -f ${currentDockerfile} .") } } else { - // first commit in this branch or Dockerfile modified - if (utils.filesDiffer(currentDockerfile, referenceDockerfile)) { - // if we're lucky to build on the same agent, image will be built using cache - iC = docker.build("${env.DOCKER_REGISTRY_BASENAME}:${randDir}-${BUILD_NUMBER}", "$buildOptions -f ${currentDockerfile} .") - } - else { + // Dockerfile is same as develop + if ( scmVars.GIT_LOCAL_BRANCH == "develop" && utils.filesDiffer(currentDockerfile, previousDockerfile)) { + // we in dev branch and docker file was changed + iC = docker.build("${env.DOCKER_REGISTRY_BASENAME}:${randDir}-${BUILD_NUMBER}", "${buildOptions} --no-cache -f ${currentDockerfile} .") + } else { // try pulling image from Dockerhub, probably image is already there def testExitCode = sh(script: "docker pull ${env.DOCKER_REGISTRY_BASENAME}:${imageName}", returnStatus: true) if (testExitCode != 0) { diff --git a/.jenkinsci/docker-pull-or-build.groovy b/.jenkinsci/docker-pull-or-build.groovy index 0d221c35c9..174abcfc7a 100644 --- a/.jenkinsci/docker-pull-or-build.groovy +++ b/.jenkinsci/docker-pull-or-build.groovy @@ -20,41 +20,30 @@ def buildOptionsString(options) { def dockerPullOrUpdate(imageName, currentDockerfileURL, previousDockerfileURL, referenceDockerfileURL, buildOptions=null) { buildOptions = buildOptionsString(buildOptions) def commit = sh(script: "echo ${GIT_LOCAL_BRANCH} | md5sum | cut -c 1-8", returnStdout: true).trim() - if (remoteFilesDiffer(currentDockerfileURL, previousDockerfileURL)) { - // Dockerfile has been changed compared to the previous commit - // Worst case scenario. We cannot count on the local cache - // because Dockerfile may contain apt-get entries that would try to update - // from invalid (stale) addresses - if(remoteFilesDiffer(currentDockerfileURL, referenceDockerfileURL)){ - // Dockerfile has been changed compared to the develop + if(remoteFilesDiffer(currentDockerfileURL, referenceDockerfileURL)) { + // Dockerfile is differ from develop + if (remoteFilesDiffer(currentDockerfileURL, previousDockerfileURL)) { + // Dockerfile has been changed compared to the previous commit + // Worst case scenario. We cannot count on the local cache + // because Dockerfile may contain apt-get entries that would try to update + // from invalid (stale) addresses iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "${buildOptions} --no-cache -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}") } else { - // Dockerfile is same as develop, we can just pull it - def testExitCode = sh(script: "docker pull ${DOCKER_REGISTRY_BASENAME}:${imageName}", returnStatus: true) - if (testExitCode != 0) { - // image does not (yet) exist on Dockerhub. Build it - iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "$buildOptions --no-cache -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}") - } - else { - // no difference found compared to both previous and reference Dockerfile - iC = docker.image("${DOCKER_REGISTRY_BASENAME}:${imageName}") - } - } - } - else { - // first commit in this branch or Dockerfile modified - if (remoteFilesDiffer(currentDockerfileURL, referenceDockerfileURL)) { // if we're lucky to build on the same agent, image will be built using cache iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "$buildOptions -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}") } - else { - // try pulling image from Dockerhub, probably image is already there + } else { + // Dockerfile is same as develop + if (GIT_LOCAL_BRANCH == "develop" && remoteFilesDiffer(currentDockerfileURL, previousDockerfileURL)) { + // we in dev branch and docker file was changed + iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "${buildOptions} --no-cache -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}") + } else { + // Dockerfile is same as develop, we can just pull it def testExitCode = sh(script: "docker pull ${DOCKER_REGISTRY_BASENAME}:${imageName}", returnStatus: true) if (testExitCode != 0) { // image does not (yet) exist on Dockerhub. Build it iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "$buildOptions --no-cache -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}") - } - else { + } else { // no difference found compared to both previous and reference Dockerfile iC = docker.image("${DOCKER_REGISTRY_BASENAME}:${imageName}") } From a705ea95a6c89486785cc6dc01124ce9a8a41fa8 Mon Sep 17 00:00:00 2001 From: Igor Egorov Date: Fri, 1 Feb 2019 11:32:47 +0300 Subject: [PATCH 26/41] Update Python Part of Iroha (#2028) Remove old Python lib. Implement new Python lib (including crypto and network parts). Update BTF examples. Update tx-example.py. Update permissions doc compiler. Signed-off-by: Igor Egorov --- CMakeLists.txt | 8 +- docker/dependencies/Dockerfile | 2 +- docker/develop/Dockerfile | 2 +- docs/permissions_compiler/compiler.py | 7 +- docs/permissions_compiler/consts.py | 60 -- docs/permissions_compiler/rst.py | 23 - docs/source/guides/build.rst | 54 +- docs/source/guides/libraries/python.rst | 1 + docs/source/maintenance/permissions.rst | 252 ++----- example/python/Makefile | 25 + example/python/batch-example.py | 53 +- example/python/blocks-query.py | 65 +- example/python/ed25519.py | 5 +- example/python/irohalib.md | 1 + example/python/irohalib.py | 97 ++- .../python/permissions/can_add_asset_qty.py | 38 +- .../permissions/can_add_domain_asset_qty.py | 2 + .../permissions/can_add_my_signatory.py | 54 +- example/python/permissions/can_add_peer.py | 40 +- .../python/permissions/can_add_signatory.py | 38 +- example/python/permissions/can_append_role.py | 48 +- .../python/permissions/can_create_account.py | 37 +- .../python/permissions/can_create_asset.py | 35 +- .../python/permissions/can_create_domain.py | 35 +- example/python/permissions/can_create_role.py | 39 +- example/python/permissions/can_detach_role.py | 35 +- .../python/permissions/can_get_all_acc_ast.py | 34 +- .../permissions/can_get_all_acc_ast_txs.py | 54 +- .../permissions/can_get_all_acc_detail.py | 34 +- .../python/permissions/can_get_all_acc_txs.py | 34 +- .../permissions/can_get_all_accounts.py | 34 +- .../permissions/can_get_all_signatories.py | 34 +- example/python/permissions/can_get_all_txs.py | 74 +-- .../permissions/can_get_domain_acc_ast.py | 33 +- .../permissions/can_get_domain_acc_ast_txs.py | 47 +- .../permissions/can_get_domain_acc_detail.py | 33 +- .../permissions/can_get_domain_acc_txs.py | 33 +- .../permissions/can_get_domain_accounts.py | 33 +- .../permissions/can_get_domain_signatories.py | 33 +- .../python/permissions/can_get_my_acc_ast.py | 33 +- .../permissions/can_get_my_acc_ast_txs.py | 47 +- .../permissions/can_get_my_acc_detail.py | 33 +- .../python/permissions/can_get_my_acc_txs.py | 33 +- .../python/permissions/can_get_my_account.py | 33 +- .../permissions/can_get_my_signatories.py | 33 +- example/python/permissions/can_get_my_txs.py | 82 +-- example/python/permissions/can_get_roles.py | 45 +- .../can_grant_can_add_my_signatory.py | 51 +- .../can_grant_can_remove_my_signatory.py | 51 +- .../can_grant_can_set_my_account_detail.py | 51 +- .../can_grant_can_set_my_quorum.py | 52 +- .../can_grant_can_transfer_my_assets.py | 69 +- example/python/permissions/can_read_assets.py | 37 +- example/python/permissions/can_receive.py | 57 +- .../permissions/can_remove_my_signatory.py | 64 +- .../permissions/can_remove_signatory.py | 43 +- example/python/permissions/can_set_detail.py | 35 +- .../permissions/can_set_my_account_detail.py | 54 +- .../python/permissions/can_set_my_quorum.py | 63 +- example/python/permissions/can_set_quorum.py | 42 +- .../permissions/can_subtract_asset_qty.py | 48 +- .../can_subtract_domain_asset_qty.py | 2 +- example/python/permissions/can_transfer.py | 1 - .../permissions/can_transfer_my_assets.py | 74 ++- example/python/permissions/commons.py | 140 ++-- example/python/prepare.sh | 13 - example/python/tx-example.py | 313 ++++----- shared_model/bindings/CMakeLists.txt | 27 +- shared_model/packages/python/setup.py | 1 + test/integration/CMakeLists.txt | 5 +- test/integration/binary/CMakeLists.txt | 75 ++- test/integration/binary/binaries_test.cpp | 15 +- .../binary/binaries_test_fixture.hpp | 2 +- .../shared_model/bindings/CMakeLists.txt | 58 +- .../bindings/blocks-query-test.py | 138 ---- .../shared_model/bindings/builder-test.py | 624 ------------------ .../shared_model/bindings/client-test.py | 65 -- .../shared_model/bindings/query-test.py | 390 ----------- 78 files changed, 1347 insertions(+), 3218 deletions(-) delete mode 100644 docs/permissions_compiler/consts.py create mode 100644 example/python/Makefile mode change 100644 => 100755 example/python/batch-example.py delete mode 100755 example/python/prepare.sh delete mode 100644 test/module/shared_model/bindings/blocks-query-test.py delete mode 100644 test/module/shared_model/bindings/builder-test.py delete mode 100644 test/module/shared_model/bindings/client-test.py delete mode 100644 test/module/shared_model/bindings/query-test.py diff --git a/CMakeLists.txt b/CMakeLists.txt index aa7318e6aa..16088554e2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -71,6 +71,7 @@ SET(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules) option(BENCHMARKING "Build benchmarks" OFF) option(TESTING "Build tests" ON ) +option(USE_BTF "Build Binary Testing Framework" OFF) option(COVERAGE "Enable coverage" OFF) option(FUZZING "Build fuzzing binaries" OFF) option(PACKAGE_ZIP "Create ZIP package" OFF) @@ -78,9 +79,7 @@ option(PACKAGE_TGZ "Create TGZ package" OFF) option(PACKAGE_RPM "Create RPM package" OFF) option(PACKAGE_DEB "Create DEB package" OFF) option(ENABLE_LIBS_PACKAGING "Enable libs packaging" ON) -option(SWIG_PYTHON "Generate Swig Python bindings" OFF) option(SWIG_JAVA "Generate Swig Java bindings" OFF) -option(SUPPORT_PYTHON2 "ON if Python2, OFF if python3" OFF) option(SWIG_CSHARP "Generate Swig C# bindings" OFF) option(USE_LIBIROHA "Use external model library" OFF) @@ -99,14 +98,15 @@ if(PACKAGE_TGZ OR PACKAGE_ZIP OR PACKAGE_RPM OR PACKAGE_DEB) message(STATUS "Packaging enabled: forcing non-packaging options to OFF") set(BENCHMARKING OFF) set(TESTING OFF) + set(USE_BTF OFF) set(COVERAGE OFF) set(FUZZING OFF) - set(SWIG_PYTHON OFF) set(SWIG_JAVA OFF) endif() message(STATUS "-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}") message(STATUS "-DTESTING=${TESTING}") +message(STATUS "-DUSE_BTF=${USE_BTF}") message(STATUS "-DBENCHMARKING=${BENCHMARKING}") message(STATUS "-DFUZZING=${FUZZING}") message(STATUS "-DCOVERAGE=${COVERAGE}") @@ -115,9 +115,7 @@ message(STATUS "-DPACKAGE_TGZ=${PACKAGE_TGZ}") message(STATUS "-DPACKAGE_RPM=${PACKAGE_RPM}") message(STATUS "-DPACKAGE_DEB=${PACKAGE_DEB}") message(STATUS "-DENABLE_LIBS_PACKAGING=${ENABLE_LIBS_PACKAGING}") -message(STATUS "-DSWIG_PYTHON=${SWIG_PYTHON}") message(STATUS "-DSWIG_JAVA=${SWIG_JAVA}") -message(STATUS "-DSUPPORT_PYTHON2=${SUPPORT_PYTHON2}") message(STATUS "-DSWIG_CSHARP=${SWIG_CSHARP}") set(IROHA_SCHEMA_DIR "${CMAKE_CURRENT_SOURCE_DIR}/schema") diff --git a/docker/dependencies/Dockerfile b/docker/dependencies/Dockerfile index 8c9cc2f7e9..ef457371fa 100644 --- a/docker/dependencies/Dockerfile +++ b/docker/dependencies/Dockerfile @@ -278,7 +278,7 @@ RUN set -e; \ # python bindings dependencies RUN set -e; \ pip install grpcio_tools pysha3; \ - pip3 install grpcio_tools pysha3; + pip3 install grpcio_tools pysha3 # install lcov RUN set -e; \ diff --git a/docker/develop/Dockerfile b/docker/develop/Dockerfile index f4d100b2d8..f3e3e7c64b 100644 --- a/docker/develop/Dockerfile +++ b/docker/develop/Dockerfile @@ -273,7 +273,7 @@ RUN set -e; \ # python bindings dependencies RUN set -e; \ pip install grpcio_tools pysha3; \ - pip3 install grpcio_tools pysha3; + pip3 install grpcio_tools pysha3 # install lcov RUN set -e; \ diff --git a/docs/permissions_compiler/compiler.py b/docs/permissions_compiler/compiler.py index b09a780a93..55373e896e 100644 --- a/docs/permissions_compiler/compiler.py +++ b/docs/permissions_compiler/compiler.py @@ -8,8 +8,6 @@ import csv import rst import glossary -import consts -import os.path import os perm_type = category = perm = "" @@ -106,8 +104,7 @@ print(row['Related Command']) raise result.append('| {}: {}'.format(related, ', '.join(links))) - - result.extend(rst.alias(perm, grantable)) + result.append('') if row['Example'].strip(): result.extend(rst.example(row['Example'])) @@ -116,8 +113,6 @@ result.extend(rst.header('Supplementary Sources', 1)) commons_path = [os.path.pardir] * 2 + ['example', 'python', 'permissions', 'commons.py'] result.extend(rst.listing(commons_path, 'commons.py')) - consts_path = [os.path.pardir, 'permissions_compiler', 'consts.py'] - result.extend(rst.listing(consts_path, 'consts.py')) with open('maintenance/permissions.rst', 'w') as output: content = "\n".join(result) diff --git a/docs/permissions_compiler/consts.py b/docs/permissions_compiler/consts.py deleted file mode 100644 index 4af449f716..0000000000 --- a/docs/permissions_compiler/consts.py +++ /dev/null @@ -1,60 +0,0 @@ -# -# Copyright Soramitsu Co., Ltd. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -# - -grantable = { - 'can_add_my_signatory': 'kAddMySignatory', - 'can_remove_my_signatory': 'kRemoveMySignatory', - 'can_set_my_account_detail': 'kSetMyAccountDetail', - 'can_set_my_quorum': 'kSetMyQuorum', - 'can_transfer_my_assets': 'kTransferMyAssets' -} - -role = { - 'can_add_asset_qty': 'kAddAssetQty', - 'can_add_domain_asset_qty': 'kAddDomainAssetQty', - 'can_add_peer': 'kAddPeer', - 'can_add_signatory': 'kAddSignatory', - 'can_append_role': 'kAppendRole', - 'can_create_account': 'kCreateAccount', - 'can_create_asset': 'kCreateAsset', - 'can_create_domain': 'kCreateDomain', - 'can_create_role': 'kCreateRole', - 'can_detach_role': 'kDetachRole', - 'can_get_all_acc_ast': 'kGetAllAccAst', - 'can_get_all_acc_ast_txs': 'kGetAllAccAstTxs', - 'can_get_all_acc_detail': 'kGetAllAccDetail', - 'can_get_all_acc_txs': 'kGetAllAccTxs', - 'can_get_all_accounts': 'kGetAllAccounts', - 'can_get_all_signatories': 'kGetAllSignatories', - 'can_get_all_txs': 'kGetAllTxs', - 'can_get_blocks': 'kGetBlocks', - 'can_get_domain_acc_ast': 'kGetDomainAccAst', - 'can_get_domain_acc_ast_txs': 'kGetDomainAccAstTxs', - 'can_get_domain_acc_detail': 'kGetDomainAccDetail', - 'can_get_domain_acc_txs': 'kGetDomainAccTxs', - 'can_get_domain_accounts': 'kGetDomainAccounts', - 'can_get_domain_signatories': 'kGetDomainSignatories', - 'can_get_my_acc_ast': 'kGetMyAccAst', - 'can_get_my_acc_ast_txs': 'kGetMyAccAstTxs', - 'can_get_my_acc_detail': 'kGetMyAccDetail', - 'can_get_my_acc_txs': 'kGetMyAccTxs', - 'can_get_my_account': 'kGetMyAccount', - 'can_get_my_signatories': 'kGetMySignatories', - 'can_get_my_txs': 'kGetMyTxs', - 'can_get_roles': 'kGetRoles', - 'can_grant_can_add_my_signatory': 'kAddMySignatory', - 'can_grant_can_remove_my_signatory': 'kRemoveMySignatory', - 'can_grant_can_set_my_account_detail': 'kSetMyAccountDetail', - 'can_grant_can_set_my_quorum': 'kSetMyQuorum', - 'can_grant_can_transfer_my_assets': 'kTransferMyAssets', - 'can_read_assets': 'kReadAssets', - 'can_receive': 'kReceive', - 'can_remove_signatory': 'kRemoveSignatory', - 'can_set_detail': 'kSetDetail', - 'can_set_quorum': 'kSetQuorum', - 'can_subtract_asset_qty': 'kSubtractAssetQty', - 'can_subtract_domain_asset_qty': 'kSubtractDomainAssetQty', - 'can_transfer': 'kTransfer' -} diff --git a/docs/permissions_compiler/rst.py b/docs/permissions_compiler/rst.py index 4421915afc..cb07ba94ca 100644 --- a/docs/permissions_compiler/rst.py +++ b/docs/permissions_compiler/rst.py @@ -3,7 +3,6 @@ # SPDX-License-Identifier: Apache-2.0 # -import consts import csv import os.path @@ -82,28 +81,6 @@ def linkify(term, dictionary, pop=False): return result -def alias(permission, grantable): - """ - - :param permission: string permsssion name - :param grantable: boolean True if grantable - :return: list of strings - """ - langs = {'Python': '_', 'Java': '.'} - lines = [] - perm = permission.lower().strip() - for lang, delimiter in sorted(langs.items()): - lines.append('| Usage in {} bindings: ``{}{}{}``'.format( - lang, - 'Grantable' if grantable else 'Role', - delimiter, - consts.grantable[perm] if grantable else consts.role[perm] - )) - lines.append('|') - lines.append('') - return lines - - def listing(compile_time_path, caption='', lines_range=None, lang='python'): """ Generates listing lines to include diff --git a/docs/source/guides/build.rst b/docs/source/guides/build.rst index 87cfcb8323..83cd07aa98 100644 --- a/docs/source/guides/build.rst +++ b/docs/source/guides/build.rst @@ -1,19 +1,19 @@ Building Iroha ============== -In this guide we will learn how to install all dependencies, required to build +In this guide we will learn how to install all dependencies, required to build Iroha and how to build it. Preparing the Environment ------------------------- -In order to successfully build Iroha, we need to configure the environment. +In order to successfully build Iroha, we need to configure the environment. There are several ways to do it and we will describe all of them. -Currently, we support Unix-like systems (we are basically targeting popular -Linux distros and macOS). If you happen to have Windows or you don't want to +Currently, we support Unix-like systems (we are basically targeting popular +Linux distros and macOS). If you happen to have Windows or you don't want to spend time installing all dependencies you might want to consider using Docker -environment. Also, Windows users might consider using +environment. Also, Windows users might consider using `WSL `_ .. hint:: Having troubles? Check FAQ section or communicate to us directly, in @@ -25,13 +25,13 @@ Docker .. note:: You don't need Docker to run Iroha, it is just one of the possible choices. -First of all, you need to install ``docker`` and ``docker-compose``. You can -read how to install it on a +First of all, you need to install ``docker`` and ``docker-compose``. You can +read how to install it on the `Docker's website `_ .. note:: Please, use the latest available docker daemon and docker-compose. - -Then you should clone the `Iroha repository `_ + +Then you should clone the `Iroha repository `_ to the directory of your choice. .. code-block:: shell @@ -42,7 +42,7 @@ to the directory of your choice. save some time and bandwidth. If you want to get a full commit history, you can omit this option. -After it, you need to run the development environment. Run the +After it, you need to run the development environment. Run the ``scripts/run-iroha-dev.sh`` script: .. code-block:: shell @@ -56,14 +56,14 @@ After it, you need to run the development environment. Run the After you execute this script, following things happen: 1. The script checks if you don't have containers with Iroha already running. -Successful completion finishes with the new container shell. +Successful completion finishes with the new container shell. 2. The script will download ``hyperledger/iroha:develop-build`` and ``postgres`` images. ``hyperledger/iroha:develop-build`` image contains all development dependencies and is -based on top of ``ubuntu:16.04``. ``postgres`` image is required for starting +based on top of ``ubuntu:16.04``. ``postgres`` image is required for starting and running Iroha. 3. Two containers are created and launched. -4. The user is attached to the interactive environment for development and -testing with ``iroha`` folder mounted from the host machine. Iroha folder +4. The user is attached to the interactive environment for development and +testing with ``iroha`` folder mounted from the host machine. Iroha folder is mounted to ``/opt/iroha`` in Docker container. Now your are ready to build Iroha! Please go to `Building Iroha` section. @@ -75,8 +75,8 @@ Boost """"" Iroha requires Boost of at least 1.65 version. -To install Boost libraries (``libboost-all-dev``), use `current release -`_ from Boost webpage. The only +To install Boost libraries (``libboost-all-dev``), use `current release +`_ from Boost webpage. The only dependencies are thread, system and filesystem, so use ``./bootstrap.sh --with-libraries=thread,system,filesystem`` when you are building the project. @@ -86,7 +86,7 @@ Other Dependencies To build Iroha, you need following packages: -``build-essential`` ``automake`` ``libtool`` ``libssl-dev`` ``zlib1g-dev`` +``build-essential`` ``automake`` ``libtool`` ``libssl-dev`` ``zlib1g-dev`` ``libc6-dbg`` ``golang`` ``git`` ``tar`` ``gzip`` ``ca-certificates`` ``wget`` ``curl`` ``file`` ``unzip`` ``python`` ``cmake`` @@ -104,13 +104,13 @@ Use this code to install dependencies on Debian-based Linux distro. python cmake .. note:: If you are willing to actively develop Iroha and to build shared - libraries, please consider installing the - `latest release `_ of CMake. + libraries, please consider installing the + `latest release `_ of CMake. macOS ^^^^^ -If you want to build it from scratch and actively develop it, please use this code +If you want to build it from scratch and actively develop it, please use this code to install all dependencies with Homebrew. .. code-block:: shell @@ -118,7 +118,7 @@ to install all dependencies with Homebrew. xcode-select --install brew install cmake boost postgres grpc autoconf automake libtool golang soci -.. hint:: To install the Homebrew itself please run +.. hint:: To install the Homebrew itself please run ``ruby -e "$(curl -fsSL https://raw.githubusercontent.com/homebrew/install/master/install)"`` @@ -157,17 +157,17 @@ though) cmake -H. -Bbuild; cmake --build build -- -j$(nproc) -.. note:: On macOS ``$(nproc)`` variable does not work. Check the number of - logical cores with ``sysctl -n hw.ncpu`` and put it explicitly in the command +.. note:: On macOS ``$(nproc)`` variable does not work. Check the number of + logical cores with ``sysctl -n hw.ncpu`` and put it explicitly in the command above, e.g. ``cmake --build build -- -j4`` CMake Parameters ^^^^^^^^^^^^^^^^ -We use CMake to build platform-dependent build files. It has numerous flags +We use CMake to build platform-dependent build files. It has numerous flags for configuring the final build. Note that besides the listed parameters cmake's variables can be useful as well. Also as long as this page can be -deprecated (or just not complete) you can browse custom flags via +deprecated (or just not complete) you can browse custom flags via ``cmake -L``, ``cmake-gui``, or ``ccmake``. .. hint:: You can specify parameters at the cmake configuring stage @@ -185,8 +185,6 @@ Main Parameters +--------------+ +---------+------------------------------------------------------------------------+ | COVERAGE | | OFF | Enables or disables lcov setting for code coverage generation | +--------------+ +---------+------------------------------------------------------------------------+ -| SWIG_PYTHON | | OFF | Enables of disables the library building and Python bindings | -+--------------+ +---------+------------------------------------------------------------------------+ | SWIG_JAVA | | OFF | Enables of disables the library building and Java bindings | +--------------+-----------------+---------+------------------------------------------------------------------------+ @@ -225,7 +223,7 @@ Alternatively, you can run following command in the ``build`` folder ctest . --output-on-failure .. note:: Some of the tests will fail without PostgreSQL storage running, - so if you are not using ``scripts/run-iroha-dev.sh`` script please run Docker + so if you are not using ``scripts/run-iroha-dev.sh`` script please run Docker container or create a local connection with following parameters: .. code-block:: shell diff --git a/docs/source/guides/libraries/python.rst b/docs/source/guides/libraries/python.rst index 685183fbd2..fbc89490cc 100644 --- a/docs/source/guides/libraries/python.rst +++ b/docs/source/guides/libraries/python.rst @@ -1,3 +1,4 @@ +.. TODO: IR-1847 nickaleks 05.11.18 rework documentation with native python lib Python Library -------------- diff --git a/docs/source/maintenance/permissions.rst b/docs/source/maintenance/permissions.rst index c68f9c6589..b1cd060a6a 100644 --- a/docs/source/maintenance/permissions.rst +++ b/docs/source/maintenance/permissions.rst @@ -194,9 +194,6 @@ can_create_account Allows creating new `accounts <../core_concepts/glossary.html#account>`__. | Related API method: `Create Account <../api/commands.html#create-account>`__ -| Usage in Java bindings: ``Role.kCreateAccount`` -| Usage in Python bindings: ``Role_kCreateAccount`` -| **Example** @@ -206,7 +203,7 @@ Allows creating new `accounts <../core_concepts/glossary.html#account>`__. .. literalinclude:: ../../../example/python/permissions/can_create_account.py :language: python :linenos: - :lines: 9-42 + :lines: 10-31 can_set_detail ^^^^^^^^^^^^^^ @@ -218,9 +215,6 @@ The `permission <../core_concepts/glossary.html#permission>`__ allows setting de .. Note:: Transaction creator can always set detail for own account even without that permission. | Related API method: `Set Account Detail <../api/commands.html#set-account-detail>`__ -| Usage in Java bindings: ``Role.kSetDetail`` -| Usage in Python bindings: ``Role_kSetDetail`` -| **Example** @@ -230,7 +224,7 @@ The `permission <../core_concepts/glossary.html#permission>`__ allows setting de .. literalinclude:: ../../../example/python/permissions/can_set_detail.py :language: python :linenos: - :lines: 9-39 + :lines: 10-30 can_set_my_account_detail ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -242,9 +236,6 @@ can_set_my_account_detail .. Note:: To grant the permission an account should already have a role with `can_grant_can_set_my_account_detail`_ permission. | Related API method: `Set Account Detail <../api/commands.html#set-account-detail>`__ -| Usage in Java bindings: ``Grantable.kSetMyAccountDetail`` -| Usage in Python bindings: ``Grantable_kSetMyAccountDetail`` -| **Example** @@ -254,7 +245,7 @@ can_set_my_account_detail .. literalinclude:: ../../../example/python/permissions/can_set_my_account_detail.py :language: python :linenos: - :lines: 9-54 + :lines: 10-44 Asset ----- @@ -265,9 +256,6 @@ can_create_asset Allows creating new `assets <../core_concepts/glossary.html#asset>`__. | Related API method: `Create Asset <../api/commands.html#create-asset>`__ -| Usage in Java bindings: ``Role.kCreateAsset`` -| Usage in Python bindings: ``Role_kCreateAsset`` -| **Example** @@ -277,7 +265,7 @@ Allows creating new `assets <../core_concepts/glossary.html#asset>`__. .. literalinclude:: ../../../example/python/permissions/can_create_asset.py :language: python :linenos: - :lines: 9-39 + :lines: 10-30 can_receive ^^^^^^^^^^^ @@ -285,9 +273,6 @@ can_receive Allows `account <../core_concepts/glossary.html#account>`__ receive `assets <../core_concepts/glossary.html#asset>`__. | Related API method: `Transfer Asset <../api/commands.html#transfer-asset>`__ -| Usage in Java bindings: ``Role.kReceive`` -| Usage in Python bindings: ``Role_kReceive`` -| **Example** @@ -297,7 +282,7 @@ Allows `account <../core_concepts/glossary.html#account>`__ receive `assets <../ .. literalinclude:: ../../../example/python/permissions/can_receive.py :language: python :linenos: - :lines: 9-47 + :lines: 10-48 can_transfer ^^^^^^^^^^^^ @@ -309,14 +294,11 @@ You can transfer an asset from one `domain <../core_concepts/glossary.html#domai .. Note:: Destination account should have `can_receive`_ permission. | Related API method: `Transfer Asset <../api/commands.html#transfer-asset>`__ -| Usage in Java bindings: ``Role.kTransfer`` -| Usage in Python bindings: ``Role_kTransfer`` -| .. literalinclude:: ../../../example/python/permissions/can_transfer.py :language: python :linenos: - :lines: 1-11 + :lines: 1-10 can_transfer_my_assets ^^^^^^^^^^^^^^^^^^^^^^ @@ -328,9 +310,6 @@ can_transfer_my_assets See the example (to be done) for the usage details. | Related API method: `Transfer Asset <../api/commands.html#transfer-asset>`__ -| Usage in Java bindings: ``Grantable.kTransferMyAssets`` -| Usage in Python bindings: ``Grantable_kTransferMyAssets`` -| **Example** @@ -340,7 +319,7 @@ See the example (to be done) for the usage details. .. literalinclude:: ../../../example/python/permissions/can_transfer_my_assets.py :language: python :linenos: - :lines: 9-59 + :lines: 10-61 Asset Quantity -------------- @@ -353,9 +332,6 @@ Allows issuing `assets <../core_concepts/glossary.html#asset>`__. The corresponding `command <../core_concepts/glossary.html#command>`__ can be executed only for an `account <../core_concepts/glossary.html#account>`__ of `transaction <../core_concepts/glossary.html#transaction>`__ creator and only if that account has a `role <../core_concepts/glossary.html#role>`__ with the `permission <../core_concepts/glossary.html#permission>`__. | Related API method: `Add Asset Quantity <../api/commands.html#add-asset-quantity>`__ -| Usage in Java bindings: ``Role.kAddAssetQty`` -| Usage in Python bindings: ``Role_kAddAssetQty`` -| **Example** @@ -365,7 +341,7 @@ The corresponding `command <../core_concepts/glossary.html#command>`__ can be ex .. literalinclude:: ../../../example/python/permissions/can_add_asset_qty.py :language: python :linenos: - :lines: 9-40 + :lines: 10-32 can_subtract_asset_qty ^^^^^^^^^^^^^^^^^^^^^^ @@ -375,9 +351,6 @@ Allows burning `assets <../core_concepts/glossary.html#asset>`__. The corresponding `command <../core_concepts/glossary.html#command>`__ can be executed only for an `account <../core_concepts/glossary.html#account>`__ of `transaction <../core_concepts/glossary.html#transaction>`__ creator and only if that account has a `role <../core_concepts/glossary.html#role>`__ with the `permission <../core_concepts/glossary.html#permission>`__. | Related API method: `Subtract Asset Quantity <../api/commands.html#subtract-asset-quantity>`__ -| Usage in Java bindings: ``Role.kSubtractAssetQty`` -| Usage in Python bindings: ``Role_kSubtractAssetQty`` -| **Example** @@ -387,7 +360,7 @@ The corresponding `command <../core_concepts/glossary.html#command>`__ can be ex .. literalinclude:: ../../../example/python/permissions/can_subtract_asset_qty.py :language: python :linenos: - :lines: 9-42 + :lines: 10-40 can_add_domain_asset_qty ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -397,9 +370,6 @@ Allows issuing `assets <../core_concepts/glossary.html#asset>`__ only in own `do The corresponding `command <../core_concepts/glossary.html#command>`__ can be executed only for an `account <../core_concepts/glossary.html#account>`__ of `transaction <../core_concepts/glossary.html#transaction>`__ creator and only if that account has a `role <../core_concepts/glossary.html#role>`__ with the `permission <../core_concepts/glossary.html#permission>`__ and only for assets in creator’s domain. | Related API method: `Add Asset Quantity <../api/commands.html#add-asset-quantity>`__ -| Usage in Java bindings: ``Role.kAddDomainAssetQty`` -| Usage in Python bindings: ``Role_kAddDomainAssetQty`` -| .. literalinclude:: ../../../example/python/permissions/can_add_domain_asset_qty.py :language: python @@ -414,14 +384,11 @@ Allows burning `assets <../core_concepts/glossary.html#asset>`__ only in own `do The corresponding `command <../core_concepts/glossary.html#command>`__ can be executed only for an `account <../core_concepts/glossary.html#account>`__ of `transaction <../core_concepts/glossary.html#transaction>`__ creator and only if that account has a `role <../core_concepts/glossary.html#role>`__ with the `permission <../core_concepts/glossary.html#permission>`__ and only for assets in creator’s domain. | Related API method: `Subtract Asset Quantity <../api/commands.html#subtract-asset-quantity>`__ -| Usage in Java bindings: ``Role.kSubtractDomainAssetQty`` -| Usage in Python bindings: ``Role_kSubtractDomainAssetQty`` -| .. literalinclude:: ../../../example/python/permissions/can_subtract_domain_asset_qty.py :language: python :linenos: - :lines: 1-10 + :lines: 1-8 Domain ------ @@ -432,9 +399,6 @@ can_create_domain Allows creating new `domains <../core_concepts/glossary.html#domain>`__ within the system. | Related API method: `Create Domain <../api/commands.html#create-domain>`__ -| Usage in Java bindings: ``Role.kCreateDomain`` -| Usage in Python bindings: ``Role_kCreateDomain`` -| **Example** @@ -444,7 +408,7 @@ Allows creating new `domains <../core_concepts/glossary.html#domain>`__ within t .. literalinclude:: ../../../example/python/permissions/can_create_domain.py :language: python :linenos: - :lines: 9-40 + :lines: 10-31 Grant ----- @@ -455,9 +419,6 @@ can_grant_can_add_my_signatory Allows `role <../core_concepts/glossary.html#role>`__ owners grant `can_add_my_signatory`_ `permission <../core_concepts/glossary.html#permission>`__. | Related API methods: `Grant Permission <../api/commands.html#grant-permission>`__, `Revoke Permission <../api/commands.html#revoke-permission>`__ -| Usage in Java bindings: ``Role.kAddMySignatory`` -| Usage in Python bindings: ``Role_kAddMySignatory`` -| **Example** @@ -467,7 +428,7 @@ Allows `role <../core_concepts/glossary.html#role>`__ owners grant `can_add_my_s .. literalinclude:: ../../../example/python/permissions/can_grant_can_add_my_signatory.py :language: python :linenos: - :lines: 9-52 + :lines: 10-43 can_grant_can_remove_my_signatory ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -475,9 +436,6 @@ can_grant_can_remove_my_signatory Allows `role <../core_concepts/glossary.html#role>`__ owners grant `can_remove_my_signatory`_ `permission <../core_concepts/glossary.html#permission>`__. | Related API methods: `Grant Permission <../api/commands.html#grant-permission>`__, `Revoke Permission <../api/commands.html#revoke-permission>`__ -| Usage in Java bindings: ``Role.kRemoveMySignatory`` -| Usage in Python bindings: ``Role_kRemoveMySignatory`` -| **Example** @@ -487,7 +445,7 @@ Allows `role <../core_concepts/glossary.html#role>`__ owners grant `can_remove_m .. literalinclude:: ../../../example/python/permissions/can_grant_can_remove_my_signatory.py :language: python :linenos: - :lines: 9-52 + :lines: 10-43 can_grant_can_set_my_account_detail ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -495,9 +453,6 @@ can_grant_can_set_my_account_detail Allows `role <../core_concepts/glossary.html#role>`__ owners grant `can_set_my_account_detail`_ `permission <../core_concepts/glossary.html#permission>`__. | Related API methods: `Grant Permission <../api/commands.html#grant-permission>`__, `Revoke Permission <../api/commands.html#revoke-permission>`__ -| Usage in Java bindings: ``Role.kSetMyAccountDetail`` -| Usage in Python bindings: ``Role_kSetMyAccountDetail`` -| **Example** @@ -507,7 +462,7 @@ Allows `role <../core_concepts/glossary.html#role>`__ owners grant `can_set_my_a .. literalinclude:: ../../../example/python/permissions/can_grant_can_set_my_account_detail.py :language: python :linenos: - :lines: 9-52 + :lines: 10-43 can_grant_can_set_my_quorum ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -515,9 +470,6 @@ can_grant_can_set_my_quorum Allows `role <../core_concepts/glossary.html#role>`__ owners grant `can_set_my_quorum`_ `permission <../core_concepts/glossary.html#permission>`__. | Related API methods: `Grant Permission <../api/commands.html#grant-permission>`__, `Revoke Permission <../api/commands.html#revoke-permission>`__ -| Usage in Java bindings: ``Role.kSetMyQuorum`` -| Usage in Python bindings: ``Role_kSetMyQuorum`` -| **Example** @@ -527,7 +479,7 @@ Allows `role <../core_concepts/glossary.html#role>`__ owners grant `can_set_my_q .. literalinclude:: ../../../example/python/permissions/can_grant_can_set_my_quorum.py :language: python :linenos: - :lines: 9-52 + :lines: 10-44 can_grant_can_transfer_my_assets ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -535,9 +487,6 @@ can_grant_can_transfer_my_assets Allows `role <../core_concepts/glossary.html#role>`__ owners grant `can_transfer_my_assets`_ `permission <../core_concepts/glossary.html#permission>`__. | Related API methods: `Grant Permission <../api/commands.html#grant-permission>`__, `Revoke Permission <../api/commands.html#revoke-permission>`__ -| Usage in Java bindings: ``Role.kTransferMyAssets`` -| Usage in Python bindings: ``Role_kTransferMyAssets`` -| **Example** @@ -547,7 +496,7 @@ Allows `role <../core_concepts/glossary.html#role>`__ owners grant `can_transfer .. literalinclude:: ../../../example/python/permissions/can_grant_can_transfer_my_assets.py :language: python :linenos: - :lines: 9-59 + :lines: 10-56 Peer ---- @@ -560,9 +509,6 @@ Allows adding `peers <../core_concepts/glossary.html#peer>`__ to the network. A new peer will be a valid participant in the next `consensus <../core_concepts/glossary.html#consensus>`__ round after an agreement on `transaction <../core_concepts/glossary.html#transaction>`__ containing "addPeer" `command <../core_concepts/glossary.html#command>`__. | Related API method: `Add Peer <../api/commands.html#add-peer>`__ -| Usage in Java bindings: ``Role.kAddPeer`` -| Usage in Python bindings: ``Role_kAddPeer`` -| **Example** @@ -572,7 +518,7 @@ A new peer will be a valid participant in the next `consensus <../core_concepts/ .. literalinclude:: ../../../example/python/permissions/can_add_peer.py :language: python :linenos: - :lines: 9-40 + :lines: 10-34 Role ---- @@ -585,9 +531,6 @@ Allows appending `roles <../core_concepts/glossary.html#role>`__ to another `acc You can append only that role that has lesser or the same set of privileges as `transaction <../core_concepts/glossary.html#transaction>`__ creator. | Related API method: `Append Role <../api/commands.html#append-role>`__ -| Usage in Java bindings: ``Role.kAppendRole`` -| Usage in Python bindings: ``Role_kAppendRole`` -| **Example** @@ -597,7 +540,7 @@ You can append only that role that has lesser or the same set of privileges as ` .. literalinclude:: ../../../example/python/permissions/can_append_role.py :language: python :linenos: - :lines: 9-48 + :lines: 10-40 can_create_role ^^^^^^^^^^^^^^^ @@ -608,9 +551,6 @@ Possible set of `permissions <../core_concepts/glossary.html#permission>`__ for | Related API method: `Create Role <../api/commands.html#create-role>`__ -| Usage in Java bindings: ``Role.kCreateRole`` -| Usage in Python bindings: ``Role_kCreateRole`` -| **Example** @@ -620,7 +560,7 @@ Possible set of `permissions <../core_concepts/glossary.html#permission>`__ for .. literalinclude:: ../../../example/python/permissions/can_create_role.py :language: python :linenos: - :lines: 9-44 + :lines: 10-33 can_detach_role ^^^^^^^^^^^^^^^ @@ -630,9 +570,6 @@ Allows revoking a `role <../core_concepts/glossary.html#role>`__ from a user. .. Note:: Due to a known issue the permission allows to detach any role without limitations https://soramitsu.atlassian.net/browse/IR-1468 | Related API method: `Detach Role <../api/commands.html#detach-role>`__ -| Usage in Java bindings: ``Role.kDetachRole`` -| Usage in Python bindings: ``Role_kDetachRole`` -| **Example** @@ -642,7 +579,7 @@ Allows revoking a `role <../core_concepts/glossary.html#role>`__ from a user. .. literalinclude:: ../../../example/python/permissions/can_detach_role.py :language: python :linenos: - :lines: 9-39 + :lines: 10-30 Signatory --------- @@ -655,9 +592,6 @@ can_add_my_signatory `Permission <../core_concepts/glossary.html#permission>`__ that allows a specified `account <../core_concepts/glossary.html#account>`__ to add an extra public key to the another specified account. | Related API method: `Add Signatory <../api/commands.html#add-signatory>`__ -| Usage in Java bindings: ``Grantable.kAddMySignatory`` -| Usage in Python bindings: ``Grantable_kAddMySignatory`` -| **Example** @@ -667,7 +601,7 @@ can_add_my_signatory .. literalinclude:: ../../../example/python/permissions/can_add_my_signatory.py :language: python :linenos: - :lines: 9-53 + :lines: 10-45 can_add_signatory ^^^^^^^^^^^^^^^^^ @@ -677,9 +611,6 @@ Allows linking additional public keys to `account <../core_concepts/glossary.htm The corresponding `command <../core_concepts/glossary.html#command>`__ can be executed only for an account of `transaction <../core_concepts/glossary.html#transaction>`__ creator and only if that account has a `role <../core_concepts/glossary.html#role>`__ with the `permission <../core_concepts/glossary.html#permission>`__. | Related API method: `Add Signatory <../api/commands.html#add-signatory>`__ -| Usage in Java bindings: ``Role.kAddSignatory`` -| Usage in Python bindings: ``Role_kAddSignatory`` -| **Example** @@ -689,7 +620,7 @@ The corresponding `command <../core_concepts/glossary.html#command>`__ can be ex .. literalinclude:: ../../../example/python/permissions/can_add_signatory.py :language: python :linenos: - :lines: 9-40 + :lines: 10-32 can_remove_my_signatory ^^^^^^^^^^^^^^^^^^^^^^^ @@ -701,9 +632,6 @@ can_remove_my_signatory See the example (to be done) for the usage details. | Related API method: `Remove Signatory <../api/commands.html#remove-signatory>`__ -| Usage in Java bindings: ``Grantable.kRemoveMySignatory`` -| Usage in Python bindings: ``Grantable_kRemoveMySignatory`` -| **Example** @@ -713,7 +641,7 @@ See the example (to be done) for the usage details. .. literalinclude:: ../../../example/python/permissions/can_remove_my_signatory.py :language: python :linenos: - :lines: 9-57 + :lines: 10-51 can_remove_signatory ^^^^^^^^^^^^^^^^^^^^ @@ -723,9 +651,6 @@ Allows unlinking additional public keys from an `account <../core_concepts/gloss The corresponding `command <../core_concepts/glossary.html#command>`__ can be executed only for an account of `transaction <../core_concepts/glossary.html#transaction>`__ creator and only if that account has a `role <../core_concepts/glossary.html#role>`__ with the `permission <../core_concepts/glossary.html#permission>`__. | Related API method: `Remove Signatory <../api/commands.html#remove-signatory>`__ -| Usage in Java bindings: ``Role.kRemoveSignatory`` -| Usage in Python bindings: ``Role_kRemoveSignatory`` -| **Example** @@ -735,7 +660,7 @@ The corresponding `command <../core_concepts/glossary.html#command>`__ can be ex .. literalinclude:: ../../../example/python/permissions/can_remove_signatory.py :language: python :linenos: - :lines: 9-41 + :lines: 10-36 can_set_my_quorum ^^^^^^^^^^^^^^^^^ @@ -747,9 +672,6 @@ can_set_my_quorum Account should have greater or equal amount of keys than quorum. | Related API method: `Set Account Quorum <../api/commands.html#set-account-quorum>`__ -| Usage in Java bindings: ``Grantable.kSetMyQuorum`` -| Usage in Python bindings: ``Grantable_kSetMyQuorum`` -| **Example** @@ -759,7 +681,7 @@ Account should have greater or equal amount of keys than quorum. .. literalinclude:: ../../../example/python/permissions/can_set_my_quorum.py :language: python :linenos: - :lines: 9-57 + :lines: 10-50 can_set_quorum ^^^^^^^^^^^^^^ @@ -769,9 +691,6 @@ Allows setting `quorum <../core_concepts/glossary.html#quorum>`__. At least the same number (or more) of public keys should be already linked to an `account <../core_concepts/glossary.html#account>`__. | Related API method: `Set Account Quorum <../api/commands.html#set-account-quorum>`__ -| Usage in Java bindings: ``Role.kSetQuorum`` -| Usage in Python bindings: ``Role_kSetQuorum`` -| **Example** @@ -781,7 +700,7 @@ At least the same number (or more) of public keys should be already linked to an .. literalinclude:: ../../../example/python/permissions/can_set_quorum.py :language: python :linenos: - :lines: 9-42 + :lines: 10-36 Query-related permissions ========================= @@ -795,9 +714,6 @@ can_get_all_acc_detail Allows getting all the details set to any `account <../core_concepts/glossary.html#account>`__ within the system. | Related API method: `Get Account Detail <../api/queries.html#get-account-detail>`__ -| Usage in Java bindings: ``Role.kGetAllAccDetail`` -| Usage in Python bindings: ``Role_kGetAllAccDetail`` -| **Example** @@ -807,7 +723,7 @@ Allows getting all the details set to any `account <../core_concepts/glossary.ht .. literalinclude:: ../../../example/python/permissions/can_get_all_acc_detail.py :language: python :linenos: - :lines: 9-40 + :lines: 10-28 can_get_all_accounts ^^^^^^^^^^^^^^^^^^^^ @@ -819,9 +735,6 @@ With this `permission <../core_concepts/glossary.html#permission>`__, `query <.. All the details (set by the account owner or owners of other accounts) will be returned. | Related API method: `Get Account <../api/queries.html#get-account>`__ -| Usage in Java bindings: ``Role.kGetAllAccounts`` -| Usage in Python bindings: ``Role_kGetAllAccounts`` -| **Example** @@ -831,7 +744,7 @@ All the details (set by the account owner or owners of other accounts) will be r .. literalinclude:: ../../../example/python/permissions/can_get_all_accounts.py :language: python :linenos: - :lines: 9-40 + :lines: 10-28 can_get_domain_acc_detail ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -839,9 +752,6 @@ can_get_domain_acc_detail Allows getting all the details set to any `account <../core_concepts/glossary.html#account>`__ within the same `domain <../core_concepts/glossary.html#domain>`__ as a domain of `query <../core_concepts/glossary.html#query>`__ creator account. | Related API method: `Get Account Detail <../api/queries.html#get-account-detail>`__ -| Usage in Java bindings: ``Role.kGetDomainAccDetail`` -| Usage in Python bindings: ``Role_kGetDomainAccDetail`` -| **Example** @@ -851,7 +761,7 @@ Allows getting all the details set to any `account <../core_concepts/glossary.ht .. literalinclude:: ../../../example/python/permissions/can_get_domain_acc_detail.py :language: python :linenos: - :lines: 9-39 + :lines: 10-28 can_get_domain_accounts ^^^^^^^^^^^^^^^^^^^^^^^ @@ -863,9 +773,6 @@ With this `permission <../core_concepts/glossary.html#permission>`__, `query <.. All the details (set by the account owner or owners of other accounts) will be returned. | Related API method: `Get Account <../api/queries.html#get-account>`__ -| Usage in Java bindings: ``Role.kGetDomainAccounts`` -| Usage in Python bindings: ``Role_kGetDomainAccounts`` -| **Example** @@ -875,7 +782,7 @@ All the details (set by the account owner or owners of other accounts) will be r .. literalinclude:: ../../../example/python/permissions/can_get_domain_accounts.py :language: python :linenos: - :lines: 9-39 + :lines: 10-28 can_get_my_acc_detail ^^^^^^^^^^^^^^^^^^^^^ @@ -883,9 +790,6 @@ can_get_my_acc_detail Allows getting all the details set to the `account <../core_concepts/glossary.html#account>`__ of `query <../core_concepts/glossary.html#query>`__ creator. | Related API method: `Get Account Detail <../api/queries.html#get-account-detail>`__ -| Usage in Java bindings: ``Role.kGetMyAccDetail`` -| Usage in Python bindings: ``Role_kGetMyAccDetail`` -| **Example** @@ -895,7 +799,7 @@ Allows getting all the details set to the `account <../core_concepts/glossary.ht .. literalinclude:: ../../../example/python/permissions/can_get_my_acc_detail.py :language: python :linenos: - :lines: 9-39 + :lines: 10-28 can_get_my_account ^^^^^^^^^^^^^^^^^^ @@ -907,9 +811,6 @@ With this `permission <../core_concepts/glossary.html#permission>`__, `query <.. All the details (set by the account owner or owners of other accounts) will be returned. | Related API method: `Get Account <../api/queries.html#get-account>`__ -| Usage in Java bindings: ``Role.kGetMyAccount`` -| Usage in Python bindings: ``Role_kGetMyAccount`` -| **Example** @@ -919,7 +820,7 @@ All the details (set by the account owner or owners of other accounts) will be r .. literalinclude:: ../../../example/python/permissions/can_get_my_account.py :language: python :linenos: - :lines: 9-39 + :lines: 10-28 Account Asset ------------- @@ -932,9 +833,6 @@ Allows getting a balance of `assets <../core_concepts/glossary.html#asset>`__ on `Query <../core_concepts/glossary.html#query>`__ response will contain information about all the assets that ever been assigned to an account. | Related API method: `Get Account Assets <../api/queries.html#get-account-assets>`__ -| Usage in Java bindings: ``Role.kGetAllAccAst`` -| Usage in Python bindings: ``Role_kGetAllAccAst`` -| **Example** @@ -944,7 +842,7 @@ Allows getting a balance of `assets <../core_concepts/glossary.html#asset>`__ on .. literalinclude:: ../../../example/python/permissions/can_get_all_acc_ast.py :language: python :linenos: - :lines: 9-40 + :lines: 10-28 can_get_domain_acc_ast ^^^^^^^^^^^^^^^^^^^^^^ @@ -954,9 +852,6 @@ Allows getting a balance of specified `asset <../core_concepts/glossary.html#ass Query response will contain information about all the assets that ever been assigned to an account. | Related API method: `Get Account Assets <../api/queries.html#get-account-assets>`__ -| Usage in Java bindings: ``Role.kGetDomainAccAst`` -| Usage in Python bindings: ``Role_kGetDomainAccAst`` -| **Example** @@ -966,7 +861,7 @@ Query response will contain information about all the assets that ever been assi .. literalinclude:: ../../../example/python/permissions/can_get_domain_acc_ast.py :language: python :linenos: - :lines: 9-39 + :lines: 10-28 can_get_my_acc_ast ^^^^^^^^^^^^^^^^^^ @@ -976,9 +871,6 @@ Allows getting a balance of specified `asset <../core_concepts/glossary.html#ass Query response will contain information about all the assets that ever been assigned to an account. | Related API method: `Get Account Assets <../api/queries.html#get-account-assets>`__ -| Usage in Java bindings: ``Role.kGetMyAccAst`` -| Usage in Python bindings: ``Role_kGetMyAccAst`` -| **Example** @@ -988,7 +880,7 @@ Query response will contain information about all the assets that ever been assi .. literalinclude:: ../../../example/python/permissions/can_get_my_acc_ast.py :language: python :linenos: - :lines: 9-39 + :lines: 10-28 Account Asset Transaction ------------------------- @@ -1001,9 +893,6 @@ Allows getting `transactions <../core_concepts/glossary.html#transaction>`__ ass .. Note:: Incoming asset transfers will also appear in the query response. | Related API method: `Get Account Asset Transactions <../api/queries.html#get-account-asset-transactions>`__ -| Usage in Java bindings: ``Role.kGetAllAccAstTxs`` -| Usage in Python bindings: ``Role_kGetAllAccAstTxs`` -| **Example** @@ -1013,7 +902,7 @@ Allows getting `transactions <../core_concepts/glossary.html#transaction>`__ ass .. literalinclude:: ../../../example/python/permissions/can_get_all_acc_ast_txs.py :language: python :linenos: - :lines: 9-47 + :lines: 10-43 can_get_domain_acc_ast_txs ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1023,9 +912,6 @@ Allows getting `transactions <../core_concepts/glossary.html#transaction>`__ ass .. Note:: Incoming asset transfers will also appear in the query response. | Related API method: `Get Account Asset Transactions <../api/queries.html#get-account-asset-transactions>`__ -| Usage in Java bindings: ``Role.kGetDomainAccAstTxs`` -| Usage in Python bindings: ``Role_kGetDomainAccAstTxs`` -| **Example** @@ -1035,7 +921,7 @@ Allows getting `transactions <../core_concepts/glossary.html#transaction>`__ ass .. literalinclude:: ../../../example/python/permissions/can_get_domain_acc_ast_txs.py :language: python :linenos: - :lines: 9-42 + :lines: 10-39 can_get_my_acc_ast_txs ^^^^^^^^^^^^^^^^^^^^^^ @@ -1045,9 +931,6 @@ Allows getting `transactions <../core_concepts/glossary.html#transaction>`__ ass .. Note:: Incoming asset transfers will also appear in the query response. | Related API method: `Get Account Asset Transactions <../api/queries.html#get-account-asset-transactions>`__ -| Usage in Java bindings: ``Role.kGetMyAccAstTxs`` -| Usage in Python bindings: ``Role_kGetMyAccAstTxs`` -| **Example** @@ -1057,7 +940,7 @@ Allows getting `transactions <../core_concepts/glossary.html#transaction>`__ ass .. literalinclude:: ../../../example/python/permissions/can_get_my_acc_ast_txs.py :language: python :linenos: - :lines: 9-42 + :lines: 10-39 Account Transaction ------------------- @@ -1070,9 +953,6 @@ Allows getting all `transactions <../core_concepts/glossary.html#transaction>`__ .. Note:: Incoming asset transfer inside a transaction would NOT lead to an appearance of the transaction in the command output. | Related API method: `Get Account Transactions <../api/queries.html#get-account-transactions>`__ -| Usage in Java bindings: ``Role.kGetAllAccTxs`` -| Usage in Python bindings: ``Role_kGetAllAccTxs`` -| **Example** @@ -1082,7 +962,7 @@ Allows getting all `transactions <../core_concepts/glossary.html#transaction>`__ .. literalinclude:: ../../../example/python/permissions/can_get_all_acc_txs.py :language: python :linenos: - :lines: 9-40 + :lines: 10-28 can_get_domain_acc_txs ^^^^^^^^^^^^^^^^^^^^^^ @@ -1092,9 +972,6 @@ Allows getting all `transactions <../core_concepts/glossary.html#transaction>`__ .. Note:: Incoming asset transfer inside a transaction would NOT lead to an appearance of the transaction in the command output. | Related API method: `Get Account Transactions <../api/queries.html#get-account-transactions>`__ -| Usage in Java bindings: ``Role.kGetDomainAccTxs`` -| Usage in Python bindings: ``Role_kGetDomainAccTxs`` -| **Example** @@ -1104,7 +981,7 @@ Allows getting all `transactions <../core_concepts/glossary.html#transaction>`__ .. literalinclude:: ../../../example/python/permissions/can_get_domain_acc_txs.py :language: python :linenos: - :lines: 9-39 + :lines: 10-28 can_get_my_acc_txs ^^^^^^^^^^^^^^^^^^ @@ -1114,9 +991,6 @@ Allows getting all `transactions <../core_concepts/glossary.html#transaction>`__ .. Note:: Incoming asset transfer inside a transaction would NOT lead to an appearance of the transaction in the command output. | Related API method: `Get Account Transactions <../api/queries.html#get-account-transactions>`__ -| Usage in Java bindings: ``Role.kGetMyAccTxs`` -| Usage in Python bindings: ``Role_kGetMyAccTxs`` -| **Example** @@ -1126,7 +1000,7 @@ Allows getting all `transactions <../core_concepts/glossary.html#transaction>`__ .. literalinclude:: ../../../example/python/permissions/can_get_my_acc_txs.py :language: python :linenos: - :lines: 9-39 + :lines: 10-28 Asset ----- @@ -1137,9 +1011,6 @@ can_read_assets Allows getting information about `asset <../core_concepts/glossary.html#asset>`__ precision. | Related API method: `Get Asset Info <../api/queries.html#get-asset-info>`__ -| Usage in Java bindings: ``Role.kReadAssets`` -| Usage in Python bindings: ``Role_kReadAssets`` -| **Example** @@ -1149,7 +1020,7 @@ Allows getting information about `asset <../core_concepts/glossary.html#asset>`_ .. literalinclude:: ../../../example/python/permissions/can_read_assets.py :language: python :linenos: - :lines: 9-40 + :lines: 10-31 Block Stream ------------ @@ -1159,10 +1030,6 @@ can_get_blocks Allows subscription to the stream of accepted `blocks <../core_concepts/glossary.html#block>`__. -| Usage in Java bindings: ``Role.kGetBlocks`` -| Usage in Python bindings: ``Role_kGetBlocks`` -| - Role ---- @@ -1173,9 +1040,6 @@ Allows getting a list of `roles <../core_concepts/glossary.html#role>`__ within Allows getting a list of `permissions <../core_concepts/glossary.html#permission>`__ associated with a role. | Related API methods: `Get Roles <../api/queries.html#get-roles>`__, `Get Role Permissions <../api/queries.html#get-role-permissions>`__ -| Usage in Java bindings: ``Role.kGetRoles`` -| Usage in Python bindings: ``Role_kGetRoles`` -| **Example** @@ -1185,7 +1049,7 @@ Allows getting a list of `permissions <../core_concepts/glossary.html#permission .. literalinclude:: ../../../example/python/permissions/can_get_roles.py :language: python :linenos: - :lines: 9-52 + :lines: 10-35 Signatory --------- @@ -1196,9 +1060,6 @@ can_get_all_signatories Allows getting a list of public keys linked to an `account <../core_concepts/glossary.html#account>`__ within the system. | Related API method: `Get Signatories <../api/queries.html#get-signatories>`__ -| Usage in Java bindings: ``Role.kGetAllSignatories`` -| Usage in Python bindings: ``Role_kGetAllSignatories`` -| **Example** @@ -1208,7 +1069,7 @@ Allows getting a list of public keys linked to an `account <../core_concepts/glo .. literalinclude:: ../../../example/python/permissions/can_get_all_signatories.py :language: python :linenos: - :lines: 9-40 + :lines: 10-28 can_get_domain_signatories ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1216,9 +1077,6 @@ can_get_domain_signatories Allows getting a list of public keys of any `account <../core_concepts/glossary.html#account>`__ within the same `domain <../core_concepts/glossary.html#domain>`__ as the domain of `query <../core_concepts/glossary.html#query>`__ creator account. | Related API method: `Get Signatories <../api/queries.html#get-signatories>`__ -| Usage in Java bindings: ``Role.kGetDomainSignatories`` -| Usage in Python bindings: ``Role_kGetDomainSignatories`` -| **Example** @@ -1228,7 +1086,7 @@ Allows getting a list of public keys of any `account <../core_concepts/glossary. .. literalinclude:: ../../../example/python/permissions/can_get_domain_signatories.py :language: python :linenos: - :lines: 9-39 + :lines: 10-28 can_get_my_signatories ^^^^^^^^^^^^^^^^^^^^^^ @@ -1236,9 +1094,6 @@ can_get_my_signatories Allows getting a list of public keys of `query <../core_concepts/glossary.html#query>`__ creator `account <../core_concepts/glossary.html#account>`__. | Related API method: `Get Signatories <../api/queries.html#get-signatories>`__ -| Usage in Java bindings: ``Role.kGetMySignatories`` -| Usage in Python bindings: ``Role_kGetMySignatories`` -| **Example** @@ -1248,7 +1103,7 @@ Allows getting a list of public keys of `query <../core_concepts/glossary.html#q .. literalinclude:: ../../../example/python/permissions/can_get_my_signatories.py :language: python :linenos: - :lines: 9-39 + :lines: 10-28 Transaction ----------- @@ -1259,9 +1114,6 @@ can_get_all_txs Allows getting any `transaction <../core_concepts/glossary.html#transaction>`__ by hash. | Related API method: `Get Transactions <../api/queries.html#get-transactions>`__ -| Usage in Java bindings: ``Role.kGetAllTxs`` -| Usage in Python bindings: ``Role_kGetAllTxs`` -| **Example** @@ -1271,7 +1123,7 @@ Allows getting any `transaction <../core_concepts/glossary.html#transaction>`__ .. literalinclude:: ../../../example/python/permissions/can_get_all_txs.py :language: python :linenos: - :lines: 9-72 + :lines: 11-58 can_get_my_txs ^^^^^^^^^^^^^^ @@ -1279,9 +1131,6 @@ can_get_my_txs Allows getting `transaction <../core_concepts/glossary.html#transaction>`__ (that was issued by `query <../core_concepts/glossary.html#query>`__ creator) by hash. | Related API method: `Get Transactions <../api/queries.html#get-transactions>`__ -| Usage in Java bindings: ``Role.kGetMyTxs`` -| Usage in Python bindings: ``Role_kGetMyTxs`` -| **Example** @@ -1291,7 +1140,7 @@ Allows getting `transaction <../core_concepts/glossary.html#transaction>`__ (tha .. literalinclude:: ../../../example/python/permissions/can_get_my_txs.py :language: python :linenos: - :lines: 9-76 + :lines: 11-62 Supplementary Sources ===================== @@ -1300,8 +1149,3 @@ Supplementary Sources :language: python :linenos: :caption: commons.py - -.. literalinclude:: ../../permissions_compiler/consts.py - :language: python - :linenos: - :caption: consts.py diff --git a/example/python/Makefile b/example/python/Makefile new file mode 100644 index 0000000000..0d19e05492 --- /dev/null +++ b/example/python/Makefile @@ -0,0 +1,25 @@ +#!/usr/bin/env make + +# TODO igor-egorov 21.01.2019 reimplement using cmake IR-239 + +# sources +PROTO_PATH = ../../shared_model/schema +PROTO_FILES = $(wildcard $(PROTO_PATH)/*.proto) + +# targets +PYTHON_GRPC_LIBS_PATH = . +PYTHON_GRPC_LIBS_FILES = $(PYTHON_GRPC_LIBS_PATH)/endpoint_pb2_grpc.py +PYTHON_PROTO_LIBS_PATH = . +PYTHON_PROTO_LIBS_FILES = $(patsubst $(PROTO_PATH)/%.proto,$(PYTHON_PROTO_LIBS_PATH)/%_pb2.py,$(PROTO_FILES)) + +.PHONY: default +default: all + +.PHONY: all +all: $(PYTHON_PROTO_LIBS_FILES) $(PYTHON_GRPC_LIBS_FILES) + +$(PYTHON_PROTO_LIBS_PATH)/%_pb2.py: $(PROTO_PATH)/%.proto + protoc --proto_path=$(PROTO_PATH) --python_out=$(PYTHON_PROTO_LIBS_PATH) $< + +$(PYTHON_GRPC_LIBS_PATH)/%_pb2_grpc.py: $(PROTO_PATH)/%.proto + python -m grpc_tools.protoc --proto_path=$(PROTO_PATH) --grpc_python_out=$(PYTHON_PROTO_LIBS_PATH) $< diff --git a/example/python/batch-example.py b/example/python/batch-example.py old mode 100644 new mode 100755 index 3ea32efeac..05d6e003a9 --- a/example/python/batch-example.py +++ b/example/python/batch-example.py @@ -4,16 +4,21 @@ # SPDX-License-Identifier: Apache-2.0 # +import binascii +from irohalib import IrohaCrypto as ic +from irohalib import Iroha, IrohaGrpc +import sys + print(""" PLEASE ENSURE THAT MST IS ENABLED IN IROHA CONFIG """) -from irohalib import Iroha, IrohaGrpc -from irohalib import IrohaCrypto as ic -import binascii +if sys.version_info[0] < 3: + raise Exception('Python 3 or a more recent version is required.') + iroha = Iroha('admin@test') net = IrohaGrpc() @@ -74,14 +79,18 @@ def send_batch_and_print_status(*transactions): def create_users(): global iroha init_cmds = [ - iroha.command('CreateAsset', asset_name='bitcoin', domain_id='test', precision=2), - iroha.command('CreateAsset', asset_name='dogecoin', domain_id='test', precision=2), - iroha.command('AddAssetQuantity', asset_id='bitcoin#test', amount='100000'), - iroha.command('AddAssetQuantity', asset_id='dogecoin#test', amount='20000'), + iroha.command('CreateAsset', asset_name='bitcoin', + domain_id='test', precision=2), + iroha.command('CreateAsset', asset_name='dogecoin', + domain_id='test', precision=2), + iroha.command('AddAssetQuantity', + asset_id='bitcoin#test', amount='100000'), + iroha.command('AddAssetQuantity', + asset_id='dogecoin#test', amount='20000'), iroha.command('CreateAccount', account_name='alice', domain_id='test', - public_key=ic.hex_key_to_bytes(alice_public_keys[0])), + public_key=alice_public_keys[0]), iroha.command('CreateAccount', account_name='bob', domain_id='test', - public_key=ic.hex_key_to_bytes(bob_public_keys[0])), + public_key=bob_public_keys[0]), iroha.command('TransferAsset', src_account_id='admin@test', dest_account_id='alice@test', asset_id='bitcoin#test', description='init top up', amount='100000'), iroha.command('TransferAsset', src_account_id='admin@test', dest_account_id='bob@test', @@ -97,8 +106,9 @@ def add_keys_and_set_quorum(): alice_iroha = Iroha('alice@test') alice_cmds = [ alice_iroha.command('AddSignatory', account_id='alice@test', - public_key=ic.hex_key_to_bytes(alice_public_keys[1])), - alice_iroha.command('SetAccountQuorum', account_id='alice@test', quorum=2) + public_key=alice_public_keys[1]), + alice_iroha.command('SetAccountQuorum', + account_id='alice@test', quorum=2) ] alice_tx = alice_iroha.transaction(alice_cmds) ic.sign_transaction(alice_tx, alice_private_keys[0]) @@ -106,7 +116,8 @@ def add_keys_and_set_quorum(): bob_iroha = Iroha('bob@test') bob_cmds = [ - bob_iroha.command('AddSignatory', account_id='bob@test', public_key=ic.hex_key_to_bytes(bob_public_keys[1])), + bob_iroha.command('AddSignatory', account_id='bob@test', + public_key=bob_public_keys[1]), bob_iroha.command('SetAccountQuorum', account_id='bob@test', quorum=2) ] bob_tx = bob_iroha.transaction(bob_cmds) @@ -150,10 +161,12 @@ def bob_accepts_exchange_request(): pending_transactions = net.send_query(q) for tx in pending_transactions.transactions_response.transactions: if tx.payload.reduced_payload.creator_account_id == 'alice@test': - del tx.signatures[:] # we need do this temporarily, otherwise accept will not reach MST engine + # we need do this temporarily, otherwise accept will not reach MST engine + del tx.signatures[:] else: ic.sign_transaction(tx, *bob_private_keys) - send_batch_and_print_status(*pending_transactions.transactions_response.transactions) + send_batch_and_print_status( + *pending_transactions.transactions_response.transactions) @trace @@ -162,7 +175,8 @@ def check_no_pending_txs(): print( net.send_query( ic.sign_query( - iroha.query('GetPendingTransactions', creator_account='bob@test'), + iroha.query('GetPendingTransactions', + creator_account='bob@test'), bob_private_keys[0] ) ) @@ -185,11 +199,14 @@ def bob_declines_exchange_request(): pending_transactions = net.send_query(q) for tx in pending_transactions.transactions_response.transactions: if tx.payload.reduced_payload.creator_account_id == 'alice@test': - del tx.signatures[:] # we need do this temporarily, otherwise accept will not reach MST engine + # we need do this temporarily, otherwise accept will not reach MST engine + del tx.signatures[:] else: - ic.sign_transaction(tx, *alice_private_keys) # intentionally alice keys were used to fail bob's txs + # intentionally alice keys were used to fail bob's txs + ic.sign_transaction(tx, *alice_private_keys) # zeroes as private keys are also acceptable - send_batch_and_print_status(*pending_transactions.transactions_response.transactions) + send_batch_and_print_status( + *pending_transactions.transactions_response.transactions) create_users() diff --git a/example/python/blocks-query.py b/example/python/blocks-query.py index 95c11ba804..8da5328223 100644 --- a/example/python/blocks-query.py +++ b/example/python/blocks-query.py @@ -1,45 +1,46 @@ -import sys -sys.path.insert(0, 'build/shared_model/bindings') -import iroha +# +# Copyright Soramitsu Co., Ltd. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# -import endpoint_pb2_grpc -import queries_pb2 -import grpc -import time +import sys +if sys.version_info[0] < 3: + raise Exception('Python 3 or a more recent version is required.') -blocks_query_builder = iroha.ModelBlocksQueryBuilder() -crypto = iroha.ModelCrypto() -admin_priv = open("../admin@test.priv", "r").read() -admin_pub = open("../admin@test.pub", "r").read() -key_pair = crypto.convertFromExisting(admin_pub, admin_priv) +from irohalib import IrohaCrypto +from irohalib import Iroha, IrohaGrpc -creator = "admin@test" -current_time = int(round(time.time() * 1000)) - 10**5 -def get_blocks(): - query = blocks_query_builder.creatorAccountId(creator)\ - .createdTime(current_time)\ - .queryCounter(1) \ - .build() +admin_private_key = open('../admin@test.priv').read() +iroha = Iroha('admin@test') +net = IrohaGrpc() - query_blob = iroha.ModelProtoBlocksQuery(query).signAndAddSignature(key_pair).finish().blob() - proto_query = queries_pb2.BlocksQuery() - if sys.version_info[0] == 2: - tmp = ''.join(map(chr, query_blob)) - else: - tmp = bytes(query_blob) +def trace(func): + """ + A decorator for tracing methods' begin/end execution points + """ + def tracer(*args, **kwargs): + name = func.__name__ + print('\tEntering "{}"'.format(name)) + result = func(*args, **kwargs) + print('\tLeaving "{}"'.format(name)) + return result + return tracer - proto_query.ParseFromString(tmp) - channel = grpc.insecure_channel('127.0.0.1:50051') - query_stub = endpoint_pb2_grpc.QueryServiceStub(channel) - query_response = query_stub.FetchCommits(proto_query) +@trace +def get_blocks(): + """ + Subscribe to blocks stream from the network + :return: + """ + query = iroha.blocks_query() + IrohaCrypto.sign_query(query, admin_private_key) + for block in net.send_blocks_stream_query(query): + print('The next block arrived:', block) - for block in query_response: - print("block:") - print(block) get_blocks() diff --git a/example/python/ed25519.py b/example/python/ed25519.py index 0aa9a7f33e..dc68f81d4d 100644 --- a/example/python/ed25519.py +++ b/example/python/ed25519.py @@ -43,6 +43,9 @@ import operator import sys +if sys.version_info < (3, 6): + import sha3 + __version__ = "1.0.dev0" # Useful for very coarse version differentiation. @@ -56,11 +59,9 @@ int2byte = chr range = xrange - def indexbytes(buf, i): return ord(buf[i]) - def intlist2bytes(l): return b"".join(chr(c) for c in l) diff --git a/example/python/irohalib.md b/example/python/irohalib.md index fe7255db8c..9b36aef016 100644 --- a/example/python/irohalib.md +++ b/example/python/irohalib.md @@ -12,6 +12,7 @@ All you have to do before the first usage is: ```sh pip3 install grpcio-tools +pip3 install pysha3 protoc --proto_path=../../shared_model/schema --python_out=. ../../shared_model/schema/*.proto python -m grpc_tools.protoc --proto_path=../../shared_model/schema --python_out=. --grpc_python_out=. ../../shared_model/schema/endpoint.proto diff --git a/example/python/irohalib.py b/example/python/irohalib.py index ac79f4ca03..3df09a5a6c 100644 --- a/example/python/irohalib.py +++ b/example/python/irohalib.py @@ -44,7 +44,15 @@ def hash(proto_with_payload): :proto_with_payload: proto transaction or query :return: bytes representation of hash """ - bytes = proto_with_payload.payload.SerializeToString() + obj = None + if hasattr(proto_with_payload, 'payload'): + obj = getattr(proto_with_payload, 'payload') + # hash of meta is implemented for block streaming queries, + # because they do not have a payload in their schema + elif hasattr(proto_with_payload, 'meta'): + obj = getattr(proto_with_payload, 'meta') + + bytes = obj.SerializeToString() hash = hashlib.sha3_256(bytes).digest() return hash @@ -62,8 +70,8 @@ def _signature(message, private_key): message_hash = IrohaCrypto.hash(message) signature_bytes = ed25519.signature_unsafe(message_hash, sk, pk) signature = primitive_pb2.Signature() - signature.public_key = pk - signature.signature = signature_bytes + signature.public_key = public_key + signature.signature = binascii.hexlify(signature_bytes) return signature @staticmethod @@ -99,11 +107,12 @@ def reduced_hash(transaction): """ Calculates hash of reduced payload of a transaction :param transaction: transaction to be processed - :return: bytes representation of hash + :return: hex representation of hash """ bytes = transaction.payload.reduced_payload.SerializeToString() hash = hashlib.sha3_256(bytes).digest() - return hash + hex_hash = binascii.hexlify(hash) + return hex_hash @staticmethod def private_key(): @@ -113,11 +122,6 @@ def private_key(): """ return binascii.b2a_hex(os.urandom(32)) - @staticmethod - def hex_key_to_bytes(key): - """Convert hex string to bytes string. The string is just a container""" - return binascii.unhexlify(key) - class Iroha(object): """ @@ -176,24 +180,41 @@ def command(name, **kwargs): field_name = Iroha._camel_case_to_snake_case(name) internal_command = getattr(command_wrapper, field_name) for key, value in kwargs.items(): + if 'permissions' == key: + permissions_attr = getattr(internal_command, key) + permissions_attr.extend(value) + continue + if 'peer' == key: + peer_attr = getattr(internal_command, key) + peer_attr.CopyFrom(value) + continue setattr(internal_command, key, value) return command_wrapper - def query(self, name, counter=1, creator_account=None, created_time=None, **kwargs): + def query(self, name, counter=1, creator_account=None, created_time=None, page_size=None, first_tx_hash=None, + **kwargs): """ Creates a protobuf query with specified set of entities :param name: CamelCased name of query to be executed :param counter: query counter, should be incremented for each new query :param creator_account: account id of query creator :param created_time: query creation timestamp in milliseconds + :param page_size: a non-zero positive number, size of result rowset for queries with pagination + :param first_tx_hash: optional hash of a transaction that will be the beginning of the next page :param kwargs: query arguments as they defined in schema :return: a proto query """ assert creator_account or self.creator_account, "No account name specified as query creator id" + pagination_meta = None if not created_time: created_time = self.now() if not creator_account: creator_account = self.creator_account + if page_size or first_tx_hash: + pagination_meta = queries_pb2.TxPaginationMeta() + pagination_meta.page_size = page_size + if first_tx_hash: + pagination_meta.first_tx_hash = first_tx_hash meta = queries_pb2.QueryPayloadMeta() meta.created_time = created_time @@ -205,12 +226,41 @@ def query(self, name, counter=1, creator_account=None, created_time=None, **kwar field_name = Iroha._camel_case_to_snake_case(name) internal_query = getattr(query_wrapper.payload, field_name) for key, value in kwargs.items(): + if 'tx_hashes' == key: + hashes_attr = getattr(internal_query, key) + hashes_attr.extend(value) + continue setattr(internal_query, key, value) + if pagination_meta: + pagination_meta_attr = getattr(internal_query, 'pagination_meta') + pagination_meta_attr.CopyFrom(pagination_meta) if not len(kwargs): message = getattr(queries_pb2, name)() internal_query.CopyFrom(message) return query_wrapper + def blocks_query(self, counter=1, creator_account=None, created_time=None): + """ + Creates a protobuf query for a blocks stream + :param counter: query counter, should be incremented for each new query + :param creator_account: account id of query creator + :param created_time: query creation timestamp in milliseconds + :return: a proto blocks query + """ + if not created_time: + created_time = self.now() + if not creator_account: + creator_account = self.creator_account + + meta = queries_pb2.QueryPayloadMeta() + meta.created_time = created_time + meta.creator_account_id = creator_account + meta.query_counter = counter + + query_wrapper = queries_pb2.BlocksQuery() + query_wrapper.meta.CopyFrom(meta) + return query_wrapper + @staticmethod def batch(*transactions, atomic=True): """ @@ -240,8 +290,10 @@ class IrohaGrpc(object): def __init__(self, address=None): self._address = address if address else '127.0.0.1:50051' self._channel = grpc.insecure_channel(self._address) - self._command_service_stub = endpoint_pb2_grpc.CommandServiceStub(self._channel) - self._query_service_stub = endpoint_pb2_grpc.QueryServiceStub(self._channel) + self._command_service_stub = endpoint_pb2_grpc.CommandService_v1Stub( + self._channel) + self._query_service_stub = endpoint_pb2_grpc.QueryService_v1Stub( + self._channel) def send_tx(self, transaction): """ @@ -284,6 +336,17 @@ def send_query(self, query): response = self._query_service_stub.Find(query) return response + def send_blocks_stream_query(self, query): + """ + Send a query for blocks stream to Iroha + :param query: protobuf BlocksQuery + :return: an iterable over a stream of blocks + :raise: grpc.RpcError with .code() available in case of any error + """ + response = self._query_service_stub.FetchCommits(query) + for block in response: + yield block + def tx_status(self, transaction): """ Request a status of a transaction @@ -292,7 +355,7 @@ def tx_status(self, transaction): integral status code, and error message string (will be empty if no error occurred) """ request = endpoint_pb2.TxStatusRequest() - request.tx_hash = IrohaCrypto.hash(transaction) + request.tx_hash = binascii.hexlify(IrohaCrypto.hash(transaction)) response = self._command_service_stub.Status(request) status_code = response.tx_status status_name = endpoint_pb2.TxStatus.Name(response.tx_status) @@ -307,10 +370,10 @@ def tx_status_stream(self, transaction): integral status code, and error message string (will be empty if no error occurred) """ request = endpoint_pb2.TxStatusRequest() - request.tx_hash = IrohaCrypto.hash(transaction) + request.tx_hash = binascii.hexlify(IrohaCrypto.hash(transaction)) response = self._command_service_stub.StatusStream(request) for status in response: status_name = endpoint_pb2.TxStatus.Name(status.tx_status) status_code = status.tx_status - error_message = status.error_message - yield status_name, status_code, error_message + error_code = status.error_code + yield status_name, status_code, error_code diff --git a/example/python/permissions/can_add_asset_qty.py b/example/python/permissions/can_add_asset_qty.py index afdb8d3521..58e820cdad 100644 --- a/example/python/permissions/can_add_asset_qty.py +++ b/example/python/permissions/can_add_asset_qty.py @@ -3,38 +3,30 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kAddAssetQty]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .createAsset('coin', 'test', 2) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_add_asset_qty] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('CreateAsset', asset_name='coin', domain_id='test', precision=2)) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def add_asset_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .addAssetQuantity('coin#test', '5000.99') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('AddAssetQuantity', asset_id='coin#test', amount='5000.99') + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_add_domain_asset_qty.py b/example/python/permissions/can_add_domain_asset_qty.py index e556592766..9f9103fb68 100644 --- a/example/python/permissions/can_add_domain_asset_qty.py +++ b/example/python/permissions/can_add_domain_asset_qty.py @@ -6,3 +6,5 @@ import can_add_asset_qty # Please see example for can_add_asset_qty permission. + +# TODO igor-egorov 21.01.2019 IR-240 diff --git a/example/python/permissions/can_add_my_signatory.py b/example/python/permissions/can_add_my_signatory.py index 2b23f0a63d..d2bf41ed69 100644 --- a/example/python/permissions/can_add_my_signatory.py +++ b/example/python/permissions/can_add_my_signatory.py @@ -3,51 +3,43 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kAddMySignatory]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_grant_can_add_my_signatory] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key']))) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def grant_can_add_my_signatory_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .grantPermission(bob['id'], iroha.Grantable_kAddMySignatory) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('GrantPermission', account_id=bob['id'], permission=primitive_pb2.can_add_my_signatory) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def add_signatory_tx(): - extra_key = iroha.ModelCrypto().generateKeypair() - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(bob['id']) \ - .addSignatory(alice['id'], extra_key.publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(bob['key']).finish() + extra_key = irohalib.IrohaCrypto.private_key() + tx = iroha.transaction([ + iroha.command('AddSignatory', account_id=alice['id'], + public_key=irohalib.IrohaCrypto.derive_public_key(extra_key)) + ], creator_account=bob['id']) + irohalib.IrohaCrypto.sign_transaction(tx, bob['key']) + return tx diff --git a/example/python/permissions/can_add_peer.py b/example/python/permissions/can_add_peer.py index 98c238f7c1..8184f5f54b 100644 --- a/example/python/permissions/can_add_peer.py +++ b/example/python/permissions/can_add_peer.py @@ -3,38 +3,32 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kAddPeer]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_add_peer] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def add_peer_tx(): - peer_key = iroha.ModelCrypto().generateKeypair() - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .addPeer('192.168.10.10:50541', peer_key.publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + peer_key = irohalib.IrohaCrypto.private_key() + peer = primitive_pb2.Peer() + peer.address = '192.168.10.10:50541' + peer.peer_key = irohalib.IrohaCrypto.derive_public_key(peer_key) + tx = iroha.transaction([ + iroha.command('AddPeer', peer=peer) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_add_signatory.py b/example/python/permissions/can_add_signatory.py index 1adcff3e9a..e2bfdbd2cf 100644 --- a/example/python/permissions/can_add_signatory.py +++ b/example/python/permissions/can_add_signatory.py @@ -3,38 +3,30 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kAddSignatory]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_add_signatory] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def add_signatory_tx(): - extra_key = iroha.ModelCrypto().generateKeypair() - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .addSignatory(alice['id'], extra_key.publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + extra_key = irohalib.IrohaCrypto.private_key() + tx = iroha.transaction([ + iroha.command('AddSignatory', account_id=alice['id'], + public_key=irohalib.IrohaCrypto.derive_public_key(extra_key)) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_append_role.py b/example/python/permissions/can_append_role.py index 0c9bbf778e..83066af926 100644 --- a/example/python/permissions/can_append_role.py +++ b/example/python/permissions/can_append_role.py @@ -3,46 +3,38 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet( - [iroha.Role_kAppendRole, iroha.Role_kAddPeer] - ) - second_role = iroha.RolePermissionSet([iroha.Role_kAddPeer]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createRole('second_role', second_role) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .appendRole(alice['id'], 'second_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_append_role, primitive_pb2.can_add_peer] + second_role_permissions = [primitive_pb2.can_add_peer] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.extend([ + iroha.command('CreateRole', role_name='second_role', permissions=second_role_permissions), + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key'])), + iroha.command('AppendRole', account_id=alice['id'], role_name='second_role') + ]) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def append_role_tx(): # Note that you can append only that role that has # lesser or the same set of permissions as transaction creator. - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .appendRole(bob['id'], 'second_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('AppendRole', account_id=bob['id'], role_name='second_role') + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_create_account.py b/example/python/permissions/can_create_account.py index bd13821bec..3a4dd0769e 100644 --- a/example/python/permissions/can_create_account.py +++ b/example/python/permissions/can_create_account.py @@ -3,40 +3,29 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet( - [iroha.Role_kCreateAccount] - ) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_create_account] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def create_account_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('CreateAccount', account_name='bob', domain_id='test', public_key=bob['key']) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_create_asset.py b/example/python/permissions/can_create_asset.py index 6d52788dbc..a3f00a6c52 100644 --- a/example/python/permissions/can_create_asset.py +++ b/example/python/permissions/can_create_asset.py @@ -3,37 +3,28 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kCreateAsset]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_create_asset] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def create_asset_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .createAsset('coin', 'test', 2) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('CreateAsset', asset_name='coin', domain_id='test', precision=2) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_create_domain.py b/example/python/permissions/can_create_domain.py index 06d6032edf..4390d1a461 100644 --- a/example/python/permissions/can_create_domain.py +++ b/example/python/permissions/can_create_domain.py @@ -3,38 +3,29 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kCreateDomain]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_create_domain] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def create_domain_tx(): # 'test_role' was created in genesis transaction - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .createDomain('another-domain', 'test_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('CreateDomain', domain_id='another-domain', default_role='test_role') + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_create_role.py b/example/python/permissions/can_create_role.py index 474e4f2a2f..da410c2310 100644 --- a/example/python/permissions/can_create_role.py +++ b/example/python/permissions/can_create_role.py @@ -3,42 +3,31 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet( - [iroha.Role_kCreateRole, iroha.Role_kCreateDomain] - ) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_create_role, primitive_pb2.can_create_domain] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def create_role_tx(): # You can pick only those permissions that # already belong to account of transaction creator. - role_permissions = iroha.RolePermissionSet([iroha.Role_kCreateDomain]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .createRole('newrole', role_permissions) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + role_permissions = [primitive_pb2.can_create_domain] + tx = iroha.transaction([ + iroha.command('CreateRole', role_name='newrole', permissions=role_permissions) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_detach_role.py b/example/python/permissions/can_detach_role.py index 4b5cee6463..03847ba538 100644 --- a/example/python/permissions/can_detach_role.py +++ b/example/python/permissions/can_detach_role.py @@ -3,37 +3,28 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kDetachRole]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_detach_role] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def detach_role_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .detachRole(admin['id'], 'test_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('DetachRole', account_id=admin['id'], role_name='test_role') + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_get_all_acc_ast.py b/example/python/permissions/can_get_all_acc_ast.py index 96aaa55e8f..a50eaad91f 100644 --- a/example/python/permissions/can_get_all_acc_ast.py +++ b/example/python/permissions/can_get_all_acc_ast.py @@ -3,38 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@first') alice = commons.new_user('alice@second') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetAllAccAst]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('first', 'admin_role') \ - .createDomain('second', 'test_role') \ - .createAccount('admin', 'first', admin['key'].publicKey()) \ - .createAccount('alice', 'second', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_all_acc_ast] + genesis_commands = commons.genesis_block(admin, alice, test_permissions, multidomain=True) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_assets_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountAssets(admin['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountAssets', creator_account=alice['id'], account_id=admin['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_all_acc_ast_txs.py b/example/python/permissions/can_get_all_acc_ast_txs.py index af6f3d29e5..632d1aa163 100644 --- a/example/python/permissions/can_get_all_acc_ast_txs.py +++ b/example/python/permissions/can_get_all_acc_ast_txs.py @@ -3,45 +3,41 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@first') alice = commons.new_user('alice@second') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([ - iroha.Role_kGetAllAccAstTxs, - iroha.Role_kReceive, - iroha.Role_kTransfer + test_permissions = [ + primitive_pb2.can_get_all_acc_ast_txs, + primitive_pb2.can_receive, + primitive_pb2.can_transfer + ] + genesis_commands = commons.genesis_block(admin, alice, test_permissions, multidomain=True) + genesis_commands.extend([ + iroha.command('CreateAsset', asset_name='coin', domain_id='first', precision=2), + iroha.command('AddAssetQuantity', asset_id='coin#first', amount='300.00'), + iroha.command('TransferAsset', + src_account_id=admin['id'], + dest_account_id=alice['id'], + asset_id='coin#first', + description='top up', + amount='200.00') ]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('first', 'admin_role') \ - .createDomain('second', 'test_role') \ - .createAccount('admin', 'first', admin['key'].publicKey()) \ - .createAccount('alice', 'second', alice['key'].publicKey()) \ - .createAsset('coin', 'first', 2) \ - .addAssetQuantity('coin#first', '300.00') \ - .transferAsset(admin['id'], alice['id'], 'coin#first', 'top up', '200.00') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_asset_transactions_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountAssetTransactions(admin['id'], 'coin#first') \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountAssetTransactions', creator_account=alice['id'], page_size=10, + account_id=admin['id'], asset_id='coin#first') + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_all_acc_detail.py b/example/python/permissions/can_get_all_acc_detail.py index 0a62055576..f1642556cb 100644 --- a/example/python/permissions/can_get_all_acc_detail.py +++ b/example/python/permissions/can_get_all_acc_detail.py @@ -3,38 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@first') alice = commons.new_user('alice@second') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetAllAccDetail]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('first', 'admin_role') \ - .createDomain('second', 'test_role') \ - .createAccount('admin', 'first', admin['key'].publicKey()) \ - .createAccount('alice', 'second', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_all_acc_detail] + genesis_commands = commons.genesis_block(admin, alice, test_permissions, multidomain=True) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_detail_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountDetail(admin['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountDetail', creator_account=alice['id'], account_id=admin['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_all_acc_txs.py b/example/python/permissions/can_get_all_acc_txs.py index 18c41042f7..f787d6ed96 100644 --- a/example/python/permissions/can_get_all_acc_txs.py +++ b/example/python/permissions/can_get_all_acc_txs.py @@ -3,38 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@first') alice = commons.new_user('alice@second') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetAllAccTxs]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('first', 'admin_role') \ - .createDomain('second', 'test_role') \ - .createAccount('admin', 'first', admin['key'].publicKey()) \ - .createAccount('alice', 'second', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_all_acc_txs] + genesis_commands = commons.genesis_block(admin, alice, test_permissions, multidomain=True) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_transactions_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountTransactions(admin['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountTransactions', creator_account=alice['id'], account_id=admin['id'], page_size=10) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_all_accounts.py b/example/python/permissions/can_get_all_accounts.py index 3adeec05a4..c44bab3925 100644 --- a/example/python/permissions/can_get_all_accounts.py +++ b/example/python/permissions/can_get_all_accounts.py @@ -3,38 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@first') alice = commons.new_user('alice@second') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetAllAccounts]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('first', 'admin_role') \ - .createDomain('second', 'test_role') \ - .createAccount('admin', 'first', admin['key'].publicKey()) \ - .createAccount('alice', 'second', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_all_accounts] + genesis_commands = commons.genesis_block(admin, alice, test_permissions, multidomain=True) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccount(admin['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccount', creator_account=alice['id'], account_id=admin['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_all_signatories.py b/example/python/permissions/can_get_all_signatories.py index bf4b3b7c54..2db8e02fc8 100644 --- a/example/python/permissions/can_get_all_signatories.py +++ b/example/python/permissions/can_get_all_signatories.py @@ -3,38 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@first') alice = commons.new_user('alice@second') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetAllSignatories]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('first', 'admin_role') \ - .createDomain('second', 'test_role') \ - .createAccount('admin', 'first', admin['key'].publicKey()) \ - .createAccount('alice', 'second', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_all_signatories] + genesis_commands = commons.genesis_block(admin, alice, test_permissions, multidomain=True) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def signatories_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getSignatories(admin['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetSignatories', creator_account=alice['id'], account_id=admin['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_all_txs.py b/example/python/permissions/can_get_all_txs.py index b17e76fea9..7988690b5e 100644 --- a/example/python/permissions/can_get_all_txs.py +++ b/example/python/permissions/can_get_all_txs.py @@ -3,70 +3,56 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib +import binascii import commons +import primitive_pb2 admin = commons.new_user('admin@first') alice = commons.new_user('alice@second') +iroha = irohalib.Iroha(admin['id']) admin_tx1_hash = None -admin_tx2_hash_blob = None +admin_tx2_hash = None @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetAllTxs]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('first', 'admin_role') \ - .createDomain('second', 'test_role') \ - .createAccount('admin', 'first', admin['key'].publicKey()) \ - .createAccount('alice', 'second', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_all_txs] + genesis_commands = commons.genesis_block(admin, alice, test_permissions, multidomain=True) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def admin_action_1_tx(): global admin_tx1_hash - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .createAsset('coin', 'second', 2) \ - .build() - admin_tx1_hash = tx.hash() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + tx = iroha.transaction([ + iroha.command('CreateAsset', asset_name='coin', domain_id='second', precision=2) + ]) + admin_tx1_hash = irohalib.IrohaCrypto.hash(tx) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def admin_action_2_tx(): - global admin_tx2_hash_blob - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .setAccountDetail(admin['id'], 'hyperledger', 'iroha') \ - .build() - admin_tx2_hash_blob = tx.hash().blob() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + global admin_tx2_hash + tx = iroha.transaction([ + iroha.command('SetAccountDetail', account_id=admin['id'], key='hyperledger', value='iroha') + ]) + admin_tx2_hash = irohalib.IrohaCrypto.hash(tx) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def transactions_query(): - hashes = iroha.HashVector() - hashes.append(admin_tx1_hash) - hashes.append(iroha.Hash(iroha.Blob(admin_tx2_hash_blob))) - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getTransactions(hashes) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + hashes = [ + binascii.hexlify(admin_tx1_hash), + binascii.hexlify(admin_tx2_hash) + ] + query = iroha.query('GetTransactions', tx_hashes=hashes, creator_account=alice['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_domain_acc_ast.py b/example/python/permissions/can_get_domain_acc_ast.py index 67cddb006c..15f814f744 100644 --- a/example/python/permissions/can_get_domain_acc_ast.py +++ b/example/python/permissions/can_get_domain_acc_ast.py @@ -3,37 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetDomainAccAst]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_domain_acc_ast] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_assets_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountAssets(admin['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountAssets', account_id=admin['id'], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_domain_acc_ast_txs.py b/example/python/permissions/can_get_domain_acc_ast_txs.py index 2feeab8334..612d7602db 100644 --- a/example/python/permissions/can_get_domain_acc_ast_txs.py +++ b/example/python/permissions/can_get_domain_acc_ast_txs.py @@ -3,40 +3,37 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetDomainAccAstTxs]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAsset('coin', 'test', 2) \ - .addAssetQuantity('coin#test', '500.69') \ - .transferAsset(admin['id'], alice['id'], 'coin#test', 'top up', '10.00') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_domain_acc_ast_txs] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.extend([ + iroha.command('CreateAsset', asset_name='coin', domain_id='test', precision=2), + iroha.command('AddAssetQuantity', asset_id='coin#test', amount='500.69'), + iroha.command('TransferAsset', + src_account_id=admin['id'], + dest_account_id=alice['id'], + asset_id='coin#test', + description='top up', + amount='10.00') + ]) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_asset_transactions_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountAssetTransactions(admin['id'], 'coin#test') \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountAssetTransactions', account_id=admin['id'], + asset_id='coin#test', creator_account=alice['id'], page_size=10) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_domain_acc_detail.py b/example/python/permissions/can_get_domain_acc_detail.py index b3612a62c2..a03cac555a 100644 --- a/example/python/permissions/can_get_domain_acc_detail.py +++ b/example/python/permissions/can_get_domain_acc_detail.py @@ -3,37 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetDomainAccDetail]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_domain_acc_detail] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_detail_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountDetail(admin['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountDetail', creator_account=alice['id'], account_id=admin['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_domain_acc_txs.py b/example/python/permissions/can_get_domain_acc_txs.py index ea67addb14..8111f066ed 100644 --- a/example/python/permissions/can_get_domain_acc_txs.py +++ b/example/python/permissions/can_get_domain_acc_txs.py @@ -3,37 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetDomainAccTxs]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_domain_acc_txs] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_transactions_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountTransactions(admin['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountTransactions', creator_account=alice['id'], account_id=admin['id'], page_size=10) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_domain_accounts.py b/example/python/permissions/can_get_domain_accounts.py index d043998732..e604043c1a 100644 --- a/example/python/permissions/can_get_domain_accounts.py +++ b/example/python/permissions/can_get_domain_accounts.py @@ -3,37 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetDomainAccounts]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_domain_accounts] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccount(admin['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccount', creator_account=alice['id'], account_id=admin['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_domain_signatories.py b/example/python/permissions/can_get_domain_signatories.py index ffbbe909dd..381ed0f069 100644 --- a/example/python/permissions/can_get_domain_signatories.py +++ b/example/python/permissions/can_get_domain_signatories.py @@ -3,37 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetDomainSignatories]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_domain_signatories] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def signatories_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getSignatories(admin['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetSignatories', creator_account=alice['id'], account_id=admin['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_my_acc_ast.py b/example/python/permissions/can_get_my_acc_ast.py index bb08774303..492d63b0fc 100644 --- a/example/python/permissions/can_get_my_acc_ast.py +++ b/example/python/permissions/can_get_my_acc_ast.py @@ -3,37 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetMyAccAst]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_my_acc_ast] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_assets_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountAssets(alice['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountAssets', creator_account=alice['id'], account_id=alice['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_my_acc_ast_txs.py b/example/python/permissions/can_get_my_acc_ast_txs.py index c9170f4a35..6675d5f991 100644 --- a/example/python/permissions/can_get_my_acc_ast_txs.py +++ b/example/python/permissions/can_get_my_acc_ast_txs.py @@ -3,40 +3,37 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetMyAccAstTxs]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAsset('coin', 'test', 2) \ - .addAssetQuantity('coin#test', '500.69') \ - .transferAsset(admin['id'], alice['id'], 'coin#test', 'top up', '10.00') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_my_acc_ast_txs] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.extend([ + iroha.command('CreateAsset', asset_name='coin', domain_id='test', precision=2), + iroha.command('AddAssetQuantity', asset_id='coin#test', amount='500.69'), + iroha.command('TransferAsset', + src_account_id=admin['id'], + dest_account_id=alice['id'], + asset_id='coin#test', + description='top up', + amount='10.00') + ]) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_asset_transactions_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountAssetTransactions(alice['id'], 'coin#test') \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountAssetTransactions', creator_account=alice['id'], account_id=alice['id'], + asset_id='coin#test', page_size=10) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_my_acc_detail.py b/example/python/permissions/can_get_my_acc_detail.py index 0e24b3e0f0..75603a0026 100644 --- a/example/python/permissions/can_get_my_acc_detail.py +++ b/example/python/permissions/can_get_my_acc_detail.py @@ -3,37 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetMyAccDetail]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_my_acc_detail] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_detail_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountDetail(alice['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountDetail', creator_account=alice['id'], account_id=alice['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_my_acc_txs.py b/example/python/permissions/can_get_my_acc_txs.py index b1611fd23d..d8d6461772 100644 --- a/example/python/permissions/can_get_my_acc_txs.py +++ b/example/python/permissions/can_get_my_acc_txs.py @@ -3,37 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetMyAccTxs]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_my_acc_txs] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_transactions_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccountTransactions(alice['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccountTransactions', creator_account=alice['id'], account_id=alice['id'], page_size=10) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_my_account.py b/example/python/permissions/can_get_my_account.py index 522099d4df..eddf3f4782 100644 --- a/example/python/permissions/can_get_my_account.py +++ b/example/python/permissions/can_get_my_account.py @@ -3,37 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetMyAccount]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_my_account] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def account_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAccount(alice['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAccount', creator_account=alice['id'], account_id=alice['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_my_signatories.py b/example/python/permissions/can_get_my_signatories.py index ec8f5b4499..df549bbad3 100644 --- a/example/python/permissions/can_get_my_signatories.py +++ b/example/python/permissions/can_get_my_signatories.py @@ -3,37 +3,26 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetMySignatories]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_my_signatories] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def signatories_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getSignatories(alice['id']) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetSignatories', creator_account=alice['id'], account_id=alice['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_my_txs.py b/example/python/permissions/can_get_my_txs.py index 4c281a96a1..67fe80978c 100644 --- a/example/python/permissions/can_get_my_txs.py +++ b/example/python/permissions/can_get_my_txs.py @@ -3,74 +3,60 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import binascii +import primitive_pb2 admin = commons.new_user('admin@first') alice = commons.new_user('alice@second') +iroha = irohalib.Iroha(admin['id']) alice_tx1_hash = None -alice_tx2_hash_blob = None +alice_tx2_hash = None @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([ - iroha.Role_kGetMyTxs, - iroha.Role_kAddAssetQty, - iroha.Role_kCreateAsset - ]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('first', 'admin_role') \ - .createDomain('second', 'test_role') \ - .createAccount('admin', 'first', admin['key'].publicKey()) \ - .createAccount('alice', 'second', alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [ + primitive_pb2.can_get_my_txs, + primitive_pb2.can_add_asset_qty, + primitive_pb2.can_create_asset + ] + genesis_commands = commons.genesis_block(admin, alice, test_permissions, multidomain=True) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def alice_action_1_tx(): global alice_tx1_hash - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .createAsset('coin', 'first', 2) \ - .build() - alice_tx1_hash = tx.hash() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('CreateAsset', asset_name='coin', domain_id='first', precision=2) + ], creator_account=alice['id']) + alice_tx1_hash = irohalib.IrohaCrypto.hash(tx) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def alice_action_2_tx(): - global alice_tx2_hash_blob - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .addAssetQuantity('coin#first', '600.30') \ - .build() - alice_tx2_hash_blob = tx.hash().blob() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + global alice_tx2_hash + tx = iroha.transaction([ + iroha.command('AddAssetQuantity', asset_id='coin#first', amount='600.30') + ], creator_account=alice['id']) + alice_tx2_hash = irohalib.IrohaCrypto.hash(tx) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def transactions_query(): - hashes = iroha.HashVector() - hashes.append(alice_tx1_hash) - hashes.append(iroha.Hash(iroha.Blob(alice_tx2_hash_blob))) - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getTransactions(hashes) \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + hashes = [ + binascii.hexlify(alice_tx1_hash), + binascii.hexlify(alice_tx2_hash) + ] + query = iroha.query('GetTransactions', creator_account=alice['id'], tx_hashes=hashes) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_get_roles.py b/example/python/permissions/can_get_roles.py index defcc3de61..ff95c9a205 100644 --- a/example/python/permissions/can_get_roles.py +++ b/example/python/permissions/can_get_roles.py @@ -3,50 +3,33 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kGetRoles]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAsset('coin', 'test', 2) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_get_roles] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def get_system_roles_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getRoles() \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetRoles', creator_account=alice['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query @commons.hex def get_role_permissions_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(2) \ - .creatorAccountId(alice['id']) \ - .getRolePermissions('admin_role') \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetRolePermissions', creator_account=alice['id'], counter=2, role_id='admin_role') + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_grant_can_add_my_signatory.py b/example/python/permissions/can_grant_can_add_my_signatory.py index c81f91f799..199e667f25 100644 --- a/example/python/permissions/can_grant_can_add_my_signatory.py +++ b/example/python/permissions/can_grant_can_add_my_signatory.py @@ -3,50 +3,41 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kAddMySignatory]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_grant_can_add_my_signatory] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key']))) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def grant_can_add_my_signatory_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .grantPermission(bob['id'], iroha.Grantable_kAddMySignatory) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('GrantPermission', account_id=bob['id'], permission=primitive_pb2.can_add_my_signatory) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def revoke_can_add_my_signatory_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .revokePermission(bob['id'], iroha.Grantable_kAddMySignatory) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('RevokePermission', account_id=bob['id'], permission=primitive_pb2.can_add_my_signatory) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_grant_can_remove_my_signatory.py b/example/python/permissions/can_grant_can_remove_my_signatory.py index a9d8cc8f1b..2bf3804aa9 100644 --- a/example/python/permissions/can_grant_can_remove_my_signatory.py +++ b/example/python/permissions/can_grant_can_remove_my_signatory.py @@ -3,50 +3,41 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kRemoveMySignatory]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_grant_can_remove_my_signatory] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key']))) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def grant_can_remove_my_signatory_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .grantPermission(bob['id'], iroha.Grantable_kRemoveMySignatory) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('GrantPermission', account_id=bob['id'], permission=primitive_pb2.can_remove_my_signatory) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def revoke_can_remove_my_signatory_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .revokePermission(bob['id'], iroha.Grantable_kRemoveMySignatory) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('RevokePermission', account_id=bob['id'], permission=primitive_pb2.can_remove_my_signatory) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_grant_can_set_my_account_detail.py b/example/python/permissions/can_grant_can_set_my_account_detail.py index d4f9dac9d0..35c496fdac 100644 --- a/example/python/permissions/can_grant_can_set_my_account_detail.py +++ b/example/python/permissions/can_grant_can_set_my_account_detail.py @@ -3,50 +3,41 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kSetMyAccountDetail]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_grant_can_set_my_account_detail] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key']))) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def grant_can_set_my_account_detail_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .grantPermission(bob['id'], iroha.Grantable_kSetMyAccountDetail) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('GrantPermission', account_id=bob['id'], permission=primitive_pb2.can_set_my_account_detail) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def revoke_can_set_my_account_detail_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .revokePermission(bob['id'], iroha.Grantable_kSetMyAccountDetail) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('RevokePermission', account_id=bob['id'], permission=primitive_pb2.can_set_my_account_detail) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_grant_can_set_my_quorum.py b/example/python/permissions/can_grant_can_set_my_quorum.py index b42ee3a410..a9b1c21e7f 100644 --- a/example/python/permissions/can_grant_can_set_my_quorum.py +++ b/example/python/permissions/can_grant_can_set_my_quorum.py @@ -3,50 +3,42 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kSetMyQuorum]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_grant_can_set_my_quorum] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key'])) + ) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def grant_can_set_my_quorum_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .grantPermission(bob['id'], iroha.Grantable_kSetMyQuorum) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('GrantPermission', account_id=bob['id'], permission=primitive_pb2.can_set_my_quorum) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def revoke_can_set_my_quorum_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .revokePermission(bob['id'], iroha.Grantable_kSetMyQuorum) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('RevokePermission', account_id=bob['id'], permission=primitive_pb2.can_set_my_quorum) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_grant_can_transfer_my_assets.py b/example/python/permissions/can_grant_can_transfer_my_assets.py index 817fbd10c6..b87671dbe5 100644 --- a/example/python/permissions/can_grant_can_transfer_my_assets.py +++ b/example/python/permissions/can_grant_can_transfer_my_assets.py @@ -3,57 +3,54 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([ - iroha.Role_kTransferMyAssets, - iroha.Role_kReceive, - iroha.Role_kTransfer + test_permissions = [ + primitive_pb2.can_grant_can_transfer_my_assets, + primitive_pb2.can_receive, + primitive_pb2.can_transfer + ] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.extend([ + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key'])), + iroha.command('CreateAsset', asset_name='coin', domain_id='test', precision=2), + iroha.command('AddAssetQuantity', asset_id='coin#test', amount='100.00'), + iroha.command('TransferAsset', + src_account_id=admin['id'], + dest_account_id=alice['id'], + asset_id='coin#test', + description='init top up', + amount='90.00') ]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .createAsset('coin', 'test', 2) \ - .addAssetQuantity('coin#test', '100.00') \ - .transferAsset(admin['id'], alice['id'], 'coin#test', 'init top up', '90.00') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def grant_can_transfer_my_assets_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .grantPermission(bob['id'], iroha.Grantable_kTransferMyAssets) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('GrantPermission', account_id=bob['id'], permission=primitive_pb2.can_transfer_my_assets) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def revoke_can_transfer_my_assets_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .revokePermission(bob['id'], iroha.Grantable_kTransferMyAssets) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('RevokePermission', account_id=bob['id'], permission=primitive_pb2.can_transfer_my_assets) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_read_assets.py b/example/python/permissions/can_read_assets.py index 58eeb3d3c8..2899ad29a9 100644 --- a/example/python/permissions/can_read_assets.py +++ b/example/python/permissions/can_read_assets.py @@ -3,38 +3,29 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kReadAssets]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAsset('coin', 'test', 2) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_read_assets] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('CreateAsset', asset_name='coin', domain_id='test', precision=2) + ) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def get_asset_query(): - tx = iroha.ModelQueryBuilder() \ - .createdTime(commons.now()) \ - .queryCounter(1) \ - .creatorAccountId(alice['id']) \ - .getAssetInfo('coin#test') \ - .build() - return iroha.ModelProtoQuery(tx) \ - .signAndAddSignature(alice['key']).finish() + query = iroha.query('GetAssetInfo', asset_id='coin#test', creator_account=alice['id']) + irohalib.IrohaCrypto.sign_query(query, alice['key']) + return query diff --git a/example/python/permissions/can_receive.py b/example/python/permissions/can_receive.py index 0f9552e15f..bb67ae6c32 100644 --- a/example/python/permissions/can_receive.py +++ b/example/python/permissions/can_receive.py @@ -3,45 +3,46 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([ - iroha.Role_kReceive, - iroha.Role_kTransfer + test_permissions = [primitive_pb2.can_transfer, primitive_pb2.can_receive] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.extend([ + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key'])), + iroha.command('CreateAsset', asset_name='coin', domain_id='test', precision=2), + iroha.command('AddAssetQuantity', asset_id='coin#test', amount='90.00'), + iroha.command('TransferAsset', + src_account_id=admin['id'], + dest_account_id=alice['id'], + asset_id='coin#test', + description='init top up', + amount='90.00') ]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .createAsset('coin', 'test', 2) \ - .addAssetQuantity('coin#test', '100.00') \ - .transferAsset(admin['id'], alice['id'], 'coin#test', 'init top up', '90.00') \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def transfer_asset_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .transferAsset(alice['id'], bob['id'], 'coin#test', 'transfer to Bob', '60.00') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('TransferAsset', + src_account_id=alice['id'], + dest_account_id=bob['id'], + asset_id='coin#test', + description='transfer to Bob', + amount='60.00') + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_remove_my_signatory.py b/example/python/permissions/can_remove_my_signatory.py index 1f17ebd338..f9726534c0 100644 --- a/example/python/permissions/can_remove_my_signatory.py +++ b/example/python/permissions/can_remove_my_signatory.py @@ -3,55 +3,49 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([ - iroha.Role_kRemoveMySignatory, - iroha.Role_kAddSignatory - ]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [ + primitive_pb2.can_grant_can_remove_my_signatory, + primitive_pb2.can_add_signatory + ] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key'])) + ) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def grant_can_remove_my_signatory_tx(): - extra_key = iroha.ModelCrypto().generateKeypair() - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .grantPermission(bob['id'], iroha.Grantable_kRemoveMySignatory) \ - .addSignatory(alice['id'], extra_key.publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + extra_key = irohalib.IrohaCrypto.private_key() + tx = iroha.transaction([ + iroha.command('GrantPermission', account_id=bob['id'], permission=primitive_pb2.can_remove_my_signatory), + iroha.command('AddSignatory', account_id=alice['id'], + public_key=irohalib.IrohaCrypto.derive_public_key(extra_key)) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def remove_signatory_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(bob['id']) \ - .removeSignatory(alice['id'], alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(bob['key']).finish() + tx = iroha.transaction([ + iroha.command('RemoveSignatory', account_id=alice['id'], + public_key=irohalib.IrohaCrypto.derive_public_key(alice['key'])) + ], creator_account=bob['id']) + irohalib.IrohaCrypto.sign_transaction(tx, bob['key']) + return tx diff --git a/example/python/permissions/can_remove_signatory.py b/example/python/permissions/can_remove_signatory.py index 03bf922f93..b24b6708c5 100644 --- a/example/python/permissions/can_remove_signatory.py +++ b/example/python/permissions/can_remove_signatory.py @@ -3,39 +3,34 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kRemoveSignatory]) - extra_key = iroha.ModelCrypto().generateKeypair() - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .addSignatory(alice['id'], extra_key.publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_remove_signatory] + extra_key = irohalib.IrohaCrypto.private_key() + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('AddSignatory', account_id=alice['id'], + public_key=irohalib.IrohaCrypto.derive_public_key(extra_key)) + ) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def remove_signatory_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .removeSignatory(alice['id'], alice['key'].publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('RemoveSignatory', account_id=alice['id'], + public_key=irohalib.IrohaCrypto.derive_public_key(alice['key'])) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_set_detail.py b/example/python/permissions/can_set_detail.py index 577bee5571..a0db306d0e 100644 --- a/example/python/permissions/can_set_detail.py +++ b/example/python/permissions/can_set_detail.py @@ -3,37 +3,28 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kSetDetail]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_set_detail] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def set_account_detail_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .setAccountDetail(admin['id'], 'fav_color', 'red') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('SetAccountDetail', account_id=admin['id'], key='fav_color', value='red') + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_set_my_account_detail.py b/example/python/permissions/can_set_my_account_detail.py index 43477d0a5a..370e905011 100644 --- a/example/python/permissions/can_set_my_account_detail.py +++ b/example/python/permissions/can_set_my_account_detail.py @@ -3,52 +3,42 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([ - iroha.Role_kSetMyAccountDetail, - ]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_grant_can_set_my_account_detail] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key'])) + ) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def grant_permission_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .grantPermission(bob['id'], iroha.Grantable_kSetMyAccountDetail) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('GrantPermission', account_id=bob['id'], permission=primitive_pb2.can_set_my_account_detail) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def set_detail_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(bob['id']) \ - .setAccountDetail(alice['id'], 'bobs', 'call') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(bob['key']).finish() + tx = iroha.transaction([ + iroha.command('SetAccountDetail', account_id=alice['id'], key='fav_year', value='2019') + ], creator_account=bob['id']) + irohalib.IrohaCrypto.sign_transaction(tx, bob['key']) + return tx diff --git a/example/python/permissions/can_set_my_quorum.py b/example/python/permissions/can_set_my_quorum.py index 4310d8f55f..169f820065 100644 --- a/example/python/permissions/can_set_my_quorum.py +++ b/example/python/permissions/can_set_my_quorum.py @@ -3,55 +3,48 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([ - iroha.Role_kSetMyQuorum, - iroha.Role_kAddSignatory - ]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [ + primitive_pb2.can_grant_can_set_my_quorum, + primitive_pb2.can_add_signatory + ] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key'])) + ) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def grant_can_set_my_quorum_tx(): - extra_key = iroha.ModelCrypto().generateKeypair() - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .grantPermission(bob['id'], iroha.Grantable_kSetMyQuorum) \ - .addSignatory(alice['id'], extra_key.publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + extra_key = irohalib.IrohaCrypto.private_key() + tx = iroha.transaction([ + iroha.command('GrantPermission', account_id=bob['id'], permission=primitive_pb2.can_set_my_quorum), + iroha.command('AddSignatory', account_id=alice['id'], + public_key=irohalib.IrohaCrypto.derive_public_key(extra_key)) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def set_quorum_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(bob['id']) \ - .setAccountQuorum(alice['id'], 2) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(bob['key']).finish() + tx = iroha.transaction([ + iroha.command('SetAccountQuorum', account_id=alice['id'], quorum=2) + ], creator_account=bob['id']) + irohalib.IrohaCrypto.sign_transaction(tx, bob['key']) + return tx diff --git a/example/python/permissions/can_set_quorum.py b/example/python/permissions/can_set_quorum.py index 33d0b6235b..e26afc5bde 100644 --- a/example/python/permissions/can_set_quorum.py +++ b/example/python/permissions/can_set_quorum.py @@ -3,40 +3,34 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kSetQuorum]) - extra_key = iroha.ModelCrypto().generateKeypair() - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .addSignatory(alice['id'], extra_key.publicKey()) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_set_quorum] + extra_key = irohalib.IrohaCrypto.private_key() + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.append( + iroha.command('AddSignatory', account_id=alice['id'], + public_key=irohalib.IrohaCrypto.derive_public_key(extra_key)) + ) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def set_quorum_tx(): # Quourum cannot be greater than amount of keys linked to an account - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .setAccountQuorum(alice['id'], 2) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('SetAccountQuorum', account_id=alice['id'], quorum=2) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_subtract_asset_qty.py b/example/python/permissions/can_subtract_asset_qty.py index e57dc4dd0a..f2b5d906b1 100644 --- a/example/python/permissions/can_subtract_asset_qty.py +++ b/example/python/permissions/can_subtract_asset_qty.py @@ -3,40 +3,38 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([iroha.Role_kSubtractAssetQty]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .createAsset('coin', 'test', 2) \ - .addAssetQuantity('coin#test', '1000.00') \ - .transferAsset(admin['id'], alice['id'], 'coin#test', 'init top up', '999.99') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + test_permissions = [primitive_pb2.can_subtract_asset_qty] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.extend([ + iroha.command('CreateAsset', asset_name='coin', domain_id='test', precision=2), + iroha.command('AddAssetQuantity', asset_id='coin#test', amount='1000.00'), + iroha.command('TransferAsset', + src_account_id=admin['id'], + dest_account_id=alice['id'], + asset_id='coin#test', + description='init top up', + amount='999.99') + ]) + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def subtract_asset_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .subtractAssetQuantity('coin#test', '999.99') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('SubtractAssetQuantity', asset_id='coin#test', amount='999.99') + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx diff --git a/example/python/permissions/can_subtract_domain_asset_qty.py b/example/python/permissions/can_subtract_domain_asset_qty.py index 1e3ad3c561..462d44af25 100644 --- a/example/python/permissions/can_subtract_domain_asset_qty.py +++ b/example/python/permissions/can_subtract_domain_asset_qty.py @@ -5,6 +5,6 @@ import can_subtract_asset_qty - # Please see example for can_subtract_asset_qty permission. +# TODO igor-egorov 21.01.2019 IR-240 diff --git a/example/python/permissions/can_transfer.py b/example/python/permissions/can_transfer.py index ddb9d069b9..6121daeffe 100644 --- a/example/python/permissions/can_transfer.py +++ b/example/python/permissions/can_transfer.py @@ -5,7 +5,6 @@ import can_receive - # Please see example for can_receive permission. # By design can_receive and can_transfer permissions # can be tested only together. diff --git a/example/python/permissions/can_transfer_my_assets.py b/example/python/permissions/can_transfer_my_assets.py index 547bc6c2a7..c9a369a101 100644 --- a/example/python/permissions/can_transfer_my_assets.py +++ b/example/python/permissions/can_transfer_my_assets.py @@ -3,57 +3,59 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib import commons +import primitive_pb2 admin = commons.new_user('admin@test') alice = commons.new_user('alice@test') bob = commons.new_user('bob@test') +iroha = irohalib.Iroha(admin['id']) @commons.hex def genesis_tx(): - test_permissions = iroha.RolePermissionSet([ - iroha.Role_kTransferMyAssets, - iroha.Role_kReceive, - iroha.Role_kTransfer + test_permissions = [ + primitive_pb2.can_grant_can_transfer_my_assets, + primitive_pb2.can_receive, + primitive_pb2.can_transfer + ] + genesis_commands = commons.genesis_block(admin, alice, test_permissions) + genesis_commands.extend([ + iroha.command('CreateAccount', account_name='bob', domain_id='test', + public_key=irohalib.IrohaCrypto.derive_public_key(bob['key'])), + iroha.command('CreateAsset', asset_name='coin', domain_id='test', precision=2), + iroha.command('AddAssetQuantity', asset_id='coin#test', amount='100.00'), + iroha.command('TransferAsset', + src_account_id=admin['id'], + dest_account_id=alice['id'], + asset_id='coin#test', + description='init top up', + amount='90.00') ]) - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(admin['id']) \ - .addPeer('0.0.0.0:50541', admin['key'].publicKey()) \ - .createRole('admin_role', commons.all_permissions()) \ - .createRole('test_role', test_permissions) \ - .createDomain('test', 'test_role') \ - .createAccount('admin', 'test', admin['key'].publicKey()) \ - .createAccount('alice', 'test', alice['key'].publicKey()) \ - .createAccount('bob', 'test', bob['key'].publicKey()) \ - .appendRole(admin['id'], 'admin_role') \ - .createAsset('coin', 'test', 2) \ - .addAssetQuantity('coin#test', '100.00') \ - .transferAsset(admin['id'], alice['id'], 'coin#test', 'init top up', '90.00') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(admin['key']).finish() + tx = iroha.transaction(genesis_commands) + irohalib.IrohaCrypto.sign_transaction(tx, admin['key']) + return tx @commons.hex def grant_permission_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(alice['id']) \ - .grantPermission(bob['id'], iroha.Grantable_kTransferMyAssets) \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(alice['key']).finish() + tx = iroha.transaction([ + iroha.command('GrantPermission', account_id=bob['id'], permission=primitive_pb2.can_transfer_my_assets) + ], creator_account=alice['id']) + irohalib.IrohaCrypto.sign_transaction(tx, alice['key']) + return tx @commons.hex def transfer_asset_tx(): - tx = iroha.ModelTransactionBuilder() \ - .createdTime(commons.now()) \ - .creatorAccountId(bob['id']) \ - .transferAsset(alice['id'], admin['id'], 'coin#test', 'transfer from alice to admin done by bob', '60.00') \ - .build() - return iroha.ModelProtoTransaction(tx) \ - .signAndAddSignature(bob['key']).finish() + tx = iroha.transaction([ + iroha.command('TransferAsset', + src_account_id=alice['id'], + dest_account_id=admin['id'], + asset_id='coin#test', + description='transfer from Alice to Admin by Bob', + amount='60.00') + ], creator_account=bob['id']) + irohalib.IrohaCrypto.sign_transaction(tx, bob['key']) + return tx diff --git a/example/python/permissions/commons.py b/example/python/permissions/commons.py index dcac6f2517..b1d950a419 100644 --- a/example/python/permissions/commons.py +++ b/example/python/permissions/commons.py @@ -3,69 +3,111 @@ # SPDX-License-Identifier: Apache-2.0 # -import iroha +import irohalib +import primitive_pb2 +import binascii from time import time +command = irohalib.Iroha.command + def now(): return int(time() * 1000) def all_permissions(): - return iroha.RolePermissionSet([ - iroha.Role_kAppendRole, - iroha.Role_kCreateRole, - iroha.Role_kDetachRole, - iroha.Role_kAddAssetQty, - iroha.Role_kSubtractAssetQty, - iroha.Role_kAddPeer, - iroha.Role_kAddSignatory, - iroha.Role_kRemoveSignatory, - iroha.Role_kSetQuorum, - iroha.Role_kCreateAccount, - iroha.Role_kSetDetail, - iroha.Role_kCreateAsset, - iroha.Role_kTransfer, - iroha.Role_kReceive, - iroha.Role_kCreateDomain, - iroha.Role_kReadAssets, - iroha.Role_kGetRoles, - iroha.Role_kGetMyAccount, - iroha.Role_kGetAllAccounts, - iroha.Role_kGetDomainAccounts, - iroha.Role_kGetMySignatories, - iroha.Role_kGetAllSignatories, - iroha.Role_kGetDomainSignatories, - iroha.Role_kGetMyAccAst, - iroha.Role_kGetAllAccAst, - iroha.Role_kGetDomainAccAst, - iroha.Role_kGetMyAccDetail, - iroha.Role_kGetAllAccDetail, - iroha.Role_kGetDomainAccDetail, - iroha.Role_kGetMyAccTxs, - iroha.Role_kGetAllAccTxs, - iroha.Role_kGetDomainAccTxs, - iroha.Role_kGetMyAccAstTxs, - iroha.Role_kGetAllAccAstTxs, - iroha.Role_kGetDomainAccAstTxs, - iroha.Role_kGetMyTxs, - iroha.Role_kGetAllTxs, - iroha.Role_kSetMyQuorum, - iroha.Role_kAddMySignatory, - iroha.Role_kRemoveMySignatory, - iroha.Role_kTransferMyAssets, - iroha.Role_kSetMyAccountDetail, - iroha.Role_kGetBlocks + return [ + primitive_pb2.can_append_role, + primitive_pb2.can_create_role, + primitive_pb2.can_detach_role, + primitive_pb2.can_add_asset_qty, + primitive_pb2.can_subtract_asset_qty, + primitive_pb2.can_add_peer, + primitive_pb2.can_add_signatory, + primitive_pb2.can_remove_signatory, + primitive_pb2.can_set_quorum, + primitive_pb2.can_create_account, + primitive_pb2.can_set_detail, + primitive_pb2.can_create_asset, + primitive_pb2.can_transfer, + primitive_pb2.can_receive, + primitive_pb2.can_create_domain, + primitive_pb2.can_read_assets, + primitive_pb2.can_get_roles, + primitive_pb2.can_get_my_account, + primitive_pb2.can_get_all_accounts, + primitive_pb2.can_get_domain_accounts, + primitive_pb2.can_get_my_signatories, + primitive_pb2.can_get_all_signatories, + primitive_pb2.can_get_domain_signatories, + primitive_pb2.can_get_my_acc_ast, + primitive_pb2.can_get_all_acc_ast, + primitive_pb2.can_get_domain_acc_ast, + primitive_pb2.can_get_my_acc_detail, + primitive_pb2.can_get_all_acc_detail, + primitive_pb2.can_get_domain_acc_detail, + primitive_pb2.can_get_my_acc_txs, + primitive_pb2.can_get_all_acc_txs, + primitive_pb2.can_get_domain_acc_txs, + primitive_pb2.can_get_my_acc_ast_txs, + primitive_pb2.can_get_all_acc_ast_txs, + primitive_pb2.can_get_domain_acc_ast_txs, + primitive_pb2.can_get_my_txs, + primitive_pb2.can_get_all_txs, + primitive_pb2.can_get_blocks, + primitive_pb2.can_grant_can_set_my_quorum, + primitive_pb2.can_grant_can_add_my_signatory, + primitive_pb2.can_grant_can_remove_my_signatory, + primitive_pb2.can_grant_can_transfer_my_assets, + primitive_pb2.can_grant_can_set_my_account_detail + ] + + +def genesis_block(admin, alice, test_permissions, multidomain=False): + """ + Compose a set of common for all tests' genesis block transactions + :param admin: dict of id and private key of admin + :param alice: dict of id and private key of alice + :param test_permissions: permissions for users in test domain + :param multidomain: admin and alice accounts will be created in + different domains and the first domain users will have admin right + by default if True + :return: a list of irohalib.Iroha.command's + """ + peer = primitive_pb2.Peer() + peer.address = '0.0.0.0:50541' + peer.peer_key = admin['key'] + commands = [ + command('AddPeer', peer=peer), + command('CreateRole', role_name='admin_role', permissions=all_permissions()), + command('CreateRole', role_name='test_role', permissions=test_permissions)] + if multidomain: + commands.append(command('CreateDomain', domain_id='first', default_role='admin_role')) + commands.extend([ + command('CreateDomain', + domain_id='second' if multidomain else 'test', + default_role='test_role'), + command('CreateAccount', + account_name='admin', + domain_id='first' if multidomain else 'test', + public_key=irohalib.IrohaCrypto.derive_public_key(admin['key'])), + command('CreateAccount', + account_name='alice', + domain_id='second' if multidomain else 'test', + public_key=irohalib.IrohaCrypto.derive_public_key(alice['key'])) ]) + if not multidomain: + commands.append(command('AppendRole', account_id=admin['id'], role_name='admin_role')) + return commands def new_user(user_id): - key = iroha.ModelCrypto().generateKeypair() + private_key = irohalib.IrohaCrypto.private_key() if user_id.lower().startswith('admin'): - print('K{}'.format(key.privateKey().hex())) + print('K{}'.format(private_key.decode('utf-8'))) return { 'id': user_id, - 'key': key + 'key': private_key } @@ -76,4 +118,4 @@ def hex(generator): Allows preserving the type of binaries for Binary Testing Framework. """ prefix = 'T' if generator.__name__.lower().endswith('tx') else 'Q' - print('{}{}'.format(prefix, generator().hex())) + print('{}{}'.format(prefix, binascii.hexlify(generator().SerializeToString()).decode('utf-8'))) diff --git a/example/python/prepare.sh b/example/python/prepare.sh deleted file mode 100755 index 6af9714c0f..0000000000 --- a/example/python/prepare.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash - -cd $(dirname $0) - -# generate iroha lib -CURDIR="$(cd "$(dirname "$0")"; pwd)" -IROHA_HOME="$(dirname $(dirname "${CURDIR}"))" -cmake -H$IROHA_HOME -Bbuild -DSWIG_PYTHON=ON -DSUPPORT_PYTHON2=ON; -cmake --build build/ --target irohapy -- -j"$(getconf _NPROCESSORS_ONLN)" - -# generate proto files in current dir -protoc --proto_path=../../shared_model/schema --python_out=. ../../shared_model/schema/*.proto -python -m grpc_tools.protoc --proto_path=../../shared_model/schema --python_out=. --grpc_python_out=. ../../shared_model/schema/endpoint.proto diff --git a/example/python/tx-example.py b/example/python/tx-example.py index 3b45962ba4..324bee128b 100644 --- a/example/python/tx-example.py +++ b/example/python/tx-example.py @@ -1,253 +1,180 @@ -import sys -sys.path.insert(0, 'build/shared_model/bindings') -import iroha - -import transaction_pb2 -import endpoint_pb2 -import endpoint_pb2_grpc -import queries_pb2 -import grpc -import time - - -tx_builder = iroha.ModelTransactionBuilder() -query_builder = iroha.ModelQueryBuilder() -crypto = iroha.ModelCrypto() - -admin_priv = open("../admin@test.priv", "r").read() -admin_pub = open("../admin@test.pub", "r").read() -key_pair = crypto.convertFromExisting(admin_pub, admin_priv) - -user1_kp = crypto.generateKeypair() - -def current_time(): - return int(round(time.time() * 1000)) - -creator = "admin@test" - -query_counter = 1 - -def get_status(tx): - # Create status request +#!/usr/bin/env python3 +# +# Copyright Soramitsu Co., Ltd. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# - print("Hash of the transaction: ", tx.hash().hex()) - tx_hash = tx.hash().blob() - - if sys.version_info[0] == 2: - tx_hash = ''.join(map(chr, tx_hash)) - else: - tx_hash = bytes(tx_hash) - - - request = endpoint_pb2.TxStatusRequest() - request.tx_hash = tx_hash - - channel = grpc.insecure_channel('127.0.0.1:50051') - stub = endpoint_pb2_grpc.CommandService_v1Stub(channel) - - response = stub.Status(request) - status = endpoint_pb2.TxStatus.Name(response.tx_status) - print("Status of transaction is:", status) +import sys - if status != "COMMITTED": - print("Your transaction wasn't committed") - exit(1) +if sys.version_info[0] < 3: + raise Exception('Python 3 or a more recent version is required.') +from primitive_pb2 import can_set_my_account_detail +from irohalib import Iroha, IrohaGrpc +from irohalib import IrohaCrypto +import binascii -def print_status_streaming(tx): - # Create status request - print("Hash of the transaction: ", tx.hash().hex()) - tx_hash = tx.hash().hex() +admin_private_key = open('../admin@test.priv').read() +user_private_key = IrohaCrypto.private_key() +user_public_key = IrohaCrypto.derive_public_key(user_private_key) +iroha = Iroha('admin@test') +net = IrohaGrpc() - # Create request - request = endpoint_pb2.TxStatusRequest() - request.tx_hash = tx_hash - # Create connection to Iroha - channel = grpc.insecure_channel('127.0.0.1:50051') - stub = endpoint_pb2_grpc.CommandService_v1Stub(channel) +def trace(func): + """ + A decorator for tracing methods' begin/end execution points + """ + def tracer(*args, **kwargs): + name = func.__name__ + print('\tEntering "{}"'.format(name)) + result = func(*args, **kwargs) + print('\tLeaving "{}"'.format(name)) + return result + return tracer - # Send request - response = stub.StatusStream(request) - for status in response: - print("Status of transaction:") +@trace +def send_transaction_and_print_status(transaction): + hex_hash = binascii.hexlify(IrohaCrypto.hash(transaction)) + print('Transaction hash = {}, creator = {}'.format( + hex_hash, transaction.payload.reduced_payload.creator_account_id)) + net.send_tx(transaction) + for status in net.tx_status_stream(transaction): print(status) -def send_tx(tx, key_pair): - tx_blob = iroha.ModelProtoTransaction(tx).signAndAddSignature(key_pair).finish().blob() - proto_tx = transaction_pb2.Transaction() - - if sys.version_info[0] == 2: - tmp = ''.join(map(chr, tx_blob)) - else: - tmp = bytes(tx_blob) - - proto_tx.ParseFromString(tmp) - - channel = grpc.insecure_channel('127.0.0.1:50051') - stub = endpoint_pb2_grpc.CommandService_v1Stub(channel) - - stub.Torii(proto_tx) - - -def send_query(query, key_pair): - query_blob = iroha.ModelProtoQuery(query).signAndAddSignature(key_pair).finish().blob() - - proto_query = queries_pb2.Query() - - if sys.version_info[0] == 2: - tmp = ''.join(map(chr, query_blob)) - else: - tmp = bytes(query_blob) - - proto_query.ParseFromString(tmp) - - channel = grpc.insecure_channel('127.0.0.1:50051') - query_stub = endpoint_pb2_grpc.QueryService_v1Stub(channel) - query_response = query_stub.Find(proto_query) - - return query_response - - -def create_asset_coin(): +@trace +def create_domain_and_asset(): """ - Create domain "domain" and asset "coin#domain" with precision 2 + Creates domain 'domain' and asset 'coin#domain' with precision 2 """ - tx = tx_builder.creatorAccountId(creator) \ - .createdTime(current_time()) \ - .createDomain("domain", "user") \ - .createAsset("coin", "domain", 2).build() - - send_tx(tx, key_pair) - print_status_streaming(tx) + commands = [ + iroha.command('CreateDomain', domain_id='domain', default_role='user'), + iroha.command('CreateAsset', asset_name='coin', + domain_id='domain', precision=2) + ] + tx = IrohaCrypto.sign_transaction( + iroha.transaction(commands), admin_private_key) + send_transaction_and_print_status(tx) +@trace def add_coin_to_admin(): """ - Add 1000.00 asset quantity of asset coin to admin + Add 1000.00 units of 'coin#domain' to 'admin@test' """ - tx = tx_builder.creatorAccountId(creator) \ - .createdTime(current_time()) \ - .addAssetQuantity("coin#domain", "1000.00").build() - - send_tx(tx, key_pair) - print_status_streaming(tx) + tx = iroha.transaction([ + iroha.command('AddAssetQuantity', + asset_id='coin#domain', amount='1000.00') + ]) + IrohaCrypto.sign_transaction(tx, admin_private_key) + send_transaction_and_print_status(tx) +@trace def create_account_userone(): """ - Create account "userone@domain" + Create account 'userone@domain' """ - tx = tx_builder.creatorAccountId(creator) \ - .createdTime(current_time()) \ - .createAccount("userone", "domain", user1_kp.publicKey()).build() + tx = iroha.transaction([ + iroha.command('CreateAccount', account_name='userone', domain_id='domain', + public_key=user_public_key) + ]) + IrohaCrypto.sign_transaction(tx, admin_private_key) + send_transaction_and_print_status(tx) - send_tx(tx, key_pair) - print_status_streaming(tx) +@trace def transfer_coin_from_admin_to_userone(): """ - Transfer 2.00 of coin from admin@test to userone@domain + Transfer 2.00 'coin#domain' from 'admin@test' to 'userone@domain' """ - tx = tx_builder.creatorAccountId(creator) \ - .createdTime(current_time()) \ - .transferAsset("admin@test", "userone@domain", "coin#domain", "Some message", "2.00").build() + tx = iroha.transaction([ + iroha.command('TransferAsset', src_account_id='admin@test', dest_account_id='userone@domain', + asset_id='coin#domain', description='init top up', amount='2.00') + ]) + IrohaCrypto.sign_transaction(tx, admin_private_key) + send_transaction_and_print_status(tx) - send_tx(tx, key_pair) - print_status_streaming(tx) -def grant_admin_to_add_detail_to_userone(): +@trace +def userone_grants_to_admin_set_account_detail_permission(): """ - Grant admin@test to be able to set details information to userone@domain + Make admin@test able to set detail to userone@domain """ - tx = tx_builder.creatorAccountId("userone@domain") \ - .createdTime(current_time()) \ - .grantPermission(creator, iroha.Grantable_kSetMyAccountDetail) \ - .build() + tx = iroha.transaction([ + iroha.command('GrantPermission', account_id='admin@test', + permission=can_set_my_account_detail) + ], creator_account='userone@domain') + IrohaCrypto.sign_transaction(tx, user_private_key) + send_transaction_and_print_status(tx) - send_tx(tx, user1_kp) - print_status_streaming(tx) -def set_age_to_userone_by_admin(): +@trace +def set_age_to_userone(): """ Set age to userone@domain by admin@test """ - tx = tx_builder.creatorAccountId(creator) \ - .createdTime(current_time()) \ - .setAccountDetail("userone@domain", "age", "18") \ - .build() + tx = iroha.transaction([ + iroha.command('SetAccountDetail', + account_id='userone@domain', key='age', value='18') + ]) + IrohaCrypto.sign_transaction(tx, admin_private_key) + send_transaction_and_print_status(tx) - send_tx(tx, key_pair) - print_status_streaming(tx) +@trace def get_coin_info(): """ - Get information about asset coin#domain + Get asset info for coin#domain + :return: """ - global query_counter - query_counter += 1 - query = query_builder.creatorAccountId(creator) \ - .createdTime(current_time()) \ - .queryCounter(query_counter) \ - .getAssetInfo("coin#domain") \ - .build() + query = iroha.query('GetAssetInfo', asset_id='coin#domain') + IrohaCrypto.sign_query(query, admin_private_key) - query_response = send_query(query, key_pair) + response = net.send_query(query) + data = response.asset_response.asset + print('Asset id = {}, precision = {}'.format(data.asset_id, data.precision)) - if not query_response.HasField("asset_response"): - print("Query response error") - exit(1) - else: - print("Query responded with asset response") - asset_info = query_response.asset_response.asset - print("Asset Id =", asset_info.asset_id) - print("Precision =", asset_info.precision) - - -def get_account_asset(): +@trace +def get_account_assets(): """ - Get list of transactions done by userone@domain with asset coin#domain + List all the assets of userone@domain """ - global query_counter - query_counter += 1 - query = query_builder.creatorAccountId(creator) \ - .createdTime(current_time()) \ - .queryCounter(query_counter) \ - .getAccountAssets("userone@domain") \ - .build() + query = iroha.query('GetAccountAssets', account_id='userone@domain') + IrohaCrypto.sign_query(query, admin_private_key) - query_response = send_query(query, key_pair) + response = net.send_query(query) + data = response.account_assets_response.account_assets + for asset in data: + print('Asset id = {}, balance = {}'.format( + asset.asset_id, asset.balance)) - print(query_response) -def get_userone_info(): +@trace +def get_userone_details(): """ - Get userone's key value information + Get all the kv-storage entries for userone@domain """ - global query_counter - query_counter += 1 - query = query_builder.creatorAccountId(creator) \ - .createdTime(current_time()) \ - .queryCounter(query_counter) \ - .getAccountDetail("userone@domain") \ - .build() + query = iroha.query('GetAccountDetail', account_id='userone@domain') + IrohaCrypto.sign_query(query, admin_private_key) - query_response = send_query(query, key_pair) - print(query_response.account_detail_response.detail) + response = net.send_query(query) + data = response.account_detail_response + print('Account id = {}, details = {}'.format('userone@domain', data.detail)) - -create_asset_coin() +create_domain_and_asset() add_coin_to_admin() create_account_userone() transfer_coin_from_admin_to_userone() -grant_admin_to_add_detail_to_userone() -set_age_to_userone_by_admin() +userone_grants_to_admin_set_account_detail_permission() +set_age_to_userone() get_coin_info() -get_account_asset() -get_userone_info() -print("done!") +get_account_assets() +get_userone_details() + +print('done') diff --git a/shared_model/bindings/CMakeLists.txt b/shared_model/bindings/CMakeLists.txt index 1c6a8e05e0..74a933baf1 100644 --- a/shared_model/bindings/CMakeLists.txt +++ b/shared_model/bindings/CMakeLists.txt @@ -22,7 +22,7 @@ target_link_libraries(bindings -if (SWIG_PYTHON OR SWIG_JAVA OR SWIG_CSHARP) +if (SWIG_JAVA OR SWIG_CSHARP) find_package(swig REQUIRED) include(${SWIG_USE_FILE}) @@ -46,31 +46,6 @@ if (SWIG_PYTHON OR SWIG_JAVA OR SWIG_CSHARP) endmacro() endif() -if (SWIG_PYTHON) - if(SUPPORT_PYTHON2) - set(PYTHON_VER 2.7) - else() - set(PYTHON_VER 3.5) - endif() - - if(CMAKE_GENERATOR MATCHES "Visual Studio") - find_package(PythonInterp ${PYTHON_VER} REQUIRED) - endif() - find_package(PythonLibs ${PYTHON_VER} REQUIRED) - - if (${CMAKE_SYSTEM_NAME} STREQUAL Darwin) - set(MAC_OPTS "-flat_namespace -undefined suppress") - endif() - - myswig_add_library(iroha LANGUAGE python SOURCES bindings.i) - swig_link_libraries(iroha ${PYTHON_LIBRARIES} bindings ${MAC_OPTS}) - add_custom_target(irohapy DEPENDS ${SWIG_MODULE_iroha_REAL_NAME}) - # path to where Python.h is found - target_include_directories(${SWIG_MODULE_iroha_REAL_NAME} PUBLIC - ${PYTHON_INCLUDE_DIRS} - ) -endif() - if (SWIG_JAVA) find_package(JNI REQUIRED) diff --git a/shared_model/packages/python/setup.py b/shared_model/packages/python/setup.py index e6c7e6da39..b76c834b6a 100644 --- a/shared_model/packages/python/setup.py +++ b/shared_model/packages/python/setup.py @@ -6,6 +6,7 @@ from setuptools import setup, Extension from setuptools.command.build_ext import build_ext +# TODO: IR-1848 nickaleks 06.11.18 rework packaging with native library IROHA_REPO = "https://github.com/hyperledger/iroha" IROHA_BRANCH = "develop" diff --git a/test/integration/CMakeLists.txt b/test/integration/CMakeLists.txt index b081cb9157..a6c1789d0d 100644 --- a/test/integration/CMakeLists.txt +++ b/test/integration/CMakeLists.txt @@ -2,7 +2,10 @@ # SPDX-License-Identifier: Apache-2.0 add_subdirectory(acceptance) -add_subdirectory(binary) add_subdirectory(consensus) add_subdirectory(pipeline) add_subdirectory(validation) + +if (USE_BTF) + add_subdirectory(binary) +endif() diff --git a/test/integration/binary/CMakeLists.txt b/test/integration/binary/CMakeLists.txt index 0630d91523..e60bd631c2 100644 --- a/test/integration/binary/CMakeLists.txt +++ b/test/integration/binary/CMakeLists.txt @@ -3,42 +3,47 @@ # SPDX-License-Identifier: Apache-2.0 # +find_package(PythonInterp 3.5 REQUIRED) -if (SWIG_PYTHON OR SWIG_JAVA) - get_property(SWIG_BUILD_DIR GLOBAL PROPERTY SWIG_BUILD_DIR) -endif () +set(PYLIB_PATH "${PROJECT_SOURCE_DIR}/example/python") +set(PYTHON_PROTO_INCLUDES + "${PYLIB_PATH}/block_pb2.py" + "${PYLIB_PATH}/commands_pb2.py" + "${PYLIB_PATH}/endpoint_pb2.py" + "${PYLIB_PATH}/endpoint_pb2_grpc.py" + "${PYLIB_PATH}/primitive_pb2.py" + "${PYLIB_PATH}/proposal_pb2.py" + "${PYLIB_PATH}/qry_responses_pb2.py" + "${PYLIB_PATH}/queries_pb2.py" + "${PYLIB_PATH}/transaction_pb2.py" + ) -if (SWIG_PYTHON) - if (SUPPORT_PYTHON2) - find_package(PythonInterp 2.7 REQUIRED) - else () - find_package(PythonInterp 3.5 REQUIRED) - endif () +add_custom_command( + COMMAND make + WORKING_DIRECTORY ${PYLIB_PATH} + OUTPUT ${PYTHON_PROTO_INCLUDES} + COMMENT "Generating python protobuf includes" + VERBATIM +) +add_custom_target(python_proto_includes DEPENDS ${PYTHON_PROTO_INCLUDES}) - # TODO, igor-egorov, 2018-06-27, IR-1481, move that foreach to a separate target - foreach (item "block" "commands" "primitive" "queries") - compile_proto_to_python("${item}.proto") - list(APPEND PROTO_SWIG_DEPS "${SWIG_BUILD_DIR}/${item}_pb2.py") - endforeach (item) +add_executable(binary_test + launchers.cpp + binaries_test.cpp + ) +target_link_libraries(binary_test + gtest::main + integration_framework + bindings + ) +add_dependencies(binary_test python_proto_includes) +target_include_directories(binary_test PUBLIC ${PROJECT_SOURCE_DIR}/test) - add_executable(binary_test - launchers.cpp - binaries_test.cpp - ) - target_link_libraries(binary_test - gtest::main - shared_model_proto_backend - integration_framework - bindings - ) - target_include_directories(binary_test PUBLIC ${PROJECT_SOURCE_DIR}/test) - - add_dependencies(binary_test irohapy) - add_test( - NAME "python_binary_test" - COMMAND ${CMAKE_COMMAND} -E - env "PYTHON_INTERPRETER=${PYTHON_EXECUTABLE}" "PYTHONPATH=${SWIG_BUILD_DIR}" - "ROOT_DIR=${PROJECT_SOURCE_DIR}" $ --gtest_filter=*/0* - ) - -endif () +add_test( + NAME python_binary_test + COMMAND ${CMAKE_COMMAND} -E env + "PYTHON_INTERPRETER=${PYTHON_EXECUTABLE}" + "PYTHONPATH=${PYLIB_PATH}" + "ROOT_DIR=${PROJECT_SOURCE_DIR}" + $ --gtest_filter=*/0* +) diff --git a/test/integration/binary/binaries_test.cpp b/test/integration/binary/binaries_test.cpp index 5076447269..a6b4174c45 100644 --- a/test/integration/binary/binaries_test.cpp +++ b/test/integration/binary/binaries_test.cpp @@ -7,7 +7,8 @@ using namespace shared_model::interface; -using BinaryTestTypes = ::testing::Types; +using BinaryTestTypes = ::testing::Types< + binary_test::PythonLauncher>; //, binary_test::JavaLauncher>; TYPED_TEST_CASE(BinaryTestFixture, BinaryTestTypes); @@ -152,27 +153,27 @@ TYPED_TEST(BinaryTestFixture, can_get_my_acc_ast) { } TYPED_TEST(BinaryTestFixture, can_get_all_acc_ast_txs) { - this->template doTest(1, 1); + this->template doTest(1, 1); } TYPED_TEST(BinaryTestFixture, can_get_domain_acc_ast_txs) { - this->template doTest(1, 1); + this->template doTest(1, 1); } TYPED_TEST(BinaryTestFixture, can_get_my_acc_ast_txs) { - this->template doTest(1, 1); + this->template doTest(1, 1); } TYPED_TEST(BinaryTestFixture, can_get_all_acc_txs) { - this->template doTest(1, 1); + this->template doTest(1, 1); } TYPED_TEST(BinaryTestFixture, can_get_domain_acc_txs) { - this->template doTest(1, 1); + this->template doTest(1, 1); } TYPED_TEST(BinaryTestFixture, can_get_my_acc_txs) { - this->template doTest(1, 1); + this->template doTest(1, 1); } TYPED_TEST(BinaryTestFixture, can_read_assets) { diff --git a/test/integration/binary/binaries_test_fixture.hpp b/test/integration/binary/binaries_test_fixture.hpp index a47a71021a..ce3435066a 100644 --- a/test/integration/binary/binaries_test_fixture.hpp +++ b/test/integration/binary/binaries_test_fixture.hpp @@ -57,7 +57,7 @@ namespace query_validation { template inline void checkQueryResponseType( const shared_model::proto::QueryResponse &response) { - ASSERT_NO_THROW(boost::get(response.get())); + ASSERT_NO_THROW(boost::get(response.get())); } /** diff --git a/test/module/shared_model/bindings/CMakeLists.txt b/test/module/shared_model/bindings/CMakeLists.txt index 666da9e81d..6e09ea7f70 100644 --- a/test/module/shared_model/bindings/CMakeLists.txt +++ b/test/module/shared_model/bindings/CMakeLists.txt @@ -19,64 +19,8 @@ target_link_libraries(model_crypto_test bindings ) -if (SWIG_PYTHON OR SWIG_JAVA) - get_property(SWIG_BUILD_DIR GLOBAL PROPERTY SWIG_BUILD_DIR) -endif() - -if (SWIG_PYTHON) - if(SUPPORT_PYTHON2) - find_package(PythonInterp 2.7 REQUIRED) - else() - find_package(PythonInterp 3.5 REQUIRED) - endif() - - file(TO_NATIVE_PATH ${SWIG_BUILD_DIR} SWIG_LIB_DIR) - if(CMAKE_GENERATOR MATCHES "Visual Studio") - set(SWIG_LIB_DIR "${SWIG_LIB_DIR}\\$") - endif() - - if (MSVC) - set(SEPARATOR $) - else () - set(SEPARATOR :) - endif() - - add_test(NAME python_transaction_test - COMMAND ${CMAKE_COMMAND} -E - env "PYTHONPATH=${SWIG_LIB_DIR}${SEPARATOR}$ENV{PYTHONPATH}" - ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/builder-test.py - WORKING_DIRECTORY ${SWIG_BUILD_DIR}) - add_test(NAME python_query_test - COMMAND ${CMAKE_COMMAND} -E - env "PYTHONPATH=${SWIG_LIB_DIR}${SEPARATOR}$ENV{PYTHONPATH}" - ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/query-test.py - WORKING_DIRECTORY ${SWIG_BUILD_DIR}) - add_test(NAME python_blocks_query_test - COMMAND ${CMAKE_COMMAND} -E - env "PYTHONPATH=${SWIG_LIB_DIR}${SEPARATOR}$ENV{PYTHONPATH}" - ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/blocks-query-test.py - WORKING_DIRECTORY ${SWIG_BUILD_DIR}) - add_test(NAME python_client_test - COMMAND ${CMAKE_COMMAND} -E - env "PYTHONPATH=${SWIG_LIB_DIR}${SEPARATOR}$ENV{PYTHONPATH}" - ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/client-test.py - WORKING_DIRECTORY ${SWIG_BUILD_DIR}) - - foreach(item "block" "transaction" "commands" "primitive" "queries") - compile_proto_to_python("${item}.proto") - list(APPEND PROTO_SWIG_DEPS "${SWIG_BUILD_DIR}/${item}_pb2.py") - endforeach(item) - - add_custom_target(python_tests ALL - DEPENDS "${PROTO_SWIG_DEPS}") - foreach(test "python_transaction_test" "python_query_test" "python_blocks_query_test" "python_client_test") - set_tests_properties(${test} - PROPERTIES REQUIRED_FILES "${PROTO_SWIG_DEPS}" - DEPENDS python_tests) - endforeach(test) -endif() - if (SWIG_JAVA) + get_property(SWIG_BUILD_DIR GLOBAL PROPERTY SWIG_BUILD_DIR) find_package(Java REQUIRED) get_target_property(PROTOC_EXEC_CMAKE protoc IMPORTED_LOCATION) diff --git a/test/module/shared_model/bindings/blocks-query-test.py b/test/module/shared_model/bindings/blocks-query-test.py deleted file mode 100644 index cddf853ef9..0000000000 --- a/test/module/shared_model/bindings/blocks-query-test.py +++ /dev/null @@ -1,138 +0,0 @@ -# -# Copyright Soramitsu Co., Ltd. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -# - -import iroha -import unittest -import time -import sys -from google.protobuf.message import DecodeError -import queries_pb2 as qry - -# TODO luckychess 8.08.2018 add test for number of methods -# in interface and proto implementation IR-1080 - -# Symbols of type 1 (format [a-z_0-9]{1,32}) are used -# as account_name, asset_name and role_id. -VALID_NAMES_1 = [ - "a", - "asset", - "234234", - "_", - "_123", - "123_23", - "234asset_", - "__", - "12345678901234567890123456789012" -] - -INVALID_NAMES_1 = [ - "", - "A", - "assetV", - "asSet", - "asset%", - "^123", - "verylongassetname_thenameislonger", - "verylongassetname_thenameislongerthanitshouldbe", - "assset-01" -] - -VALID_DOMAINS = [ - "test", - "u9EEA432F", - "a-hyphen", - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad", - "endWith0", - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad" -] - -INVALID_DOMAINS = [ - "", - " ", - " ", - "9start.with.digit", - "-startWithDash", - "@.is.not.allowed", - "no space is allowed", - "endWith-", - "label.endedWith-.is.not.allowed", - "aLabelMustNotExceeds63charactersALabelMustNotExceeds63characters", - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPadP", - "257.257.257.257", - "domain#domain", - "asd@asd", - "ab..cd" -] - -class BuilderTest(unittest.TestCase): - - def setUp(self): - self.keys = iroha.ModelCrypto().generateKeypair() - self.builder = self.base() - - def base(self): - return iroha.ModelBlocksQueryBuilder().queryCounter(123) \ - .createdTime(int(time.time() * 1000)) \ - .creatorAccountId("admin@test") - - def proto(self, query): - return iroha.ModelProtoBlocksQuery(query).signAndAddSignature(self.keys).finish() - - def check_proto_query(self, blob): - try: - if sys.version_info[0] == 2: - tmp = ''.join(map(chr, blob.blob())) - else: - tmp = bytes(blob.blob()) - qry.Query.FromString(tmp) - except DecodeError as e: - print(e) - return False - return True - - def test_empty_query(self): - with self.assertRaises(ValueError): - iroha.ModelBlocksQueryBuilder().build() - - # ====================== BlocksQuery Tests ====================== - - def test_creator_account_id(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - query = self.builder.creatorAccountId("{}@{}".format(name, domain)).build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - def test_invalid_creator_account_id(self): - for domain in INVALID_DOMAINS: - for name in VALID_NAMES_1: - with self.assertRaises(ValueError): - self.builder.creatorAccountId("{}@{}".format(name, domain)).build() - - for domain in VALID_DOMAINS: - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.builder.creatorAccountId("{}@{}".format(name, domain)).build() - - def test_valid_created_time(self): - query = self.builder.createdTime(int(time.time() * 1000)).build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - - - def test_outdated_created_time(self): - for i in [0, int((time.time() - 100000) * 1000), int((time.time() + 100000) * 1000)]: - with self.assertRaises(ValueError): - self.builder.createdTime(i).build() - - # ====================== BlocksQuery Tests ====================== - -if __name__ == '__main__': - unittest.main() diff --git a/test/module/shared_model/bindings/builder-test.py b/test/module/shared_model/bindings/builder-test.py deleted file mode 100644 index bad443bff3..0000000000 --- a/test/module/shared_model/bindings/builder-test.py +++ /dev/null @@ -1,624 +0,0 @@ -# -# Copyright Soramitsu Co., Ltd. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -# - -import iroha -import unittest -import time -import sys - -from google.protobuf.message import DecodeError -import transaction_pb2 as trx - -# TODO luckychess 8.08.2018 add test for number of methods -# in interface and proto implementation IR-1080 - -# Symbols of type 1 (format [a-z_0-9]{1,32}) are used -# as account_name, asset_name and role_id. -VALID_NAMES_1 = [ - "a", - "asset", - "234234", - "_", - "_123", - "123_23", - "234asset_", - "__", - "12345678901234567890123456789012" -] - -INVALID_NAMES_1 = [ - "", - "A", - "assetV", - "asSet", - "asset%", - "^123", - "verylongassetname_thenameislonger", - "verylongassetname_thenameislongerthanitshouldbe", - "assset-01" -] - -# Symbols of type 2 (format [A-Za-z0-9_]{1,64}) -# are used as key identifier for setAccountDetail command -VALID_NAMES_2 = [ - "a", - "A", - "1", - "_", - "Key", - "Key0_", - "verylongAndValidKeyName___1110100010___veryveryveryverylongvalid" -] - -INVALID_NAMES_2 = [ - "", - "Key&", - "key-30", - "verylongAndValidKeyName___1110100010___veryveryveryverylongvalid1", - "@@@" -] - -VALID_DOMAINS = [ - "test", - "u9EEA432F", - "a-hyphen", - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad", - "endWith0", - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad" -] - -INVALID_DOMAINS = [ - "", - " ", - " ", - "9start.with.digit", - "-startWithDash", - "@.is.not.allowed", - "no space is allowed", - "endWith-", - "label.endedWith-.is.not.allowed", - "aLabelMustNotExceeds63charactersALabelMustNotExceeds63characters", - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPadP", - "257.257.257.257", - "domain#domain", - "asd@asd", - "ab..cd" -] - -INVALID_KEYS = [ - "", - "a", - "1" * 31, - "1" * 33 -] - -class BuilderTest(unittest.TestCase): - - def setUp(self): - self.keys = iroha.ModelCrypto().generateKeypair() - self.pub_key = self.keys.publicKey() - self.builder = self.base() - - def base(self): - return iroha.ModelTransactionBuilder()\ - .createdTime(int(time.time() * 1000))\ - .creatorAccountId("admin@test") - - def proto(self, tx): - return iroha.ModelProtoTransaction(tx).signAndAddSignature(self.keys).finish() - - def check_proto_tx(self, blob): - try: - if sys.version_info[0] == 2: - tmp = ''.join(map(chr, blob.blob())) - else: - tmp = bytes(blob.blob()) - trx.Transaction.FromString(tmp) - except DecodeError as e: - print(e) - return False - return True - - def set_add_peer(self): - self.builder.addPeer("123.123.123.123", self.keys.publicKey()) - - def test_empty_tx(self): - with self.assertRaises(ValueError): - iroha.ModelTransactionBuilder().build() - - # ====================== AddPeer Tests ====================== - - def test_add_peer(self): - tx = self.builder.addPeer("123.123.123.123:123", self.keys.publicKey()).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_add_peer_valid_domains(self): - for domain in VALID_DOMAINS: - tx = self.builder.addPeer("{}:123".format(domain), self.keys.publicKey()).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_outdated_add_peer(self): - self.set_add_peer() - for i in [0, int((time.time() - 100000) * 1000), int((time.time() + 1) * 1000)]: - with self.assertRaises(ValueError): - self.builder.createdTime(i).build() - - def test_add_peer_with_invalid_creator(self): - self.set_add_peer() - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.builder.creatorAccountId("{}@test".format(name)).build() - - def test_add_peer_with_invalid_creator_domain(self): - self.set_add_peer() - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.builder.creatorAccountId("admin@{}".format(domain)).build() - - def test_add_peer_with_empty_creator(self): - self.set_add_peer() - with self.assertRaises(ValueError): - self.builder.creatorAccountId("").build() - - def test_add_peer_with_invalid_key_size(self): - for k in INVALID_KEYS: - with self.assertRaises(ValueError): - self.base().addPeer("123.123.123.123", iroha.PublicKey(k)).build() - - def test_add_peer_with_invalid_domain(self): - for k in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().addPeer(k, self.keys.publicKey()).build() - - # ====================== AddSignatory Tests ====================== - - def test_add_signatory(self): - for name in VALID_NAMES_1: - for domain in VALID_DOMAINS: - tx = self.builder.addSignatory("{}@{}".format(name, domain), self.keys.publicKey()).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_add_signatory_invalid_account_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().addSignatory("{}@test".format(name), self.keys.publicKey()).build() - - def test_add_signatory_empty_account(self): - with self.assertRaises(ValueError): - self.base().addSignatory("", self.keys.publicKey()).build() - - def test_add_signatory_invalid_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().addSignatory("admin@{}".format(domain), self.keys.publicKey()).build() - - def test_add_signatory_invalid_key(self): - for key in INVALID_KEYS: - with self.assertRaises(ValueError): - self.base().addSignatory("admin@test", iroha.PublicKey(key)).build() - - # ====================== AddAssetQuantity Tests ====================== - - def test_add_asset_quantity(self): - tx = self.builder.addAssetQuantity("asset#domain", "12.345").build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_add_asset_quantity_valid_asset(self): - for name in VALID_NAMES_1: - for domain in VALID_DOMAINS: - tx = self.builder.addAssetQuantity("{}#{}".format(name, domain), "100").build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_add_asset_quantity_invalid_asset(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().addAssetQuantity("{}#test".format(name), "10").build() - - def test_add_asset_quantity_invalid_asset_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().addAssetQuantity("coin#{}".format(domain), "10").build() - - def test_add_asset_quantity_empty_asset(self): - with self.assertRaises(ValueError): - self.base().addAssetQuantity("", "10").build() - - def test_add_asset_quantity_invalid_amount(self): - for amount in ["", "-12", "-13.45", "chars", "chars10"]: - with self.assertRaises(ValueError): - self.base().addAssetQuantity("coin#test", amount).build() - - # ====================== RemoveSignatory Tests ====================== - - def test_remove_signatory(self): - for name in VALID_NAMES_1: - for domain in VALID_DOMAINS: - tx = self.builder.removeSignatory("{}@{}".format(name, domain), self.keys.publicKey()).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_remove_signatory_empty_account(self): - with self.assertRaises(ValueError): - self.base().removeSignatory("", self.keys.publicKey()).build() - - def test_remove_signatory_invalid_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().removeSignatory("{}@test".format(name), self.keys.publicKey()).build() - - def test_remove_signatory_invalid_account_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().removeSignatory("admin@{}".format(domain), self.keys.publicKey()).build() - - def test_remove_signatory_invalid_key(self): - for key in INVALID_KEYS: - with self.assertRaises(ValueError): - self.base().removeSignatory("admin@test", iroha.PublicKey(key)).build() - - # ====================== CreateAccount Tests ====================== - - def test_create_account(self): - for name in VALID_NAMES_1: - for domain in VALID_DOMAINS: - tx = self.builder.createAccount(name, domain, self.keys.publicKey()).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_create_account_invalid_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().createAccount(name, "domain", self.keys.publicKey()).build() - - def test_create_account_invalid_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().createAccount("admin", domain, self.keys.publicKey()).build() - - def test_create_account_invalid_key(self): - for key in INVALID_KEYS: - with self.assertRaises(ValueError): - self.base().createAccount("admin", "test", iroha.PublicKey(key)).build() - - # ====================== CreateDomain Tests ====================== - - def test_create_domain(self): - for domain in VALID_DOMAINS: - for role in VALID_NAMES_1: - tx = self.builder.createDomain(domain, role).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_create_domain_with_invalid_name(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().createDomain(domain, "role").build() - - def test_create_domain_invalid_role(self): - for role in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().createDomain("test", role).build() - - # ====================== SetAccountQuorum Tests ====================== - - def test_set_account_quorum(self): - for name in VALID_NAMES_1: - for domain in VALID_DOMAINS: - tx = self.builder.setAccountQuorum("{}@{}".format(name, domain), 128).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_set_account_quorum_invalid_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().setAccountQuorum("{}@test".format(name), 123).build() - - def test_set_account_quorum_invalid_account_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().setAccountQuorum("admin@{}".format(domain), 123).build() - - def test_set_account_quorum_empty_account(self): - with self.assertRaises(ValueError): - self.base().setAccountQuorum("", 123).build() - - def test_set_account_quorum_invalid_quantity(self): - with self.assertRaises(OverflowError): - self.base().setAccountQuorum("admin@test", -100).build() - - for amount in [0, 129]: - with self.assertRaises(ValueError): - self.base().setAccountQuorum("admin@test", amount).build() - - # ====================== TransferAsset Tests ====================== - - def test_transfer_asset(self): - for domain in VALID_DOMAINS: - for i in range(0, len(VALID_NAMES_1)): - from_acc = "{}@{}".format(VALID_NAMES_1[i], domain) - to = "{}@{}".format(VALID_NAMES_1[(i + 1) % len(VALID_NAMES_1)], domain) - asset = "{}#{}".format(VALID_NAMES_1[(i + 2) % len(VALID_NAMES_1)], domain) - tx = self.builder.transferAsset(from_acc, to, asset, "description", "123.456").build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_transfer_asset_with_invalid_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().transferAsset("from@test", "to@test", "{}#test".format(name), "description", "100").build() - - def test_transfer_asset_with_invalid_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().transferAsset("from@test", "to@test", "coin#{}".format(domain), "description", "100").build() - - def test_transfer_asset_with_empty_name(self): - with self.assertRaises(ValueError): - self.base().transferAsset("from@test", "to@test", "", "description", "100").build() - - def test_transfer_asset_invalid_from_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().transferAsset("{}@test".format(name), "to@test", "coin#test", "description", "100").build() - - def test_transfer_asset_invalid_from_account_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().transferAsset("from@{}".format(domain), "to@test", "coin#test", "description", "100").build() - - def test_transfer_asset_empty_from_account(self): - with self.assertRaises(ValueError): - self.base().transferAsset("", "to@test", "coin#test", "description", "100").build() - - def test_transfer_asset_invalid_to_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().transferAsset("from@test", "{}@test".format(name), "coin#test", "description", "100").build() - - def test_transfer_asset_invalid_to_account_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().transferAsset("from@test", "to@{}".format(domain), "coin#test", "description", "1").build() - - def test_transfer_asset_empty_to_account(self): - with self.assertRaises(ValueError): - self.base().transferAsset("from@test", "", "coin#test", "description", "1").build() - - def test_transfer_asset_description_valid_values(self): - for descr in ["", "a" * 64]: - tx = self.builder.transferAsset("from@test", "to@test", "coin#test", descr, "1").build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_transfer_asset_invalid_description(self): - descr = "a" * 65 - with self.assertRaises(ValueError): - self.base().transferAsset("from@test", "to@test", "coin#test", descr, "1").build() - - def test_transfer_asset_maximum_amount(self): - max_uint_256 = str(2 ** 256 - 1) - max_uint_256_2 = max_uint_256[:10] + '.' + max_uint_256[10:] - oversized = str(2 ** 256) - - for amount in [max_uint_256, max_uint_256_2]: - tx = self.builder.transferAsset("from@test", "to@test", "coin#test", "descr", amount).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - with self.assertRaises(ValueError): - self.base().transferAsset("from@test", "to@test", "coin#test", "descr", oversized).build() - - # ====================== SetAccountDetail Tests ====================== - - def test_set_account_detail(self): - for name in VALID_NAMES_1: - for domain in VALID_DOMAINS: - tx = self.builder.setAccountDetail("{}@{}".format(name, domain), "fyodor", "kek").build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_set_account_detail_invalid_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().setAccountDetail("{}@test".format(name), "fyodor", "true").build() - - def test_set_account_detail_invalid_account_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().setAccountDetail("admin@{}".format(domain), "fyodor", "true").build() - - def test_set_account_detail_empty_account(self): - with self.assertRaises(ValueError): - self.base().setAccountDetail("", "fyodor", "true").build() - - def test_set_account_detail_valid_key(self): - for key in VALID_NAMES_2: - tx = self.builder.setAccountDetail("admin@test", key, "true").build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_set_account_detail_invalid_key(self): - for key in INVALID_NAMES_2: - with self.assertRaises(ValueError): - self.base().setAccountDetail("admin@test", key, "true").build() - - def test_set_account_detail_valid_value(self): - length = 4 * 1024 * 1024 - value = "a" * length - - for v in ["", value]: - tx = self.builder.setAccountDetail("admin@test", "fyodor", v).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_set_account_detail_oversized_value(self): - length = 4 * 1024 * 1024 + 1 - value = "a" * length - - with self.assertRaises(ValueError): - self.base().setAccountDetail("admin@test", "fyodor", value).build() - - # ====================== AppendRole Tests ====================== - - def test_append_role(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - tx = self.builder.appendRole("{}@{}".format(name, domain), name).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_append_role_invalid_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().appendRole("{}@test".format(name), "user").build() - - def test_append_role_invalid_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().appendRole("admin@{}".format(domain), "user").build() - - def test_append_role_empty_account(self): - with self.assertRaises(ValueError): - self.base().appendRole("", "user").build() - - def test_append_role_with_invalid_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().appendRole("admin@test", name).build() - - # ====================== CreateAsset Tests ====================== - - def test_create_asset(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - tx = self.builder.createAsset(name, domain, 6).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_create_asset_invalid_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().createAsset(name, "test", 6).build() - - def test_create_asset_invalid_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().createAsset("asset", domain, 6).build() - - def test_create_asset_zero_precision(self): - tx = self.builder.createAsset("asset", "test", 0).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - # ====================== CreateRole Tests ====================== - - def test_create_role(self): - for name in VALID_NAMES_1: - tx = self.builder.createRole(name, iroha.RolePermissionSet([iroha.Role_kReceive, iroha.Role_kGetRoles])).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_create_role_with_invalid_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().createRole(name, iroha.RolePermissionSet([iroha.Role_kReceive, iroha.Role_kGetRoles])).build() - - def test_create_role_with_empty_permissions(self): - tx = self.builder.createRole("user", iroha.RolePermissionSet([])).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - # ====================== DetachRole Tests ====================== - - def test_detach_role(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - tx = self.builder.detachRole("{}@{}".format(name, domain), "role").build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_detach_role_with_invalid_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().detachRole("{}@test".format(name), "role").build() - - def test_detach_role_with_invalid_account_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().detachRole("admin@{}".format(domain), "role").build() - - def test_detach_role_with_empty_account(self): - with self.assertRaises(ValueError): - self.base().detachRole("", "role").build() - - def test_detach_role_with_invalid_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().detachRole("admin@test", name).build() - - # ====================== GrantPermission Tests ====================== - - def test_grant_permission(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - tx = self.builder.grantPermission("{}@{}".format(name, domain), iroha.Grantable_kSetMyQuorum).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_grant_permission_empty_account(self): - with self.assertRaises(ValueError): - self.base().grantPermission("", iroha.Grantable_kSetMyQuorum).build() - - def test_grant_permission_invalid_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().grantPermission("{}@test".format(name), iroha.Grantable_kSetMyQuorum).build() - - def test_grant_permission_invalid_account_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().grantPermission("admin@{}".format(domain), iroha.Grantable_kSetMyQuorum).build() - - # ====================== RevokePermission Tests ====================== - - def test_revoke_permission(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - tx = self.builder.revokePermission("{}@{}".format(name, domain), iroha.Grantable_kSetMyQuorum).build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_revoke_permission_invalid_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().revokePermission("{}@test".format(name), iroha.Grantable_kSetMyQuorum).build() - - def test_revoke_permission_invalid_account_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().revokePermission("admin@{}".format(domain), iroha.Grantable_kSetMyQuorum).build() - - def test_revoke_permission_empty_account(self): - with self.assertRaises(ValueError): - self.base().revokePermission("", iroha.Grantable_kSetMyQuorum).build() - - # ====================== SubtractAssetQuantity Tests ====================== - - def test_subtract_asset_quantity(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - tx = self.builder.subtractAssetQuantity("{}#{}".format(name, domain), "10").build() - self.assertTrue(self.check_proto_tx(self.proto(tx))) - - def test_subtract_asset_quantity_invalid_asset_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().subtractAssetQuantity("{}#test".format(name), "10").build() - - def test_subtract_asset_quantity_invalid_asset_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().subtractAssetQuantity("coin#{}".format(domain), "10").build() - - def test_subtract_asset_quantity_empty_account(self): - with self.assertRaises(ValueError): - self.base().subtractAssetQuantity("", "10").build() - - def test_subtract_asset_quantity_invalid_amount(self): - amounts = ["", "0", "chars", "-10", "10chars", "10.10.10"] - for amount in amounts: - with self.assertRaises(ValueError): - self.base().subtractAssetQuantity("coin#test", amount).build() - -if __name__ == '__main__': - unittest.main() diff --git a/test/module/shared_model/bindings/client-test.py b/test/module/shared_model/bindings/client-test.py deleted file mode 100644 index 3e184ba63c..0000000000 --- a/test/module/shared_model/bindings/client-test.py +++ /dev/null @@ -1,65 +0,0 @@ -# -# Copyright Soramitsu Co., Ltd. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -# - -from time import time -import unittest - -import transaction_pb2 as trx -import commands_pb2 as cmd -import iroha - -class ClientTest(unittest.TestCase): - def setUp(self): - self.keys = iroha.ModelCrypto().generateKeypair() - - def valid_add_peer_command(self): - command = cmd.Command() - command.add_peer.peer.address = "127.0.0.1:50500" - command.add_peer.peer.peer_key = b'A' * 32 - return command - - def unsigned_tx(self): - tx = trx.Transaction() - tx.payload.reduced_payload.creator_account_id = "admin@test" - tx.payload.reduced_payload.created_time = int(time() * 1000) - tx.payload.reduced_payload.quorum = 1 - return tx - - def test_hash(self): - tx = self.unsigned_tx() - tx.payload.reduced_payload.commands.extend([self.valid_add_peer_command()]) - h = iroha.hashTransaction(iroha.Blob(tx.SerializeToString()).blob()) - self.assertEqual(len(h), 32) - - def test_sign(self): - tx = self.unsigned_tx() - tx.payload.reduced_payload.commands.extend([self.valid_add_peer_command()]) - self.assertEqual(len(tx.signatures), 0) - tx_blob = iroha.signTransaction(iroha.Blob(tx.SerializeToString()).blob(), self.keys) - signed_tx = trx.Transaction() - signed_tx.ParseFromString(bytearray(tx_blob)) - self.assertEqual(len(signed_tx.signatures), 1) - - def test_validate_without_cmd(self): - tx = self.unsigned_tx() - tx_blob = iroha.signTransaction(iroha.Blob(tx.SerializeToString()).blob(), self.keys) - with self.assertRaises(ValueError): - iroha.validateTransaction(tx_blob) - - def test_validate_unsigned_tx(self): - tx = self.unsigned_tx() - tx.payload.reduced_payload.commands.extend([self.valid_add_peer_command()]) - self.assertEqual(len(tx.signatures), 0) - with self.assertRaises(ValueError): - iroha.validateTransaction(iroha.Blob(tx.SerializeToString()).blob()) - - def test_validate_correct_tx(self): - tx = self.unsigned_tx() - tx.payload.reduced_payload.commands.extend([self.valid_add_peer_command()]) - self.assertEqual(len(tx.signatures), 0) - iroha.signTransaction(iroha.Blob(tx.SerializeToString()).blob(), self.keys) - -if __name__ == '__main__': - unittest.main() diff --git a/test/module/shared_model/bindings/query-test.py b/test/module/shared_model/bindings/query-test.py deleted file mode 100644 index 265073f849..0000000000 --- a/test/module/shared_model/bindings/query-test.py +++ /dev/null @@ -1,390 +0,0 @@ -# -# Copyright Soramitsu Co., Ltd. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -# - -import iroha -import unittest -import time -import sys -from google.protobuf.message import DecodeError -import queries_pb2 as qry - -# TODO luckychess 8.08.2018 add test for number of methods -# in interface and proto implementation IR-1080 - -# Symbols of type 1 (format [a-z_0-9]{1,32}) are used -# as account_name, asset_name and role_id. -VALID_NAMES_1 = [ - "a", - "asset", - "234234", - "_", - "_123", - "123_23", - "234asset_", - "__", - "12345678901234567890123456789012" -] - -INVALID_NAMES_1 = [ - "", - "A", - "assetV", - "asSet", - "asset%", - "^123", - "verylongassetname_thenameislonger", - "verylongassetname_thenameislongerthanitshouldbe", - "assset-01" -] - -# Symbols of type 2 (format [A-Za-z0-9_]{1,64}) -# are used as key identifier for setAccountDetail command -VALID_NAMES_2 = [ - "a", - "A", - "1", - "_", - "Key", - "Key0_", - "verylongAndValidKeyName___1110100010___veryveryveryverylongvalid" -] - -INVALID_NAMES_2 = [ - "", - "Key&", - "key-30", - "verylongAndValidKeyName___1110100010___veryveryveryverylongvalid1", - "@@@" -] - -VALID_DOMAINS = [ - "test", - "u9EEA432F", - "a-hyphen", - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad", - "endWith0", - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad" -] - -INVALID_DOMAINS = [ - "", - " ", - " ", - "9start.with.digit", - "-startWithDash", - "@.is.not.allowed", - "no space is allowed", - "endWith-", - "label.endedWith-.is.not.allowed", - "aLabelMustNotExceeds63charactersALabelMustNotExceeds63characters", - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPad." + - "maxLabelLengthIs63paddingPaddingPaddingPaddingPaddingPaddingPadP", - "257.257.257.257", - "domain#domain", - "asd@asd", - "ab..cd" -] - -INVALID_KEYS = [ - "", - "a", - "1" * 31, - "1" * 33 -] - -class BuilderTest(unittest.TestCase): - - def setUp(self): - self.keys = iroha.ModelCrypto().generateKeypair() - self.builder = self.base() - - def set_get_account(self): - self.builder.getAccount("user@test") - - def base(self): - return iroha.ModelQueryBuilder().queryCounter(123)\ - .createdTime(int(time.time() * 1000))\ - .creatorAccountId("admin@test") - - def proto(self, query): - return iroha.ModelProtoQuery(query).signAndAddSignature(self.keys).finish() - - def check_proto_query(self, blob): - try: - if sys.version_info[0] == 2: - tmp = ''.join(map(chr, blob.blob())) - else: - tmp = bytes(blob.blob()) - qry.Query.FromString(tmp) - except DecodeError as e: - print(e) - return False - return True - - def test_empty_query(self): - with self.assertRaises(ValueError): - iroha.ModelQueryBuilder().build() - - # ====================== GetAccount Tests ====================== - - def test_get_account(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - query = self.builder.getAccount("{}@{}".format(name, domain)).build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - def test_outdated_get_account(self): - self.set_get_account() - for i in [0, int((time.time() - 100000) * 1000), int((time.time() + 1) * 1000)]: - with self.assertRaises(ValueError): - self.builder.createdTime(i).build() - - def test_get_account_with_invalid_creator(self): - self.set_get_account() - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.builder.creatorAccountId("{}@test".format(name)).build() - - def test_get_account_with_invalid_creator_domain(self): - self.set_get_account() - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.builder.creatorAccountId("admin@{}".format(domain)).build() - - def test_get_account_with_empty_creator_domain(self): - self.set_get_account() - with self.assertRaises(ValueError): - self.builder.creatorAccountId("").build() - - def test_get_account_invalid_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().getAccount("{}@test".format(name)).build() - - def test_get_account_invalid_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().getAccount("admin@{}".format(domain)).build() - - def test_get_account_with_empty_name(self): - with self.assertRaises(ValueError): - self.base().getAccount("").build() - - # ====================== GetSignatories Tests ====================== - - def test_get_signatories(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - query = self.builder.getSignatories("{}@{}".format(name, domain)).build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - def test_get_signatories_invalid_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().getSignatories("{}@test".format(name)).build() - - def test_get_signatories_invalid_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().getSignatories("admin@{}".format(domain)).build() - - def test_get_signatories_empty_account(self): - with self.assertRaises(ValueError): - self.base().getSignatories("").build() - - # ====================== GetAccountTransactions Tests ====================== - - def test_get_account_transactions(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - query = self.builder.getAccountTransactions("{}@{}".format(name, domain)).build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - def test_get_account_transactions_invalid_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().getAccountTransactions("{}@test".format(name)).build() - - def test_get_account_transactions_invalid_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().getAccountTransactions("admin@{}".format(domain)).build() - - def test_get_account_transactions_with_empty_account(self): - with self.assertRaises(ValueError): - self.base().getAccountTransactions("").build() - - # ====================== GetAccountAssetTransactions Tests ====================== - - def test_get_account_asset_transactions(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - query = self.builder.getAccountAssetTransactions("{}@{}".format(name, domain), "{}#{}".format(name, domain)).build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - def test_get_account_asset_transactions_invalid_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().getAccountAssetTransactions("{}@test".format(name), "coin#test").build() - - def test_get_account_asset_transactions_invalid_account_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().getAccountAssetTransactions("admin@{}".format(domain), "coin#test").build() - - def test_get_account_asset_transactions_empty_account(self): - with self.assertRaises(ValueError): - self.base().getAccountAssetTransactions("", "coin#test").build() - - def test_get_account_asset_transactions_invalid_asset_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().getAccountAssetTransactions("admin@test", "{}#test".format(name)).build() - - def test_get_account_asset_transactions_invalid_asset_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().getAccountAssetTransactions("admin@test", "admin#{}".format(domain)).build() - - def test_get_account_asset_transactions_empty_asset(self): - with self.assertRaises(ValueError): - self.base().getAccountAssetTransactions("admin@test", "").build() - - # ====================== GetAccountAssets Tests ====================== - - def test_get_account_assets(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - query = self.builder.getAccountAssets("{}@{}".format(name, domain)).build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - def test_get_account_assets_invalid_account(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().getAccountAssets("{}@test".format(name)).build() - - def test_get_account_assets_invalid_account_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().getAccountAssets("admin@{}".format(domain)).build() - - def test_get_account_assets_empty_account(self): - with self.assertRaises(ValueError): - self.base().getAccountAssets("").build() - - # ====================== GetRoles Tests ====================== - - def test_get_roles(self): - query = self.builder.getRoles().build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - # ====================== GetAssetInfo Tests ====================== - - def test_get_asset_info(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - query = self.builder.getAssetInfo("{}#{}".format(name, domain)).build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - def test_get_asset_info_invalid_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().getAssetInfo("{}#test".format(name)).build() - - def test_get_asset_info_invalid_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().getAssetInfo("admin#{}".format(domain)).build() - - def test_get_asset_info_empty_asset_name(self): - with self.assertRaises(ValueError): - self.base().getAssetInfo("").build() - - # ====================== GetRolePermissions Tests ====================== - - def test_get_role_permissions(self): - for name in VALID_NAMES_1: - query = self.builder.getRolePermissions(name).build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - def test_get_role_permissions_with_invalid_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().getRolePermissions(name).build() - - # ====================== GetTransactions Tests ====================== - - def test_get_transactions(self): - hv = iroha.HashVector() - hv.append(iroha.Hash("1" * 32)) - hv.append(iroha.Hash("2" * 32)) - self.assertTrue(hv.size() == 2) - - query = self.builder.getTransactions(hv).build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - def test_get_transactions_with_empty_vector(self): - with self.assertRaises(ValueError): - self.base().getTransactions(iroha.HashVector()).build() - - def test_get_transactions_with_invalid_hash_sizes(self): - hashes = [ - "", - "1", - "1" * 31, - "1" * 33 - ] - for h in hashes: - hv = iroha.HashVector() - hv.append(iroha.Hash(h)) - with self.assertRaises(ValueError): - self.base().getTransactions(hv).build() - - def test_get_transactions_with_one_valid_and_one_invalid_hash_1(self): - hv = iroha.HashVector() - hv.append(iroha.Hash("1" * 32)) - hv.append(iroha.Hash("1")) - - with self.assertRaises(ValueError): - self.base().getTransactions(hv).build() - - def test_get_transactions_with_one_valid_and_one_invalid_hash_2(self): - hv = iroha.HashVector() - hv.append(iroha.Hash("1")) - hv.append(iroha.Hash("1" * 32)) - - with self.assertRaises(ValueError): - self.base().getTransactions(hv).build() - - - # ====================== GetAccountDetail Tests ====================== - - def test_get_account_detail(self): - for domain in VALID_DOMAINS: - for name in VALID_NAMES_1: - query = self.builder.getAccountDetail("{}@{}".format(name, domain)).build() - self.assertTrue(self.check_proto_query(self.proto(query))) - - def test_get_account_detail_invalid_name(self): - for name in INVALID_NAMES_1: - with self.assertRaises(ValueError): - self.base().getAccountDetail("{}@test".format(name)).build() - - def test_get_account_detail_invalid_domain(self): - for domain in INVALID_DOMAINS: - with self.assertRaises(ValueError): - self.base().getAccountDetail("admin@{}".format(domain)).build() - - def test_get_account_detail_with_no_args(self): - query = self.builder.getAccountDetail().build() - self.assertTrue(self.check_proto_query(self.proto(query))) - -if __name__ == '__main__': - unittest.main() From 7a9a3d17313259ae6bbd5a7cabe70b8b92902ab7 Mon Sep 17 00:00:00 2001 From: Fedor Muratov Date: Fri, 1 Feb 2019 14:50:45 +0300 Subject: [PATCH 27/41] Refactoring/yac mocks (#2026) * Refactoring of yac_mocks file: - separate different mocks by own files - move util functions to own file - reduces dependencies of chain_validator and yac_gate tests Fix dependencies in YAC: - separate messages for vote_message and Commit&Reject files - fix header bugs in yac_vote_storage and yac_gate * - Rename yac_mocks with yac_fixture - Cleanup dependencies of yac-related tests - Fix YacPeerOrder forward declaration bug Signed-off-by: Fedor Muratov --- .../consensus/yac/impl/peer_orderer_impl.hpp | 4 - irohad/consensus/yac/impl/yac_gate_impl.cpp | 2 +- .../{messages.hpp => outcome_messages.hpp} | 28 +- .../consensus/yac/storage/impl/yac_common.cpp | 2 +- .../yac/storage/yac_block_storage.hpp | 2 +- .../yac/storage/yac_vote_storage.hpp | 20 +- .../yac/transport/impl/network_impl.cpp | 1 - .../yac/transport/impl/network_impl.hpp | 3 +- .../yac/transport/yac_pb_converters.hpp | 2 +- irohad/consensus/yac/vote_message.hpp | 48 ++++ irohad/consensus/yac/yac.hpp | 4 +- irohad/consensus/yac/yac_gate.hpp | 1 + irohad/consensus/yac/yac_peer_orderer.hpp | 3 +- irohad/main/impl/consensus_init.hpp | 2 +- .../unsafe_proposal_factory.hpp | 1 + .../fake_peer/yac_network_notifier.cpp | 2 +- test/fuzzing/consensus_fuzz.cpp | 4 +- .../consensus/consensus_sunny_day.cpp | 10 +- .../consensus/yac/cluster_order_test.cpp | 9 +- .../yac/mock_yac_crypto_provider.hpp | 66 +++++ .../consensus/yac/mock_yac_hash_gate.hpp | 38 +++ .../consensus/yac/mock_yac_hash_provider.hpp | 41 +++ .../irohad/consensus/yac/mock_yac_network.hpp | 62 ++++ .../consensus/yac/mock_yac_peer_orderer.hpp | 39 +++ .../yac/mock_yac_supermajority_checker.hpp | 41 +++ .../irohad/consensus/yac/mock_yac_timer.hpp | 37 +++ .../irohad/consensus/yac/network_test.cpp | 11 +- .../consensus/yac/peer_orderer_test.cpp | 9 +- .../yac/supermajority_checker_test.cpp | 2 + .../irohad/consensus/yac/timer_test.cpp | 4 +- .../consensus/yac/yac_block_storage_test.cpp | 6 +- .../irohad/consensus/yac/yac_common_test.cpp | 4 +- .../yac/yac_crypto_provider_test.cpp | 4 +- .../irohad/consensus/yac/yac_fixture.hpp | 65 +++++ .../irohad/consensus/yac/yac_gate_test.cpp | 15 +- .../consensus/yac/yac_hash_provider_test.cpp | 1 + .../module/irohad/consensus/yac/yac_mocks.hpp | 266 ------------------ .../yac/yac_proposal_storage_test.cpp | 4 +- .../consensus/yac/yac_rainy_day_test.cpp | 4 +- .../yac/yac_simple_cold_case_test.cpp | 5 +- .../consensus/yac/yac_sunny_day_test.cpp | 5 +- .../irohad/consensus/yac/yac_test_util.hpp | 46 +++ .../consensus/yac/yac_unknown_peer_test.cpp | 3 +- .../validation/chain_validation_test.cpp | 2 +- test/module/shared_model/interface_mocks.hpp | 2 + 45 files changed, 567 insertions(+), 363 deletions(-) rename irohad/consensus/yac/{messages.hpp => outcome_messages.hpp} (66%) create mode 100644 irohad/consensus/yac/vote_message.hpp create mode 100644 test/module/irohad/consensus/yac/mock_yac_crypto_provider.hpp create mode 100644 test/module/irohad/consensus/yac/mock_yac_hash_gate.hpp create mode 100644 test/module/irohad/consensus/yac/mock_yac_hash_provider.hpp create mode 100644 test/module/irohad/consensus/yac/mock_yac_network.hpp create mode 100644 test/module/irohad/consensus/yac/mock_yac_peer_orderer.hpp create mode 100644 test/module/irohad/consensus/yac/mock_yac_supermajority_checker.hpp create mode 100644 test/module/irohad/consensus/yac/mock_yac_timer.hpp create mode 100644 test/module/irohad/consensus/yac/yac_fixture.hpp delete mode 100644 test/module/irohad/consensus/yac/yac_mocks.hpp create mode 100644 test/module/irohad/consensus/yac/yac_test_util.hpp diff --git a/irohad/consensus/yac/impl/peer_orderer_impl.hpp b/irohad/consensus/yac/impl/peer_orderer_impl.hpp index 099af38be6..25947d6856 100644 --- a/irohad/consensus/yac/impl/peer_orderer_impl.hpp +++ b/irohad/consensus/yac/impl/peer_orderer_impl.hpp @@ -13,10 +13,6 @@ namespace iroha { - namespace ametsuchi { - class PeerQuery; - } - namespace consensus { namespace yac { diff --git a/irohad/consensus/yac/impl/yac_gate_impl.cpp b/irohad/consensus/yac/impl/yac_gate_impl.cpp index 7ba3977b19..42020bb044 100644 --- a/irohad/consensus/yac/impl/yac_gate_impl.cpp +++ b/irohad/consensus/yac/impl/yac_gate_impl.cpp @@ -8,7 +8,7 @@ #include #include "common/visitor.hpp" #include "consensus/yac/cluster_order.hpp" -#include "consensus/yac/messages.hpp" +#include "consensus/yac/outcome_messages.hpp" #include "consensus/yac/storage/yac_common.hpp" #include "consensus/yac/yac_hash_provider.hpp" #include "consensus/yac/yac_peer_orderer.hpp" diff --git a/irohad/consensus/yac/messages.hpp b/irohad/consensus/yac/outcome_messages.hpp similarity index 66% rename from irohad/consensus/yac/messages.hpp rename to irohad/consensus/yac/outcome_messages.hpp index 5ba74a0b29..b3052ef88e 100644 --- a/irohad/consensus/yac/messages.hpp +++ b/irohad/consensus/yac/outcome_messages.hpp @@ -8,39 +8,13 @@ #include -#include "consensus/yac/yac_hash_provider.hpp" // for YacHash -#include "interfaces/common_objects/signature.hpp" +#include "consensus/yac/vote_message.hpp" #include "utils/string_builder.hpp" namespace iroha { namespace consensus { namespace yac { - /** - * VoteMessage represents voting for some block; - */ - struct VoteMessage { - YacHash hash; - std::shared_ptr signature; - - bool operator==(const VoteMessage &rhs) const { - return hash == rhs.hash and *signature == *rhs.signature; - } - - bool operator!=(const VoteMessage &rhs) const { - return not(*this == rhs); - } - - std::string toString() const { - return shared_model::detail::PrettyStringBuilder() - .init("VoteMessage") - .append("yac hash", hash.toString()) - .append("signature", - signature ? signature->toString() : "not set") - .finalize(); - } - }; - /** * CommitMsg means consensus on cluster achieved. * All nodes deals on some solution diff --git a/irohad/consensus/yac/storage/impl/yac_common.cpp b/irohad/consensus/yac/storage/impl/yac_common.cpp index 75e461226e..29f165337e 100644 --- a/irohad/consensus/yac/storage/impl/yac_common.cpp +++ b/irohad/consensus/yac/storage/impl/yac_common.cpp @@ -7,7 +7,7 @@ #include -#include "consensus/yac/messages.hpp" +#include "consensus/yac/outcome_messages.hpp" namespace iroha { namespace consensus { diff --git a/irohad/consensus/yac/storage/yac_block_storage.hpp b/irohad/consensus/yac/storage/yac_block_storage.hpp index 564f21340c..169ecc7f05 100644 --- a/irohad/consensus/yac/storage/yac_block_storage.hpp +++ b/irohad/consensus/yac/storage/yac_block_storage.hpp @@ -23,7 +23,7 @@ #include #include "consensus/yac/impl/supermajority_checker_impl.hpp" -#include "consensus/yac/messages.hpp" +#include "consensus/yac/outcome_messages.hpp" #include "consensus/yac/storage/storage_result.hpp" #include "consensus/yac/yac_types.hpp" #include "logger/logger.hpp" diff --git a/irohad/consensus/yac/storage/yac_vote_storage.hpp b/irohad/consensus/yac/storage/yac_vote_storage.hpp index fcc32afdc4..a198a7ab64 100644 --- a/irohad/consensus/yac/storage/yac_vote_storage.hpp +++ b/irohad/consensus/yac/storage/yac_vote_storage.hpp @@ -1,18 +1,6 @@ /** - * Copyright Soramitsu Co., Ltd. 2017 All Rights Reserved. - * http://soramitsu.co.jp - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 */ #ifndef IROHA_YAC_VOTE_STORAGE_HPP @@ -23,15 +11,15 @@ #include #include -#include "consensus/yac/messages.hpp" // because messages passed by value +#include "consensus/yac/outcome_messages.hpp" // because messages passed by value #include "consensus/yac/storage/storage_result.hpp" // for Answer #include "consensus/yac/storage/yac_common.hpp" // for ProposalHash +#include "consensus/yac/storage/yac_proposal_storage.hpp" #include "consensus/yac/yac_types.hpp" namespace iroha { namespace consensus { namespace yac { - class YacProposalStorage; /** * Proposal outcome states for multicast propagation strategy diff --git a/irohad/consensus/yac/transport/impl/network_impl.cpp b/irohad/consensus/yac/transport/impl/network_impl.cpp index 4db621d36a..e89aaa3896 100644 --- a/irohad/consensus/yac/transport/impl/network_impl.cpp +++ b/irohad/consensus/yac/transport/impl/network_impl.cpp @@ -8,7 +8,6 @@ #include #include -#include "consensus/yac/messages.hpp" #include "consensus/yac/storage/yac_common.hpp" #include "consensus/yac/transport/yac_pb_converters.hpp" #include "interfaces/common_objects/peer.hpp" diff --git a/irohad/consensus/yac/transport/impl/network_impl.hpp b/irohad/consensus/yac/transport/impl/network_impl.hpp index e094c841a8..0390dbfa39 100644 --- a/irohad/consensus/yac/transport/impl/network_impl.hpp +++ b/irohad/consensus/yac/transport/impl/network_impl.hpp @@ -12,7 +12,8 @@ #include #include -#include "consensus/yac/messages.hpp" +#include "consensus/yac/outcome_messages.hpp" +#include "interfaces/common_objects/peer.hpp" #include "interfaces/common_objects/types.hpp" #include "logger/logger.hpp" #include "network/impl/async_grpc_client.hpp" diff --git a/irohad/consensus/yac/transport/yac_pb_converters.hpp b/irohad/consensus/yac/transport/yac_pb_converters.hpp index c3bc6839ac..4ca8f4babc 100644 --- a/irohad/consensus/yac/transport/yac_pb_converters.hpp +++ b/irohad/consensus/yac/transport/yac_pb_converters.hpp @@ -8,7 +8,7 @@ #include "backend/protobuf/common_objects/proto_common_objects_factory.hpp" #include "common/byteutils.hpp" -#include "consensus/yac/messages.hpp" +#include "consensus/yac/outcome_messages.hpp" #include "cryptography/crypto_provider/crypto_defaults.hpp" #include "interfaces/common_objects/signature.hpp" #include "logger/logger.hpp" diff --git a/irohad/consensus/yac/vote_message.hpp b/irohad/consensus/yac/vote_message.hpp new file mode 100644 index 0000000000..09bc3ed541 --- /dev/null +++ b/irohad/consensus/yac/vote_message.hpp @@ -0,0 +1,48 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_VOTE_MESSAGE_HPP +#define IROHA_VOTE_MESSAGE_HPP + +#include + +#include "consensus/yac/yac_hash_provider.hpp" // for YacHash +#include "interfaces/common_objects/signature.hpp" +#include "utils/string_builder.hpp" + +namespace iroha { + namespace consensus { + namespace yac { + + /** + * VoteMessage represents voting for some block; + */ + struct VoteMessage { + YacHash hash; + std::shared_ptr signature; + + bool operator==(const VoteMessage &rhs) const { + return hash == rhs.hash and *signature == *rhs.signature; + } + + bool operator!=(const VoteMessage &rhs) const { + return not(*this == rhs); + } + + std::string toString() const { + return shared_model::detail::PrettyStringBuilder() + .init("VoteMessage") + .append("yac hash", hash.toString()) + .append("signature", + signature ? signature->toString() : "not set") + .finalize(); + } + }; + + } // namespace yac + } // namespace consensus +} // namespace iroha + +#endif // IROHA_VOTE_MESSAGE_HPP diff --git a/irohad/consensus/yac/yac.hpp b/irohad/consensus/yac/yac.hpp index 542f14d891..ff8d0f57c8 100644 --- a/irohad/consensus/yac/yac.hpp +++ b/irohad/consensus/yac/yac.hpp @@ -11,8 +11,8 @@ #include #include -#include "consensus/yac/cluster_order.hpp" // for ClusterOrdering -#include "consensus/yac/messages.hpp" // because messages passed by value +#include "consensus/yac/cluster_order.hpp" // for ClusterOrdering +#include "consensus/yac/outcome_messages.hpp" // because messages passed by value #include "consensus/yac/storage/yac_vote_storage.hpp" // for VoteStorage #include "consensus/yac/transport/yac_network_interface.hpp" // for YacNetworkNotifications #include "consensus/yac/yac_gate.hpp" // for HashGate diff --git a/irohad/consensus/yac/yac_gate.hpp b/irohad/consensus/yac/yac_gate.hpp index c84bff8943..a263568a94 100644 --- a/irohad/consensus/yac/yac_gate.hpp +++ b/irohad/consensus/yac/yac_gate.hpp @@ -7,6 +7,7 @@ #define IROHA_YAC_GATE_HPP #include +#include "consensus/yac/cluster_order.hpp" #include "consensus/yac/storage/storage_result.hpp" #include "network/consensus_gate.hpp" diff --git a/irohad/consensus/yac/yac_peer_orderer.hpp b/irohad/consensus/yac/yac_peer_orderer.hpp index 6eaa19cc4c..c3755808e6 100644 --- a/irohad/consensus/yac/yac_peer_orderer.hpp +++ b/irohad/consensus/yac/yac_peer_orderer.hpp @@ -8,11 +8,12 @@ #include +#include "consensus/yac/cluster_order.hpp" + namespace iroha { namespace consensus { namespace yac { - class ClusterOrdering; class YacHash; /** diff --git a/irohad/main/impl/consensus_init.hpp b/irohad/main/impl/consensus_init.hpp index 0e62976501..8648f8b3af 100644 --- a/irohad/main/impl/consensus_init.hpp +++ b/irohad/main/impl/consensus_init.hpp @@ -24,7 +24,7 @@ #include "ametsuchi/peer_query_factory.hpp" #include "consensus/consensus_block_cache.hpp" -#include "consensus/yac/messages.hpp" +#include "consensus/yac/outcome_messages.hpp" #include "consensus/yac/timer.hpp" #include "consensus/yac/transport/impl/network_impl.hpp" #include "consensus/yac/yac.hpp" diff --git a/shared_model/interfaces/iroha_internal/unsafe_proposal_factory.hpp b/shared_model/interfaces/iroha_internal/unsafe_proposal_factory.hpp index 9390defb92..5a28df3928 100644 --- a/shared_model/interfaces/iroha_internal/unsafe_proposal_factory.hpp +++ b/shared_model/interfaces/iroha_internal/unsafe_proposal_factory.hpp @@ -10,6 +10,7 @@ #include #include "interfaces/common_objects/types.hpp" +#include "interfaces/iroha_internal/proposal.hpp" namespace shared_model { namespace interface { diff --git a/test/framework/integration_framework/fake_peer/yac_network_notifier.cpp b/test/framework/integration_framework/fake_peer/yac_network_notifier.cpp index 1197484e40..32d080a4aa 100644 --- a/test/framework/integration_framework/fake_peer/yac_network_notifier.cpp +++ b/test/framework/integration_framework/fake_peer/yac_network_notifier.cpp @@ -5,7 +5,7 @@ #include "framework/integration_framework/fake_peer/yac_network_notifier.hpp" -#include "consensus/yac/messages.hpp" +#include "consensus/yac/outcome_messages.hpp" #include "consensus/yac/transport/impl/network_impl.hpp" #include "consensus/yac/transport/yac_network_interface.hpp" diff --git a/test/fuzzing/consensus_fuzz.cpp b/test/fuzzing/consensus_fuzz.cpp index 4255484202..3ec36a19b8 100644 --- a/test/fuzzing/consensus_fuzz.cpp +++ b/test/fuzzing/consensus_fuzz.cpp @@ -5,11 +5,11 @@ #include -#include #include #include "consensus/yac/transport/impl/network_impl.hpp" -#include "module/irohad/consensus/yac/yac_mocks.hpp" + +#include "module/irohad/consensus/yac/mock_yac_network.hpp" using namespace testing; diff --git a/test/integration/consensus/consensus_sunny_day.cpp b/test/integration/consensus/consensus_sunny_day.cpp index 46e21bbc2f..e875837f63 100644 --- a/test/integration/consensus/consensus_sunny_day.cpp +++ b/test/integration/consensus/consensus_sunny_day.cpp @@ -5,12 +5,18 @@ #include #include + +#include "consensus/yac/cluster_order.hpp" #include "consensus/yac/impl/timer_impl.hpp" #include "consensus/yac/storage/yac_proposal_storage.hpp" +#include "consensus/yac/storage/yac_vote_storage.hpp" #include "consensus/yac/transport/impl/network_impl.hpp" +#include "consensus/yac/yac.hpp" #include "cryptography/crypto_provider/crypto_defaults.hpp" + #include "framework/test_subscriber.hpp" -#include "module/irohad/consensus/yac/yac_mocks.hpp" +#include "module/irohad/consensus/yac/mock_yac_crypto_provider.hpp" +#include "module/irohad/consensus/yac/yac_test_util.hpp" #include "module/shared_model/interface_mocks.hpp" using ::testing::_; @@ -25,7 +31,7 @@ static size_t num_peers = 1, my_num = 0; auto mk_local_peer(uint64_t num) { auto address = "0.0.0.0:" + std::to_string(num); - return iroha::consensus::yac::mk_peer(address); + return iroha::consensus::yac::makePeer(address); } class FixedCryptoProvider : public MockYacCryptoProvider { diff --git a/test/module/irohad/consensus/yac/cluster_order_test.cpp b/test/module/irohad/consensus/yac/cluster_order_test.cpp index 50761ec1c5..f3033f04cb 100644 --- a/test/module/irohad/consensus/yac/cluster_order_test.cpp +++ b/test/module/irohad/consensus/yac/cluster_order_test.cpp @@ -3,16 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ +#include + #include "consensus/yac/cluster_order.hpp" -#include -#include "module/irohad/consensus/yac/yac_mocks.hpp" +#include "module/irohad/consensus/yac/yac_test_util.hpp" class ClusterOrderTest : public ::testing::Test { protected: void SetUp() override { - p1 = iroha::consensus::yac::mk_peer("1"); - p2 = iroha::consensus::yac::mk_peer("2"); + p1 = iroha::consensus::yac::makePeer("1"); + p2 = iroha::consensus::yac::makePeer("2"); peers_list = {p1, p2}; } diff --git a/test/module/irohad/consensus/yac/mock_yac_crypto_provider.hpp b/test/module/irohad/consensus/yac/mock_yac_crypto_provider.hpp new file mode 100644 index 0000000000..4e7eb24628 --- /dev/null +++ b/test/module/irohad/consensus/yac/mock_yac_crypto_provider.hpp @@ -0,0 +1,66 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_YAC_CRYPTO_PROVIDER_HPP +#define IROHA_MOCK_YAC_CRYPTO_PROVIDER_HPP + +#include + +#include "consensus/yac/yac_crypto_provider.hpp" +#include "cryptography/crypto_provider/crypto_defaults.hpp" + +#include "module/shared_model/interface_mocks.hpp" + +namespace iroha { + namespace consensus { + namespace yac { + + /** + * Creates test signature with empty signed data, and provided pubkey + * @param pub_key - public key to put in the signature + * @return new signature + */ + std::shared_ptr createSig( + const std::string &pub_key) { + auto tmp = + shared_model::crypto::DefaultCryptoAlgorithmType::generateKeypair() + .publicKey(); + std::string key(tmp.blob().size(), 0); + std::copy(pub_key.begin(), pub_key.end(), key.begin()); + auto sig = std::make_shared(); + EXPECT_CALL(*sig, publicKey()) + .WillRepeatedly(::testing::ReturnRefOfCopy( + shared_model::crypto::PublicKey(key))); + EXPECT_CALL(*sig, signedData()) + .WillRepeatedly( + ::testing::ReturnRefOfCopy(shared_model::crypto::Signed(""))); + + return sig; + } + + class MockYacCryptoProvider : public YacCryptoProvider { + public: + MOCK_METHOD1(verify, bool(const std::vector &)); + + VoteMessage getVote(YacHash hash) override { + VoteMessage vote; + vote.hash = std::move(hash); + vote.signature = createSig(""); + return vote; + } + + MockYacCryptoProvider() = default; + + MockYacCryptoProvider(const MockYacCryptoProvider &) {} + + MockYacCryptoProvider &operator=(const MockYacCryptoProvider &) { + return *this; + } + }; + + } // namespace yac + } // namespace consensus +} // namespace iroha +#endif // IROHA_MOCK_YAC_CRYPTO_PROVIDER_HPP diff --git a/test/module/irohad/consensus/yac/mock_yac_hash_gate.hpp b/test/module/irohad/consensus/yac/mock_yac_hash_gate.hpp new file mode 100644 index 0000000000..fb15608e8f --- /dev/null +++ b/test/module/irohad/consensus/yac/mock_yac_hash_gate.hpp @@ -0,0 +1,38 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_YAC_HASH_GATE_HPP +#define IROHA_MOCK_YAC_HASH_GATE_HPP + +#include + +#include "consensus/yac/yac_gate.hpp" + +namespace iroha { + namespace consensus { + namespace yac { + + class MockHashGate : public HashGate { + public: + MOCK_METHOD2(vote, void(YacHash, ClusterOrdering)); + + MOCK_METHOD0(onOutcome, rxcpp::observable()); + + MockHashGate() = default; + + MockHashGate(const MockHashGate &rhs) {} + + MockHashGate(MockHashGate &&rhs) {} + + MockHashGate &operator=(const MockHashGate &rhs) { + return *this; + } + }; + + } // namespace yac + } // namespace consensus +} // namespace iroha + +#endif // IROHA_MOCK_YAC_HASH_GATE_HPP diff --git a/test/module/irohad/consensus/yac/mock_yac_hash_provider.hpp b/test/module/irohad/consensus/yac/mock_yac_hash_provider.hpp new file mode 100644 index 0000000000..b69ee4f653 --- /dev/null +++ b/test/module/irohad/consensus/yac/mock_yac_hash_provider.hpp @@ -0,0 +1,41 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_YAC_HASH_PROVIDER_HPP +#define IROHA_MOCK_YAC_HASH_PROVIDER_HPP + +#include + +#include "consensus/yac/yac_hash_provider.hpp" + +namespace iroha { + namespace consensus { + namespace yac { + + class MockYacHashProvider : public YacHashProvider { + public: + MOCK_CONST_METHOD1(makeHash, + YacHash(const simulator::BlockCreatorEvent &event)); + + MOCK_CONST_METHOD1( + toModelHash, + shared_model::interface::types::HashType(const YacHash &)); + + MockYacHashProvider() = default; + + MockYacHashProvider(const MockYacHashProvider &rhs){}; + + MockYacHashProvider(MockYacHashProvider &&rhs){}; + + MockYacHashProvider &operator=(const MockYacHashProvider &rhs) { + return *this; + }; + }; + + } // namespace yac + } // namespace consensus +} // namespace iroha + +#endif // IROHA_MOCK_YAC_HASH_PROVIDER_HPP diff --git a/test/module/irohad/consensus/yac/mock_yac_network.hpp b/test/module/irohad/consensus/yac/mock_yac_network.hpp new file mode 100644 index 0000000000..01a4875091 --- /dev/null +++ b/test/module/irohad/consensus/yac/mock_yac_network.hpp @@ -0,0 +1,62 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_YAC_NETWORK_HPP +#define IROHA_MOCK_YAC_NETWORK_HPP + +#include + +#include "consensus/yac/transport/yac_network_interface.hpp" + +namespace iroha { + namespace consensus { + namespace yac { + + class MockYacNetwork : public YacNetwork { + public: + void subscribe( + std::shared_ptr handler) override { + notification = std::move(handler); + }; + + void release() { + notification.reset(); + } + + MOCK_METHOD2(sendState, + void(const shared_model::interface::Peer &, + const std::vector &)); + + MockYacNetwork() = default; + + MockYacNetwork(const MockYacNetwork &rhs) + : notification(rhs.notification) {} + + MockYacNetwork &operator=(const MockYacNetwork &rhs) { + notification = rhs.notification; + return *this; + } + + MockYacNetwork(MockYacNetwork &&rhs) { + std::swap(notification, rhs.notification); + } + + MockYacNetwork &operator=(MockYacNetwork &&rhs) { + std::swap(notification, rhs.notification); + return *this; + } + + std::shared_ptr notification; + }; + + class MockYacNetworkNotifications : public YacNetworkNotifications { + public: + MOCK_METHOD1(onState, void(std::vector)); + }; + + } // namespace yac + } // namespace consensus +} // namespace iroha +#endif // IROHA_MOCK_YAC_NETWORK_HPP diff --git a/test/module/irohad/consensus/yac/mock_yac_peer_orderer.hpp b/test/module/irohad/consensus/yac/mock_yac_peer_orderer.hpp new file mode 100644 index 0000000000..6740efd6b0 --- /dev/null +++ b/test/module/irohad/consensus/yac/mock_yac_peer_orderer.hpp @@ -0,0 +1,39 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_YAC_PEER_ORDERER_HPP +#define IROHA_MOCK_YAC_PEER_ORDERER_HPP + +#include + +#include "consensus/yac/yac_peer_orderer.hpp" + +namespace iroha { + namespace consensus { + namespace yac { + + class MockYacPeerOrderer : public YacPeerOrderer { + public: + MOCK_METHOD0(getInitialOrdering, boost::optional()); + + MOCK_METHOD1(getOrdering, + boost::optional(const YacHash &)); + + MockYacPeerOrderer() = default; + + MockYacPeerOrderer(const MockYacPeerOrderer &rhs){}; + + MockYacPeerOrderer(MockYacPeerOrderer &&rhs){}; + + MockYacPeerOrderer &operator=(const MockYacPeerOrderer &rhs) { + return *this; + } + }; + + } // namespace yac + } // namespace consensus +} // namespace iroha + +#endif // IROHA_MOCK_YAC_PEER_ORDERER_HPP diff --git a/test/module/irohad/consensus/yac/mock_yac_supermajority_checker.hpp b/test/module/irohad/consensus/yac/mock_yac_supermajority_checker.hpp new file mode 100644 index 0000000000..840db36210 --- /dev/null +++ b/test/module/irohad/consensus/yac/mock_yac_supermajority_checker.hpp @@ -0,0 +1,41 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_YAC_SUPERMAJORITY_CHECKER_HPP +#define IROHA_MOCK_YAC_SUPERMAJORITY_CHECKER_HPP + +#include + +#include "consensus/yac/supermajority_checker.hpp" + +namespace iroha { + namespace consensus { + namespace yac { + + class MockSupermajorityChecker : public SupermajorityChecker { + public: + MOCK_CONST_METHOD2( + hasSupermajority, + bool(const shared_model::interface::types::SignatureRangeType &, + const std::vector< + std::shared_ptr> &)); + + MOCK_CONST_METHOD2(checkSize, bool(PeersNumberType, PeersNumberType)); + + MOCK_CONST_METHOD2( + peersSubset, + bool(const shared_model::interface::types::SignatureRangeType &, + const std::vector< + std::shared_ptr> &)); + + MOCK_CONST_METHOD3( + hasReject, bool(PeersNumberType, PeersNumberType, PeersNumberType)); + }; + + } // namespace yac + } // namespace consensus +} // namespace iroha + +#endif // IROHA_MOCK_YAC_SUPERMAJORITY_CHECKER_HPP diff --git a/test/module/irohad/consensus/yac/mock_yac_timer.hpp b/test/module/irohad/consensus/yac/mock_yac_timer.hpp new file mode 100644 index 0000000000..3f9b339087 --- /dev/null +++ b/test/module/irohad/consensus/yac/mock_yac_timer.hpp @@ -0,0 +1,37 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_MOCK_YAC_TIMER_HPP +#define IROHA_MOCK_YAC_TIMER_HPP + +#include + +#include "consensus/yac/timer.hpp" + +namespace iroha { + namespace consensus { + namespace yac { + + class MockTimer : public Timer { + public: + void invokeAfterDelay(std::function handler) override { + handler(); + } + + MOCK_METHOD0(deny, void()); + + MockTimer() = default; + + MockTimer(const MockTimer &rhs) {} + + MockTimer &operator=(const MockTimer &rhs) { + return *this; + } + }; + + } // namespace yac + } // namespace consensus +} // namespace iroha +#endif // IROHA_MOCK_YAC_TIMER_HPP diff --git a/test/module/irohad/consensus/yac/network_test.cpp b/test/module/irohad/consensus/yac/network_test.cpp index 772dc34a34..e6200546c3 100644 --- a/test/module/irohad/consensus/yac/network_test.cpp +++ b/test/module/irohad/consensus/yac/network_test.cpp @@ -3,11 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ -#include "module/irohad/consensus/yac/yac_mocks.hpp" +#include "consensus/yac/transport/impl/network_impl.hpp" #include -#include "consensus/yac/transport/impl/network_impl.hpp" +#include "module/irohad/consensus/yac/mock_yac_crypto_provider.hpp" +#include "module/irohad/consensus/yac/mock_yac_network.hpp" +#include "module/irohad/consensus/yac/yac_test_util.hpp" using ::testing::_; using ::testing::DoAll; @@ -45,7 +47,7 @@ namespace iroha { ASSERT_TRUE(server); ASSERT_NE(port, 0); - peer = mk_peer(std::string(default_ip) + ":" + std::to_string(port)); + peer = makePeer(std::string(default_ip) + ":" + std::to_string(port)); } void TearDown() override { @@ -61,7 +63,8 @@ namespace iroha { std::unique_ptr server; std::mutex mtx; std::condition_variable cv; - shared_model::crypto::PublicKey pubkey = shared_model::crypto::PublicKey{""}; + shared_model::crypto::PublicKey pubkey = + shared_model::crypto::PublicKey{""}; }; /** diff --git a/test/module/irohad/consensus/yac/peer_orderer_test.cpp b/test/module/irohad/consensus/yac/peer_orderer_test.cpp index 775f7a722e..5c5ab9513b 100644 --- a/test/module/irohad/consensus/yac/peer_orderer_test.cpp +++ b/test/module/irohad/consensus/yac/peer_orderer_test.cpp @@ -3,6 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ +#include "consensus/yac/impl/peer_orderer_impl.hpp" + #include #include @@ -11,11 +13,12 @@ #include #include #include -#include "consensus/yac/impl/peer_orderer_impl.hpp" #include "consensus/yac/storage/yac_proposal_storage.hpp" + #include "module/irohad/ametsuchi/mock_peer_query.hpp" #include "module/irohad/ametsuchi/mock_peer_query_factory.hpp" -#include "module/irohad/consensus/yac/yac_mocks.hpp" +#include "module/irohad/consensus/yac/yac_test_util.hpp" +#include "module/shared_model/interface_mocks.hpp" using namespace boost::adaptors; using namespace iroha::ametsuchi; @@ -61,7 +64,7 @@ class YacPeerOrdererTest : public ::testing::Test { std::vector s_peers = [] { std::vector result; for (size_t i = 1; i <= N_PEERS; ++i) { - auto tmp = iroha::consensus::yac::mk_peer(std::to_string(i)); + auto tmp = iroha::consensus::yac::makePeer(std::to_string(i)); auto key = tmp->pubkey(); diff --git a/test/module/irohad/consensus/yac/supermajority_checker_test.cpp b/test/module/irohad/consensus/yac/supermajority_checker_test.cpp index e618c8ce15..4a2e95f2a3 100644 --- a/test/module/irohad/consensus/yac/supermajority_checker_test.cpp +++ b/test/module/irohad/consensus/yac/supermajority_checker_test.cpp @@ -6,8 +6,10 @@ #include "consensus/yac/impl/supermajority_checker_impl.hpp" #include + #include #include "logger/logger.hpp" + #include "module/shared_model/interface_mocks.hpp" using namespace iroha::consensus::yac; diff --git a/test/module/irohad/consensus/yac/timer_test.cpp b/test/module/irohad/consensus/yac/timer_test.cpp index 736f0a4bad..a42d78a4c2 100644 --- a/test/module/irohad/consensus/yac/timer_test.cpp +++ b/test/module/irohad/consensus/yac/timer_test.cpp @@ -5,10 +5,10 @@ #include "consensus/yac/impl/timer_impl.hpp" -#include - #include +#include + using namespace iroha::consensus::yac; class TimerTest : public ::testing::Test { diff --git a/test/module/irohad/consensus/yac/yac_block_storage_test.cpp b/test/module/irohad/consensus/yac/yac_block_storage_test.cpp index e555903ab2..61ecd320d5 100644 --- a/test/module/irohad/consensus/yac/yac_block_storage_test.cpp +++ b/test/module/irohad/consensus/yac/yac_block_storage_test.cpp @@ -6,11 +6,11 @@ #include "consensus/yac/storage/yac_block_storage.hpp" #include -#include + #include "consensus/yac/storage/yac_proposal_storage.hpp" -#include "consensus/yac/storage/yac_vote_storage.hpp" #include "logger/logger.hpp" -#include "module/irohad/consensus/yac/yac_mocks.hpp" + +#include "module/irohad/consensus/yac/yac_test_util.hpp" using namespace iroha::consensus::yac; diff --git a/test/module/irohad/consensus/yac/yac_common_test.cpp b/test/module/irohad/consensus/yac/yac_common_test.cpp index b2eb98d6ea..006e30237e 100644 --- a/test/module/irohad/consensus/yac/yac_common_test.cpp +++ b/test/module/irohad/consensus/yac/yac_common_test.cpp @@ -6,9 +6,11 @@ #include "consensus/yac/storage/yac_common.hpp" #include + #include "consensus/yac/storage/yac_proposal_storage.hpp" #include "logger/logger.hpp" -#include "module/irohad/consensus/yac/yac_mocks.hpp" + +#include "module/irohad/consensus/yac/yac_test_util.hpp" using namespace iroha::consensus; using namespace iroha::consensus::yac; diff --git a/test/module/irohad/consensus/yac/yac_crypto_provider_test.cpp b/test/module/irohad/consensus/yac/yac_crypto_provider_test.cpp index b6c7d64aa2..3f777f1348 100644 --- a/test/module/irohad/consensus/yac/yac_crypto_provider_test.cpp +++ b/test/module/irohad/consensus/yac/yac_crypto_provider_test.cpp @@ -6,8 +6,10 @@ #include "consensus/yac/impl/yac_crypto_provider_impl.hpp" #include -#include "consensus/yac/messages.hpp" + +#include "consensus/yac/outcome_messages.hpp" #include "cryptography/crypto_provider/crypto_defaults.hpp" + #include "module/shared_model/interface_mocks.hpp" using ::testing::_; diff --git a/test/module/irohad/consensus/yac/yac_fixture.hpp b/test/module/irohad/consensus/yac/yac_fixture.hpp new file mode 100644 index 0000000000..7de6d3997d --- /dev/null +++ b/test/module/irohad/consensus/yac/yac_fixture.hpp @@ -0,0 +1,65 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_YAC_FIXTURE_HPP +#define IROHA_YAC_FIXTURE_HPP + +#include + +#include "consensus/yac/cluster_order.hpp" +#include "consensus/yac/yac.hpp" + +#include "module/irohad/consensus/yac/mock_yac_crypto_provider.hpp" +#include "module/irohad/consensus/yac/mock_yac_network.hpp" +#include "module/irohad/consensus/yac/mock_yac_timer.hpp" +#include "module/irohad/consensus/yac/yac_test_util.hpp" + +namespace iroha { + namespace consensus { + namespace yac { + + class YacTest : public ::testing::Test { + public: + // ------|Network|------ + std::shared_ptr network; + std::shared_ptr crypto; + std::shared_ptr timer; + std::shared_ptr yac; + + // ------|One round|------ + std::vector> + default_peers = [] { + std::vector> + result; + for (size_t i = 1; i <= 7; ++i) { + result.push_back(makePeer(std::to_string(i))); + } + return result; + }(); + + void SetUp() override { + network = std::make_shared(); + crypto = std::make_shared(); + timer = std::make_shared(); + auto ordering = ClusterOrdering::create(default_peers); + ASSERT_TRUE(ordering); + initYac(ordering.value()); + } + + void TearDown() override { + network->release(); + } + + void initYac(ClusterOrdering ordering) { + yac = Yac::create(YacVoteStorage(), network, crypto, timer, ordering); + network->subscribe(yac); + } + }; + + } // namespace yac + } // namespace consensus +} // namespace iroha + +#endif // IROHA_YAC_FIXTURE_HPP diff --git a/test/module/irohad/consensus/yac/yac_gate_test.cpp b/test/module/irohad/consensus/yac/yac_gate_test.cpp index cfee5f8b00..de3428045e 100644 --- a/test/module/irohad/consensus/yac/yac_gate_test.cpp +++ b/test/module/irohad/consensus/yac/yac_gate_test.cpp @@ -12,8 +12,11 @@ #include "consensus/yac/storage/yac_proposal_storage.hpp" #include "cryptography/crypto_provider/crypto_defaults.hpp" #include "framework/test_subscriber.hpp" -#include "module/irohad/consensus/yac/yac_mocks.hpp" -#include "module/irohad/network/network_mocks.hpp" + +#include "module/irohad/consensus/yac/mock_yac_hash_gate.hpp" +#include "module/irohad/consensus/yac/mock_yac_hash_provider.hpp" +#include "module/irohad/consensus/yac/mock_yac_peer_orderer.hpp" +#include "module/irohad/consensus/yac/yac_test_util.hpp" #include "module/irohad/simulator/simulator_mocks.hpp" #include "module/shared_model/interface_mocks.hpp" @@ -126,7 +129,7 @@ TEST_F(YacGateTest, YacGateSubscriptionTest) { // generate order of peers EXPECT_CALL(*peer_orderer, getOrdering(_)) - .WillOnce(Return(ClusterOrdering::create({mk_peer("fake_node")}))); + .WillOnce(Return(ClusterOrdering::create({makePeer("fake_node")}))); // make hash from block EXPECT_CALL(*hash_provider, makeHash(_)).WillOnce(Return(expected_hash)); @@ -182,7 +185,7 @@ TEST_F(YacGateTest, AgreementOnNone) { EXPECT_CALL(*hash_gate, vote(_, _)).Times(1); EXPECT_CALL(*peer_orderer, getOrdering(_)) - .WillOnce(Return(ClusterOrdering::create({mk_peer("fake_node")}))); + .WillOnce(Return(ClusterOrdering::create({makePeer("fake_node")}))); ASSERT_EQ(block_cache->get(), nullptr); @@ -202,7 +205,7 @@ TEST_F(YacGateTest, DifferentCommit) { // generate order of peers EXPECT_CALL(*peer_orderer, getOrdering(_)) - .WillOnce(Return(ClusterOrdering::create({mk_peer("fake_node")}))); + .WillOnce(Return(ClusterOrdering::create({makePeer("fake_node")}))); EXPECT_CALL(*hash_gate, vote(expected_hash, _)).Times(1); @@ -254,7 +257,7 @@ class YacGateOlderTest : public YacGateTest { // generate order of peers ON_CALL(*peer_orderer, getOrdering(_)) - .WillByDefault(Return(ClusterOrdering::create({mk_peer("fake_node")}))); + .WillByDefault(Return(ClusterOrdering::create({makePeer("fake_node")}))); // make hash from block ON_CALL(*hash_provider, makeHash(_)).WillByDefault(Return(expected_hash)); diff --git a/test/module/irohad/consensus/yac/yac_hash_provider_test.cpp b/test/module/irohad/consensus/yac/yac_hash_provider_test.cpp index ae6d47179c..b753d106ce 100644 --- a/test/module/irohad/consensus/yac/yac_hash_provider_test.cpp +++ b/test/module/irohad/consensus/yac/yac_hash_provider_test.cpp @@ -8,6 +8,7 @@ #include #include + #include #include #include diff --git a/test/module/irohad/consensus/yac/yac_mocks.hpp b/test/module/irohad/consensus/yac/yac_mocks.hpp deleted file mode 100644 index db0c7c4a22..0000000000 --- a/test/module/irohad/consensus/yac/yac_mocks.hpp +++ /dev/null @@ -1,266 +0,0 @@ -/** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ - -#ifndef IROHA_YAC_MOCKS_HPP -#define IROHA_YAC_MOCKS_HPP - -#include -#include "common/byteutils.hpp" -#include "consensus/yac/cluster_order.hpp" -#include "consensus/yac/messages.hpp" -#include "consensus/yac/storage/yac_proposal_storage.hpp" -#include "consensus/yac/supermajority_checker.hpp" -#include "consensus/yac/timer.hpp" -#include "consensus/yac/yac.hpp" -#include "consensus/yac/yac_crypto_provider.hpp" -#include "consensus/yac/yac_gate.hpp" -#include "consensus/yac/yac_hash_provider.hpp" -#include "consensus/yac/yac_peer_orderer.hpp" -#include "cryptography/crypto_provider/crypto_defaults.hpp" -#include "interfaces/iroha_internal/block.hpp" -#include "module/shared_model/interface_mocks.hpp" - -namespace iroha { - namespace consensus { - namespace yac { - std::shared_ptr mk_peer( - const std::string &address) { - auto key = std::string(32, '0'); - std::copy(address.begin(), address.end(), key.begin()); - auto peer = std::make_shared(); - EXPECT_CALL(*peer, address()) - .WillRepeatedly(::testing::ReturnRefOfCopy(address)); - EXPECT_CALL(*peer, pubkey()) - .WillRepeatedly(::testing::ReturnRefOfCopy( - shared_model::interface::types::PubkeyType(key))); - - return peer; - } - - /** - * Creates test signature with empty signed data, and provided pubkey - * @param pub_key - public key to put in the signature - * @return new signature - */ - std::shared_ptr createSig( - const std::string &pub_key) { - auto tmp = - shared_model::crypto::DefaultCryptoAlgorithmType::generateKeypair() - .publicKey(); - std::string key(tmp.blob().size(), 0); - std::copy(pub_key.begin(), pub_key.end(), key.begin()); - auto sig = std::make_shared(); - EXPECT_CALL(*sig, publicKey()) - .WillRepeatedly(::testing::ReturnRefOfCopy( - shared_model::crypto::PublicKey(key))); - EXPECT_CALL(*sig, signedData()) - .WillRepeatedly( - ::testing::ReturnRefOfCopy(shared_model::crypto::Signed(""))); - - return sig; - } - - VoteMessage createVote(YacHash hash, std::string pub_key) { - VoteMessage vote; - vote.hash = hash; - vote.signature = createSig(pub_key); - return vote; - } - - class MockYacCryptoProvider : public YacCryptoProvider { - public: - MOCK_METHOD1(verify, bool(const std::vector &)); - - VoteMessage getVote(YacHash hash) override { - VoteMessage vote; - vote.hash = hash; - vote.signature = createSig(""); - return vote; - } - - MockYacCryptoProvider() = default; - - MockYacCryptoProvider(const MockYacCryptoProvider &) {} - - MockYacCryptoProvider &operator=(const MockYacCryptoProvider &) { - return *this; - } - }; - - class MockTimer : public Timer { - public: - void invokeAfterDelay(std::function handler) override { - handler(); - } - - MOCK_METHOD0(deny, void()); - - MockTimer() = default; - - MockTimer(const MockTimer &rhs) {} - - MockTimer &operator=(const MockTimer &rhs) { - return *this; - } - }; - - class MockYacNetwork : public YacNetwork { - public: - void subscribe( - std::shared_ptr handler) override { - notification = handler; - }; - - void release() { - notification.reset(); - } - - MOCK_METHOD2(sendState, - void(const shared_model::interface::Peer &, - const std::vector &)); - - MockYacNetwork() = default; - - MockYacNetwork(const MockYacNetwork &rhs) - : notification(rhs.notification) {} - - MockYacNetwork &operator=(const MockYacNetwork &rhs) { - notification = rhs.notification; - return *this; - } - - MockYacNetwork(MockYacNetwork &&rhs) { - std::swap(notification, rhs.notification); - } - - MockYacNetwork &operator=(MockYacNetwork &&rhs) { - std::swap(notification, rhs.notification); - return *this; - } - - std::shared_ptr notification; - }; - - class MockHashGate : public HashGate { - public: - MOCK_METHOD2(vote, void(YacHash, ClusterOrdering)); - - MOCK_METHOD0(onOutcome, rxcpp::observable()); - - MockHashGate() = default; - - MockHashGate(const MockHashGate &rhs) {} - - MockHashGate(MockHashGate &&rhs) {} - - MockHashGate &operator=(const MockHashGate &rhs) { - return *this; - }; - }; - - class MockYacPeerOrderer : public YacPeerOrderer { - public: - MOCK_METHOD0(getInitialOrdering, boost::optional()); - - MOCK_METHOD1(getOrdering, - boost::optional(const YacHash &)); - - MockYacPeerOrderer() = default; - - MockYacPeerOrderer(const MockYacPeerOrderer &rhs){}; - - MockYacPeerOrderer(MockYacPeerOrderer &&rhs){}; - - MockYacPeerOrderer &operator=(const MockYacPeerOrderer &rhs) { - return *this; - }; - }; - - class MockYacHashProvider : public YacHashProvider { - public: - MOCK_CONST_METHOD1(makeHash, - YacHash(const simulator::BlockCreatorEvent &event)); - - MOCK_CONST_METHOD1( - toModelHash, - shared_model::interface::types::HashType(const YacHash &)); - - MockYacHashProvider() = default; - - MockYacHashProvider(const MockYacHashProvider &rhs){}; - - MockYacHashProvider(MockYacHashProvider &&rhs){}; - - MockYacHashProvider &operator=(const MockYacHashProvider &rhs) { - return *this; - }; - }; - - class MockYacNetworkNotifications : public YacNetworkNotifications { - public: - MOCK_METHOD1(onState, void(std::vector)); - }; - - class MockSupermajorityChecker : public SupermajorityChecker { - public: - MOCK_CONST_METHOD2( - hasSupermajority, - bool(const shared_model::interface::types::SignatureRangeType - &signatures, - const std::vector< - std::shared_ptr> &peers)); - MOCK_CONST_METHOD2(checkSize, bool(PeersNumberType, PeersNumberType)); - MOCK_CONST_METHOD2( - peersSubset, - bool(const shared_model::interface::types::SignatureRangeType - &signatures, - const std::vector< - std::shared_ptr> &peers)); - MOCK_CONST_METHOD3( - hasReject, bool(PeersNumberType, PeersNumberType, PeersNumberType)); - }; - - class YacTest : public ::testing::Test { - public: - // ------|Network|------ - std::shared_ptr network; - std::shared_ptr crypto; - std::shared_ptr timer; - std::shared_ptr yac; - - // ------|Round|------ - std::vector> - default_peers = [] { - std::vector> - result; - for (size_t i = 1; i <= 7; ++i) { - result.push_back(mk_peer(std::to_string(i))); - } - return result; - }(); - - void SetUp() override { - network = std::make_shared(); - crypto = std::make_shared(); - timer = std::make_shared(); - auto ordering = ClusterOrdering::create(default_peers); - ASSERT_TRUE(ordering); - initYac(ordering.value()); - } - - void TearDown() override { - network->release(); - } - - void initYac(ClusterOrdering ordering) { - yac = Yac::create(YacVoteStorage(), network, crypto, timer, ordering); - network->subscribe(yac); - } - }; - } // namespace yac - } // namespace consensus -} // namespace iroha - -#endif // IROHA_YAC_MOCKS_HPP diff --git a/test/module/irohad/consensus/yac/yac_proposal_storage_test.cpp b/test/module/irohad/consensus/yac/yac_proposal_storage_test.cpp index 7af6e0a541..a5629df948 100644 --- a/test/module/irohad/consensus/yac/yac_proposal_storage_test.cpp +++ b/test/module/irohad/consensus/yac/yac_proposal_storage_test.cpp @@ -6,9 +6,11 @@ #include "consensus/yac/storage/yac_proposal_storage.hpp" #include + #include "consensus/yac/storage/yac_common.hpp" #include "logger/logger.hpp" -#include "module/irohad/consensus/yac/yac_mocks.hpp" + +#include "module/irohad/consensus/yac/yac_test_util.hpp" using namespace iroha::consensus::yac; diff --git a/test/module/irohad/consensus/yac/yac_rainy_day_test.cpp b/test/module/irohad/consensus/yac/yac_rainy_day_test.cpp index e1b0973b7c..bb1bc6e1f0 100644 --- a/test/module/irohad/consensus/yac/yac_rainy_day_test.cpp +++ b/test/module/irohad/consensus/yac/yac_rainy_day_test.cpp @@ -3,10 +3,10 @@ * SPDX-License-Identifier: Apache-2.0 */ -#include #include "consensus/yac/storage/yac_proposal_storage.hpp" #include "framework/test_subscriber.hpp" -#include "module/irohad/consensus/yac/yac_mocks.hpp" + +#include "module/irohad/consensus/yac/yac_fixture.hpp" using ::testing::_; using ::testing::An; diff --git a/test/module/irohad/consensus/yac/yac_simple_cold_case_test.cpp b/test/module/irohad/consensus/yac/yac_simple_cold_case_test.cpp index 16051d9c4d..d2122aa0b2 100644 --- a/test/module/irohad/consensus/yac/yac_simple_cold_case_test.cpp +++ b/test/module/irohad/consensus/yac/yac_simple_cold_case_test.cpp @@ -9,11 +9,10 @@ #include #include -#include -#include #include "consensus/yac/storage/yac_proposal_storage.hpp" + #include "framework/test_subscriber.hpp" -#include "yac_mocks.hpp" +#include "module/irohad/consensus/yac/yac_fixture.hpp" using ::testing::_; using ::testing::An; diff --git a/test/module/irohad/consensus/yac/yac_sunny_day_test.cpp b/test/module/irohad/consensus/yac/yac_sunny_day_test.cpp index b00d3884aa..3b80e47b66 100644 --- a/test/module/irohad/consensus/yac/yac_sunny_day_test.cpp +++ b/test/module/irohad/consensus/yac/yac_sunny_day_test.cpp @@ -6,11 +6,10 @@ #include #include -#include -#include #include "consensus/yac/storage/yac_proposal_storage.hpp" + #include "framework/test_subscriber.hpp" -#include "module/irohad/consensus/yac/yac_mocks.hpp" +#include "module/irohad/consensus/yac/yac_fixture.hpp" using ::testing::_; using ::testing::An; diff --git a/test/module/irohad/consensus/yac/yac_test_util.hpp b/test/module/irohad/consensus/yac/yac_test_util.hpp new file mode 100644 index 0000000000..c2731dffc3 --- /dev/null +++ b/test/module/irohad/consensus/yac/yac_test_util.hpp @@ -0,0 +1,46 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_YAC_TEST_UTIL_HPP +#define IROHA_YAC_TEST_UTIL_HPP + +#include + +#include "consensus/yac/vote_message.hpp" +#include "consensus/yac/yac_hash_provider.hpp" + +#include "module/irohad/consensus/yac/mock_yac_crypto_provider.hpp" +#include "module/shared_model/interface_mocks.hpp" + +namespace iroha { + namespace consensus { + namespace yac { + + inline std::shared_ptr makePeer( + const std::string &address) { + auto key = std::string(32, '0'); + std::copy(address.begin(), address.end(), key.begin()); + auto peer = std::make_shared(); + EXPECT_CALL(*peer, address()) + .WillRepeatedly(::testing::ReturnRefOfCopy(address)); + EXPECT_CALL(*peer, pubkey()) + .WillRepeatedly(::testing::ReturnRefOfCopy( + shared_model::interface::types::PubkeyType(key))); + + return peer; + } + + inline VoteMessage createVote(YacHash hash, const std::string &pub_key) { + VoteMessage vote; + vote.hash = std::move(hash); + vote.signature = createSig(pub_key); + return vote; + } + + } // namespace yac + } // namespace consensus +} // namespace iroha + +#endif // IROHA_YAC_TEST_UTIL_HPP diff --git a/test/module/irohad/consensus/yac/yac_unknown_peer_test.cpp b/test/module/irohad/consensus/yac/yac_unknown_peer_test.cpp index c4e2a2e340..11086c2435 100644 --- a/test/module/irohad/consensus/yac/yac_unknown_peer_test.cpp +++ b/test/module/irohad/consensus/yac/yac_unknown_peer_test.cpp @@ -4,8 +4,9 @@ */ #include "consensus/yac/storage/yac_proposal_storage.hpp" + #include "framework/test_subscriber.hpp" -#include "yac_mocks.hpp" +#include "module/irohad/consensus/yac/yac_fixture.hpp" using ::testing::_; using ::testing::An; diff --git a/test/module/irohad/validation/chain_validation_test.cpp b/test/module/irohad/validation/chain_validation_test.cpp index 98998e2915..0d7560dfa9 100644 --- a/test/module/irohad/validation/chain_validation_test.cpp +++ b/test/module/irohad/validation/chain_validation_test.cpp @@ -8,7 +8,7 @@ #include #include "module/irohad/ametsuchi/mock_mutable_storage.hpp" #include "module/irohad/ametsuchi/mock_peer_query.hpp" -#include "module/irohad/consensus/yac/yac_mocks.hpp" +#include "module/irohad/consensus/yac/mock_yac_supermajority_checker.hpp" #include "module/shared_model/interface_mocks.hpp" using namespace iroha; diff --git a/test/module/shared_model/interface_mocks.hpp b/test/module/shared_model/interface_mocks.hpp index 85d42c2fb8..5ac8f8abee 100644 --- a/test/module/shared_model/interface_mocks.hpp +++ b/test/module/shared_model/interface_mocks.hpp @@ -18,6 +18,8 @@ #include "interfaces/iroha_internal/unsafe_proposal_factory.hpp" #include "interfaces/transaction.hpp" + +// TODO: 2019-01-18 @muratovv Separate file by classes IR-229 struct MockBlock : public shared_model::interface::Block { MOCK_CONST_METHOD0(txsNumber, shared_model::interface::types::TransactionsNumberType()); From 4feddbe024273321b67b64907fef97b180e2977c Mon Sep 17 00:00:00 2001 From: Mikhail Boldyrev Date: Mon, 4 Feb 2019 09:47:11 +0300 Subject: [PATCH 28/41] small fix to ansible docs (#2068) Signed-off-by: Mikhail Boldyrev --- deploy/ansible/roles/iroha-docker/README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/deploy/ansible/roles/iroha-docker/README.md b/deploy/ansible/roles/iroha-docker/README.md index b5a8aa2536..4b0a92c3f9 100644 --- a/deploy/ansible/roles/iroha-docker/README.md +++ b/deploy/ansible/roles/iroha-docker/README.md @@ -7,7 +7,7 @@ The first one is easier to implement since it does not require preliminary confi This option is enabled by default. -The second one can be used when there is an overlay network exists between the hosts. In short, overlay network allows for Docker containers to communicate using a single subnet. Such that each container would have a unique IP address in that subnet. Learn more in official Docker documentation (https://docs.docker.com/network/overlay). We recommend to use Calico for setting up Docker overlay network since it can be used as a network plugin (https://docs.projectcalico.org/v1.5/getting-started/docker/tutorials/basic). +The second one can be used when there exists an overlay network between the hosts. In short, overlay network allows for Docker containers to communicate using a single subnet. Such that each container would have a unique IP address in that subnet. Learn more in official Docker documentation (https://docs.docker.com/network/overlay). We recommend to use Calico for setting up Docker overlay network since it can be used as a network plugin (https://docs.projectcalico.org/v1.5/getting-started/docker/tutorials/basic). The second way is also suitable for local-only deployments. @@ -20,6 +20,7 @@ The second way is also suitable for local-only deployments. - Docker (>=17.12) - python3 - PIP modules: docker, docker-compose + There is a role for setting up a remote part of the dependencies named `docker`. It works for Ubuntu OS only. Check `iroha-docker` playbook. ### Note: @@ -42,8 +43,8 @@ The second way is also suitable for local-only deployments. `cd ../../ && ansible-playbook -b -e 'ansible_ssh_user=ubuntu' -i inventory/iroha.list playbooks/iroha-docker/main.yml` -This will deploy 6 Iroha Docker containers along with 6 Postgres containers on a remote host. Remote user is `ubuntu`. Torii port of each container is exposed on the host. Iroha peer can be communicated over port defined in `iroha_torii_port` variable (50051 by default). Overall, each host will listen the following port range: `iroha_torii_port` + *number-of-containers*. -During installation it will also install Docker along with required python modules. If you want to skip this step, comment out `docker` role in the playbook (`playbooks/iroha-docker/main.yml`) +This will deploy 6 Iroha Docker containers along with 6 Postgres containers on the remote host specified in `iroha.list` file. Remote user is `ubuntu`. Torii port of each container is exposed on the host. Iroha peer can be communicated over port defined in `iroha_torii_port` variable (50051 by default). Overall, each host will listen the following port range: `iroha_torii_port` ... `iroha_torii_port` + *number-of-containers* - 1. +It will also install Docker along with required python modules. If you want to skip this step, comment out `docker` role in the playbook (`playbooks/iroha-docker/main.yml`) ### Note: > This command escalates privileges on a remote host during the run. It is required to be able to spin up Docker containers. We recommend to run the playbook using a passwordless remote sudo user. From 861212f980d109b3b76b561eacdcabec51493400 Mon Sep 17 00:00:00 2001 From: Nikolay Yushkevich Date: Mon, 4 Feb 2019 14:06:25 +0300 Subject: [PATCH 29/41] IR-150 Fix security vulnerability (#2039) Signed-off-by: Nikolay Yushkevich --- docs/source/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/requirements.txt b/docs/source/requirements.txt index ce32077476..645caa0034 100644 --- a/docs/source/requirements.txt +++ b/docs/source/requirements.txt @@ -17,7 +17,7 @@ port-for==0.3.1 protobuf==3.5.1 Pygments==2.2.0 pytz==2017.3 -PyYAML==3.13 +PyYAML==4.2b1 requests==2.20.1 restructuredtext-lint==1.1.2 singledispatch==3.4.0.3 @@ -30,6 +30,6 @@ sphinx-rtd-theme==0.4.2 sphinxcontrib-websupport==1.0.1 tornado==4.5.3 typing==3.6.2 -urllib3==1.22 +urllib3==1.23 watchdog==0.8.3 yarg==0.1.9 From a182754c774b66657b789e3db1ec0401da05aa95 Mon Sep 17 00:00:00 2001 From: Andrei Lebedev Date: Tue, 5 Feb 2019 07:42:14 +0300 Subject: [PATCH 30/41] Add send-tx load test based on locust.io (#2062) Signed-off-by: Andrei Lebedev --- test/load/Dockerfile | 10 ++++++ test/load/README.md | 32 +++++++++++++++++ test/load/docker-compose.yml | 27 ++++++++++++++ test/load/docker_start.sh | 25 +++++++++++++ test/load/locustfile.py | 68 ++++++++++++++++++++++++++++++++++++ 5 files changed, 162 insertions(+) create mode 100644 test/load/Dockerfile create mode 100644 test/load/README.md create mode 100644 test/load/docker-compose.yml create mode 100755 test/load/docker_start.sh create mode 100644 test/load/locustfile.py diff --git a/test/load/Dockerfile b/test/load/Dockerfile new file mode 100644 index 0000000000..7243bf9f96 --- /dev/null +++ b/test/load/Dockerfile @@ -0,0 +1,10 @@ +FROM python:3.6.6-alpine3.8 + +RUN apk --no-cache add g++ \ + && pip install locustio pyzmq + +EXPOSE 8089 5557 5558 + +ENTRYPOINT ["/usr/local/bin/locust"] + +RUN pip install grpcio-tools diff --git a/test/load/README.md b/test/load/README.md new file mode 100644 index 0000000000..f059f8665b --- /dev/null +++ b/test/load/README.md @@ -0,0 +1,32 @@ +# Load tests + +See [locustfile.py](locustfile.py) for descriptions of task sets implemented using [locust](https://github.com/locustio/locust) framework. + +## Prerequisites + + * Docker + * Docker Compose + * Python 3 + +## Build steps + +1. Create a Docker image with Python, locust, and gRPC. +```sh +docker build -t iroha-locust . +``` + +2. Copy [irohalib.py](https://github.com/hyperledger/iroha/blob/master/example/python/irohalib.py) and [ed25519.py](https://github.com/hyperledger/iroha/blob/master/example/python/ed25519.py) and follow the preparation steps in [libiroha.md](https://github.com/hyperledger/iroha/blob/master/example/python/irohalib.md). + +## Running the tests + +1. Specify Iroha node address and port in `TARGET_URL` in Compose [file](docker-compose.yml). + +**Note for Mac hosts** If you are running Iroha on the same host as locust, you most likely need to use `docker.for.mac.localhost:50051`, where `50051` is Torii port. + +2. Run locust +```sh +docker-compose up +``` +4 slaves can be spawned by adding `--scale locust-slave=4` to the previous command. + +3. Access locust web interface as specified in [documentation](https://docs.locust.io/en/stable/quickstart.html#open-up-locust-s-web-interface). diff --git a/test/load/docker-compose.yml b/test/load/docker-compose.yml new file mode 100644 index 0000000000..e789b86cb2 --- /dev/null +++ b/test/load/docker-compose.yml @@ -0,0 +1,27 @@ +version: "3.4" + +x-common: &common + image: iroha-locust + environment: &common-env + TARGET_URL: 127.0.0.1:50051 + LOCUSTFILE_PATH: /tests/locustfile.py + volumes: + - ./:/tests + entrypoint: + - /tests/docker_start.sh + +services: + locust-master: + <<: *common + ports: + - 8089:8089 + environment: + <<: *common-env + LOCUST_MODE: master + + locust-slave: + <<: *common + environment: + <<: *common-env + LOCUST_MODE: slave + LOCUST_MASTER_HOST: locust-master diff --git a/test/load/docker_start.sh b/test/load/docker_start.sh new file mode 100755 index 0000000000..66dd5e1aaf --- /dev/null +++ b/test/load/docker_start.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env ash + +if [ -z "${TARGET_URL}" ]; then + echo "ERROR: TARGET_URL not configured" >&2 + exit 1 +fi + +LOCUST_MODE="${LOCUST_MODE:=standalone}" +LOCUST_OPTS="-f ${LOCUSTFILE_PATH:-/locustfile.py} -H ${TARGET_URL}" + +if [ "${LOCUST_MODE}" = "master" ]; then + LOCUST_OPTS="${LOCUST_OPTS} --master" +elif [ "${LOCUST_MODE}" = "slave" ]; then + if [ -z "${LOCUST_MASTER_HOST}" ]; then + echo "ERROR: MASTER_HOST is empty. Slave mode requires a master" >&2 + exit 1 + fi + + LOCUST_OPTS="${LOCUST_OPTS} --slave --master-host=${LOCUST_MASTER_HOST} --master-port=${LOCUST_MASTER_PORT:-5557}" +fi + +echo "Starting Locust..." +echo "$ locust ${LOCUST_OPTS}" + +locust ${LOCUST_OPTS} diff --git a/test/load/locustfile.py b/test/load/locustfile.py new file mode 100644 index 0000000000..0e4e1260a1 --- /dev/null +++ b/test/load/locustfile.py @@ -0,0 +1,68 @@ +import os +import time +import binascii +import grpc +from irohalib import Iroha, IrohaGrpc +from irohalib import IrohaCrypto as ic + +from locust import Locust, TaskSet, events, task + +HOSTNAME = os.environ['HOSTNAME'] + +class IrohaClient(IrohaGrpc): + """ + Simple, sample Iroha gRPC client implementation that wraps IrohaGrpc and + fires locust events on request_success and request_failure, so that all requests + gets tracked in locust's statistics. + """ + def __getattribute__(self, name): + func = IrohaGrpc.__getattribute__(self, name) + if hasattr(func, '__call__'): + def wrapper(*args, **kwargs): + start_time = time.time() + try: + result = func(*args, **kwargs) + except grpc.RpcError as e: + total_time = int((time.time() - start_time) * 1000) + events.request_failure.fire(request_type="grpc", name=name, response_time=total_time, exception=e) + else: + total_time = int((time.time() - start_time) * 1000) + events.request_success.fire(request_type="grpc", name=name, response_time=total_time, response_length=0) + # In this example, I've hardcoded response_length=0. If we would want the response length to be + # reported correctly in the statistics, we would probably need to hook in at a lower level + return result + + return wrapper + else: + return func + + +class IrohaLocust(Locust): + """ + This is the abstract Locust class which should be subclassed. It provides an Iroha gRPC client + that can be used to make gRPC requests that will be tracked in Locust's statistics. + """ + def __init__(self, *args, **kwargs): + super(IrohaLocust, self).__init__(*args, **kwargs) + self.client = IrohaClient(self.host) + + +class ApiUser(IrohaLocust): + + host = "127.0.0.1:50051" + min_wait = 1 + max_wait = 10 + + class task_set(TaskSet): + @task + def send_tx(self): + iroha = Iroha('admin@test') + admin_private_key = 'f101537e319568c765b2cc89698325604991dca57b9716b58016b253506cab70' + + tx = iroha.transaction([iroha.command( + 'TransferAsset', src_account_id='admin@test', dest_account_id='test@test', asset_id='coin#test', + amount='0.01', description=HOSTNAME + )]) + ic.sign_transaction(tx, admin_private_key) + + self.client.send_tx(tx) From 8f01d94198b4904c232512fb527ea505dedfe942 Mon Sep 17 00:00:00 2001 From: Andrei Lebedev Date: Tue, 5 Feb 2019 11:31:43 +0300 Subject: [PATCH 31/41] Fix load test documentation (#2077) Signed-off-by: Andrei Lebedev --- test/load/README.md | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/test/load/README.md b/test/load/README.md index f059f8665b..52eb3a73df 100644 --- a/test/load/README.md +++ b/test/load/README.md @@ -11,22 +11,23 @@ See [locustfile.py](locustfile.py) for descriptions of task sets implemented usi ## Build steps 1. Create a Docker image with Python, locust, and gRPC. -```sh -docker build -t iroha-locust . -``` + ```sh + docker build -t iroha-locust . + ``` -2. Copy [irohalib.py](https://github.com/hyperledger/iroha/blob/master/example/python/irohalib.py) and [ed25519.py](https://github.com/hyperledger/iroha/blob/master/example/python/ed25519.py) and follow the preparation steps in [libiroha.md](https://github.com/hyperledger/iroha/blob/master/example/python/irohalib.md). +2. Copy [irohalib.py](https://github.com/hyperledger/iroha/blob/master/example/python/irohalib.py) and [ed25519.py](https://github.com/hyperledger/iroha/blob/master/example/python/ed25519.py) to the current directory and follow the preparation steps in [libiroha.md](https://github.com/hyperledger/iroha/blob/master/example/python/irohalib.md). ## Running the tests 1. Specify Iroha node address and port in `TARGET_URL` in Compose [file](docker-compose.yml). -**Note for Mac hosts** If you are running Iroha on the same host as locust, you most likely need to use `docker.for.mac.localhost:50051`, where `50051` is Torii port. + **Note for Mac hosts** If you are running Iroha on the same host as locust, you most likely need to use `docker.for.mac.localhost:50051`, where `50051` is Torii port. 2. Run locust -```sh -docker-compose up -``` -4 slaves can be spawned by adding `--scale locust-slave=4` to the previous command. + ```sh + docker-compose up + ``` + + 4 slaves can be spawned by adding `--scale locust-slave=4` to the previous command. 3. Access locust web interface as specified in [documentation](https://docs.locust.io/en/stable/quickstart.html#open-up-locust-s-web-interface). From 7e28e912a8a7e2ac96409f0ec91b47ae971d3b4f Mon Sep 17 00:00:00 2001 From: Akvinikym Date: Tue, 5 Feb 2019 21:26:23 +0300 Subject: [PATCH 32/41] Selective block streaming (#2031) Signed-off-by: Akvinikym --- docs/source/api/queries.rst | 55 ++++++++++ .../ametsuchi/impl/postgres_block_query.hpp | 2 +- .../impl/postgres_query_executor.cpp | 93 +++++++++++++--- .../impl/postgres_query_executor.hpp | 13 +++ irohad/ametsuchi/query_executor.hpp | 5 + .../processor/impl/query_processor_impl.cpp | 4 +- shared_model/backend/protobuf/CMakeLists.txt | 2 + .../impl/proto_query_response_factory.cpp | 16 +++ .../protobuf/proto_query_response_factory.hpp | 6 +- .../protobuf/queries/impl/proto_get_block.cpp | 29 +++++ .../protobuf/queries/impl/proto_query.cpp | 4 +- .../protobuf/queries/proto_blocks_query.hpp | 2 +- .../protobuf/queries/proto_get_block.hpp | 36 ++++++ .../impl/proto_get_block_response.cpp | 35 ++++++ .../impl/proto_query_response.cpp | 4 +- .../proto_get_block_response.hpp | 38 +++++++ .../builder_templates/query_template.hpp | 7 ++ shared_model/interfaces/CMakeLists.txt | 1 + .../iroha_internal/query_response_factory.hpp | 10 ++ shared_model/interfaces/queries/get_block.hpp | 28 +++++ .../interfaces/queries/impl/get_block.cpp | 23 ++++ .../interfaces/queries/impl/query.cpp | 1 + shared_model/interfaces/queries/query.hpp | 4 +- .../query_responses/impl/query_response.cpp | 1 + .../query_responses/query_response.hpp | 4 +- shared_model/schema/qry_responses.proto | 1 + shared_model/schema/queries.proto | 5 + shared_model/validators/query_validator.hpp | 10 ++ .../postgres_query_executor_test.cpp | 103 ++++++++++++++++++ .../torii/processor/query_processor_test.cpp | 6 +- .../validators/field_validator_test.cpp | 4 + .../validators/validators_fixture.hpp | 3 + 32 files changed, 529 insertions(+), 26 deletions(-) create mode 100644 shared_model/backend/protobuf/queries/impl/proto_get_block.cpp create mode 100644 shared_model/backend/protobuf/queries/proto_get_block.hpp create mode 100644 shared_model/backend/protobuf/query_responses/impl/proto_get_block_response.cpp create mode 100644 shared_model/backend/protobuf/query_responses/proto_get_block_response.hpp create mode 100644 shared_model/interfaces/queries/get_block.hpp create mode 100644 shared_model/interfaces/queries/impl/get_block.cpp diff --git a/docs/source/api/queries.rst b/docs/source/api/queries.rst index d7bc660e94..a7e446bd65 100644 --- a/docs/source/api/queries.rst +++ b/docs/source/api/queries.rst @@ -81,6 +81,61 @@ Possible Stateful Validation Errors "2", "No such permissions", "Query's creator does not have any of the permissions to get account", "Grant the necessary permission: individual, global or domain one" "3", "Invalid signatures", "Signatures of this query did not pass validation", "Add more signatures and make sure query's signatures are a subset of account's signatories" +Get Block +^^^^^^^^^ + +Purpose +------- + +Purpose of get block query is to get a specific block, using its height as an identifier + +Request Schema +-------------- + +.. code-block:: proto + + message GetBlock { + uint64 height = 1; + } + + +Request Structure +----------------- + +.. csv-table:: + :header: "Field", "Description", "Constraint", "Example" + :widths: 15, 30, 20, 15 + + "Height", "height of the block to be retrieved", "0 < height < 2^64", "42" + +Response Schema +--------------- + +.. code-block:: proto + + message BlockResponse { + Block block = 1; + } + +Response Structure +------------------ + +.. csv-table:: + :header: "Field", "Description", "Constraint", "Example" + :widths: 15, 30, 20, 15 + + "Block", "the retrieved block", "block structure", "block" + +Possible Stateful Validation Errors +----------------------------------- + +.. csv-table:: + :header: "Code", "Error Name", "Description", "How to solve" + + "1", "Could not get block", "Internal error happened", "Try again or contact developers" + "2", "No such permissions", "Query's creator does not have a permission to get block", "Grant the necessary permission" + "3", "Invalid height", "Supplied height is not uint_64 or greater than the ledger's height", "Check the height and try again" + Get Signatories ^^^^^^^^^^^^^^^ diff --git a/irohad/ametsuchi/impl/postgres_block_query.hpp b/irohad/ametsuchi/impl/postgres_block_query.hpp index aae6ed8360..26ef79533e 100644 --- a/irohad/ametsuchi/impl/postgres_block_query.hpp +++ b/irohad/ametsuchi/impl/postgres_block_query.hpp @@ -56,7 +56,7 @@ namespace iroha { private: /** - * Retrieve block with given id block storage + * Retrieve block with given id from block storage * @param id - height of a block to retrieve * @return block with given height */ diff --git a/irohad/ametsuchi/impl/postgres_query_executor.cpp b/irohad/ametsuchi/impl/postgres_query_executor.cpp index 8a0d3658b0..3f9293f8bb 100644 --- a/irohad/ametsuchi/impl/postgres_query_executor.cpp +++ b/irohad/ametsuchi/impl/postgres_query_executor.cpp @@ -26,6 +26,7 @@ #include "interfaces/queries/get_account_detail.hpp" #include "interfaces/queries/get_account_transactions.hpp" #include "interfaces/queries/get_asset_info.hpp" +#include "interfaces/queries/get_block.hpp" #include "interfaces/queries/get_pending_transactions.hpp" #include "interfaces/queries/get_role_permissions.hpp" #include "interfaces/queries/get_roles.hpp" @@ -48,7 +49,7 @@ namespace { return res.at(1); } - std::string checkAccountRolePermission( + std::string getAccountRolePermissionCheckSql( shared_model::interface::permissions::Role permission, const std::string &account_alias = "role_account_id") { const auto perm_str = @@ -312,14 +313,25 @@ namespace iroha { log_->error("query signatories did not pass validation"); return false; } + if (not visitor_.hasAccountRolePermission(Role::kGetBlocks, + query.creatorAccountId())) { + log_->error("query creator does not have enough permissions"); + return false; + } + + return true; + } + + bool PostgresQueryExecutorVisitor::hasAccountRolePermission( + shared_model::interface::permissions::Role permission, + const std::string &account_id) const { using T = boost::tuple; boost::format cmd(R"(%s)"); try { soci::rowset st = - (sql_->prepare - << (cmd % checkAccountRolePermission(Role::kGetBlocks)).str(), - soci::use(query.creatorAccountId(), "role_account_id")); - + (sql_.prepare + << (cmd % getAccountRolePermissionCheckSql(permission)).str(), + soci::use(account_id, "role_account_id")); return st.begin()->get<0>(); } catch (const std::exception &e) { log_->error("Failed to validate query: {}", e.what()); @@ -576,6 +588,57 @@ namespace iroha { Role::kGetDomainAccounts)); } + QueryExecutorResult PostgresQueryExecutorVisitor::operator()( + const shared_model::interface::GetBlock &q) { + if (not hasAccountRolePermission(Role::kGetBlocks, creator_id_)) { + // no permission + return query_response_factory_->createErrorQueryResponse( + shared_model::interface::QueryResponseFactory::ErrorQueryType:: + kStatefulFailed, + notEnoughPermissionsResponse(perm_converter_, Role::kGetBlocks)(), + 2, + query_hash_); + } + + auto ledger_height = block_store_.last_id(); + if (q.height() > ledger_height) { + // invalid height + return logAndReturnErrorResponse( + QueryErrorType::kStatefulFailed, + "requested height (" + std::to_string(q.height()) + + ") is greater than the ledger's one (" + + std::to_string(ledger_height) + ")", + 3); + } + + auto block_deserialization_msg = [height = q.height()] { + return "could not retrieve block with given height: " + + std::to_string(height); + }; + auto serialized_block = block_store_.get(q.height()); + if (not serialized_block) { + // for some reason, block with such height was not retrieved + return logAndReturnErrorResponse( + QueryErrorType::kStatefulFailed, block_deserialization_msg(), 1); + } + + return converter_->deserialize(bytesToString(*serialized_block)) + .match( + [this](iroha::expected::Value< + std::unique_ptr> &block) { + return this->query_response_factory_->createBlockResponse( + std::move(block.value), query_hash_); + }, + [this, err_msg = block_deserialization_msg()](const auto &err) { + auto extended_error = + err_msg + ", because it was not deserialized: " + err.error; + return this->logAndReturnErrorResponse( + QueryErrorType::kStatefulFailed, + std::move(extended_error), + 1); + }); + } + QueryExecutorResult PostgresQueryExecutorVisitor::operator()( const shared_model::interface::GetSignatories &q) { using QueryTuple = QueryType; @@ -650,7 +713,8 @@ namespace iroha { }; auto check_query = [this](const auto &q) { - if (this->existsInDb("account", "account_id", "quorum", q.accountId())) { + if (this->existsInDb( + "account", "account_id", "quorum", q.accountId())) { return QueryFallbackCheckResult{}; } return QueryFallbackCheckResult{ @@ -679,7 +743,8 @@ namespace iroha { QueryType; using PermissionTuple = boost::tuple; - auto cmd = (boost::format(R"(WITH has_my_perm AS (%s), + auto cmd = + (boost::format(R"(WITH has_my_perm AS (%s), has_all_perm AS (%s), t AS ( SELECT height, hash FROM position_by_hash WHERE hash IN (%s) @@ -687,10 +752,10 @@ namespace iroha { SELECT height, hash, has_my_perm.perm, has_all_perm.perm FROM t RIGHT OUTER JOIN has_my_perm ON TRUE RIGHT OUTER JOIN has_all_perm ON TRUE - )") % checkAccountRolePermission(Role::kGetMyTxs, "account_id") - % checkAccountRolePermission(Role::kGetAllTxs, "account_id") - % hash_str) - .str(); + )") % getAccountRolePermissionCheckSql(Role::kGetMyTxs, "account_id") + % getAccountRolePermissionCheckSql(Role::kGetAllTxs, "account_id") + % hash_str) + .str(); return executeQuery( [&] { @@ -921,7 +986,7 @@ namespace iroha { R"(WITH has_perms AS (%s) SELECT role_id, perm FROM role RIGHT OUTER JOIN has_perms ON TRUE - )") % checkAccountRolePermission(Role::kGetRoles)) + )") % getAccountRolePermissionCheckSql(Role::kGetRoles)) .str(); return executeQuery( @@ -953,7 +1018,7 @@ namespace iroha { WHERE role_id = :role_name) SELECT permission, perm FROM perms RIGHT OUTER JOIN has_perms ON TRUE - )") % checkAccountRolePermission(Role::kGetRoles)) + )") % getAccountRolePermissionCheckSql(Role::kGetRoles)) .str(); return executeQuery( @@ -991,7 +1056,7 @@ namespace iroha { WHERE asset_id = :asset_id) SELECT domain_id, precision, perm FROM perms RIGHT OUTER JOIN has_perms ON TRUE - )") % checkAccountRolePermission(Role::kReadAssets)) + )") % getAccountRolePermissionCheckSql(Role::kReadAssets)) .str(); return executeQuery( diff --git a/irohad/ametsuchi/impl/postgres_query_executor.hpp b/irohad/ametsuchi/impl/postgres_query_executor.hpp index 57fff9aa57..6115e515b5 100644 --- a/irohad/ametsuchi/impl/postgres_query_executor.hpp +++ b/irohad/ametsuchi/impl/postgres_query_executor.hpp @@ -65,9 +65,22 @@ namespace iroha { void setQueryHash(const shared_model::crypto::Hash &query_hash); + /** + * Check that account has a specific role permission + * @param permission to be in that account + * @param account_id of account to be checked + * @return true, if account has that permission, false otherwise + */ + bool hasAccountRolePermission( + shared_model::interface::permissions::Role permission, + const std::string &account_id) const; + QueryExecutorResult operator()( const shared_model::interface::GetAccount &q); + QueryExecutorResult operator()( + const shared_model::interface::GetBlock &q); + QueryExecutorResult operator()( const shared_model::interface::GetSignatories &q); diff --git a/irohad/ametsuchi/query_executor.hpp b/irohad/ametsuchi/query_executor.hpp index c2764ecd2b..6e0406cb0f 100644 --- a/irohad/ametsuchi/query_executor.hpp +++ b/irohad/ametsuchi/query_executor.hpp @@ -27,6 +27,9 @@ namespace iroha { virtual ~QueryExecutor() = default; /** * Execute and validate query. + * @param query to validate and execute + * @param validate_signatories - if signatories should be validated + * @return pointer to query response */ virtual QueryExecutorResult validateAndExecute( const shared_model::interface::Query &query, @@ -34,6 +37,8 @@ namespace iroha { /** * Perform BlocksQuery validation + * @param query to validate + * @param validate_signatories - if signatories should be validated * @return true if valid, false otherwise */ virtual bool validate(const shared_model::interface::BlocksQuery &query, diff --git a/irohad/torii/processor/impl/query_processor_impl.cpp b/irohad/torii/processor/impl/query_processor_impl.cpp index 1dd14a1625..90641a8cb6 100644 --- a/irohad/torii/processor/impl/query_processor_impl.cpp +++ b/irohad/torii/processor/impl/query_processor_impl.cpp @@ -6,11 +6,11 @@ #include "torii/processor/query_processor_impl.hpp" #include - #include "common/bind.hpp" #include "interfaces/queries/blocks_query.hpp" #include "interfaces/queries/query.hpp" #include "interfaces/query_responses/block_query_response.hpp" +#include "interfaces/query_responses/block_response.hpp" #include "interfaces/query_responses/query_response.hpp" #include "validation/utils.hpp" @@ -61,7 +61,7 @@ namespace iroha { })) { std::shared_ptr response = response_factory_->createBlockQueryResponse("stateful invalid"); - return rxcpp::observable<>::just(response); + return rxcpp::observable<>::just(std::move(response)); } return blocks_query_subject_.get_observable(); } diff --git a/shared_model/backend/protobuf/CMakeLists.txt b/shared_model/backend/protobuf/CMakeLists.txt index 52d1db960b..27fababca3 100644 --- a/shared_model/backend/protobuf/CMakeLists.txt +++ b/shared_model/backend/protobuf/CMakeLists.txt @@ -37,6 +37,7 @@ add_library(shared_model_proto_backend queries/impl/proto_get_account_detail.cpp queries/impl/proto_get_account_transactions.cpp queries/impl/proto_get_asset_info.cpp + queries/impl/proto_get_block.cpp queries/impl/proto_get_role_permissions.cpp queries/impl/proto_get_roles.cpp queries/impl/proto_get_signatories.cpp @@ -63,6 +64,7 @@ if (IROHA_ROOT_PROJECT) query_responses/impl/proto_block_query_response.cpp query_responses/impl/proto_block_response.cpp query_responses/impl/proto_block_error_response.cpp + query_responses/impl/proto_get_block_response.cpp transaction_responses/impl/proto_tx_response.cpp ) endif () diff --git a/shared_model/backend/protobuf/impl/proto_query_response_factory.cpp b/shared_model/backend/protobuf/impl/proto_query_response_factory.cpp index 653c425e08..206c2db74a 100644 --- a/shared_model/backend/protobuf/impl/proto_query_response_factory.cpp +++ b/shared_model/backend/protobuf/impl/proto_query_response_factory.cpp @@ -118,6 +118,22 @@ shared_model::proto::ProtoQueryResponseFactory::createAccountResponse( query_hash); } +std::unique_ptr +shared_model::proto::ProtoQueryResponseFactory::createBlockResponse( + std::unique_ptr block, + const crypto::Hash &query_hash) const { + return createQueryResponse( + [block = std::move(block)]( + iroha::protocol::QueryResponse &protocol_query_response) { + iroha::protocol::BlockResponse *protocol_specific_response = + protocol_query_response.mutable_block_response(); + *protocol_specific_response->mutable_block()->mutable_block_v1() = + static_cast(block.get()) + ->getTransport(); + }, + query_hash); +} + std::unique_ptr shared_model::proto::ProtoQueryResponseFactory::createErrorQueryResponse( ErrorQueryType error_type, diff --git a/shared_model/backend/protobuf/proto_query_response_factory.hpp b/shared_model/backend/protobuf/proto_query_response_factory.hpp index fab03233f7..c97e14c201 100644 --- a/shared_model/backend/protobuf/proto_query_response_factory.hpp +++ b/shared_model/backend/protobuf/proto_query_response_factory.hpp @@ -31,6 +31,10 @@ namespace shared_model { std::vector roles, const crypto::Hash &query_hash) const override; + std::unique_ptr createBlockResponse( + std::unique_ptr block, + const crypto::Hash &query_hash) const override; + std::unique_ptr createErrorQueryResponse( ErrorQueryType error_type, interface::ErrorQueryResponse::ErrorMessageType error_msg, @@ -55,7 +59,7 @@ namespace shared_model { std::unique_ptr createTransactionsPageResponse( std::vector> - transactions, + transactions, interface::types::TransactionsNumberType all_transactions_size, const crypto::Hash &query_hash) const override; diff --git a/shared_model/backend/protobuf/queries/impl/proto_get_block.cpp b/shared_model/backend/protobuf/queries/impl/proto_get_block.cpp new file mode 100644 index 0000000000..033ed35cca --- /dev/null +++ b/shared_model/backend/protobuf/queries/impl/proto_get_block.cpp @@ -0,0 +1,29 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#include "backend/protobuf/queries/proto_get_block.hpp" + +namespace shared_model { + namespace proto { + + template + GetBlock::GetBlock(QueryType &&query) + : CopyableProto(std::forward(query)), + get_block_{proto_->payload().get_block()} {} + + template GetBlock::GetBlock(GetBlock::TransportType &); + template GetBlock::GetBlock(const GetBlock::TransportType &); + template GetBlock::GetBlock(GetBlock::TransportType &&); + + GetBlock::GetBlock(const GetBlock &o) : GetBlock(o.proto_) {} + + GetBlock::GetBlock(GetBlock &&o) noexcept : GetBlock(std::move(o.proto_)) {} + + interface::types::HeightType GetBlock::height() const { + return get_block_.height(); + } + + } // namespace proto +} // namespace shared_model diff --git a/shared_model/backend/protobuf/queries/impl/proto_query.cpp b/shared_model/backend/protobuf/queries/impl/proto_query.cpp index c9bd5f5e03..bf039ed0d1 100644 --- a/shared_model/backend/protobuf/queries/impl/proto_query.cpp +++ b/shared_model/backend/protobuf/queries/impl/proto_query.cpp @@ -12,6 +12,7 @@ #include "backend/protobuf/queries/proto_get_account_detail.hpp" #include "backend/protobuf/queries/proto_get_account_transactions.hpp" #include "backend/protobuf/queries/proto_get_asset_info.hpp" +#include "backend/protobuf/queries/proto_get_block.hpp" #include "backend/protobuf/queries/proto_get_pending_transactions.hpp" #include "backend/protobuf/queries/proto_get_role_permissions.hpp" #include "backend/protobuf/queries/proto_get_roles.hpp" @@ -33,7 +34,8 @@ namespace { shared_model::proto::GetRoles, shared_model::proto::GetRolePermissions, shared_model::proto::GetAssetInfo, - shared_model::proto::GetPendingTransactions>; + shared_model::proto::GetPendingTransactions, + shared_model::proto::GetBlock>; /// list of types in proto variant using ProtoQueryListType = ProtoQueryVariantType::types; diff --git a/shared_model/backend/protobuf/queries/proto_blocks_query.hpp b/shared_model/backend/protobuf/queries/proto_blocks_query.hpp index f4469ef493..b8ab93752d 100644 --- a/shared_model/backend/protobuf/queries/proto_blocks_query.hpp +++ b/shared_model/backend/protobuf/queries/proto_blocks_query.hpp @@ -7,9 +7,9 @@ #define IROHA_SHARED_MODEL_PROTO_BLOCKS_QUERY_HPP #include "backend/protobuf/common_objects/signature.hpp" +#include "backend/protobuf/util.hpp" #include "interfaces/queries/blocks_query.hpp" #include "queries.pb.h" -#include "backend/protobuf/util.hpp" namespace shared_model { namespace proto { diff --git a/shared_model/backend/protobuf/queries/proto_get_block.hpp b/shared_model/backend/protobuf/queries/proto_get_block.hpp new file mode 100644 index 0000000000..f5fba36176 --- /dev/null +++ b/shared_model/backend/protobuf/queries/proto_get_block.hpp @@ -0,0 +1,36 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_PROTO_GET_BLOCK_HPP +#define IROHA_PROTO_GET_BLOCK_HPP + +#include "backend/protobuf/common_objects/trivial_proto.hpp" +#include "interfaces/queries/get_block.hpp" +#include "queries.pb.h" + +namespace shared_model { + namespace proto { + class GetBlock final : public CopyableProto { + public: + template + explicit GetBlock(QueryType &&query); + + GetBlock(const GetBlock &o); + + GetBlock(GetBlock &&o) noexcept; + + interface::types::HeightType height() const override; + + private: + // ------------------------------| fields |------------------------------- + const iroha::protocol::GetBlock &get_block_; + }; + + } // namespace proto +} // namespace shared_model + +#endif // IROHA_PROTO_GET_BLOCK_HPP diff --git a/shared_model/backend/protobuf/query_responses/impl/proto_get_block_response.cpp b/shared_model/backend/protobuf/query_responses/impl/proto_get_block_response.cpp new file mode 100644 index 0000000000..1dcc1b8852 --- /dev/null +++ b/shared_model/backend/protobuf/query_responses/impl/proto_get_block_response.cpp @@ -0,0 +1,35 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#include "backend/protobuf/query_responses/proto_get_block_response.hpp" + +namespace shared_model { + namespace proto { + + template + GetBlockResponse::GetBlockResponse(QueryResponseType &&queryResponse) + : CopyableProto(std::forward(queryResponse)), + block_response_{proto_->block_response()}, + block_{block_response_.block().block_v1()} {} + + template GetBlockResponse::GetBlockResponse( + GetBlockResponse::TransportType &); + template GetBlockResponse::GetBlockResponse( + const GetBlockResponse::TransportType &); + template GetBlockResponse::GetBlockResponse( + GetBlockResponse::TransportType &&); + + GetBlockResponse::GetBlockResponse(const GetBlockResponse &o) + : GetBlockResponse(o.proto_) {} + + GetBlockResponse::GetBlockResponse(GetBlockResponse &&o) + : GetBlockResponse(std::move(o.proto_)) {} + + const interface::Block &GetBlockResponse::block() const { + return block_; + } + + } // namespace proto +} // namespace shared_model diff --git a/shared_model/backend/protobuf/query_responses/impl/proto_query_response.cpp b/shared_model/backend/protobuf/query_responses/impl/proto_query_response.cpp index 955c107b13..adb3142034 100644 --- a/shared_model/backend/protobuf/query_responses/impl/proto_query_response.cpp +++ b/shared_model/backend/protobuf/query_responses/impl/proto_query_response.cpp @@ -9,6 +9,7 @@ #include "backend/protobuf/query_responses/proto_account_detail_response.hpp" #include "backend/protobuf/query_responses/proto_account_response.hpp" #include "backend/protobuf/query_responses/proto_asset_response.hpp" +#include "backend/protobuf/query_responses/proto_get_block_response.hpp" #include "backend/protobuf/query_responses/proto_error_query_response.hpp" #include "backend/protobuf/query_responses/proto_role_permissions_response.hpp" #include "backend/protobuf/query_responses/proto_roles_response.hpp" @@ -30,7 +31,8 @@ namespace { shared_model::proto::AssetResponse, shared_model::proto::RolesResponse, shared_model::proto::RolePermissionsResponse, - shared_model::proto::TransactionsPageResponse>; + shared_model::proto::TransactionsPageResponse, + shared_model::proto::GetBlockResponse>; /// list of types in variant using ProtoQueryResponseListType = ProtoQueryResponseVariantType::types; diff --git a/shared_model/backend/protobuf/query_responses/proto_get_block_response.hpp b/shared_model/backend/protobuf/query_responses/proto_get_block_response.hpp new file mode 100644 index 0000000000..f61e398b7c --- /dev/null +++ b/shared_model/backend/protobuf/query_responses/proto_get_block_response.hpp @@ -0,0 +1,38 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_SHARED_MODEL_GET_BLOCK_RESPONSE_HPP +#define IROHA_SHARED_MODEL_GET_BLOCK_RESPONSE_HPP + +#include "backend/protobuf/block.hpp" +#include "backend/protobuf/common_objects/trivial_proto.hpp" +#include "interfaces/query_responses/block_response.hpp" +#include "qry_responses.pb.h" + +namespace shared_model { + namespace proto { + class GetBlockResponse final + : public CopyableProto { + public: + template + explicit GetBlockResponse(QueryResponseType &&queryResponse); + + GetBlockResponse(const GetBlockResponse &o); + + GetBlockResponse(GetBlockResponse &&o); + + const interface::Block &block() const override; + + private: + const iroha::protocol::BlockResponse &block_response_; + + const Block block_; + }; + } // namespace proto +} // namespace shared_model + +#endif // IROHA_SHARED_MODEL_GET_BLOCK_RESPONSE_HPP diff --git a/shared_model/builders/protobuf/builder_templates/query_template.hpp b/shared_model/builders/protobuf/builder_templates/query_template.hpp index f3d61fb72e..bf44f78231 100644 --- a/shared_model/builders/protobuf/builder_templates/query_template.hpp +++ b/shared_model/builders/protobuf/builder_templates/query_template.hpp @@ -185,6 +185,13 @@ namespace shared_model { }); } + auto getBlock(interface::types::HeightType height) const { + return queryField([&](auto proto_query) { + auto query = proto_query->mutable_get_block(); + query->set_height(height); + }); + } + auto getRoles() const { return queryField( [&](auto proto_query) { proto_query->mutable_get_roles(); }); diff --git a/shared_model/interfaces/CMakeLists.txt b/shared_model/interfaces/CMakeLists.txt index 8062b314bc..6cefc70c8d 100644 --- a/shared_model/interfaces/CMakeLists.txt +++ b/shared_model/interfaces/CMakeLists.txt @@ -30,6 +30,7 @@ add_library(shared_model_interfaces queries/impl/get_account_detail.cpp queries/impl/get_account_transactions.cpp queries/impl/get_asset_info.cpp + queries/impl/get_block.cpp queries/impl/get_role_permissions.cpp queries/impl/get_roles.cpp queries/impl/get_signatories.cpp diff --git a/shared_model/interfaces/iroha_internal/query_response_factory.hpp b/shared_model/interfaces/iroha_internal/query_response_factory.hpp index d2e278c217..97b3160c07 100644 --- a/shared_model/interfaces/iroha_internal/query_response_factory.hpp +++ b/shared_model/interfaces/iroha_internal/query_response_factory.hpp @@ -75,6 +75,16 @@ namespace shared_model { std::vector roles, const crypto::Hash &query_hash) const = 0; + /** + * Create response for get block query + * @param block to be inserted into the response + * @param query_hash - hash of the query, for which response is created + * @return block response + */ + virtual std::unique_ptr createBlockResponse( + std::unique_ptr block, + const crypto::Hash &query_hash) const = 0; + /** * Describes type of error to be placed inside the error query response */ diff --git a/shared_model/interfaces/queries/get_block.hpp b/shared_model/interfaces/queries/get_block.hpp new file mode 100644 index 0000000000..62aceea4c2 --- /dev/null +++ b/shared_model/interfaces/queries/get_block.hpp @@ -0,0 +1,28 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#ifndef IROHA_SHARED_MODEL_GET_BLOCK_HPP +#define IROHA_SHARED_MODEL_GET_BLOCK_HPP + +#include "interfaces/base/model_primitive.hpp" +#include "interfaces/common_objects/types.hpp" + +namespace shared_model { + namespace interface { + class GetBlock : public ModelPrimitive { + public: + /** + * Get height of the block to be returned + * @return block's height + */ + virtual types::HeightType height() const = 0; + + std::string toString() const override; + + bool operator==(const ModelType &rhs) const override; + }; + } // namespace interface +} // namespace shared_model +#endif // IROHA_SHARED_MODEL_GET_BLOCK_HPP diff --git a/shared_model/interfaces/queries/impl/get_block.cpp b/shared_model/interfaces/queries/impl/get_block.cpp new file mode 100644 index 0000000000..40f6cbd581 --- /dev/null +++ b/shared_model/interfaces/queries/impl/get_block.cpp @@ -0,0 +1,23 @@ +/** + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +#include "interfaces/queries/get_block.hpp" + +namespace shared_model { + namespace interface { + + std::string GetBlock::toString() const { + return detail::PrettyStringBuilder() + .init("GetBlock") + .append("height", std::to_string(height())) + .finalize(); + } + + bool GetBlock::operator==(const ModelType &rhs) const { + return height() == rhs.height(); + } + + } // namespace interface +} // namespace shared_model diff --git a/shared_model/interfaces/queries/impl/query.cpp b/shared_model/interfaces/queries/impl/query.cpp index 3778754037..b4cea5f534 100644 --- a/shared_model/interfaces/queries/impl/query.cpp +++ b/shared_model/interfaces/queries/impl/query.cpp @@ -11,6 +11,7 @@ #include "interfaces/queries/get_account_detail.hpp" #include "interfaces/queries/get_account_transactions.hpp" #include "interfaces/queries/get_asset_info.hpp" +#include "interfaces/queries/get_block.hpp" #include "interfaces/queries/get_pending_transactions.hpp" #include "interfaces/queries/get_role_permissions.hpp" #include "interfaces/queries/get_roles.hpp" diff --git a/shared_model/interfaces/queries/query.hpp b/shared_model/interfaces/queries/query.hpp index da40747072..ce5f52da1c 100644 --- a/shared_model/interfaces/queries/query.hpp +++ b/shared_model/interfaces/queries/query.hpp @@ -14,6 +14,7 @@ namespace shared_model { namespace interface { class GetAccount; + class GetBlock; class GetSignatories; class GetAccountTransactions; class GetAccountAssetTransactions; @@ -48,7 +49,8 @@ namespace shared_model { GetRoles, GetRolePermissions, GetAssetInfo, - GetPendingTransactions>; + GetPendingTransactions, + GetBlock>; /** * @return reference to const variant with concrete command diff --git a/shared_model/interfaces/query_responses/impl/query_response.cpp b/shared_model/interfaces/query_responses/impl/query_response.cpp index e0b479183c..3070705926 100644 --- a/shared_model/interfaces/query_responses/impl/query_response.cpp +++ b/shared_model/interfaces/query_responses/impl/query_response.cpp @@ -9,6 +9,7 @@ #include "interfaces/query_responses/account_detail_response.hpp" #include "interfaces/query_responses/account_response.hpp" #include "interfaces/query_responses/asset_response.hpp" +#include "interfaces/query_responses/block_response.hpp" #include "interfaces/query_responses/error_query_response.hpp" #include "interfaces/query_responses/role_permissions.hpp" #include "interfaces/query_responses/roles_response.hpp" diff --git a/shared_model/interfaces/query_responses/query_response.hpp b/shared_model/interfaces/query_responses/query_response.hpp index 46f5880815..76db91f829 100644 --- a/shared_model/interfaces/query_responses/query_response.hpp +++ b/shared_model/interfaces/query_responses/query_response.hpp @@ -17,6 +17,7 @@ namespace shared_model { class AccountAssetResponse; class AccountDetailResponse; class AccountResponse; + class BlockResponse; class ErrorQueryResponse; class SignatoriesResponse; class TransactionsResponse; @@ -46,7 +47,8 @@ namespace shared_model { AssetResponse, RolesResponse, RolePermissionsResponse, - TransactionsPageResponse>; + TransactionsPageResponse, + BlockResponse>; /** * @return reference to const variant with concrete qr diff --git a/shared_model/schema/qry_responses.proto b/shared_model/schema/qry_responses.proto index 12890a05d6..6d6e93f155 100644 --- a/shared_model/schema/qry_responses.proto +++ b/shared_model/schema/qry_responses.proto @@ -105,6 +105,7 @@ message QueryResponse { RolesResponse roles_response = 8; RolePermissionsResponse role_permissions_response = 9; TransactionsPageResponse transactions_page_response = 11; + BlockResponse block_response = 12; } string query_hash = 10; } diff --git a/shared_model/schema/queries.proto b/shared_model/schema/queries.proto index cf96e54e7d..370343c0ec 100644 --- a/shared_model/schema/queries.proto +++ b/shared_model/schema/queries.proto @@ -19,6 +19,10 @@ message GetAccount { string account_id = 1; } +message GetBlock { + uint64 height = 1; +} + message GetSignatories { string account_id = 1; } @@ -93,6 +97,7 @@ message Query { GetRolePermissions get_role_permissions = 11; GetAssetInfo get_asset_info = 12; GetPendingTransactions get_pending_transactions = 13; + GetBlock get_block = 14; } } diff --git a/shared_model/validators/query_validator.hpp b/shared_model/validators/query_validator.hpp index 7657cd8305..2c1318f166 100644 --- a/shared_model/validators/query_validator.hpp +++ b/shared_model/validators/query_validator.hpp @@ -9,6 +9,7 @@ #include #include "backend/protobuf/queries/proto_get_account.hpp" +#include "backend/protobuf/queries/proto_get_block.hpp" #include "backend/protobuf/queries/proto_get_account_asset_transactions.hpp" #include "backend/protobuf/queries/proto_get_account_assets.hpp" #include "backend/protobuf/queries/proto_get_account_detail.hpp" @@ -47,6 +48,15 @@ namespace shared_model { return reason; } + ReasonsGroupType operator()(const interface::GetBlock &qry) const { + ReasonsGroupType reason; + reason.first = "GetBlock"; + + validator_.validateHeight(reason, qry.height()); + + return reason; + } + ReasonsGroupType operator()(const interface::GetSignatories &qry) const { ReasonsGroupType reason; reason.first = "GetSignatories"; diff --git a/test/module/irohad/ametsuchi/postgres_query_executor_test.cpp b/test/module/irohad/ametsuchi/postgres_query_executor_test.cpp index 35b362d13d..5696ad62ca 100644 --- a/test/module/irohad/ametsuchi/postgres_query_executor_test.cpp +++ b/test/module/irohad/ametsuchi/postgres_query_executor_test.cpp @@ -26,6 +26,7 @@ #include "interfaces/query_responses/account_detail_response.hpp" #include "interfaces/query_responses/account_response.hpp" #include "interfaces/query_responses/asset_response.hpp" +#include "interfaces/query_responses/block_response.hpp" #include "interfaces/query_responses/role_permissions.hpp" #include "interfaces/query_responses/roles_response.hpp" #include "interfaces/query_responses/signatories_response.hpp" @@ -218,6 +219,8 @@ namespace iroha { ErrorCodeType kInvalidAccountId = 5; static constexpr shared_model::interface::ErrorQueryResponse:: ErrorCodeType kInvalidAssetId = 6; + static constexpr shared_model::interface::ErrorQueryResponse:: + ErrorCodeType kInvalidHeight = 3; void createDefaultAccount() { execute(*mock_command_factory->constructCreateAccount( @@ -775,6 +778,106 @@ namespace iroha { }); } + class GetBlockExecutorTest : public QueryExecutorTest { + public: + // TODO [IR-257] Akvinikym 30.01.19: remove the method and use mocks + /** + * Commit some number of blocks to the storage + * @param blocks_amount - number of blocks to be committed + */ + void commitBlocks(shared_model::interface::types::HeightType + number_of_blocks = kLedgerHeight) { + std::unique_ptr ms; + auto storageResult = storage->createMutableStorage(); + storageResult.match( + [&ms](iroha::expected::Value> + &storage) { ms = std::move(storage.value); }, + [](iroha::expected::Error &error) { + FAIL() << "MutableStorage: " << error.error; + }); + + auto prev_hash = shared_model::crypto::Hash(zero_string); + for (decltype(number_of_blocks) i = 1; i < number_of_blocks; ++i) { + auto block = + TestBlockBuilder() + .transactions(std::vector{ + TestTransactionBuilder() + .creatorAccountId(account_id) + .createAsset(std::to_string(i), domain_id, 1) + .build()}) + .height(i) + .prevHash(prev_hash) + .build(); + prev_hash = block.hash(); + + if (not ms->apply(block)) { + FAIL() << "could not apply block to the storage"; + } + } + storage->commit(std::move(ms)); + } + + static constexpr shared_model::interface::types::HeightType + kLedgerHeight = 3; + }; + + /** + * @given initialized storage @and permission to get block + * @when get block of valid height + * @then return block + */ + TEST_F(GetBlockExecutorTest, Valid) { + const shared_model::interface::types::HeightType valid_height = 2; + + addPerms({shared_model::interface::permissions::Role::kGetBlocks}); + commitBlocks(); + auto query = TestQueryBuilder() + .creatorAccountId(account_id) + .getBlock(valid_height) + .build(); + auto result = executeQuery(query); + checkSuccessfulResult( + std::move(result), [valid_height](const auto &cast_resp) { + ASSERT_EQ(cast_resp.block().height(), valid_height); + }); + } + + /** + * @given initialized storage @and permission to get block + * @when get block of height, greater than supposed ledger's one + * @then return error + */ + TEST_F(GetBlockExecutorTest, InvalidHeight) { + const shared_model::interface::types::HeightType invalid_height = 123; + + commitBlocks(); + addPerms({shared_model::interface::permissions::Role::kGetBlocks}); + auto query = TestQueryBuilder() + .creatorAccountId(account_id) + .getBlock(invalid_height) + .build(); + auto result = executeQuery(query); + checkStatefulError( + std::move(result), kInvalidHeight); + } + + /** + * @given initialized storage @and no permission to get block + * @when get block + * @then return error + */ + TEST_F(GetBlockExecutorTest, NoPermission) { + const shared_model::interface::types::HeightType height = 123; + + auto query = TestQueryBuilder() + .creatorAccountId(account_id) + .getBlock(height) + .build(); + auto result = executeQuery(query); + checkStatefulError( + std::move(result), kNoPermissions); + } + class GetRolesExecutorTest : public QueryExecutorTest { public: void SetUp() override { diff --git a/test/module/irohad/torii/processor/query_processor_test.cpp b/test/module/irohad/torii/processor/query_processor_test.cpp index fdb1d87324..19e88d6a19 100644 --- a/test/module/irohad/torii/processor/query_processor_test.cpp +++ b/test/module/irohad/torii/processor/query_processor_test.cpp @@ -136,8 +136,8 @@ TEST_F(QueryProcessorTest, QueryProcessorWithWrongKey) { /** * @given account, ametsuchi queries - * @when valid block query is send - * @then Query Processor should start emitting BlockQueryRespones to the + * @when valid block query is sent + * @then Query Processor should start emitting BlockQueryResponses to the * observable */ TEST_F(QueryProcessorTest, GetBlocksQuery) { @@ -164,7 +164,7 @@ TEST_F(QueryProcessorTest, GetBlocksQuery) { /** * @given account, ametsuchi queries * @when valid block query is invalid (no can_get_blocks permission) - * @then Query Processor should return an observable with blockError + * @then Query Processor should return an observable with BlockError */ TEST_F(QueryProcessorTest, GetBlocksQueryNoPerms) { auto block_number = 5; diff --git a/test/module/shared_model/validators/field_validator_test.cpp b/test/module/shared_model/validators/field_validator_test.cpp index ad36393fa6..ef2f178960 100644 --- a/test/module/shared_model/validators/field_validator_test.cpp +++ b/test/module/shared_model/validators/field_validator_test.cpp @@ -659,6 +659,10 @@ class FieldValidatorTest : public ValidatorsTest { &FieldValidator::validateAssetName, &FieldValidatorTest::asset_name, asset_name_test_cases), + makeValidator("height", + &FieldValidator::validateHeight, + &FieldValidatorTest::counter, + counter_test_cases), makeValidator( "created_time", static_castMutableMessage(msg, field)->CopyFrom(tx_pagination_meta); }; + field_setters["height"] = setUInt64(height); } /** @@ -193,6 +195,7 @@ class ValidatorsTest : public ::testing::Test { size_t public_key_size{0}; size_t hash_size{0}; uint64_t counter{0}; + uint64_t height{42}; std::string account_id; std::string dest_id; std::string asset_name; From 15bcd8cba2dc78ae8164674015fec07ae008965b Mon Sep 17 00:00:00 2001 From: Sara Date: Wed, 6 Feb 2019 20:56:13 +0300 Subject: [PATCH 33/41] Fix. Contributing instructions (#2071) * Update contributing guidelines Signed-off-by: Nikolay Yushkevich * Update links Signed-off-by: neewy * Update links Signed-off-by: neewy * Update CONTRIBUTING.md Signed-off-by: Sara Fixed the table about bugs * Update CONTRIBUTING.md Signed-off-by: Sara fixed c++ links --- CONTRIBUTING.md | 181 ++++++++++++++---------------------------------- 1 file changed, 51 insertions(+), 130 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index dbe3b132c8..a72ec81d64 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,8 +2,7 @@ :star::tada: First off, thanks for taking the time to contribute! :tada::star: -The following is a short set of guidelines for contributing to Iroha. - +The following is a short set of guidelines for contributing to Iroha. #### Table Of Contents @@ -11,136 +10,100 @@ The following is a short set of guidelines for contributing to Iroha. ##### [How Can I Contribute?](#how-can-i-contribute-1) - [Reporting bugs](#reporting-bugs) -- [Suggesting Enhancements](#suggesting-enhancements) +- [Suggesting Improvements](#suggesting-improvements) - [Asking Questions](#asking-questions) - [Your First Code Contribution](#your-first-code-contribution) - [Pull Requests](#pull-requests) ##### [Styleguides](#styleguides-1) -- [Git Commit Messages](#git-commit-messages) -- [C++ StyleGuide](#C++-styleguide) +- [Git Style Guide](#git-style-guide) +- [C++ Style Guide](#c-style-guide) - [Documentation Styleguide](#documentation-styleguide) -##### [Additional Notes](#additional-notes) - -- [Informational Labels](#informational-labels) -- [Pull Request and Issue Labels](#pull-request-and-issue-labels) -- [Issue Labels](#issue-labels) -- [Pull Request Labels](#pull-request-labels) -- [Contact Developers](#contact-developers) - +### [Contact Developers](#places-where-community-is-active) ## How Can I Contribute? ### Reporting bugs -*Bug* is an error, design flaw, failure or fault in Iroha that causes it to produce an incorrect or unexpected result, or to behave in unintended ways. +*Bug* is an error, design flaw, failure or fault in Iroha that causes it to produce an incorrect or unexpected result, or to behave in unintended ways. -Bugs are tracked as [GitHub Issues](https://guides.github.com/features/issues/). To submit a bug, create new Issue and include these details: -- **Title** - - Write prefix `[Bug]` for the title - - Use a clear and descriptive title -- **Body** - include the following sections: - - System environment (OS, iroha version) - - Steps to reproduce - - Expected behavior - - Actual behavior +Bugs are tracked as [JIRA issues](https://jira.hyperledger.org/projects/IR/issues/IR-275?filter=allopenissues&orderby=issuetype+ASC%2C+priority+DESC%2C+updated+DESC) in Hyperledger Jira. +To submit a bug, [create new issue](https://jira.hyperledger.org/secure/CreateIssue.jspa) and include these details: +| Field | What to enter | +| :---------------------- | ---------------------------------------------------------------- | +| Project | Iroha (IR) | +| Issue Type | Bug | +| Summary | Essence of the problem | +| Description | What the issue is about; if you have any logs, please provide them| +| Priority | You can use Medium though if you see the issue as a high priority, please choose that| +| Environment | Your OS, device's specs, Virtual Environment if you use one, version of Iroha etc. | -### Suggesting Enhancements -An *enhancement* is a code or idea, which makes **existing** code or design faster, more stable, portable, secure or better in any other way. -Enhancements are tracked as [GitHub Issues](https://guides.github.com/features/issues/). To submit new enhancement, create new Issue and incllude these details: +### Suggesting Improvements -- **Title** - - Write prefix `[Enhancement]` - - Use a clear and descriptive title -- **Body** - include the following sections: - - *Target* - what is going to be improved? - - *Motivation* - why do we need it? - - *Description* - how to implement it? +An *improvement* is a code or idea, which makes **existing** code or design faster, more stable, portable, secure or better in any other way. +Improvements are tracked as [JIRA improvements](https://jira.hyperledger.org/browse/IR-184?jql=project%20%3D%20IR%20and%20issuetype%20%3D%20Improvement%20ORDER%20BY%20updated%20DESC). To submit new improvement, [create new issue](https://jira.hyperledger.org/secure/CreateIssue.jspa) and include these details: +| Field | What to enter | +| :---------------------- | ---------------------------------------------------------------- | +| Project | Iroha (IR) | +| Issue Type | Improvement | +| Summary | Essence of the idea | +| Description | What the idea is about; if you have any code suggestions, you are welcome to add them here | +| Priority | You can use Medium | +| Assign | You can assign the task to yourself if you are planning on working on it| ### Asking Questions -A *question* is any discussion that is typically neigher a bug, nor feature request, nor improvement - "How do I do X?". - -Questions are tracked as [Github Issues](https://guides.github.com/features/issues/) or via private messages in [your favourite messenger](#contact-developers). - -To submit new question in GitHub Issues, it must include these details: - -- **Title** - - Write prefix `[Question]` - - Use a clear and descriptive title -- **Body** - describe your question with as many details as possible. - +A *question* is any discussion that is typically neigher a bug, nor feature request or improvement. If you have a question like "How do I do X?" - this paragraph is for you. +Please post your question in [your favourite messenger](#places-where-community-is-active) so members of the community could help you. You can also help others! ### Your First Code Contribution -Read our [C++ Style Guide](#c++-style-guide) and start with beginner-friendly issues with label [`[good-first-issue]`](https://github.com/hyperledger/iroha/issues?q=is:open+is:issue+label:good-first-issue ). Indicate somehow that you are working on this task. - - +Read our [C++ Style Guide](#c-style-guide) and start with beginner-friendly issues with JIRA label [`[good-first-issue]`](https://jira.hyperledger.org/issues/?jql=project%20%3D%20IR%20and%20labels%20%3D%20good-first-issue%20ORDER%20BY%20updated%20DESC) <- click here. Indicate somehow that you are working on this task: get in touch with maintainers team, community or simply assign this issue to yourself. ### Pull Requests - Fill in [the required template](.github/PULL_REQUEST_TEMPLATE.md) -- **Write tests** for new code. Test coverage for new code must be at least 70%. +- End all files with a newline -- Every pull request should be reviewed and **get at least two approvals**. +- **Write tests** for new code. Test coverage for new code must be at least 70% -- Do not include issue numbers in the PR title or commit messages. +- Every pull request should be reviewed and **get at least two approvals from maintainers team**. Check who is a current maintainer in [MAINTAINERS.md](https://github.com/hyperledger/iroha/blob/master/MAINTAINERS.md) file -- Use [keywords for closing issues](https://help.github.com/articles/closing-issues-using-keywords/). +- When you've finished work make sure that you've got all passing CI checks — after that **squash and merge** your pull request -- Include issue numbers in Pull Request body only. +- Follow the [C++ Style Guide](#c-style-guide) -- When finished work, **rebase onto base branch** with - ```bash - $ git fetch - $ git rebase -i - ``` - - [Step-by-step guide](https://soramitsu.atlassian.net/wiki/spaces/IS/pages/11173889/Rebase+and+merge+guide). - -- Follow the [C++ Style Guide](#C++-style-guide). - -- Follow the [Git Style Guide](#git-commit-messages) . +- Follow the [Git Style Guide](#git-style-guide) - **Document new code** based on the [Documentation Styleguide](#documentation-styleguide) -- End all files with a newline. - - - ## Styleguides ### Git Style Guide +- **Sign-off every commit** with [DCO](https://github.com/apps/dco): `Signed-off-by: $NAME <$EMAIL>`. You can do it automatically using `git commit -s` - **Use present tense** ("Add feature", not "Added feature"). - **Use imperative mood** ("Deploy docker to..." not "Deploys docker to..."). - Write meaningful commit message. -- **Signed-off every commit** with [DCO](https://github.com/apps/dco): `Signed-off-by: $NAME <$EMAIL>`. - You can do it automatically using `git commit -s`. -- Do not include PR or Issue number in commit message. -- Limit the first line of commit message to 50 characters or less. -- First line of commit message must contain summary of work done, second line must contain empty line, third and other lines can contain list of commit changes. -- When only changing documentation, include `[ci skip]` in the commit description. -- We use mixed approach of [Github Flow](https://guides.github.com/introduction/flow/) and [Git Flow](http://nvie.com/posts/a-successful-git-branching-model/). More at [Iroha Working Agreement](https://github.com/hyperledger/iroha/wiki/Iroha-working-agreement#2-version-control-system). - - +- Limit the first line of commit message to 50 characters or less +- First line of commit message must contain summary of work done, second line must contain empty line, third and other lines can contain list of commit changes ### C++ Style Guide -- Use [clang-format](http://clang.llvm.org/docs/ClangFormat.html) for code formatting (we use google code style). +- Use clang-format [settings](https://github.com/hyperledger/iroha/blob/master/.clang-format) file. There are guides available on the internet (e.g. [Kratos wiki](https://github.com/KratosMultiphysics/Kratos/wiki/How-to-configure-clang%E2%80%90format)) - Follow [CppCoreGuidelines](http://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines) and [Cpp Best Practices](https://lefticus.gitbooks.io/cpp-best-practices). - Avoid [platform-dependent](https://stackoverflow.com/questions/1558194/learning-and-cross-platform-development-c) code. - Use [C++14](https://en.wikipedia.org/wiki/C%2B%2B14). @@ -154,60 +117,18 @@ Read our [C++ Style Guide](#c++-style-guide) and start with beginner-friendly is - Document all public API: methods, functions, members, templates, classes... -## Additional Notes +### Places where community is active -### Informational Labels -| Label Name | Description | -| :---------------------- | ---------------------------------------------------------------- | -| `pri:low` | Low priority. | -| `pri:normal` | Normal priority. | -| `pri:important` | Important issue. | -| `pri:critical` | Critical issue. Must be fixed immediately. | -| `pri:blocker` | Issue blocked by other issues. | -| `status:in-progress` | Work in progress. | -| `status:inactive` | Inactive PR or Issue. Likely to become a `candidate-for-closing` | -| `status:wontfix` | Core team has decided not to fix these issue for now. | - -### Issue and Pull Request labels - -| Label Name | Description | -| :---------------------- | ------------------------------------------------------------ | -| `enhancement:code` | Any improvements in **existing** code. | -| `enhancement:idea` | Fresh ideas to enhance existing architecture, design. | -| `bug:needs-reproduction`| Bugs or reports that are very likely to be bugs. | -| `bug:confirmed` | Confirmed bug by maintainers. | -| `feature` | Feature requests -- completely new functionality. | -| `accepted` | Pull request is accepted and can be merged. | -| `candidate-for-closing` | Outdated Pull Request / Issue. Lasts for more than 14 days. | -| `needs-correction` | Pull Request / Issue that should be corrected by author. | -| `needs-review` | Pull Request / Issue that should be reviewed by maintainer. | - - -### Issue Labels - -| Label Name | Description | -| :---------------------- | --------------------------------------------------------------------- | -| `question` | Questions more than bug reports or feature requests - "How do I do X" | -| `good-first-issue` | Good starting point to begin contributing. | -| `help-wanted` | Maintainers ask for help to work on this issue. | - -### Pull Request Labels - -| Label Name | Description | -| :---------------------- | ------------------------------------------- | -| `accepted` | Pull request is accepted and can be merged. | - - -### Contact Developers - -Developers are available at: - -| Service | Link | -| ------------ | ---------------------------------------- | -| RocketChat | https://chat.hyperledger.org/channel/iroha | -| Mailing List | [hyperledger-iroha@lists.hyperledger.org](mailto:hyperledger-iroha@lists.hyperledger.org) | -| Gitter | https://gitter.im/hyperledger-iroha/Lobby | -| Telegram | https://t.me/joinchat/Al-9jkCZ6eePL9JMQtoOJw | +Our community members are active at: + +| Service | Link | +| ------------ | ------------------------------------------------------------ | +| RocketChat | https://chat.hyperledger.org/channel/iroha | +| StackOverflow| https://stackoverflow.com/questions/tagged/hyperledger-iroha | +| Mailing List | [hyperledger-iroha@lists.hyperledger.org](mailto:hyperledger-iroha@lists.hyperledger.org) | +| Gitter | https://gitter.im/hyperledger-iroha/Lobby | +| Telegram | https://t.me/hl_iroha | +| YouTube | https://www.youtube.com/channel/UCYlK9OrZo9hvNYFuf0vrwww | From e2a82db798c890e5ef071e36f4118c41dcf99898 Mon Sep 17 00:00:00 2001 From: Mikhail Boldyrev Date: Thu, 7 Feb 2019 18:11:26 +0300 Subject: [PATCH 34/41] Storage: fixed drop deadlock (#2076) * fixed drop deadlock * reuse sessions in StorageImpl * simplified PeerQueryWsv::getLedgerPeers() Signed-off-by: Mikhail Boldyrev --- irohad/ametsuchi/impl/peer_query_wsv.cpp | 7 +---- irohad/ametsuchi/impl/storage_impl.cpp | 36 +++++++++++------------- 2 files changed, 17 insertions(+), 26 deletions(-) diff --git a/irohad/ametsuchi/impl/peer_query_wsv.cpp b/irohad/ametsuchi/impl/peer_query_wsv.cpp index a10832edcd..b73089c79d 100644 --- a/irohad/ametsuchi/impl/peer_query_wsv.cpp +++ b/irohad/ametsuchi/impl/peer_query_wsv.cpp @@ -17,12 +17,7 @@ namespace iroha { boost::optional> PeerQueryWsv::getLedgerPeers() { - auto peers = wsv_->getPeers(); - if (peers) { - return boost::make_optional(peers.value()); - } else { - return boost::none; - } + return wsv_->getPeers(); } } // namespace ametsuchi diff --git a/irohad/ametsuchi/impl/storage_impl.cpp b/irohad/ametsuchi/impl/storage_impl.cpp index 1460cddb91..3594f02e4b 100644 --- a/irohad/ametsuchi/impl/storage_impl.cpp +++ b/irohad/ametsuchi/impl/storage_impl.cpp @@ -406,13 +406,12 @@ namespace iroha { try { *(storage->sql_) << "COMMIT"; storage->committed = true; - return createPeerQuery() | - [](const auto &peer_query) { return peer_query->getLedgerPeers(); } - | [](auto &&peers) { - return boost::optional>( - std::make_unique( - std::make_shared(std::move(peers)))); - }; + return PostgresWsvQuery(*(storage->sql_), factory_).getPeers() | + [](auto &&peers) { + return boost::optional>( + std::make_unique( + std::make_shared(std::move(peers)))); + }; } catch (std::exception &e) { storage->committed = false; log_->warn("Mutable storage is not committed. Reason: {}", e.what()); @@ -444,25 +443,22 @@ namespace iroha { PostgresBlockIndex block_index(sql); block_index.index(block); block_is_prepared = false; + return PostgresWsvQuery(sql, factory_).getPeers() | + [this, &block](auto &&peers) + -> boost::optional> { + if (this->storeBlock(block)) { + return boost::optional>( + std::make_unique( + std::make_shared(std::move(peers)))); + } + return boost::none; + }; } catch (const std::exception &e) { log_->warn("failed to apply prepared block {}: {}", block.hash().hex(), e.what()); return boost::none; } - return createPeerQuery() | - [](const auto &peer_query) { - return peer_query->getLedgerPeers(); - } - | [this, &block](auto &&peers) - -> boost::optional> { - if (this->storeBlock(block)) { - return boost::optional>( - std::make_unique( - std::make_shared(std::move(peers)))); - } - return boost::none; - }; } std::shared_ptr StorageImpl::getWsvQuery() const { From 0d7a6ea4cac2ca4f49911fd15b247c4d48d145cc Mon Sep 17 00:00:00 2001 From: Igor Egorov Date: Fri, 8 Feb 2019 14:06:52 +0300 Subject: [PATCH 35/41] Fix Memory Leak in irohad (#2082) Fix a mem leak, that could not lead to a linear increase of memory consumption. Usage Examples or Tests valgrind irohad you have to use gcc + valgrind on Ubuntu-like linux (not bsd), other configurations are poorly supported by valgrind Signed-off-by: Igor Egorov --- irohad/main/application.cpp | 5 ++++- irohad/main/application.hpp | 1 + irohad/main/impl/consensus_init.cpp | 33 ++++------------------------- irohad/main/impl/consensus_init.hpp | 26 +++++++++++------------ 4 files changed, 21 insertions(+), 44 deletions(-) diff --git a/irohad/main/application.cpp b/irohad/main/application.cpp index e2ccf34b55..05c9bde6ef 100644 --- a/irohad/main/application.cpp +++ b/irohad/main/application.cpp @@ -95,7 +95,9 @@ Irohad::Irohad(const std::string &block_store_dir, initStorage(); } -Irohad::~Irohad() = default; +Irohad::~Irohad() { + consensus_gate_events_subscription.unsubscribe(); +} /** * Initializing iroha daemon @@ -400,6 +402,7 @@ void Irohad::initConsensusGate() { async_call_, common_objects_factory_); consensus_gate->onOutcome().subscribe( + consensus_gate_events_subscription, consensus_gate_objects.get_subscriber()); log_->info("[Init] => consensus gate"); } diff --git a/irohad/main/application.hpp b/irohad/main/application.hpp index 0f5813afcc..6bb60fb576 100644 --- a/irohad/main/application.hpp +++ b/irohad/main/application.hpp @@ -263,6 +263,7 @@ class Irohad { // consensus gate std::shared_ptr consensus_gate; rxcpp::subjects::subject consensus_gate_objects; + rxcpp::composite_subscription consensus_gate_events_subscription; // synchronizer std::shared_ptr synchronizer; diff --git a/irohad/main/impl/consensus_init.cpp b/irohad/main/impl/consensus_init.cpp index e2ec97896d..6ab9fae4cd 100644 --- a/irohad/main/impl/consensus_init.cpp +++ b/irohad/main/impl/consensus_init.cpp @@ -1,18 +1,6 @@ /** - * Copyright Soramitsu Co., Ltd. 2018 All Rights Reserved. - * http://soramitsu.co.jp - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 */ #include "main/impl/consensus_init.hpp" @@ -52,7 +40,7 @@ namespace iroha { } auto YacInit::createTimer(std::chrono::milliseconds delay_milliseconds) { - return std::make_shared([delay_milliseconds] { + return std::make_shared([delay_milliseconds, this] { // static factory with a single thread // // observe_on_new_thread -- coordination which creates new thread with @@ -63,21 +51,8 @@ namespace iroha { // scheduler is also a factory for workers in that timeline. // // coordination is a factory for coordinators and has a scheduler. - // - // coordinator has a worker, and is a factory for coordinated - // observables, subscribers and schedulable functions. - // - // A new thread scheduler is created - // by calling .create_coordinator().get_scheduler() - // - // static allows to reuse the same thread in subsequent calls to this - // lambda - static rxcpp::observe_on_one_worker coordination( - rxcpp::observe_on_new_thread() - .create_coordinator() - .get_scheduler()); return rxcpp::observable<>::timer( - std::chrono::milliseconds(delay_milliseconds), coordination); + std::chrono::milliseconds(delay_milliseconds), coordination_); }); } diff --git a/irohad/main/impl/consensus_init.hpp b/irohad/main/impl/consensus_init.hpp index 8648f8b3af..c3408e9595 100644 --- a/irohad/main/impl/consensus_init.hpp +++ b/irohad/main/impl/consensus_init.hpp @@ -1,18 +1,6 @@ /** - * Copyright Soramitsu Co., Ltd. 2018 All Rights Reserved. - * http://soramitsu.co.jp - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 */ #ifndef IROHA_CONSENSUS_INIT_HPP @@ -69,6 +57,16 @@ namespace iroha { std::shared_ptr common_objects_factory); + // coordinator has a worker, and a factory for coordinated + // observables, subscribers and schedulable functions. + // + // A new thread scheduler is created + // by calling .create_coordinator().get_scheduler() + rxcpp::observe_on_one_worker coordination_{ + rxcpp::observe_on_new_thread() + .create_coordinator() + .get_scheduler()}; + public: std::shared_ptr initConsensusGate( std::shared_ptr peer_query_factory, From ee03b2c5369f11414542ef81587fe4539b89d6f1 Mon Sep 17 00:00:00 2001 From: Mikhail Boldyrev Date: Fri, 8 Feb 2019 16:56:13 +0300 Subject: [PATCH 36/41] micro fix: move transactions (#2079) Signed-off-by: Mikhail Boldyrev --- irohad/ordering/impl/on_demand_ordering_gate.cpp | 2 +- irohad/ordering/impl/on_demand_os_server_grpc.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/irohad/ordering/impl/on_demand_ordering_gate.cpp b/irohad/ordering/impl/on_demand_ordering_gate.cpp index efaf882aef..5f0e2c7801 100644 --- a/irohad/ordering/impl/on_demand_ordering_gate.cpp +++ b/irohad/ordering/impl/on_demand_ordering_gate.cpp @@ -65,7 +65,7 @@ OnDemandOrderingGate::OnDemandOrderingGate( network_client_->onRequestProposal(current_round_)); // vote for the object received from the network proposal_notifier_.get_subscriber().on_next( - network::OrderingEvent{proposal, current_round_}); + network::OrderingEvent{std::move(proposal), current_round_}); })), cache_(std::move(cache)), proposal_factory_(std::move(factory)), diff --git a/irohad/ordering/impl/on_demand_os_server_grpc.cpp b/irohad/ordering/impl/on_demand_os_server_grpc.cpp index f059d090c5..cd9f8b1bc1 100644 --- a/irohad/ordering/impl/on_demand_os_server_grpc.cpp +++ b/irohad/ordering/impl/on_demand_os_server_grpc.cpp @@ -68,7 +68,7 @@ grpc::Status OnDemandOsServerGrpc::SendBatches( request->round().reject_round()}; auto transactions = deserializeTransactions(request); - auto batch_candidates = batch_parser_->parseBatches(transactions); + auto batch_candidates = batch_parser_->parseBatches(std::move(transactions)); auto batches = std::accumulate( std::begin(batch_candidates), From e7e42e09eb8c65e5a7e758ee8294655af06beaac Mon Sep 17 00:00:00 2001 From: Mikhail Boldyrev Date: Fri, 8 Feb 2019 16:56:42 +0300 Subject: [PATCH 37/41] OrderingService: fixed old proposals wipe (#2084) Signed-off-by: Mikhail Boldyrev --- irohad/ordering/impl/on_demand_ordering_service_impl.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/irohad/ordering/impl/on_demand_ordering_service_impl.cpp b/irohad/ordering/impl/on_demand_ordering_service_impl.cpp index fede5789a1..109a6e1a8b 100644 --- a/irohad/ordering/impl/on_demand_ordering_service_impl.cpp +++ b/irohad/ordering/impl/on_demand_ordering_service_impl.cpp @@ -208,7 +208,7 @@ OnDemandOrderingServiceImpl::emitProposal(const consensus::Round &round) { } void OnDemandOrderingServiceImpl::tryErase() { - if (round_queue_.size() >= number_of_proposals_) { + while (round_queue_.size() > number_of_proposals_) { auto &round = round_queue_.front(); proposal_map_.erase(round); log_->info("tryErase: erased {}", round); From 4b23f34fbe2728f8d577710ae547d8f5e1142a8e Mon Sep 17 00:00:00 2001 From: Igor Egorov Date: Sat, 9 Feb 2019 14:30:38 +0300 Subject: [PATCH 38/41] Fix the rest of memory leaks (#2086) Fix the rest of memory leaks in irohad. valgrind irohad (under Ubuntu-like environment with GNU compiler) Signed-off-by: Igor Egorov --- irohad/main/impl/consensus_init.hpp | 4 +--- irohad/torii/impl/status_bus_impl.cpp | 6 +++++- irohad/torii/impl/status_bus_impl.hpp | 3 +++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/irohad/main/impl/consensus_init.hpp b/irohad/main/impl/consensus_init.hpp index c3408e9595..fdd4201f22 100644 --- a/irohad/main/impl/consensus_init.hpp +++ b/irohad/main/impl/consensus_init.hpp @@ -63,9 +63,7 @@ namespace iroha { // A new thread scheduler is created // by calling .create_coordinator().get_scheduler() rxcpp::observe_on_one_worker coordination_{ - rxcpp::observe_on_new_thread() - .create_coordinator() - .get_scheduler()}; + rxcpp::observe_on_new_thread()}; public: std::shared_ptr initConsensusGate( diff --git a/irohad/torii/impl/status_bus_impl.cpp b/irohad/torii/impl/status_bus_impl.cpp index e24a2edf8e..ffcde15f31 100644 --- a/irohad/torii/impl/status_bus_impl.cpp +++ b/irohad/torii/impl/status_bus_impl.cpp @@ -8,7 +8,11 @@ namespace iroha { namespace torii { StatusBusImpl::StatusBusImpl(rxcpp::observe_on_one_worker worker) - : worker_(worker), subject_(worker_) {} + : worker_(worker), subject_(worker_, cs_) {} + + StatusBusImpl::~StatusBusImpl() { + cs_.unsubscribe(); + } void StatusBusImpl::publish(StatusBus::Objects resp) { subject_.get_subscriber().on_next(resp); diff --git a/irohad/torii/impl/status_bus_impl.hpp b/irohad/torii/impl/status_bus_impl.hpp index f56e99405a..be7e047f39 100644 --- a/irohad/torii/impl/status_bus_impl.hpp +++ b/irohad/torii/impl/status_bus_impl.hpp @@ -18,12 +18,15 @@ namespace iroha { StatusBusImpl( rxcpp::observe_on_one_worker worker = rxcpp::observe_on_new_thread()); + ~StatusBusImpl() override; + void publish(StatusBus::Objects) override; /// Subscribers will be invoked in separate thread rxcpp::observable statuses() override; // Need to create once, otherwise will create thread for each subscriber rxcpp::observe_on_one_worker worker_; + rxcpp::composite_subscription cs_; rxcpp::subjects::synchronize subject_; }; From 69e4a323114e50c581060be694d3083fc03bb612 Mon Sep 17 00:00:00 2001 From: Sara Date: Mon, 11 Feb 2019 15:21:08 +0300 Subject: [PATCH 39/41] fixed broken links (#2078) Signed-off-by: Sara --- docs/source/guides/deployment.rst | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/docs/source/guides/deployment.rst b/docs/source/guides/deployment.rst index be568ee434..1354767847 100644 --- a/docs/source/guides/deployment.rst +++ b/docs/source/guides/deployment.rst @@ -221,24 +221,24 @@ Checking Iroha peer status Dealing with troubles ^^^^^^^^^^^^^^^^^^^^^ -—"Please, help me, because I am…" +—"Please, help me, because I…" -Not having Iroha daemon binary ------------------------------- +Do not have Iroha daemon binary +------------------------------- -You can build Iroha daemon binary from sources, following API section from the `website `__ +You can build Iroha daemon binary from sources. You can get binaries `here `__ -Not having config file ----------------------- +Do not have a config file +------------------------- -Check how to create a configuration file by following this `link `__ +Check how to create a configuration file by following this `link <./configuration.html>`__ -Not having genesis block ------------------------- +Do not have a genesis block +--------------------------- -Create genesis block by generating it via `iroha-cli` or manually, as it is described `here `__ +Create genesis block by generating it via `iroha-cli` or manually, using the `example `__ and checking out `permissions `__ -Not having a keypair for a peer -------------------------------- +Do not have a keypair for a peer +-------------------------------- In order to create a keypair for an account or a peer, use iroha-cli binary by passing the name of the peer with `--new_account` option. From f30497f4fc1de1d5dd1aaeaa20aaff3bfc06b39b Mon Sep 17 00:00:00 2001 From: Andrei Lebedev Date: Mon, 11 Feb 2019 15:45:16 +0300 Subject: [PATCH 40/41] Fix locking in OnDemandOrderingGate (#2088) Signed-off-by: Andrei Lebedev --- .../ordering/impl/on_demand_ordering_gate.cpp | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/irohad/ordering/impl/on_demand_ordering_gate.cpp b/irohad/ordering/impl/on_demand_ordering_gate.cpp index 5f0e2c7801..01eabc426e 100644 --- a/irohad/ordering/impl/on_demand_ordering_gate.cpp +++ b/irohad/ordering/impl/on_demand_ordering_gate.cpp @@ -31,14 +31,12 @@ OnDemandOrderingGate::OnDemandOrderingGate( network_client_(std::move(network_client)), events_subscription_(events.subscribe([this](auto event) { // exclusive lock - std::lock_guard lock(mutex_); - + std::unique_lock lock(mutex_); visit_in_place(event, [this](const BlockEvent &block_event) { // block committed, increment block round log_->debug("BlockEvent. {}", block_event.round); current_round_ = block_event.round; - cache_->remove(block_event.hashes); }, [this](const EmptyEvent &empty_event) { // no blocks committed, increment reject round @@ -46,6 +44,16 @@ OnDemandOrderingGate::OnDemandOrderingGate( current_round_ = empty_event.round; }); log_->debug("Current: {}", current_round_); + lock.unlock(); + + visit_in_place(event, + [this](const BlockEvent &block_event) { + // block committed, remove transactions from cache + cache_->remove(block_event.hashes); + }, + [this](const EmptyEvent &) { + // no blocks committed, no transactions to remove + }); auto batches = cache_->pop(); @@ -78,9 +86,9 @@ OnDemandOrderingGate::~OnDemandOrderingGate() { void OnDemandOrderingGate::propagateBatch( std::shared_ptr batch) { - std::shared_lock lock(mutex_); - cache_->addToBack({batch}); + + std::shared_lock lock(mutex_); network_client_->onBatches( current_round_, transport::OdOsNotification::CollectionType{batch}); } From 228c49c32c4336a5cb6d6a438929fdb8cf87b605 Mon Sep 17 00:00:00 2001 From: Andrei Lebedev Date: Mon, 11 Feb 2019 16:03:20 +0300 Subject: [PATCH 41/41] Fix rejected status emitting (#2083) Signed-off-by: Andrei Lebedev --- irohad/torii/impl/command_service_impl.cpp | 2 +- .../impl/transaction_processor_impl.cpp | 41 +++++++------- .../processor/transaction_processor_impl.hpp | 5 +- .../proto_concrete_tx_response.hpp | 2 +- .../rejected_tx_response.hpp | 2 +- .../transaction_responses/tx_response.hpp | 4 +- .../tx_response_variant.hpp | 28 +++++----- .../acceptance/revoke_permission_test.cpp | 15 ++---- .../acceptance/set_account_quorum_test.cpp | 3 +- .../processor/transaction_processor_test.cpp | 53 +++++++++---------- 10 files changed, 73 insertions(+), 82 deletions(-) diff --git a/irohad/torii/impl/command_service_impl.cpp b/irohad/torii/impl/command_service_impl.cpp index 790532e08c..b768b1b06f 100644 --- a/irohad/torii/impl/command_service_impl.cpp +++ b/irohad/torii/impl/command_service_impl.cpp @@ -99,7 +99,7 @@ namespace iroha { shared_model::interface::StatefulFailedTxResponse, shared_model::interface::CommittedTxResponse, shared_model::interface::MstExpiredResponse, - shared_model::interface::RejectTxResponse>::value; + shared_model::interface::RejectedTxResponse>::value; rxcpp::observable< std::shared_ptr> diff --git a/irohad/torii/processor/impl/transaction_processor_impl.cpp b/irohad/torii/processor/impl/transaction_processor_impl.cpp index 0786328638..cf009a0e86 100644 --- a/irohad/torii/processor/impl/transaction_processor_impl.cpp +++ b/irohad/torii/processor/impl/transaction_processor_impl.cpp @@ -65,7 +65,6 @@ namespace iroha { // notify about failed txs const auto &errors = proposal_and_errors->rejected_transactions; - std::lock_guard lock(notifier_mutex_); for (const auto &tx_error : errors) { log_->info(composeErrorMessage(tx_error)); this->publishStatus(TxStatusType::kStatefulFailed, @@ -75,7 +74,7 @@ namespace iroha { // notify about success txs for (const auto &successful_tx : proposal_and_errors->verified_proposal->transactions()) { - log_->info("on stateful validation success: {}", + log_->info("VerifiedProposalCreatorEvent StatefulValid: {}", successful_tx.hash().hex()); this->publishStatus(TxStatusType::kStatefulValid, successful_tx.hash()); @@ -85,27 +84,29 @@ namespace iroha { // commit transactions pcs_->on_commit().subscribe( [this](synchronizer::SynchronizationEvent sync_event) { + bool has_at_least_one_committed = false; sync_event.synced_blocks.subscribe( // on next - [this](auto model_block) { - current_txs_hashes_.reserve( - model_block->transactions().size()); - std::transform(model_block->transactions().begin(), - model_block->transactions().end(), - std::back_inserter(current_txs_hashes_), - [](const auto &tx) { return tx.hash(); }); + [this, &has_at_least_one_committed](auto model_block) { + for (const auto &tx : model_block->transactions()) { + const auto &hash = tx.hash(); + log_->info("SynchronizationEvent Committed: {}", + hash.hex()); + this->publishStatus(TxStatusType::kCommitted, hash); + has_at_least_one_committed = true; + } + for (const auto &rejected_tx_hash : + model_block->rejected_transactions_hashes()) { + log_->info("SynchronizationEvent Rejected: {}", + rejected_tx_hash.hex()); + this->publishStatus(TxStatusType::kRejected, + rejected_tx_hash); + } }, // on complete - [this] { - if (current_txs_hashes_.empty()) { + [this, &has_at_least_one_committed] { + if (not has_at_least_one_committed) { log_->info("there are no transactions to be committed"); - } else { - std::lock_guard lock(notifier_mutex_); - for (const auto &tx_hash : current_txs_hashes_) { - log_->info("on commit committed: {}", tx_hash.hex()); - this->publishStatus(TxStatusType::kCommitted, tx_hash); - } - current_txs_hashes_.clear(); } }); }); @@ -175,6 +176,10 @@ namespace iroha { status_factory_->makeStatefulValid(hash, tx_error)); return; }; + case TxStatusType::kRejected: { + status_bus_->publish(status_factory_->makeRejected(hash, tx_error)); + return; + }; case TxStatusType::kCommitted: { status_bus_->publish(status_factory_->makeCommitted(hash, tx_error)); return; diff --git a/irohad/torii/processor/transaction_processor_impl.hpp b/irohad/torii/processor/transaction_processor_impl.hpp index 5a5fbef488..107071ef6a 100644 --- a/irohad/torii/processor/transaction_processor_impl.hpp +++ b/irohad/torii/processor/transaction_processor_impl.hpp @@ -64,10 +64,6 @@ namespace iroha { logger::Logger log_; - /// prevents from emitting new tx statuses from different threads - /// in parallel - std::mutex notifier_mutex_; - // TODO: [IR-1665] Akvinikym 29.08.18: Refactor method publishStatus(..) /** * Complementary class for publishStatus method @@ -77,6 +73,7 @@ namespace iroha { kStatelessValid, kStatefulFailed, kStatefulValid, + kRejected, kCommitted, kMstExpired, kNotReceived, diff --git a/shared_model/backend/protobuf/transaction_responses/proto_concrete_tx_response.hpp b/shared_model/backend/protobuf/transaction_responses/proto_concrete_tx_response.hpp index c4d2e38d99..0abb843a9f 100644 --- a/shared_model/backend/protobuf/transaction_responses/proto_concrete_tx_response.hpp +++ b/shared_model/backend/protobuf/transaction_responses/proto_concrete_tx_response.hpp @@ -40,7 +40,7 @@ namespace shared_model { using CommittedTxResponse = TrivialProto; - using RejectedTxResponse = TrivialProto; // ---------------------------| Rest statuses |----------------------------- diff --git a/shared_model/interfaces/transaction_responses/rejected_tx_response.hpp b/shared_model/interfaces/transaction_responses/rejected_tx_response.hpp index b2fd585db1..b80424afb8 100644 --- a/shared_model/interfaces/transaction_responses/rejected_tx_response.hpp +++ b/shared_model/interfaces/transaction_responses/rejected_tx_response.hpp @@ -13,7 +13,7 @@ namespace shared_model { /** * Status shows that transaction was rejected on consensus */ - class RejectTxResponse : public AbstractTxResponse { + class RejectedTxResponse : public AbstractTxResponse { private: std::string className() const override { return "RejectedTxResponse"; diff --git a/shared_model/interfaces/transaction_responses/tx_response.hpp b/shared_model/interfaces/transaction_responses/tx_response.hpp index 12fab10d00..ceac72a4a6 100644 --- a/shared_model/interfaces/transaction_responses/tx_response.hpp +++ b/shared_model/interfaces/transaction_responses/tx_response.hpp @@ -18,7 +18,7 @@ namespace shared_model { class StatelessValidTxResponse; class StatefulFailedTxResponse; class StatefulValidTxResponse; - class RejectTxResponse; + class RejectedTxResponse; class CommittedTxResponse; class MstExpiredResponse; class NotReceivedTxResponse; @@ -47,7 +47,7 @@ namespace shared_model { StatelessValidTxResponse, StatefulFailedTxResponse, StatefulValidTxResponse, - RejectTxResponse, + RejectedTxResponse, CommittedTxResponse, MstExpiredResponse, NotReceivedTxResponse, diff --git a/shared_model/interfaces/transaction_responses/tx_response_variant.hpp b/shared_model/interfaces/transaction_responses/tx_response_variant.hpp index 2ae42edad7..262cd34aae 100644 --- a/shared_model/interfaces/transaction_responses/tx_response_variant.hpp +++ b/shared_model/interfaces/transaction_responses/tx_response_variant.hpp @@ -1,7 +1,7 @@ /** - * Copyright Soramitsu Co., Ltd. All Rights Reserved. - * SPDX-License-Identifier: Apache-2.0 - */ + * Copyright Soramitsu Co., Ltd. All Rights Reserved. + * SPDX-License-Identifier: Apache-2.0 + */ #ifndef IROHA_SHARED_MODEL_TX_RESPONSE_VARIANT_HPP #define IROHA_SHARED_MODEL_TX_RESPONSE_VARIANT_HPP @@ -11,17 +11,17 @@ #include namespace boost { - extern template class variant< - const shared_model::interface::StatelessFailedTxResponse &, - const shared_model::interface::StatelessValidTxResponse &, - const shared_model::interface::StatefulFailedTxResponse &, - const shared_model::interface::StatefulValidTxResponse &, - const shared_model::interface::RejectTxResponse &, - const shared_model::interface::CommittedTxResponse &, - const shared_model::interface::MstExpiredResponse &, - const shared_model::interface::NotReceivedTxResponse &, - const shared_model::interface::MstPendingResponse &, - const shared_model::interface::EnoughSignaturesCollectedResponse &>; + extern template class variant< + const shared_model::interface::StatelessFailedTxResponse &, + const shared_model::interface::StatelessValidTxResponse &, + const shared_model::interface::StatefulFailedTxResponse &, + const shared_model::interface::StatefulValidTxResponse &, + const shared_model::interface::RejectedTxResponse &, + const shared_model::interface::CommittedTxResponse &, + const shared_model::interface::MstExpiredResponse &, + const shared_model::interface::NotReceivedTxResponse &, + const shared_model::interface::MstPendingResponse &, + const shared_model::interface::EnoughSignaturesCollectedResponse &>; } #endif // IROHA_SHARED_MODEL_TX_RESPONSE_VARIANT_HPP diff --git a/test/integration/acceptance/revoke_permission_test.cpp b/test/integration/acceptance/revoke_permission_test.cpp index c9adf9ade4..cd8651023d 100644 --- a/test/integration/acceptance/revoke_permission_test.cpp +++ b/test/integration/acceptance/revoke_permission_test.cpp @@ -94,8 +94,8 @@ TEST_F(GrantablePermissionsFixture, RevokeWithoutPermission) { IntegrationTestFramework itf(1); itf.setInitialState(kAdminKeypair); createTwoAccounts(itf, {}, {Role::kReceive}) - .sendTxAwait(makeUserWithPerms( - {interface::permissions::Role::kSetMyQuorum}), + .sendTxAwait( + makeUserWithPerms({interface::permissions::Role::kSetMyQuorum}), [](auto &block) { ASSERT_EQ(block->transactions().size(), 1); }) .sendTxAwait( grantPermission(kUser, @@ -373,15 +373,8 @@ namespace grantables { ASSERT_EQ(proposal->transactions().size(), 1); }) .skipVerifiedProposal() - .skipBlock() - .getTxStatus(last_check_tx.hash(), - [&last_check_tx](auto &status) { - auto err_cmd_name = status.statelessErrorOrCommandName(); - auto cmd_in_tx = last_check_tx.commands()[0].toString(); - auto cmd_in_tx_name = - cmd_in_tx.substr(0, cmd_in_tx.find(":")); - ASSERT_EQ(err_cmd_name, cmd_in_tx_name); - }) + .checkBlock( + [](auto &block) { ASSERT_EQ(block->transactions().size(), 0); }) .done(); } diff --git a/test/integration/acceptance/set_account_quorum_test.cpp b/test/integration/acceptance/set_account_quorum_test.cpp index 16b4e942b5..5332e651ca 100644 --- a/test/integration/acceptance/set_account_quorum_test.cpp +++ b/test/integration/acceptance/set_account_quorum_test.cpp @@ -48,8 +48,7 @@ TEST_F(QuorumFixture, CannotRaiseQuorumMoreThanSignatures) { const auto new_quorum = 3; auto raise_quorum_tx = complete( baseTx(kAdminId).setAccountQuorum(kAdminId, new_quorum), kAdminKeypair); - itf.sendTxAwait(raise_quorum_tx, CHECK_TXS_QUANTITY(0)) - .getTxStatus(raise_quorum_tx.hash(), CHECK_STATEFUL_INVALID); + itf.sendTxAwait(raise_quorum_tx, CHECK_TXS_QUANTITY(0)); } /** diff --git a/test/module/irohad/torii/processor/transaction_processor_test.cpp b/test/module/irohad/torii/processor/transaction_processor_test.cpp index 049497b810..efbf9a4a70 100644 --- a/test/module/irohad/torii/processor/transaction_processor_test.cpp +++ b/test/module/irohad/torii/processor/transaction_processor_test.cpp @@ -6,6 +6,7 @@ #include "torii/processor/transaction_processor_impl.hpp" #include +#include #include #include #include "builders/default_builders.hpp" @@ -233,10 +234,10 @@ TEST_F(TransactionProcessorTest, TransactionProcessorOnProposalBatchTest) { /** * @given transaction processor * @when transactions compose proposal which is sent to peer - * communication service @and all transactions composed the block - * @then for every transaction in bathces STATEFUL_VALID status is returned + * communication service + * @then for every transaction in batches STATEFUL_VALID status is returned */ -TEST_F(TransactionProcessorTest, TransactionProcessorBlockCreatedTest) { +TEST_F(TransactionProcessorTest, TransactionProcessorVerifiedProposalTest) { std::vector txs; for (size_t i = 0; i < proposal_size; i++) { auto &&tx = addSignaturesFromKeyPairs(baseTestTx(), makeKey()); @@ -268,23 +269,6 @@ TEST_F(TransactionProcessorTest, TransactionProcessorBlockCreatedTest) { verified_prop_notifier.get_subscriber().on_next( simulator::VerifiedProposalCreatorEvent{validation_result, round}); - auto block = TestBlockBuilder().transactions(txs).build(); - - // 2. Create block and notify transaction processor about it - rxcpp::subjects::subject> - blocks_notifier; - - commit_notifier.get_subscriber().on_next( - SynchronizationEvent{blocks_notifier.get_observable(), - SynchronizationOutcomeType::kCommit, - {}, - {}}); - - blocks_notifier.get_subscriber().on_next( - std::shared_ptr(clone(block))); - // Note blocks_notifier hasn't invoked on_completed, so - // transactions are not commited - SCOPED_TRACE("Stateful Valid status verification"); validateStatuses(txs); } @@ -349,8 +333,8 @@ TEST_F(TransactionProcessorTest, TransactionProcessorOnCommitTest) { * communication service @and some transactions became part of block, while some * were not committed, failing stateful validation * @then for every transaction from block COMMIT status is returned @and - * for every transaction, which failed stateful validation, - * STATEFUL_INVALID_STATUS status is returned + * for every transaction, which failed stateful validation, REJECTED status is + * returned */ TEST_F(TransactionProcessorTest, TransactionProcessorInvalidTxsTest) { std::vector block_txs; @@ -377,8 +361,9 @@ TEST_F(TransactionProcessorTest, TransactionProcessorInvalidTxsTest) { // passed or not stateful validation) // Plus all transactions from block will // be committed and corresponding status will be sent + // Rejected statuses will be published for invalid transactions EXPECT_CALL(*status_bus, publish(_)) - .Times(proposal_size + block_size) + .Times(proposal_size + block_size + invalid_txs.size()) .WillRepeatedly(testing::Invoke([this](auto response) { status_map[response->transactionHash()] = response; })); @@ -403,7 +388,20 @@ TEST_F(TransactionProcessorTest, TransactionProcessorInvalidTxsTest) { verified_prop_notifier.get_subscriber().on_next( simulator::VerifiedProposalCreatorEvent{validation_result, round}); - auto block = TestBlockBuilder().transactions(block_txs).build(); + { + SCOPED_TRACE("Stateful invalid status verification"); + // check that all invalid transactions will have stateful invalid status + validateStatuses( + invalid_txs); + } + + auto block = TestBlockBuilder() + .transactions(block_txs) + .rejectedTransactions( + invalid_txs | boost::adaptors::transformed([](auto &tx) { + return tx.hash(); + })) + .build(); SynchronizationEvent commit_event{ rxcpp::observable<>::just( @@ -414,10 +412,9 @@ TEST_F(TransactionProcessorTest, TransactionProcessorInvalidTxsTest) { commit_notifier.get_subscriber().on_next(commit_event); { - SCOPED_TRACE("Stateful invalid status verification"); - // check that all invalid transactions will have stateful invalid status - validateStatuses( - invalid_txs); + SCOPED_TRACE("Rejected status verification"); + // check that all invalid transactions will have rejected status + validateStatuses(invalid_txs); } { SCOPED_TRACE("Committed status verification");