From 8609eb05df54a6f53a1bc6a2658cdb333fde4822 Mon Sep 17 00:00:00 2001 From: Jovonni Pharr Date: Thu, 12 Mar 2020 00:09:29 -0400 Subject: [PATCH] saved from core --- Makefile | 2 +- README.md | 2 +- core/.DS_Store | Bin 10244 -> 6148 bytes core/Cargo.lock | 35 +++ core/DockerfileAOS | 38 +++ core/DockerfileAlice | 21 ++ core/DockerfileBob | 21 ++ core/DockerfileCici | 22 ++ core/Makefile | 64 ++++- core/README.md | 106 +++++++- core/block/.DS_Store | Bin 6148 -> 6148 bytes core/block/src/lib.rs | 129 +++++++++- core/cryptoutil/src/lib.rs | 5 +- core/db/Cargo.lock | 62 +++++ core/db/Cargo.toml | 1 + core/db/src/lib.rs | 123 ++++++++- core/encode/src/lib.rs | 16 +- core/{storage/proposal => executor}/.DS_Store | Bin core/executor/Cargo.lock | 24 ++ core/executor/Cargo.toml | 9 + core/executor/src/lib.rs | 65 +++++ core/lock/.DS_Store | Bin 0 -> 6148 bytes core/lock/Cargo.lock | 48 ++++ core/lock/Cargo.toml | 8 + core/lock/src/lib.rs | 86 +++++++ core/macros/.DS_Store | Bin 0 -> 6148 bytes core/macros/Cargo.lock | 16 ++ core/macros/Cargo.toml | 8 + core/macros/src/lib.rs | 101 ++++++++ core/network/src/lib.rs | 139 ++++++----- core/node/src/lib.rs | 227 +++++++++++++---- core/proposal/Cargo.lock | 124 ++++++++++ core/proposal/Cargo.toml | 1 + core/proposal/src/lib.rs | 233 +++++++++++++----- core/signature/src/lib.rs | 20 +- core/src/Cargo.toml | 2 + core/src/main.rs | 7 +- core/storage/.DS_Store | Bin 10244 -> 6148 bytes core/storage/chain/.DS_Store | Bin 8196 -> 6148 bytes core/storage/states.db | 2 +- core/timestamp/src/lib.rs | 1 - core/transaction/Cargo.toml | 1 + core/transaction/src/lib.rs | 77 +++--- 43 files changed, 1569 insertions(+), 277 deletions(-) create mode 100644 core/DockerfileAOS create mode 100644 core/DockerfileAlice create mode 100644 core/DockerfileBob create mode 100644 core/DockerfileCici create mode 100644 core/db/Cargo.lock rename core/{storage/proposal => executor}/.DS_Store (100%) create mode 100644 core/executor/Cargo.lock create mode 100644 core/executor/Cargo.toml create mode 100644 core/executor/src/lib.rs create mode 100644 core/lock/.DS_Store create mode 100644 core/lock/Cargo.lock create mode 100644 core/lock/Cargo.toml create mode 100644 core/lock/src/lib.rs create mode 100644 core/macros/.DS_Store create mode 100644 core/macros/Cargo.lock create mode 100644 core/macros/Cargo.toml create mode 100644 core/macros/src/lib.rs diff --git a/Makefile b/Makefile index b879ec9..196d4f2 100644 --- a/Makefile +++ b/Makefile @@ -1,2 +1,2 @@ save: - git add * ; git commit -am "checkpoint" -v ; git push origin master -v ; + git add * ; git commit -am "checkpoint from root" ; git push origin master:development -v diff --git a/README.md b/README.md index 54a1598..d7bd713 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ This will submit a first proposal to alice, which will initiate proposal generat make p_alice ``` -# Stress with input/female transaction +# Stress with output/female transaction To create/submit a new transaction every m minutes ``` make stress diff --git a/core/.DS_Store b/core/.DS_Store index aacfd33e32e4d73739f7a783688b2639be7f610b..cd3e843b26470e297dc2ecb7f5017d11a8d23bcb 100644 GIT binary patch literal 6148 zcmeHKOK!q25PgP}LfNqFGFRvgB1BKn3lstZ)R06;QQKXP-Zx{bjRjq{QmPqg{F3>3 z{ET7@z%9?)2VenUN);T`*eRqGQ*0OzC*1=31sg(|0B~V2u`s*m$L*2~LcZ zt}3+s=vz*^V}oyWH1R4Dx~}w7Ymr(Cv1GKY8BIs*nYU)iGXtZwjOlKW$J4jUQNxV= zceL#POldo@I|*5H0d+rfPNQU%+D5Ly70Q0d5Pn70qcLC%42^+sNKfZOYux%Z28@AU zXF$FWDOE5F*m=}X2M4_Z5EGhJ_$+@ZB_|D-1?)W1LvcQp=u=&|VmO~pdm7@hfSpI5 z4i_#TF3jx04aMo~tef^Zfq-NcbyGePqEfqHTb3+XWx;u$cP3|)hSre_|7?DFnHoCAQ>u^YD!Y_KP4d^R-VJ*ZH{ZO4A%TM{WdJJnW7zK<1 zMggOMQNSqhVJLuawrKXSvgT+MFbWt2x(bN(!9f=`r)@`Bxpd$lO90q3F3W~<)B!Ta z(Ke@TM_Cz)d#dWe(xH|vF)S4hza!wVIc+=2s&KMYI9WQgr7ILmW=EVO;bi5MHAka> zQ6R2>sNL64fvV)wRJ49i9sB2Q)LZi>Cks`-fUEZH-+#S0_jKW4FQT{#qFxSn2vU`K zgwqf*2Xv0=)C7wbjuom#cM`12{cxO9@v}Sk%JY&_;yr7A$Zj<(=SgZY8lWN-qP)I2 z177kv=kaLcp~Wi?BoFNi^Y;3pdE~~)S}KtTO?>1l@qE_Oj$e>3;^%pn26sQXx+wGZ zV8Zy53NkxHph`%Wf5}!ybG}5^1YgsP0!9I& zfKk9GU=;Z16i6w?j@bYIs{il*AKh~q!bSn3z}qWer6==~6TmN5dcC1yuiZgEjV_wN zb(EDMILPgISh*dKKg4$YZghc_n+Ezkr)@`B84ipLhgG}z@BGIAwvQRF@OfDj_eRSg YQ~yUcIii)SssC5nDc}?7J=Fhi02hAQBLDyZ diff --git a/core/Cargo.lock b/core/Cargo.lock index 7e0d474..da37436 100644 --- a/core/Cargo.lock +++ b/core/Cargo.lock @@ -233,6 +233,7 @@ name = "db" version = "0.1.0" dependencies = [ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lock 0.1.0", ] [[package]] @@ -269,6 +270,14 @@ dependencies = [ "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "executor" +version = "0.1.0" +dependencies = [ + "json 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "macros 0.1.0", +] + [[package]] name = "failure" version = "0.1.6" @@ -318,6 +327,15 @@ name = "foreign-types-shared" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "fuchsia-cprng" version = "0.1.1" @@ -519,6 +537,13 @@ name = "libc" version = "0.2.62" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "lock" +version = "0.1.0" +dependencies = [ + "fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "lock_api" version = "0.3.1" @@ -535,6 +560,13 @@ dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "macros" +version = "0.1.0" +dependencies = [ + "json 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "matches" version = "0.1.8" @@ -772,6 +804,7 @@ version = "0.1.0" dependencies = [ "block 0.1.0", "db 0.1.0", + "executor 0.1.0", "hash 0.1.0", "json 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1348,6 +1381,7 @@ version = "0.1.0" dependencies = [ "db 0.1.0", "encode 0.1.0", + "executor 0.1.0", "hash 0.1.0", "json 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", "timestamp 0.1.0", @@ -1530,6 +1564,7 @@ dependencies = [ "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" "checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" "checksum foreign-types-shared 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" +"checksum fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" diff --git a/core/DockerfileAOS b/core/DockerfileAOS new file mode 100644 index 0000000..5ab1027 --- /dev/null +++ b/core/DockerfileAOS @@ -0,0 +1,38 @@ +#FROM rust:latest +#FROM rust:1.36.0 AS build +FROM clux/muslrust AS build + +ARG nodeType + +RUN mkdir /environment/ + +COPY . /environment/ + +WORKDIR /environment/ + +RUN rustup target add x86_64-unknown-linux-musl + +RUN cargo build --release + +# Copy the source and build the application. +COPY ./src ./src + +#### +#RUN apt-get install pkg-config libx11-dev libxmu-dev + +RUN cargo install --target x86_64-unknown-linux-musl --path . + +################second stage +FROM scratch + +EXPOSE 8081 + +#test +#COPY --from=build /usr/local/cargo/bin/core . + +#works +###COPY --from=build /environment/target/x86_64-unknown-linux-musl/release/core . + +###COPY --from=build /environment/storage ./storage + +###CMD ["./core node-name=\"alices node\" node-id=1 port=8081 peers=127.0.0.1:8082,127.0.0.1:8083 ip=127.0.0.1:8081"] diff --git a/core/DockerfileAlice b/core/DockerfileAlice new file mode 100644 index 0000000..681233b --- /dev/null +++ b/core/DockerfileAlice @@ -0,0 +1,21 @@ +FROM aos:latest AS build + +EXPOSE 8081 + +#test +#COPY --from=build /usr/local/cargo/bin/core . + +#works +####COPY --from=build /environment/target/x86_64-unknown-linux-musl/release/core . + +#RUN mkdir ./storage + +####COPY --from=build /environment/storage ./storage + + +#USER 1 +#CMD ["./core node-name='alice node' node-id=1 port=8081 peers=127.0.0.1:8082,127.0.0.1:8083 ip=127.0.0.1:8081"] +####CMD ["./core", "node-name='alice node'", "node-id=1", "port=8081", "peers=127.0.0.1:8082,127.0.0.1:8083", "ip=127.0.0.1:8081"] +#CMD ["./core", "node-name='alice node'", "node-id=1", "port=8081", "peers=localhost:8082,localhost:8083", "ip=localhost:8081"] + +CMD ["make", "alice"] diff --git a/core/DockerfileBob b/core/DockerfileBob new file mode 100644 index 0000000..b7c1276 --- /dev/null +++ b/core/DockerfileBob @@ -0,0 +1,21 @@ +FROM aos:latest AS build + +EXPOSE 8082 + +#test +#COPY --from=build /usr/local/cargo/bin/core . + +#works +####COPY --from=build /environment/target/x86_64-unknown-linux-musl/release/core . + +#RUN mkdir ./storage + +####COPY --from=build /environment/storage ./storage + + +#USER 1 +#CMD ["./core node-name=\"bob's node\" node-id=2 port=8082 peers=127.0.0.1:8081,127.0.0.1:8083 ip=127.0.0.1:8082"] +####CMD ["./core", "node-name='bobs node'", "node-id=2", "port=8082", "peers=127.0.0.1:8081,127.0.0.1:8083", "ip=127.0.0.1:8082"] +#CMD ["./core", "node-name='bobs node'", "node-id=2", "port=8082", "peers=localhost:8081,localhost:8083", "ip=localhost:8082"] + +CMD ["make", "bob"] diff --git a/core/DockerfileCici b/core/DockerfileCici new file mode 100644 index 0000000..fa6ff3b --- /dev/null +++ b/core/DockerfileCici @@ -0,0 +1,22 @@ +FROM aos:latest AS build + +EXPOSE 8083 + + +#test +#COPY --from=build /usr/local/cargo/bin/core . + +#works +####COPY --from=build /environment/target/x86_64-unknown-linux-musl/release/core . + +#RUN mkdir ./storage + +####COPY --from=build /environment/storage ./storage + + +#USER 1 +#CMD ["./core node-name=\"cici's node\" node-id=3 port=8083 peers=127.0.0.1:8082,127.0.0.1:8081 ip=127.0.0.1:8083"] +####CMD ["./core", "node-name='cicis node'", "node-id=3", "port=8083", "peers=127.0.0.1:8082,127.0.0.1:8081", "ip=127.0.0.1:8083"] +#CMD ["./core", "node-name='cicis node'", "node-id=3", "port=8083", "peers=localhost:8082,localhost:8081", "ip=localhost:8083"] + +CMD ["make", "cici"] diff --git a/core/Makefile b/core/Makefile index b6a8538..c33f862 100644 --- a/core/Makefile +++ b/core/Makefile @@ -1,17 +1,24 @@ compile: RUST_BACKTRACE=1 cargo check --verbose; alice: clean_alice - RUST_BACKTRACE=1 cargo run node-name="alice's node" node-id=1 port=8081 peers=127.0.0.1:8082,127.0.0.1:8083 ip=127.0.0.1:8081 ; > run.log; + #RUST_BACKTRACE=1 cargo run node-name="alice's node" port=8081 peers=192.168.0.8:8082,192.168.0.8:8083 ip=192.168.0.2:8081; + #RUST_BACKTRACE=1 cargo run node-name="alice's node" port=8081 peers=192.168.0.8:8082 ip=192.168.0.2:8081; + #RUST_BACKTRACE=1 cargo run node-name="alice's node" port=8081 peers=127.0.0.1:8081 ip=127.0.0.1:8081; + RUST_BACKTRACE=1 cargo run node-name="alice's node" node-id=1 port=8081 peers=127.0.0.1:8082,127.0.0.1:8083 ip=127.0.0.1:8081 #> run.log; bob: clean_bob - RUST_BACKTRACE=1 cargo run node-name="bob's node" node-id=2 port=8082 peers=127.0.0.1:8081,127.0.0.1:8083 ip=127.0.0.1:8082 ; > run.log; + #RUST_BACKTRACE=1 cargo run node-name="bob's node" port=8082 peers=192.168.0.2:8081,192.168.0.8:8083 ip=192.168.0.8:8082; + #RUST_BACKTRACE=1 cargo run node-name="bob's node" port=8082 peers=192.168.0.2:8081 ip=192.168.0.8:8082; + RUST_BACKTRACE=1 cargo run node-name="bob's node" node-id=2 port=8082 peers=127.0.0.1:8081,127.0.0.1:8083 ip=127.0.0.1:8082 #> run.log; cici: clean_cici - RUST_BACKTRACE=1 cargo run node-name="cici's node" node-id=3 port=8083 peers=127.0.0.1:8082,127.0.0.1:8081 ip=127.0.0.1:8083 ; > run.log; + #RUST_BACKTRACE=1 cargo run node-name="cici's node" port=8083 peers=192.168.0.8:8082,192.168.0.2:8081 ip=192.168.0.8:8083; + #RUST_BACKTRACE=1 cargo run node-name="cici's node" port=8083 peers=192.168.0.2:8081 ip=192.168.0.8:8083; + RUST_BACKTRACE=1 cargo run node-name="cici's node" node-id=3 port=8083 peers=127.0.0.1:8082,127.0.0.1:8081 ip=127.0.0.1:8083 #> run.log; reset_alice: - rm storage/.DS_Store ; make clean_alice ; make get ; make alice + rm storage/.DS_Store ; rm -r .DS_Store ; make clean_alice ; make get ; make alice reset_bob: - rm storage/.DS_Store ; make clean_bob ; make get ; make bob + rm storage/.DS_Store ; rm -r .DS_Store ; make clean_bob ; make get ; make bob reset_cici: - rm storage/.DS_Store ; make clean_cici ; make get ; make cici + rm storage/.DS_Store ; rm -r .DS_Store ; make clean_cici ; make get ; make cici clean_alice: rm -f ./storage/proposals.db ; rm -rf ./storage/proposal/* ; @@ -33,6 +40,7 @@ clean_bob: rm -rf ./storage/state/* ; make create_directories ; #cargo clean ; + clean_cici: rm -f ./storage/proposals.db ; rm -rf ./storage/proposal/* ; @@ -52,25 +60,35 @@ create_directories: build: cargo build --verbose; save: - git add * ; git commit -am "saved..." ; git push origin master + git add * ; git commit -am "saved from core" ; git push origin master:development -v get: git fetch ; git pull origin master; rms: rm ./storage/proposal/* stress: - while true; do make stress_a; sleep 30 ; make stress_b; sleep 30 ; make stress_c; sleep 30 ; done + #while true; do make txo; make sap; sleep 60; done + #while true; do make txo; make create_proposal; sleep 60; done + #while true; do make sat; make sap; sleep 30; done + while true; do make stress_a; sleep 15 ; make stress_b; sleep 15 ; make stress_c; sleep 15 ; done + #while true; do make stress_a; sleep 60 ; done p_alice: - curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8081" 127.0.0.1:8081/proposal/create/ + #curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8081" 127.0.0.1:8081/proposal/create/ + curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8081" localhost:8081/proposal/create/ p_bob: - curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8082" 127.0.0.1:8082/proposal/create/ + #curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8082" 127.0.0.1:8082/proposal/create/ + curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8082" localhost:8082/proposal/create/ p_cici: - curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8083" 127.0.0.1:8083/proposal/create/ + #curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8083" 127.0.0.1:8083/proposal/create/ + curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8083" localhost:8083/proposal/create/ stress_a: curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8081" --header "User-Agent: 100,200,test_string" 127.0.0.1:8081/transaction/submit/output ; + #curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8081" 127.0.0.1:8081/proposal/create/ stress_b: curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8082" --header "User-Agent: 100,200,test_string" 192.168.0.8:8082/transaction/submit/output ; + #curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8082" 192.168.0.8:8082/proposal/create/ ; stress_c: curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8083" --header "User-Agent: 100,200,test_string" 192.168.0.8:8083/transaction/submit/output ; + #curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8083" 192.168.0.8:8083/proposal/create/ ; sat: for i in {1..2}; do curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8081" --header "User-Agent: 100,200,test_string" 127.0.0.1:8081/transaction/submit/output ; done for i in {1..2}; do curl -d 'TESTSTRING' --header "Origin: 127.0.0.1:8082" --header "User-Agent: 100,200,test_string" 192.168.0.8:8082/transaction/submit/output ; done @@ -108,3 +126,27 @@ test_hash: cd hash; RUST_BACKTRACE=1 cargo test --verbose; test_signature: cd signature; RUST_BACKTRACE=1 cargo test --verbose; +dbm: #dbmain + time docker build --file "./DockerfileAOS" --build-arg nodeType=$1 -t aos . +dball: + make dba ; + make dbb ; + make dbc ; +dba: + time docker build --force-rm --rm --network="host" --file "./DockerfileAlice" --build-arg nodeType=alice -t alice_node . +dbb: + time docker build --force-rm --rm --network="host" --file "./DockerfileBob" --build-arg nodeType=bob -t bob_node . +dbc: + time docker build --force-rm --rm --network="host" --file "./DockerfileCici" --build-arg nodeType=cici -t cici_node . +rac: + docker run -d -p 8081:8081 --dns=192.168.0.1 alice_node ; + docker run -d -p 8082:8082 --dns=192.168.0.1 bob_node ; + docker run -d -p 8083:8083 --dns=192.168.0.1 cici_node ; + + #docker run -d --network="host" alice_node ; + #docker run -d --network="host" bob_node ; + #docker run -d --network="host" cici_node ; + + #docker run -d -t -p 8081:8081 --net=host --dns=192.168.0.1 alice_node ; + #docker run -d -t -p 8082:8082 --net=host --dns=192.168.0.1 bob_node ; + #docker run -d -t -p 8083:8083 --net=host --dns=192.168.0.1 cici_node ; diff --git a/core/README.md b/core/README.md index da8887d..81362d2 100644 --- a/core/README.md +++ b/core/README.md @@ -1 +1,105 @@ -# TODOs +# Development work + +## Technological goals +Can we develop a system that: +- all heads of state have an uncompromised computer, they own +- each computer have the means of interacting with the other +- each computer serves as a decision making node to invoke state transitions in the System +- can we make a circular queue, of inputs to a state transition function +- each state that transitions the state transition function has to have certain aspects that are used to be computed + + +## Proposals +A proposal submits a motion to further "time" within the network. This is sent, and must be responded to. +- Pending: for proposals just made +- Created: for proposals made, and broadcasted to the network +- Accepted: proposals accepted by peers +- Rejected: proposals rejected by peers +- Committed: proposals agreed upon by peers +- NotValid: proposals that do not match any of the above enum values + +### Proposal Index +Each replicate maintains a Proposal Index (a database of proposals) +```javascript +{proposals: + {"proposal_id_as_key": Proposal (see below)} +} +``` + +### Proposal Structure +a Proposal contains the following information +```javascript +{ + proposal_id: Integer, + proposal_status: String, + proposal_hash: String, + proposal_time: Timestamp, + proposal_sender: String, + proposal_block: Block (see below) +} +``` + +### Calculating Proposal Hash +To calculate the hash of the proposal, hash the JSON string representation of the *proposal_id*, and the *proposal_block* + +## Block +A block will hold the contents (hash) of the proposal, combined with a timestamp. A new block can only be authored once a proposal has been sent, responded to, and resolved -- A three-way handshake. + +A proposer proposes, receives an acceptance, and then notifies the network of it now committing the proposal, whereby each other replica can take the commitment notification, and verify it: + +### Block structure +A block contains the following structure +```javascript +{ + block_id: i32, + block_hash: String, + block_parent_hash: String, + block_time: Timestamp, + proposal_hash: String + block_data: String +} +``` + +## Accepting a proposal +Once a proposal is broadcasted from Alice to Bob, (A -> B), Bob performs the following (latest proposal_id = 0, and alice's proposal state is *created*): +- Bob checks if he already has a Proposal with that *proposal_id* (proposal_id = 1) + - if bob has proposal_id = 1, he checks the status of it + - if the *proposal_status* is *accepted*, or *rejected*, and the submitter is NOT bob, do nothing because bob already added it to the *proposal index* +- if bob doesn't have proposal_id = 1, bob verifies the proposal using the following criteria: + - What is the current *block_id* bob has? + - a valid proposal will ONLY be the current *block_id* + 1 + - calculate the hash of the proposal (see below) + - validate the *proposal_hash* provided by alice against the *proposal_hash* bob just calculated + - if the hashes are different, bob rejects the proposal, and sets the proposal to *NotValid* in the proposal index + - What is the current *block_hash* of our highest block? + - If the *block_hash* of bob's highest block is NOT equal to the *block_parent_hash* of the submitted proposal's *proposal_block*, bob rejects the block, and sets the proposal to *NotValid* in the proposal index + - if all of the above does not reject the proposal, bob accepts alice's submitted proposal, responds to alice with "acceptance", and updates the proposal_index to *accepted* for the *proposal_id* +- If the proposal is rejected, update proposal index, and set proposal to *NotValid* +- If the proposal is valid, (bob doesn't add it yet), but bob responds to alice with "acceptance", and updates the proposal index to mark the proposal as *accepted* +- Every time alice receives a *accept* or a *reject*, alice checks how many responses the respective proposal requires + - If alice receives enough *accept* responses, alice broadcasts the corresponding proposal to bob with a *committed* state +- When bob receives a committed proposal: + - bob checks if he already has the same proposal with the submitter != bob, and with a *accepted* state + - if bob already has the proposal with a *accepted* state: + - bob verified the proposal (using the hash) + - if the proposal is valid, bob commits the proposal's block to his block history + + +### Calculating Block Hash +Calculate the block hash by hashing the JSON string representation of the *block_id*, *block_parent_hash*, *block_time*, and the *block_time* + +### State Transitioning +Upon each transition step, check the proposal index for: +- Proposals marked as *pending*, and the submitter is "me" + - for each of these *pending* proposals + - broadcast them to the network + - mark the proposal as *created* +- Proposals marked as *accepted* + - for each of these *accepted* proposals + - verify the proposal's block + - if valid + - commit the proposal's block to "my" block history + - mark the proposal as committed + - if not valid, mark proposal as *NotValid* + +# Chain Directory diff --git a/core/block/.DS_Store b/core/block/.DS_Store index c8db29fcc9b0d574564bc7f0a7cb79afdb6b35f7..9a051e3f67a1ada063d328557793db66e034f85c 100644 GIT binary patch delta 74 zcmZoMXfc=|#>CJ*u~2NHo+3XB0|Nsi1A_pAVQ_MOZo$MtT}GD8FPP+6HzzR1Fl}b% d;O77;-Ws3|cI>1*8%7 wNZTxzV|l?X^Z!kbKTZA|;(l(IvuaJMaTlETDPAR|D{~b%G5F>wGDCTYFIc!cI{*Lx diff --git a/core/block/src/lib.rs b/core/block/src/lib.rs index 09b309c..814a36d 100644 --- a/core/block/src/lib.rs +++ b/core/block/src/lib.rs @@ -97,11 +97,13 @@ impl ReadBlockFromDB for DB { fn get_block_index_as_json() -> Option { let block_index: String = match DB::read_block_index() { Some(i) => { + //TODO: parse/verify proposal index i }, None => String::from("NO INDEX") }; println!("Block index: {}", block_index); + //TODO: convert DB json string to json let parsed_result: Result = json::parse( &format!(r#"{}"#, block_index) ); match parsed_result { Ok(parsed) => { @@ -116,6 +118,51 @@ impl ReadBlockFromDB for DB { } } + /* + @name get_latest_block_id + @desc get the block id + */ + // fn get_latest_block_id() -> Option { + // let block_index_parsed_option: Option = Self::get_block_index_as_json(); + // + // match block_index_parsed_option { + // Some(block_index_parsed) => { + // let all_blocks = &block_index_parsed["blocks"]; + // + // if all_blocks.is_empty() { + // //None + // Some(-1) + // } else { + // let mut highest_block_id: i64 = -1; + // // // iterate over each proposal entry + // let blocks_iter = all_blocks.entries(); + // for (id, block_iter) in blocks_iter { + // + // println!("get_latest_block_id(), block: iter {}:{}", id, block_iter); + // let block_from_json: Result = Block::from_json( (*block_iter).clone() ); + // match block_from_json { + // Ok(block) => { + // if block.block_id > highest_block_id { + // highest_block_id = block.block_id; + // } else { + // println!("get_latest_block_id(), block id not higher than highest_block_id: {}", block.block_id); + // } + // }, + // Err(_) => { + // println!("Couldn't convert JSON block to Block type"); + // } + // } + // } + // /////////////////////////////// + // Some(highest_block_id) + // } + // }, + // None => None + // } + // + // + // } + /* @name get_latest_block_id @desc get the block id @@ -123,19 +170,25 @@ impl ReadBlockFromDB for DB { */ fn get_latest_block_id() -> Option { let block_index_parsed_option: Option = Self::get_block_index_as_json(); + match block_index_parsed_option { Some(block_index_parsed) => { let all_blocks = &block_index_parsed["blocks"]; if all_blocks.is_empty() { + //None Some(-1) } else { + // if the length is 1, that means the next block number should be 1 + // if the length is 2, that means the next block number should be 2 let mut amount_of_blocks: i64 = all_blocks.len() as i64; Some(amount_of_blocks - 1) } }, None => None } + + } /* @@ -143,15 +196,19 @@ impl ReadBlockFromDB for DB { @desc get all proposals from the proposals directory */ fn get_all_blocks() -> Option> { + //TODO: read proposal index let parsed: Option = Self::get_block_index_as_json(); match parsed { Some(parsed) => { let mut all_blocks_vector: Vec = Vec::new(); let blocks_iter = parsed["blocks"].entries(); + //let blocks_iter = parsed.members(); for (id, block) in blocks_iter { + //println!("get_all_blocks(), block: {}:{}", id, block); let parsed_block: Result = Block::from_json(block.clone()); match parsed_block { Ok(block) => { + //TODO: DB::index_block_sync_check(block) all_blocks_vector.push(block); }, Err(err) => { @@ -167,6 +224,10 @@ impl ReadBlockFromDB for DB { } } + /* + @name get_block_by_block_id + @desc + */ fn get_block_by_block_id(block_id: i64) -> Option { let block_string_result: Option = Self::read_block(block_id); match block_string_result { @@ -186,6 +247,7 @@ impl ReadBlockFromDB for DB { } + /* @name WriteProposalToDB @desc trait to write a proposal to the DB @@ -242,11 +304,9 @@ impl CreateNewBlock for Block { } } }; - + //TODO: Retrieve Transactions from TX pool let transactons_from_pool: Vec = DB::get_all_transactions(); - println!("CreateNewBlock, transactons_from_pool, tx count: {}", transactons_from_pool.len()); - match new_block_time { Some(ts) => { Ok(Block { @@ -393,6 +453,7 @@ impl VerifyBlockAnscestry for Block { false } } + } } @@ -410,6 +471,7 @@ impl ValidateAcceptedProposalBlock for Block { match current_block_id { Some(block_id) => { println!("validate_block, after current_block_id, block_id: {}", block_id); + //TODO: CHECK IF GENESIS Block if block_id == 0 { Self::process_genesis_block(block) } else if block_id > 0 { @@ -419,6 +481,7 @@ impl ValidateAcceptedProposalBlock for Block { } } None => { + false } } @@ -437,14 +500,22 @@ trait ProcessBlock { impl ProcessBlock for Block { fn process_genesis_block(submitted_block: Block) -> bool { println!("PROCESSING GENESIS BLOCK, submitted_block_id: {}", submitted_block.block_id); + //TODO: COMPARE WITH BLOCKCHAIN CONFIG + //TODO: CALL CHAIN LOGIC BLOCKVALIDATION Transaction::execute_block_transactions(submitted_block.transactions); true } fn process_nongenesis_block(submitted_block: Block) -> bool { println!("PROCESSING NONGENESIS BLOCK, submitted_block_id: {}", submitted_block.block_id); + //TODO: PROBLEM + //if we get the block with block id - 1, we just get the block "before" the submitted block + // might want to just get the latest block....? + //let current_block_id: Option = DB::get_latest_block_id(); + //let current_block_by_id: Option = DB::get_block_by_block_id( current_block_id.unwrap() ); let previous_block_by_id: Option = DB::get_block_by_block_id(submitted_block.block_id - 1); if previous_block_by_id.clone().is_some() { + //TODO: verify parent hash match Self::verify_block_anscestry(previous_block_by_id.clone().unwrap(), submitted_block.clone()) { true => { @@ -456,6 +527,7 @@ impl ProcessBlock for Block { } } + //TODO: Self.verify_block_id(submitted_block, current_block_by_id); match submitted_block.clone().block_id == (previous_block_by_id @@ -466,22 +538,51 @@ impl ProcessBlock for Block { }, false => { println!("process_nongenesis_block, SUBMITTED_BLOCK ID IS [NOT] EQUAL TO MY BLOCK ID + 1, ERROR"); - let current_block_id_option: Option = DB::get_latest_block_id(); + //TODO modularize out into a is_valid_current_block(submitted_block) + let current_block_id_option: Option = DB::get_latest_block_id(); // Get my latest block match current_block_id_option { Some(current_block_id) => { let current_block_by_id_option: Option = DB::get_block_by_block_id(current_block_id); match current_block_by_id_option { Some(current_block_by_id) => { + // if block ids are the same + // match ( submitted_block.clone().block_id == (current_block_by_id.clone().block_id) ) + // && + // // if the parent hashes are the same + // ( submitted_block.clone().block_parent_hash == current_block_by_id.clone().block_parent_hash ) + // && + // // different block hashes + // ( submitted_block.clone().block_hash != current_block_by_id.clone().block_hash ) { + // true => { + // println!("process_nongenesis_block, BLOCK ID MATCH MY TOP BLOCK, AND PARENT HASHES MATCH, and BLOCK HASHES ARE NOT THE SAME - SUCCESS"); + // }, + // false => { + // println!("process_nongenesis_block ERROR!, BLOCK ID MATCH MY TOP, BLOCK AND PARENT HASHES DONT MATCH, and BLOCK HASHES ARE NOT THE SAME - ERROR"); + // return false + // } + // } + + /* + match current_block { + _ if current_block.block_hash == proposed_block.block_parent_hash => { + true + }, + _ => { + false + } + } + */ + //return false + }, None => { println!("process_nongenesis_block, current_block_by_id_option is NONE"); return false } } - }, - + //current_block_id_option is NONE None => { println!("process_nongenesis_block, current_block_id_option is NONE"); return false @@ -490,9 +591,16 @@ impl ProcessBlock for Block { } } } + + //TODO: MACRO: CUSTOM_BLOCK_VALIDATION!() + // AFTER CHECKING IF BLOCK ID IS RIGHT SEQUENCE + //TODO: verify block hash + //TODO: CALL CHAIN LOGIC BLOCKVALIDATION + //TODO: error handling Transaction::execute_block_transactions(submitted_block.transactions); true } else { + //current block by block id is NOT SOME false } } @@ -543,6 +651,9 @@ impl CommitBlock for Block { } fn commit_block(block: Block) -> Result<(), ()> { + //TODO: get higher block id + //TODO: verify the ledger history (within window) is valid + //TODO: if it is valid, actually write block to blocks directory, and alter block_index let mut block_index_option: Option = DB::get_block_index_as_json(); match block_index_option { Some(mut block_index) => { @@ -566,6 +677,7 @@ impl CommitBlock for Block { } }, Err(_) => { + //couldnt insert new block into index Err(()) } } @@ -575,6 +687,7 @@ impl CommitBlock for Block { } } +//TODO: change access to DB directly for modules calling ReadBlockFromDB - later pub trait ReadBlock { fn get_latest_block_id() -> Option; } @@ -609,7 +722,6 @@ mod tests { \"proposal_hash\": \"test proposal hash\", \"block_data\": \"test block data\" }"; - let actual_block: Result = Block::from_string( String::from(stringed_block) ); assert_eq!(actual_block.unwrap(), expected_block); } @@ -624,7 +736,6 @@ mod tests { "proposal_hash" => "hash", "block_data" => "data", }; - let expected_block: Block = Block { block_id: 0, block_hash: String::from("hash"), @@ -633,9 +744,7 @@ mod tests { proposal_hash: String::from("hash"), block_data: String::from("data"), }; - let actual_block: Result = Block::from_json(data); assert_eq!(expected_block, actual_block.unwrap()); - } } diff --git a/core/cryptoutil/src/lib.rs b/core/cryptoutil/src/lib.rs index 783cdf3..78ec8ee 100644 --- a/core/cryptoutil/src/lib.rs +++ b/core/cryptoutil/src/lib.rs @@ -21,8 +21,5 @@ struct CryptoUtil { #[cfg(test)] mod tests { - #[test] - fn it_works() { - assert_eq!(2 + 2, 4); - } + } diff --git a/core/db/Cargo.lock b/core/db/Cargo.lock new file mode 100644 index 0000000..5b63081 --- /dev/null +++ b/core/db/Cargo.lock @@ -0,0 +1,62 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "db" +version = "0.1.0" +dependencies = [ + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lock 0.1.0", +] + +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "libc" +version = "0.2.66" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "lock" +version = "0.1.0" +dependencies = [ + "fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "winapi" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +"checksum libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)" = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558" +"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" +"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/core/db/Cargo.toml b/core/db/Cargo.toml index 7ca4c3b..4c55930 100644 --- a/core/db/Cargo.toml +++ b/core/db/Cargo.toml @@ -6,3 +6,4 @@ edition = "2018" [dependencies] lazy_static = "1.4.0" +lock = { path = "../lock" } diff --git a/core/db/src/lib.rs b/core/db/src/lib.rs index cc6391f..0e39f34 100644 --- a/core/db/src/lib.rs +++ b/core/db/src/lib.rs @@ -17,7 +17,8 @@ along with the AfricaOS Platform. If not, see . extern crate lazy_static; use std::fs; -use std::io::{Write, Error}; +use std::io::{Write, Error, ErrorKind}; +use lock::{Locker, FileLockWrite}; pub struct DB { @@ -30,17 +31,30 @@ pub trait NodeNameSetter { impl NodeNameSetter for DB { fn set_node_name(name: String) -> () { lazy_static! { + // TODO: variable node name //static ref NODENAME: String = name; } } } +/* + TODO: target node's names DB directory ONLY + TODO: PROPOSALS_LOC needs to be relatively safe. + Test Proposal by itsef throws an erorr since the entry point isnt the root +*/ + /* @desc PROPOSALS_LOC stores all proposals the network submits */ const PROPOSALS_LOC: &str = "storage/proposal/";//&(format!("{}", "storage/proposal/").as_str()); const PROPOSALS_DB_LOC: &str = "storage/proposals.db"; +/* +@desc PROPOSALS_LOC stores all peer statuses on proposals +*/ +const PROPOSALS_PEER_STATUS_LOC: &str = "storage/proposal/peer_status/"; +const PROPOSALS_PEER_STATUS_DB_LOC: &str = "storage/peer_status.db"; + /* @desc TRANSACTIONS_LOC stores all transactions the network submits */ @@ -59,13 +73,15 @@ const STATES_DB_LOC: &str = "storage/states.db"; const BLOCKS_LOC: &str = "storage/chain/"; const BLOCKS_DB_LOC: &str = "storage/chain.db"; - pub trait DBInit{ fn create_sql_databases() -> Result<(), std::io::Error>; } impl DBInit for DB { fn create_sql_databases() -> Result<(), std::io::Error>{ + //TODO: create new proposals database + //if not exists + Ok(()) } } @@ -75,12 +91,27 @@ pub trait DBWrite { } impl DBWrite for DB { + /* + TODO: should take a type to store + */ fn write(content: String, location: String) -> Result { println!("DB write, Writing to DB"); let file_location: String = format!("{}", location); - let mut file = fs::File::create(file_location.to_string())?; - file.write( content.as_bytes() )?; - Ok(content) + //TODO:: invoke Lock::write(content, location) + //let file_lock_write_result: Result<(), std::io::Error> = Locker::write(test_content,file_location); + let file_lock_write_result: Result<(), std::io::Error> = Locker::write(content.clone(), location); + //OLD + //let mut file = fs::File::create(file_location.to_string())?; + //file.write( content.as_bytes() )?; + match file_lock_write_result { + Ok(_) => { + Ok(content) + }, + Err(e) => { + let db_lock_write_error = Error::new(ErrorKind::Other, "DBWrite ERROR, write(), could write with lock!"); + Err(db_lock_write_error) + } + } } } @@ -91,10 +122,11 @@ pub trait DBRead { impl DBRead for DB { fn read(file: String) -> Option { println!("DB Read File: {}", file); - let contents: Result = fs::read_to_string(file); + //TODO: read with lock as well + let contents: Result = fs::read_to_string(file); //.expect("[DB Error reading file]"); match contents { Ok(r) => { - println!("DBRead Text:\n{}", r); + //println!("DBRead Text:\n{}", r); Some(r) }, Err(e) => { @@ -104,17 +136,80 @@ impl DBRead for DB { } } +/* +@name DBReadProposalPeerStatus +@desc +*/ + +pub trait DBReadProposalPeerStatus{ + /* + @name read_proposal_peer_status + @desc read and return JSON proposal_status + {"proposal_id": {"ip": "status", }} + */ + fn read_proposal_peer_status(proposal_id: i32) -> Option; +} + +impl DBReadProposalPeerStatus for DB { + fn read_proposal_peer_status(pid: i32) -> Option{ + let file_location: String = format!("{}{}", + PROPOSALS_PEER_STATUS_LOC, + format!("proposal_{}.dat", pid)); + //let file_location: String = format!("{}",PROPOSALS_PEER_STATUS_DB_LOC); + match Self::read(file_location) { + Some(p) => Some(p), + None => None + } + } +} + + +pub trait DBWriteProposalPeerStatus { + /* + @name write_proposal_peer_status + @desc read and return JSON proposal_status + */ + fn write_proposal_peer_status(pid: i32, proposal_string: String) -> Result; +} + +impl DBWriteProposalPeerStatus for DB { + fn write_proposal_peer_status(pid: i32, proposal_string: String) -> Result{ + let file_location: String = format!("{}{}", + PROPOSALS_PEER_STATUS_LOC, + format!("proposal_{}.prop", pid)); + //let file_location: String = format!("{}",PROPOSALS_PEER_STATUS_DB_LOC); + Self::write(proposal_string, file_location); + Ok(String::from("write_proposal_peer_status, Ok, Successfully wrote DB JSON index")) + } +} + /* @name DBReadProposal @desc */ pub trait DBReadProposal { + fn read_proposal_file_by_id(pid: i32) -> Option; fn read_proposal_index() -> Option; fn write_proposal_index(db_json_string: String) -> Result; } impl DBReadProposal for DB { + /* + @name read_proposal_file_by_id + @desc read and return JSON DB PROPOSAL FILE + */ + fn read_proposal_file_by_id(pid: i32) -> Option{ + let file_location: String = format!("{}{}", + PROPOSALS_LOC, + format!("proposal_{}.prop", pid)); + //let file_location: String = format!("{}",PROPOSALS_DB_LOC); + match Self::read(file_location) { + Some(p) => Some(p), + None => None + } + } + /* @name read_proposal_index @desc read and return JSON DB map @@ -134,6 +229,8 @@ impl DBReadProposal for DB { fn write_proposal_index(db_json_string: String) -> Result { println!("DB, write_proposal_index: Attempting to Write DB JSON INDEX"); let file_location: String = format!("{}",PROPOSALS_DB_LOC); + //let mut file = fs::File::create(file_location.to_string())?; + //file.write( proposal_string.as_bytes() )?; Self::write(db_json_string, file_location); Ok(String::from("Ok, Successfully wrote DB JSON index")) } @@ -158,6 +255,7 @@ impl DBWriteProposal for DB { PROPOSALS_LOC, format!("proposal_{}.prop", pid)); let mut file = fs::File::create(file_location.to_string())?; + //TODO: will fail if directory doesn't exist, but will fail gracefully file.write( proposal_string.as_bytes() )?; println!("Wrote Proposal"); Ok(proposal_string) @@ -216,7 +314,6 @@ pub trait DBReadTransaction { } impl DBReadTransaction for DB { - /* @name read_transaction_index @desc read and return JSON DB map @@ -236,6 +333,8 @@ impl DBReadTransaction for DB { fn write_transaction_index(db_json_string: String) -> Result { println!("DB, write_transaction_index: Attempting to Write DB JSON INDEX for tx"); let file_location: String = format!("{}",TRANSACTIONS_DB_LOC); + //let mut file = fs::File::create(file_location.to_string())?; + //file.write( proposal_string.as_bytes() )?; Self::write(db_json_string, file_location); Ok(String::from("Ok, Successfully wrote DB JSON index FOR TRANSACTION")) } @@ -263,6 +362,7 @@ impl DBWriteTransaction for DB { pass string to write into db */ fn write_transaction_to_sql(tid: i32, transaction_string: String) -> Result{ + //write to transactions file println!("Writing TRANSACTION to DB"); let file_location: String = format!("{}{}", TRANSACTIONS_LOC, @@ -305,6 +405,8 @@ impl DBReadBlock for DB { fn write_block_index(db_json_string: String) -> Result { println!("DB, write_block_index: Attempting to Write DB JSON INDEX FOR BLOCK"); let file_location: String = format!("{}",BLOCKS_DB_LOC); + //let mut file = fs::File::create(file_location.to_string())?; + //file.write( proposal_string.as_bytes() )?; Self::write(db_json_string, file_location); Ok(String::from("Ok, Successfully wrote DB JSON index FOR BLOCK")) } @@ -401,8 +503,5 @@ impl FileDirectoryReader for DB { #[cfg(test)] mod tests { - #[test] - fn it_works() { - assert_eq!(2 + 2, 4); - } + } diff --git a/core/encode/src/lib.rs b/core/encode/src/lib.rs index 39f02d2..b9f46c8 100644 --- a/core/encode/src/lib.rs +++ b/core/encode/src/lib.rs @@ -18,15 +18,11 @@ extern crate base64; use std::str; use base64::{encode, decode}; -pub struct Encoder { - -} - +pub struct Encoder {} pub trait RawBytesEncode { fn encode_rawbytes(string_to_encode: String) -> Vec; } - impl RawBytesEncode for Encoder { fn encode_rawbytes(mut string_to_encode: String) -> Vec { string_to_encode.as_bytes().to_vec() @@ -37,10 +33,9 @@ pub trait RawBytesDecode { fn decode_rawbytes(bytes: Vec) -> Result; } - impl RawBytesDecode for Encoder { fn decode_rawbytes(bytes: Vec) -> Result { - let converted_to_string: Result = String::from_utf8(bytes); + let converted_to_string: Result = String::from_utf8(bytes); match converted_to_string { Ok(result) => { Ok(result) @@ -54,7 +49,7 @@ impl RawBytesDecode for Encoder { pub trait Base64Encode { /* - @name encode + @name encode_base64 @desc encode a string to its base64 representation */ fn encode_base64(bytes_as_string: String) -> Result; @@ -70,14 +65,13 @@ impl Base64Encode for Encoder { pub trait Base64Decode { /* - @name decode + @name decode_base64 @desc convert a base64 encoded string back to its origin */ fn decode_base64(encoded: String) -> Result; } impl Base64Decode for Encoder { - //fn decode_base64(encoded: String) -> Result { fn decode_base64(encoded: String) -> Result { println!("decode_base64(), base64 encoded: {}", encoded); let decoded_result: Result, base64::DecodeError> = decode(&encoded); @@ -87,6 +81,7 @@ impl Base64Decode for Encoder { match decode_from_utf8 { Ok(result) => { let decoded_result_w_quotes: String = String::from(result); + //TODO: replace only the first and last quotes, not all, since its A json object let decoded_result_wo_quotes: &str = decoded_result_w_quotes .as_str() .trim_matches('\"'); @@ -103,7 +98,6 @@ impl Base64Decode for Encoder { Err(String::from("Base64encode, encode(), decoded_result is ERR")) } } - } } diff --git a/core/storage/proposal/.DS_Store b/core/executor/.DS_Store similarity index 100% rename from core/storage/proposal/.DS_Store rename to core/executor/.DS_Store diff --git a/core/executor/Cargo.lock b/core/executor/Cargo.lock new file mode 100644 index 0000000..94f34e2 --- /dev/null +++ b/core/executor/Cargo.lock @@ -0,0 +1,24 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "executor" +version = "0.1.0" +dependencies = [ + "json 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", + "macros 0.1.0", +] + +[[package]] +name = "json" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "macros" +version = "0.1.0" +dependencies = [ + "json 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[metadata] +"checksum json 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9a38661a28126f8621fb246611288ae28935ddf180f5e21f2d0fbfe5e4131dbe" diff --git a/core/executor/Cargo.toml b/core/executor/Cargo.toml new file mode 100644 index 0000000..83e2328 --- /dev/null +++ b/core/executor/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "executor" +version = "0.1.0" +authors = ["Kunta Labs"] +edition = "2018" + +[dependencies] +json = "*" +macros = { path = "../macros" } diff --git a/core/executor/src/lib.rs b/core/executor/src/lib.rs new file mode 100644 index 0000000..cffba26 --- /dev/null +++ b/core/executor/src/lib.rs @@ -0,0 +1,65 @@ +/* +Copyright 2018-Present The AfricaOS Authors +This file is part of the AfricaOS library. +The AfricaOS Platform is free software: you can redistribute it and/or modify +it under the terms of the GNU Lesser General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. +The AfricaOS Platform is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Lesser General Public License for more details. +You should have received a copy of the GNU Lesser General Public License +along with the AfricaOS Platform. If not, see . +*/ + +#[macro_use] +extern crate json; +#[macro_use] +extern crate macros; + +use json::{JsonValue}; +use macros::{transaction_output_logic,proposal_creator_election}; + +pub struct Executor {} +pub trait ExecuteMacro { + /* + @name execute_transaction_output_logic + @desc macro for tx output + */ + fn execute_transaction_output_logic(state: JsonValue, + transaction_hash: String, + transaction_data: String) -> JsonValue; + + /* + @name execute_proposal_creator_election + @desc macro for proposal creator election + */ + fn execute_proposal_creator_election(peer_length: usize, + latest_block_id: i64) -> i64; +} + +impl ExecuteMacro for Executor { + + fn execute_transaction_output_logic(state: JsonValue, + transaction_hash: String, + transaction_data: String) -> JsonValue { + transaction_output_logic!(state.clone(), + transaction_hash, + transaction_data) + } + + fn execute_proposal_creator_election(peer_length: usize, + latest_block_id: i64) -> i64 { + proposal_creator_election!(peer_length, + latest_block_id) + } +} + +#[cfg(test)] +mod tests { + #[test] + fn it_works() { + assert_eq!(2 + 2, 4); + } +} diff --git a/core/lock/.DS_Store b/core/lock/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..5008ddfcf53c02e82d7eee2e57c38e5672ef89f6 GIT binary patch literal 6148 zcmeH~Jr2S!425mzP>H1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0. +*/ + +extern crate fs2; + +use fs2::FileExt; +use std::io::Result; +use std::env::args; +use std::fs::File; +use std::io::{Write, Error}; +use std::time::Duration; +use std::thread::sleep; + +pub struct Locker {} + +pub trait FileLockWrite { + fn write(content: String, location: String) -> Result<()>; +} + +impl FileLockWrite for Locker { + fn write(content: String, location: String) -> Result<()> { + let sleep_seconds = 0; + let sleep_duration = Duration::from_secs(sleep_seconds); + let file_open: Result = File::open(location.to_string()); + match file_open { + Ok(_) => { + let mut file_o: File = File::open(location.to_string())?; + //let mut file: File = File::open(location.to_string())?; + println!("{}: Preparing to lock file.", sleep_seconds); + file_o.lock_exclusive()?; // block until this process can lock the file + let mut file = File::create(location.to_string())?; + println!("{}: Obtained lock.", sleep_seconds); + //sleep(sleep_duration); + file.write( content.as_bytes() )?; + println!("{}: Sleep completed", sleep_seconds); + file_o.unlock()?; + println!("{}: Released lock, returning", sleep_seconds); + Ok(()) + }, + Err(_) => { + //let file_open: File = File::open(location.to_string())?; + let mut file = File::create(location.to_string())?; + println!("{}: Preparing to lock file.", sleep_seconds); + file.lock_exclusive()?; // block until this process can lock the file + //let mut file = File::create(location.to_string())?; + println!("{}: Obtained lock.", sleep_seconds); + //sleep(sleep_duration); + file.write( content.as_bytes() )?; + println!("{}: Sleep completed", sleep_seconds); + file.unlock()?; + println!("{}: Released lock, returning", sleep_seconds); + Ok(()) + } + } + } +} + + +#[cfg(test)] +mod tests { + use super::{Locker, FileLockWrite}; + + #[test] + fn test_file_write(){ + let test_location: String = String::from("./LOCKTEST"); + let test_content: String = String::from("TEST CONTENT"); + let file_location: String = format!("{}", test_location); + //let mut file = fs::File::create(file_location.to_string())?; + //Locker::write(test_content, file_location), Ok(()) + //use std::io::Error; + let file_lock_write_result: Result<(), std::io::Error> = Locker::write(test_content,file_location); + assert!( file_lock_write_result.is_ok() ); + } +} diff --git a/core/macros/.DS_Store b/core/macros/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..5008ddfcf53c02e82d7eee2e57c38e5672ef89f6 GIT binary patch literal 6148 zcmeH~Jr2S!425mzP>H1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0. +*/ + +#[macro_use] +extern crate json; + +use json::{JsonValue}; + +macro_rules! transaction_input_logic { + () => { + + } +} + +macro_rules! block_validation { + () => { + } +} + +macro_rules! proposal_validation { + () => { + } +} + +#[macro_export] +macro_rules! proposal_creator_election { + // TODO: place PCE code in here + ($peer_length: expr, $latest_block_id: expr) => { + { + ( ($latest_block_id + 1) % ( ($peer_length + 1) as i64) ) + 1 + } + } +} + +// @input +#[macro_export] +macro_rules! transaction_output_logic { + /* + @pattern StateJson, self + */ + ($state: expr, $tx_hash: expr, $tx_data: expr) => { + { + println!("TX execute TX Output BEFORE: {} : ", $state.clone() ); + let mut state_as_json: JsonValue = $state; + // insert a new account into the state db + match &state_as_json.insert( &( format!("{}", $tx_hash).to_string() ), + format!("{}", $tx_data) ) { + Ok(_) => { + //TODO: after we insert the initial state for the sender + //current_state_buffer + println!("TX execute TX Output AFTER: {} : ", state_as_json.clone() ); + state_as_json + }, + Err(_) => { + // error on inserting, return current state + println!("TX execute ERROR: State::to_json is NOT okay: {} ", $state.clone() ); + $state + } + } + } + } +} + +#[cfg(test)] +mod tests { + + //use transaction::{State}; + //#[macro_use] + //extern crate json; + + use json::{JsonValue}; + + #[test] + fn test_transaction_output_logic() -> (){ + let test_json: JsonValue = object!{ + "test" => "test" + }; + + let test_json_2: JsonValue = object!{ + "test" => "test", + "test2" => "test2" + }; + let test_string: String = String::from("test2"); + let result: JsonValue = transaction_output_logic!( test_json.clone(), + test_string, + test_string ); + assert_eq!(test_json_2, result); + } +} diff --git a/core/network/src/lib.rs b/core/network/src/lib.rs index 4825234..8027a6d 100644 --- a/core/network/src/lib.rs +++ b/core/network/src/lib.rs @@ -18,7 +18,6 @@ use std::thread; use std::io::{Write, Read}; use http::{Request, Response, StatusCode}; use std::collections::HashMap; - use proposal::{Proposal, NewProposal, JsonConverter, @@ -33,11 +32,9 @@ use proposal::{Proposal, CompareWithoutStatus, ValidateProposalBlock, ProposalResolutionAccepted}; - use url::Url; use reqwest::header::{USER_AGENT, CONTENT_TYPE, ORIGIN}; use encode::{Encoder, Base64Encode, Base64Decode}; - use transaction::{Transaction, CreateNewOuputTransaction}; pub trait PayloadParser { @@ -70,14 +67,14 @@ impl PayloadParser for Server { } fn get_key_from_header(section: &str, header_sections: Vec<&str>) -> Result { - if header_sections.len() == 2 { //if there is just a key, and a value in the header + if header_sections.len() == 2 { let data_second_element: &str = header_sections[1]; println!("Data data_second_element: {}", data_second_element); let string_to_trunc: String = String::from(data_second_element); let mut header_data: &str = string_to_trunc.trim(); let final_header_data: String = String::from(header_data); Ok(final_header_data) - } else if header_sections.len() == 3 { // if the header has a colon in the value, such as IPAddress:Port + } else if header_sections.len() == 3 { let data_second_element: &str = header_sections[1]; let data_third_element: &str = header_sections[2]; let string_to_trunc: String = format!("{}:{}", data_second_element, data_third_element); @@ -150,7 +147,6 @@ pub struct Server { pub port: i32, } - /* @name Transmitter @desc this trait enables our server to transmit data over the network @@ -163,7 +159,7 @@ pub trait Transmitter{ create to the network */ fn broadcast_proposal_created(proposal: Proposal, peer_location: String, ip: String) -> Result<(), String>; - //fn broadcast_proposal_created(proposal: Proposal) -> Result; + /* @name broadcast_proposal_response @desc broadcast the proposal response to @@ -248,6 +244,7 @@ impl Transmitter for Server { let client = reqwest::Client::new(); let proposal_to_json: String = Proposal::to_json(proposal.clone()).to_string(); let b64_stringed_proposal: Result = Encoder::encode_base64(proposal_to_json); + //TODO: alter a meaningful header, not user agent... if b64_stringed_proposal.is_ok() { let resp = client.get(peer_location_url) .header(ORIGIN, ip.as_str()) @@ -284,9 +281,9 @@ impl Transmitter for Server { let client = reqwest::Client::new(); let proposal_to_json: String = Proposal::to_json(proposal.clone()).to_string(); let b64_stringed_proposal: Result = Encoder::encode_base64(proposal_to_json); + //TODO: alter a meaningful header, not user agent... if b64_stringed_proposal.is_ok() { let resp = client.get(peer_location_url) - //.header(ORIGIN, "MYIPADDRESS") .header(ORIGIN, ip.as_str()) .header(USER_AGENT, b64_stringed_proposal.unwrap()) .send(); @@ -301,7 +298,6 @@ impl Transmitter for Server { } } } else { - //Err(String::from("Error: broadcast_proposal_createm, Encoder::encode could not encode for some reason")) Ok(()) } } @@ -329,10 +325,8 @@ impl Transmitter for Server { Ok(()) } } - } - /* @name broadcast_block_query_response */ @@ -346,10 +340,10 @@ impl Transmitter for Server { let client = reqwest::Client::new(); let proposal_to_json: String = Proposal::to_json(proposal.clone()).to_string(); let b64_stringed_proposal: Result = Encoder::encode_base64(proposal_to_json); + //TODO: alter a meaningful header, not user agent... if b64_stringed_proposal.is_ok() { println!( "broadcast_block_query_response(), b64_stringed_proposal{}", b64_stringed_proposal.clone().unwrap() ); let resp = client.get(peer_location_url) - //.header(ORIGIN, "MYIPADDRESS") .header(ORIGIN, "127.0.0.1") .header(USER_AGENT, b64_stringed_proposal.unwrap()) .send(); @@ -367,7 +361,6 @@ impl Transmitter for Server { Ok(()) } } - } @@ -392,9 +385,6 @@ impl Receiver for Server { @desc this starts the TCP server */ fn start(&self) -> Result { - //loopback address - //let server_prefix = String::from("127.0.0.1:"); - //all interfaces available on the system let server_prefix = String::from("0.0.0.0:"); let port = self.port; let server_complete_address = format!("{}{}", server_prefix, port); @@ -434,6 +424,7 @@ impl Receiver for Server { Err(String::from("ERROR: read_result failed...")) } }; + if read_result.is_ok() { let write_result = match Self::handle_write(stream, read_result.unwrap() ) { Ok(write_result) => { @@ -468,11 +459,13 @@ impl Receiver for Server { println!("handle_read, Query: {}", query); let split_payload_for_data: Vec<&str> = req_str.split("\n").collect(); let data: Result = Self::get_header_from_payload(split_payload_for_data.clone(), "user-agent"); + //TODO: state which header to return if data.is_ok() { //TODO: get node IP address to send to receiver to pass to invoke action let request_origin: Result = Self::get_header_from_payload(split_payload_for_data, "origin"); if request_origin.is_ok() { println!("request_origin success: {}", request_origin.clone().unwrap()); + //TODO: pass request_origin to invoke_action() let invoked_action_result: Result = Self::invoke_action( query, &(data.unwrap().to_string()), request_origin.clone().unwrap() ); match invoked_action_result { Ok(r) => { @@ -503,12 +496,14 @@ impl Receiver for Server { writing of data back to the requestor */ fn handle_write(mut stream: TcpStream, result: String) -> Result { + let response = b"HTTP/1.1 200 OK \r\nContent-Length: 10 \r\nContent-Type: application/json; charset=UTF-8 \r\n\r\nRESPONSE FROM NODE \r\n"; - + //TODO: WRITE BACK THE RESULT PASSED + //let data_to_write: String = format!("{}"); match stream.write(response) { Ok(_) => { println!("handle_write, Stream Write Success"); @@ -535,6 +530,7 @@ pub trait API { fn invoke_action(command: &str, data: &str, request_origin: String) -> Result; } + /* @name API for Server @desc invoked actions by implementing this trait @@ -547,8 +543,8 @@ impl API for Server { fn invoke_action(command: &str, data: &str, request_origin: String) -> Result { match command { /* - @endpoint /transaction/submit/output/ - @desc for an external submission of a transaction + @endpoint /transaction/submit/output/ + @desc for an external submission of a transaction */ "/transaction/submit/output" => { println!("Transaction Submit: {}, {}, {}", command, data, request_origin); @@ -567,14 +563,13 @@ impl API for Server { }, /* - @endpoint /proposal/create/ - @desc create a proposal - peerid, proposalid - "INVOKES" A -> B + @endpoint /proposal/create/ + @desc create a proposal, NOTE: should we expose this externally? + peerid, proposalid + "INVOKES" A -> B */ "/proposal/create/" => { println!("Invocation to create new proposal: {}", data); - //create proposal let proposal_created: Option = Proposal::create(request_origin); match proposal_created { Some(proposal) => { @@ -586,10 +581,10 @@ impl API for Server { }, /* - @endpoint /proposal/created/ - @desc receive a proposal created by someone else - peerid, proposalid? - A -> B, AS B + @endpoint /proposal/created/ + @desc receive a proposal created by someone else + peerid, proposalid? + A -> B, AS B */ "/proposal/created/" => { let decoded_proposal_string: Result = Encoder::decode_base64(String::from(data)); @@ -600,8 +595,10 @@ impl API for Server { match decoded_proposal.clone() { Ok(proposal) => { println!("invoke_action, proposal_created: successful proposal decoding, proposal_id: {}", decoded_proposal.unwrap().proposal_id); + //TODO: Check current block ID against the proposal block_id to see if network's chain is ahead of the node's chain //proposal verdict match Proposal::validate_proposal(proposal.clone()) { + //NOTE: ONLY DO SOMETHING IF YOU CAN SAFELY PARSE THE PROPOSAL, OTHERWISE ERROR Ok(verdict) => { match verdict { ProposalValidationResult::Valid => { @@ -641,9 +638,9 @@ impl API for Server { }, /* - @endpoint /proposal/response/ - @desc get responses to a proposal request from peers - A <- B, B back to A + @endpoint /proposal/response/ + @desc get responses to a proposal request from peers + A <- B, B back to A */ "/proposal/response/" => { println!("Proposal response received: {}", data); @@ -652,15 +649,22 @@ impl API for Server { println!("invoke_action(), proposal_response - Success: Received a proposal RESPONDED by another node: {}::{}", data, decoded_proposal_string.clone().unwrap()); println!("Decoded Proposal String: {:?}", decoded_proposal_string); let decoded_proposal: Result = Proposal::from_json_string(decoded_proposal_string.unwrap()); + //TODO: check if we have a proposal with that id let all_proposals: Option> = Proposal::get_last_n_proposals(); if decoded_proposal.is_ok() { + //SYNC CHECK + + //TODO: search for proposal + //TODO: Breakout into Proposal::find_proposal let found_proposal: Option = match all_proposals { Some(proposals) => { let mut same_proposal: Option = None; for proposal in proposals { if Proposal::compare_without_status(proposal.clone(), decoded_proposal.clone().unwrap() ) { println!("/proposal/response/, proposals are equal"); + //if we have the proposal, check its status same_proposal = Some(proposal); + // TODO: can safely break for proposal in proposals iteration } else { println!("/proposal/response/, proposals are NOT equal"); } @@ -671,19 +675,31 @@ impl API for Server { None } }; + + //TODO: IF WE FOUND THE SUBMITTED PROPOSAL IN OUR LOCAL SET if found_proposal.is_some() { match found_proposal.clone().unwrap().proposal_status { + //TODO: IF WE CREATED THE PROPOSAL + //TODO: CHECK IF THIS NODE CREATED THE PROPOSALS ProposalStatus::Created => { + //TODO: update how many votes the proposal has + //TODO CHECK IF THE AMOUNT OF VOTES IS ENOUGH TO SAY "ACCEPTED" match decoded_proposal.clone().unwrap().proposal_status { - ProposalStatus::Accepted => { + ProposalStatus::Accepted | ProposalStatus::AcceptedBroadcasted => { + // TODO: set the proposal db Proposal::add_peer_status_to_proposal(found_proposal.clone().unwrap(), ProposalStatus::Accepted, request_origin); + //TODO: WE CREATED IT AND WE JUST RECEIVED AN ACCEPTANCE + //TODO: DO NOT SET TO ACCEPTED BY NETWORK HERE }, - ProposalStatus::Rejected => { + ProposalStatus::Rejected | ProposalStatus::RejectedBroadcasted => { Proposal::add_peer_status_to_proposal(found_proposal.clone().unwrap(), ProposalStatus::Rejected, request_origin); + //TODO: WE CREATED IT AND WE JUST RECEIVED A REJECTION + //TODO: DO NOT SET TO ACCEPTED BY NETWORK HERE + //Proposal::update_proposal(found_proposal.clone().unwrap(), "rejected_by_network"); }, _ => { @@ -691,18 +707,16 @@ impl API for Server { } Ok(String::from("Proposal response: Successfully parsed")) }, - + // TODO ProposalStatus::AcceptedBroadcasted ProposalStatus::AcceptedByNetwork => { + //TODO: update how many votes the proposal has + //TODO CHECK IF THE AMOUNT OF VOTES IS ENOUGH TO SAY "ACCEPTED" match decoded_proposal.clone().unwrap().proposal_status { ProposalStatus::Accepted => { - //TODO: WE CREATED IT AND WE JUST RECEIVED AN ACCEPTANCE - }, ProposalStatus::Rejected => { - //TODO: WE CREATED IT AND WE JUST RECEIVED A REJECTION - }, _ => { @@ -711,15 +725,14 @@ impl API for Server { Ok(String::from("Proposal response: Successfully parsed")) }, ProposalStatus::Committed => { + //TODO: update how many votes the proposal has + //TODO CHECK IF THE AMOUNT OF VOTES IS ENOUGH TO SAY "ACCEPTED" match decoded_proposal.clone().unwrap().proposal_status { ProposalStatus::Accepted => { - //TODO: WE CREATED IT AND WE JUST RECEIVED AN ACCEPTANCE - }, ProposalStatus::Rejected => { //TODO: WE CREATED IT AND WE JUST RECEIVED A REJECTION - }, _ => { @@ -737,25 +750,34 @@ impl API for Server { } else { Err(String::from("Proposal response: Error: decoded_proposal is NOT OK")) } + } else { println!("invoke_action() - Error: could not decode proposal in proposal_response: {}", data); Err(String::from("")) } + //TODO 1: check DB for proposal ID, and status + //TODO 2: store response in DB + //TODO 3: if responses proposal is valid + //verify if the proposal's response received completes "round" }, /* - @endpoint /proposal/resolution/ - @desc notify peers that you have commited + @endpoint /proposal/resolution/ + @desc notify peers that you have commited to a resolution. */ "/proposal/resolution/" => { println!("Resolution received: {}", data); + //TODO: resolve only if our consensus goal is met let decoded_proposal_string: Result = Encoder::decode_base64(String::from(data)); if decoded_proposal_string.clone().is_ok() { println!("invoke_action(), proposal_resolution - Success: Received a proposal RESOLUTION by another node: {}::{}", data, decoded_proposal_string.clone().unwrap()); + //TODO: check if we have a proposal with that id let decoded_proposal: Result = Proposal::from_json_string(decoded_proposal_string.unwrap()); let all_proposals: Option> = Proposal::get_last_n_proposals(); if decoded_proposal.is_ok() { + //TODO: search for proposal + //TODO: Breakout into Proposal::find_proposal let found_proposal: Option = match all_proposals { Some(proposals) => { let mut same_proposal: Option = None; @@ -763,6 +785,7 @@ impl API for Server { if Proposal::compare_without_status(proposal.clone(), decoded_proposal.clone().unwrap() ) { println!("/proposal/resolution/, proposals are equal"); same_proposal = Some(proposal); + // TODO: can safely break for proposal in proposals iteration } else { println!("/proposal/resolution/, proposals are NOT equal"); } @@ -773,14 +796,17 @@ impl API for Server { None } }; - + //TODO: IF WE FOUND THE SUBMITTED PROPOSAL IN OUR LOCAL SET + //TODO: maybe change to same_proposal.is_some()? if found_proposal.is_some() { println!("invoke_action(), proposal_resolution - WE FOUND A LOCAL PROPOSAL MATCH"); match found_proposal.clone().unwrap().proposal_status { + //TODO: CHECK IF THIS NODE ACCEPTED THE PROPOSAL AND BROADCASTED ALREADY ProposalStatus::AcceptedBroadcasted => { println!("invoke_action(), proposal_resolution - FOUND PROPOSAL STATUS IS ACCEPTEDBROADCASTED"); match Proposal::validate_proposal_resolution(found_proposal.clone().unwrap(), decoded_proposal.clone().unwrap()){ Ok(_) => { + //TODO CHECK IF THE AMOUNT OF VOTES IS ENOUGH TO SAY "COMMITTED" Proposal::update_proposal(found_proposal.clone().unwrap(), "committed"); Ok(String::from("Proposal resolution: Successfully parsed")) @@ -810,7 +836,6 @@ impl API for Server { } }, - /* @endpoint /state/get/ @desc get the state from the DB @@ -821,28 +846,31 @@ impl API for Server { }, /* - @endpoint /block/query/ - @desc when a node requests a specific block + @endpoint /block/query/ + @desc when a node requests a specific block */ "/block/query/" => { + // TODO: another node asked for a block by its ID, respond with proposal with block id, and commited println!("block query received: {} | {} | {}", command, data, request_origin); let all_proposals: Option> = Proposal::get_last_n_proposals(); + //TODO: Breakout into Proposal::find_proposal let found_proposal: Option = match all_proposals { Some(proposals) => { let mut same_proposal: Option = None; for proposal in proposals { + //TODO: data parse here could be broken out of line to be checked against if proposal.clone().proposal_block.block_id == data.parse::().unwrap() { println!("/block/query/, proposal.block_id matches requested block_id"); if proposal.clone().proposal_status == ProposalStatus::Committed { println!("/block/query/, proposal status IS INDEED COMMITED, RESPOND WITH IT!"); same_proposal = Some(proposal); + // TODO: can safely break for proposal in proposals iteration } else { println!("/block/query/, ERROR proposal STATUS IS NOT COMMITED, DON'T RESPOND WITH IT"); } } else { println!("/block/query/, proposal.block_id does not match requested block_id"); } - } same_proposal }, @@ -859,14 +887,17 @@ impl API for Server { }, /* - @endpoint /block/response/ - @desc after a node requests a block, the block is sent to this endpoint in response + @endpoint /block/response/ + @desc after a node requests a block, the block is sent to this endpoint in response + */ "/block/response/" => { + // TODO: the response is a proposal, containing a block println!("Received Block from a peer AFTER QUERYING FOR IT"); let decoded_proposal_string: Result = Encoder::decode_base64(String::from(data)); if decoded_proposal_string.clone().is_ok() { println!("invoke_action(), block received AFTER QUERING FOR IT - Success: queryied for block: {}", data); + //TODO: check if we have a proposal with that id let decoded_proposal: Result = Proposal::from_json_string(decoded_proposal_string.unwrap()); match decoded_proposal { Ok(mut proposal) => { @@ -884,9 +915,8 @@ impl API for Server { } else { Err(String::from("Block response, proposal.decode_base64() FAILED")) } - }, - + // default case _ => Err(String::from("API endpoint not correct")) } } @@ -906,13 +936,12 @@ mod tests { \r\nUser-Agent: example \r\n\r\nRESPONSE FROM NODE \r\n"); - - let payload_split: Vec<&str> = payload.split("\n").collect(); let data: Result = Server::get_header_from_payload(payload_split, "user-agent"); assert_eq!(data, Ok(String::from("example"))); } + #[test] fn test_parse_origin_for_header() { let payload: String = String::from("HTTP/1.1 200 OK @@ -922,10 +951,8 @@ mod tests { \r\nOrigin: 127.0.0.1 \r\n\r\nRESPONSE FROM NODE \r\n"); - let payload_split: Vec<&str> = payload.split("\n").collect(); let data: Result = Server::get_header_from_payload(payload_split, "origin"); assert_eq!(data, Ok(String::from("127.0.0.1"))); } - } diff --git a/core/node/src/lib.rs b/core/node/src/lib.rs index 9728297..6a11b87 100644 --- a/core/node/src/lib.rs +++ b/core/node/src/lib.rs @@ -16,11 +16,9 @@ along with the AfricaOS Platform. If not, see . #[macro_use] extern crate json; use json::{JsonValue}; - use network::{Server,Receiver,Transmitter}; use transaction::{Transaction}; use db::{DB, NodeNameSetter}; - use proposal::{Proposal, NewProposal, ReadProposalFromDB, @@ -74,6 +72,7 @@ pub trait PeerManager { fn peers_to_location_vector(&mut self) -> Vec; } + impl PeerManager for Node { fn set_peers(&mut self, peer_list_string: String) -> () { let peer_vec: Vec<&str> = peer_list_string.split(",").collect(); @@ -134,13 +133,22 @@ pub trait Initiate { impl Initiate for Node { fn new(node_name: String, node_id: i32, port: i32, ip: String) -> Node { + + //create new server let server = Server{ port: port }; + //create proposal database Proposal::create_proposal_index(); + + //create block database Block::create_block_index(); + + //create tx database Transaction::create_transaction_index(); + + //create state database State::create_state_db(); Node { @@ -168,7 +176,6 @@ impl Initiate for Node { println!("Issue starting server"); } } - } fn set_node_name(&mut self, name: String) -> (){ @@ -176,6 +183,7 @@ impl Initiate for Node { let name_set_handler = || { DB::set_node_name(name.clone()); }; + self.node_name = name; } @@ -206,6 +214,7 @@ pub trait StateTransition { impl StateTransition for Node { fn transition(&mut self) -> (){ + //TODO: read DB for new transactions let proposals: Result, ()> = match Proposal::get_last_n_proposals(){ Some(proposals) => { println!("proposals length: {}", proposals.len()); @@ -217,43 +226,49 @@ impl StateTransition for Node { } }; - let delay: u32 = 10000; - + let delay: u32 = 5000; //10000 match proposals { Ok(p) => { + // PROBLEM: AT THE END OF THIS, REFRESH JSON for proposal_iterator in p.into_iter() { let proposal_index_option: Option = DB::get_proposal_index_as_json(); match proposal_index_option { Some(proposal_index) => { + //TODO: fetch the same proposal from the disk store index again, just so changes, + // from the last iterated upon proposal take effect let proposal_id_string: &str = &format!("{}", proposal_iterator.proposal_id); let proposal_index_version_of_proposal: JsonValue = proposal_index["proposals"][ proposal_id_string ].clone(); let proposal_from_json: Result = Proposal::from_json(proposal_index_version_of_proposal); match proposal_from_json { Ok(proposal) => { println!("Proposal ID: {}", proposal.proposal_id); + //TODO: check on proposal.status, change to only pass proposal, not the status as a long parameter let proposal_status: ProposalStatus = proposal.clone().proposal_status; let local_block_id_option: Option = DB::get_latest_block_id(); match local_block_id_option { - Some(local_block_id) => { + Some(local_block_id) => { // successfuly fetch block id println!("[transition] local_block_id: {}", local_block_id); let current_block_by_id_option: Option = DB::get_block_by_block_id(local_block_id); match current_block_by_id_option { Some(current_block_by_id) => { let proposal_window: i64 = current_block_by_id.block_id - 5; if proposal.proposal_block.block_id > proposal_window { + //TODO: Condition on proposal's block_id, here we can limit how many proposals self.determine_transition_step(proposal.clone(), proposal_index); + //delay to allow buffer? thread::sleep_ms(delay); } else { // DO NOT TRANSITION on proposals from a "lomg time ago" } + //self.determine_transition_step(proposal_status, proposal.clone()); }, None => { if local_block_id == -1 { self.determine_transition_step(proposal.clone(), proposal_index); + //delay to allow buffer? thread::sleep_ms(delay); } else {} - } } }, @@ -279,6 +294,7 @@ impl StateTransition for Node { } } + //TODO: fetch the most previous proposal let latest_proposal_option: Option = Proposal::get_latest_proposal(); match latest_proposal_option { Some(latest_proposal) => { @@ -286,20 +302,19 @@ impl StateTransition for Node { match latest_proposal.proposal_status { ProposalStatus::Committed | ProposalStatus::RejectedByNetwork => { - // get latest block id let latest_block_id_option: Option = DB::get_latest_block_id(); match latest_block_id_option { Some(block_id) => { let calculated_proposal_creator_id: i32 = Proposal::calculate_next_proposal_creator_id(self.peers.peer_set.len(), block_id) as i32; println!("calculated_proposal_creator_id: {} latest block_id: {}", calculated_proposal_creator_id, block_id); if calculated_proposal_creator_id == self.node_id { + thread::sleep_ms(5000); Proposal::create( self.clone().ip ); } else { } }, None => { - //SHOULD NEVER HAPPEN, AT WORST IS -1 println!("[transition] ERROR, NO LATEST BLOCK ID"); } } @@ -313,105 +328,176 @@ impl StateTransition for Node { println!("[transition] ERROR - latest_proposal_option is NONE") } } - println!("[Done with state transition]") + //TODO: if we find a proposal to be broadcasted, + //sign it, broadcast it, and then + //upon broadcast success, change to "submitted" + //TODO: broadcast with Vec of peer locations + //Network::broadcast_proposal_created(Proposal, self.peers_to_string); } fn determine_transition_step(&mut self, proposal: Proposal, proposal_index: JsonValue) -> (){ println!("Performing Transition for proposal_id: {}", proposal.clone().proposal_id); let node_ip: String = self.ip.to_string(); + // TODO: for each proposal, read the DB file and replace the proposal "sync checked" upon + // Proposal::get_proposl_from_disk_by_id() + // TODO: Sync Check, check for sync + // TODO: Break out into its own check_is_need_to_sync() Self::sync_check(&mut self.clone(), proposal.clone(), node_ip.clone()); match proposal.clone().proposal_status { ProposalStatus::Pending => { + //TODO: broadcast proposal to network, println!("[determine_transition_step], pending..."); for peer in self.peers.clone().peer_set { + //TODO: decide who we should broadcast to if Server::broadcast_proposal_created(proposal.clone(), peer.clone().location, node_ip.clone()).is_ok() { println!("[determine_transition_step], broadcast_proposal_created SUCCESS..."); + //TODO: update proposal to created status if DB::update_proposal(proposal.clone(), "created"); } else { println!("[determine_transition_step], broadcast_proposal_created FAILED..."); + //TODO: could update to NotValid of FailedCreate? + //TODO check for enough responses to even update it. + //update it on one successul, or all + //DB::update_proposal(proposal.proposal_id, "created"); } + //TODO: and change proposal_status to Created after sending to all peers } + DB::update_proposal(proposal.clone(), "created"); }, - ProposalStatus::Created => { + //TODO: do nothing, because proposal is already broadcasted let mut missing_peer_vote: bool = false; let mut at_least_one_peer_rejected: bool = false; let all_proposals: &JsonValue = &proposal_index["proposals"]; println!("[determine_transition_step], ProposalStatus::Created, all_proposals.dump(): {}", all_proposals.dump()); for peer in self.peers.clone().peer_set { + + // check proposal index for the proposal match all_proposals.has_key( proposal.clone().proposal_id.to_string().as_str() ) { + true => { + + //TODO: unuse let proposal_object: &JsonValue = &all_proposals[ proposal.clone().proposal_id.to_string().as_str() ]; - if proposal_object.has_key( peer.clone().location.as_str() ) { - let string_value: Option<&str> = proposal_object[ peer.clone().location.as_str() ].as_str(); - match string_value { - Some(value) => { - match Proposal::status_from_string( value ) { - ProposalStatus::Accepted => { + // TODO: peer_status, check for peer status from peer_status proposal + // TODO: read peer status from file + let proposal_object_from_disk: Option = Proposal::read_proposal_file_by_id(proposal.clone().proposal_id); + + match proposal_object_from_disk { + Some(proposal_json) => { + + //if proposal_object.has_key( peer.clone().location.as_str() ) { + if proposal_json.has_key( peer.clone().location.as_str() ) { + + // TODO: change to NOT changing proposal_peer_status + + //let string_value: Option<&str> = proposal_object[ peer.clone().location.as_str() ].as_str(); + let string_value: Option<&str> = proposal_json[ peer.clone().location.as_str() ].as_str(); + + match string_value { + Some(value) => { + match Proposal::status_from_string( value ) { + ProposalStatus::Accepted => { + + }, + ProposalStatus::Rejected => { + at_least_one_peer_rejected = true; + }, + _ => { + //TODO change determine_transition_step to return a Result... + println!("[determine_transition_step], NEITHER ACCEPTED OR REJECTED STORED IN PROPOSAL PEER STATUS"); + at_least_one_peer_rejected = true; + } + } }, - ProposalStatus::Rejected => { - at_least_one_peer_rejected = true; - }, - _ => { - println!("[determine_transition_step], NEITHER ACCEPTED OR REJECTED STORED IN PROPOSAL PEER STATUS"); - at_least_one_peer_rejected = true; + None => { + println!("[determine_transition_step], CREATED, STRING VALUE FOR PEER KEY IS NONE"); } } - }, - None => { - println!("[determine_transition_step], CREATED, STRING VALUE FOR PEER KEY IS NONE"); + + } else { + println!("[determine_transition_step], CREATED, PEER KEY DOESN'T EXIST"); + // TODO: could be we just have to wait for the other person + //at_least_one_peer_rejected = true; + missing_peer_vote = true; } + }, + None => { + } - } else { - println!("[determine_transition_step], CREATED, PEER KEY DOESN'T EXIST"); - at_least_one_peer_rejected = true; } + + }, false => { println!("[determine_transition_step], proposal doesnt exist in proposal index"); - missing_peer_vote = true; + //TODO: should nOT be setting this flag if proposal doesnt exist. duh + //missing_peer_vote = true; } } - } + + } // end for each peer in peer_set if missing_peer_vote { + //todo: missing vote, do nothing println!("[determine_transition_step], Created, missing vote"); } else if at_least_one_peer_rejected { + //todo: at least one peer rejected the proposal println!("[determine_transition_step], Created, a peer rejected it"); + Proposal::update_proposal(proposal.clone(), "rejected_by_network"); } else { + // all good, update println!("[determine_transition_step], Created, all good to update proposal"); Proposal::update_proposal(proposal.clone(), "accepted_by_network"); } - }, + }, ProposalStatus::Accepted => { + //TODO: check to see if we have enough responses, + //if so move to commited, + //if not wait for more responses + //TODO: broadcast proposal to network, for peer in self.peers.clone().peer_set { //TODO: decide who we should broadcast to if Server::broadcast_proposal_response(proposal.clone(), peer.clone().location, node_ip.clone(), ProposalStatus::Accepted).is_ok() { - println!("[determine_transition_step], broadcast_proposal_accepted SUCCESS..."); - DB::update_proposal(proposal.clone(), "accepted_broadcasted"); + //TODO: update proposal to created status if + //TODO: this will redunantly write accepted_broadcasted per each peer...? + /////////////DB::update_proposal(proposal.clone(), "accepted_broadcasted"); } else { println!("[determine_transition_step], broadcast_proposal_accepted FAILED..."); + //TODO: could update to NotValid of FailedAccepted? + //TODO check for enough responses to even update it. + //update it on one successul, or all + //DB::update_proposal(proposal.proposal_id, "accepted_broadcasted"); } + //TODO: and change proposal_status to Accepted_Broadcasted after sending to all peers } + + DB::update_proposal(proposal.clone(), "accepted_broadcasted"); + }, ProposalStatus::AcceptedBroadcasted => { + + //we already Accepted it and told the network + //TODO: COULD CHANGE THIS TO BROADCAST RESPONSE? let local_block_id_option: Option = DB::get_latest_block_id(); match local_block_id_option { - Some(local_block_id) => { + Some(local_block_id) => { // successfuly fetch block id + //Test because some nodes broadcast acceptance, but don't get the for peer in self.peers.clone().peer_set { if Server::broadcast_block_query( ( local_block_id ), peer.clone().location, node_ip.clone()).is_ok() { println!("[determine_transition_step], broadcast_block_query SUCCESS 1..."); + //Ok(String::from("BLOCK BEHIND, QUERING TO SYNC")) } else { println!("[determine_transition_step], broadcast_block_query FAILED 1..."); } @@ -421,19 +507,48 @@ impl StateTransition for Node { } } + + for peer in self.peers.clone().peer_set { + //TODO: decide who we should broadcast to + if Server::broadcast_proposal_response(proposal.clone(), + peer.clone().location, + node_ip.clone(), + ProposalStatus::Accepted).is_ok() { + println!("[determine_transition_step], broadcast_proposal_accepted SUCCESS..."); + // broadcast just so the receiver can receive the response until they process it + //fire once, and forget doesnt work well + } else { + println!("[determine_transition_step], broadcast_proposal_accepted FAILED..."); + //TODO: could update to NotValid of FailedAccepted? + //TODO check for enough responses to even update it. + //update it on one successul, or all + //DB::update_proposal(proposal.proposal_id, "accepted_broadcasted"); + } + //TODO: and change proposal_status to Accepted_Broadcasted after sending to all peers + } + }, ProposalStatus::AcceptedByNetwork => { + //TODO: MY PROPOSAL WAS ACCEPTED BY NETWORK + //TODO: BROACAST TO ALL PEERS THAT WE ARE NOW println!("[determine_transition_step], accepted_by_network..."); let block_commit_result: Result<(),String> = Block::commit_if_valid(proposal.clone().proposal_block); if block_commit_result.is_ok() { for peer in self.peers.clone().peer_set { + //TODO: decide who we should broadcast to if Server::broadcast_proposal_resolution(proposal.clone(), peer.clone().location, node_ip.clone()).is_ok() { println!("[determine_transition_step], broadcast_proposal_resolution SUCCESS..."); + //TODO: update proposal to committed status if } else { println!("[determine_transition_step], broadcast_proposal_resolution FAILED..."); + //TODO: could update to NotValid of FailedAccepted? + //TODO check for enough responses to even update it. + //update it on one successul, or all + //DB::update_proposal(proposal.proposal_id, "accepted_broadcasted"); } + //TODO: and change proposal_status to Accepted_Broadcasted after sending to all peers } DB::update_proposal(proposal.clone(), "committed"); } else { @@ -441,31 +556,49 @@ impl StateTransition for Node { } }, ProposalStatus::Rejected => { + //TODO: check to see if we have enough responses + //if so move to commited, if not wait for more responses for peer in self.peers.clone().peer_set { + //TODO: decide who we should broadcast to if Server::broadcast_proposal_response(proposal.clone(), peer.clone().location, node_ip.clone(), ProposalStatus::Rejected).is_ok() { println!("[determine_transition_step], broadcast_proposal_rejected SUCCESS..."); - DB::update_proposal(proposal.clone(), "rejected_broadcasted"); + //TODO: update proposal to created status if + //TODO: this will redunantly write accepted_broadcasted per each peer...? + //DB::update_proposal(proposal.clone(), "rejected_broadcasted"); } else { println!("[determine_transition_step], broadcast_proposal_rejected FAILED..."); + //TODO: could update to NotValid of FailedAccepted? + //TODO check for enough responses to even update it. + //update it on one successul, or all + //DB::update_proposal(proposal.proposal_id, "accepted_broadcasted"); } + //TODO: and change proposal_status to Accepted_Broadcasted after sending to all peers + //DB::update_proposal(proposal.clone(), "rejected_broadcasted"); } + DB::update_proposal(proposal.clone(), "rejected_broadcasted"); }, ProposalStatus::RejectedBroadcasted => { + //we already Rejected it and told the network + //TODO: enable this so rejections can continue just as well as acceptances }, ProposalStatus::RejectedByNetwork => { + //TODO: do nothing, because proposal is already broadcasted }, ProposalStatus::Committed => { - println!("[determine_transition_step], committed, only broadcast so others waiting can get it..."); + //TODO: remove this! + //TODO: received enough responses from network. Nothing further to be done + }, ProposalStatus::NotValid => { //TODO: throw error }, ProposalStatus::NotValidIncorrectNextBlockIndex => { + //TODO: retry to commit block? match Proposal::validate_proposal(proposal.clone()) { //NOTE: ONLY DO SOMETHING IF YOU CAN SAFELY PARSE THE PROPOSAL, OTHERWISE ERROR Ok(verdict) => { @@ -495,19 +628,23 @@ impl StateTransition for Node { peer.clone().location, node_ip.clone(), ProposalStatus::Rejected).is_ok() { - println!("[determine_transition_step], broadcast_proposal_rejected SUCCESS..."); - DB::update_proposal(proposal.clone(), "rejected_broadcasted"); + //TODO: update proposal to created status if + //TODO: this will redunantly write accepted_broadcasted per each peer...? } else { println!("[determine_transition_step], broadcast_proposal_rejected FAILED..."); + //TODO: could update to NotValid of FailedAccepted? + //TODO check for enough responses to even update it. //DB::update_proposal(proposal.proposal_id, "accepted_broadcasted"); } + //DB::update_proposal(proposal.clone(), "rejected_broadcasted"); + //TODO: and change proposal_status to Accepted_Broadcasted after sending to all peers } - + DB::update_proposal(proposal.clone(), "rejected_broadcasted"); }, ProposalStatus::NotValidIncorrectProposalHash => { - + //TODO: }, ProposalStatus::ProposalStatusError => { //TODO: throw error @@ -516,6 +653,7 @@ impl StateTransition for Node { } } + trait SyncCheck { fn sync_check(&mut self, proposal: Proposal, node_ip: String) -> (); } @@ -528,7 +666,7 @@ impl SyncCheck for Node { let current_block_by_id_option: Option = DB::get_block_by_block_id(local_block_id); match current_block_by_id_option { Some(current_block_by_id) => { - + // TODO: sync check }, None => { @@ -544,8 +682,5 @@ impl SyncCheck for Node { #[cfg(test)] mod tests { - #[test] - fn it_works() { - assert_eq!(2 + 2, 4); - } + } diff --git a/core/proposal/Cargo.lock b/core/proposal/Cargo.lock index 7acf9d6..74b7d2d 100644 --- a/core/proposal/Cargo.lock +++ b/core/proposal/Cargo.lock @@ -5,6 +5,11 @@ name = "autocfg" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "base64" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "block" version = "0.1.0" @@ -32,11 +37,49 @@ name = "db" version = "0.1.0" dependencies = [ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lock 0.1.0", +] + +[[package]] +name = "encode" +version = "0.1.0" +dependencies = [ + "base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "executor" +version = "0.1.0" +dependencies = [ + "json 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "macros 0.1.0", +] + +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "gcc" +version = "0.3.55" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "hash" version = "0.1.0" +dependencies = [ + "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", +] [[package]] name = "json" @@ -53,6 +96,20 @@ name = "libc" version = "0.2.65" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "lock" +version = "0.1.0" +dependencies = [ + "fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "macros" +version = "0.1.0" +dependencies = [ + "json 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "num-integer" version = "0.1.41" @@ -78,15 +135,70 @@ dependencies = [ "db 0.1.0", "hash 0.1.0", "json 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "macros 0.1.0", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", "timestamp 0.1.0", ] +[[package]] +name = "rand" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "redox_syscall" version = "0.1.56" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "rust-crypto" +version = "0.2.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rustc-serialize" version = "0.3.24" @@ -114,6 +226,8 @@ name = "transaction" version = "0.1.0" dependencies = [ "db 0.1.0", + "encode 0.1.0", + "executor 0.1.0", "hash 0.1.0", "json 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", "timestamp 0.1.0", @@ -140,13 +254,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] "checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" +"checksum base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" "checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68" +"checksum fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" +"checksum gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)" = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" "checksum json 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b3ca41abbeb7615d56322a984e63be5e5d0a117dfaca86c14393e32a762ccac1" "checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" "checksum libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)" = "1a31a0627fdf1f6a39ec0dd577e101440b7db22672c0901fe00a9a6fbb5c24e8" "checksum num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09" "checksum num-traits 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "443c53b3c3531dfcbfa499d8893944db78474ad7a1d87fa2d94d1a2231693ac6" +"checksum rand 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)" = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" +"checksum rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" +"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" "checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" +"checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f" "checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" diff --git a/core/proposal/Cargo.toml b/core/proposal/Cargo.toml index 76c9071..4268e29 100644 --- a/core/proposal/Cargo.toml +++ b/core/proposal/Cargo.toml @@ -9,5 +9,6 @@ db = { path = "../db" } block = { path = "../block" } timestamp = { path = "../timestamp" } hash = { path = "../hash" } +executor = { path = "../executor" } rustc-serialize = "0.3" json = "*" diff --git a/core/proposal/src/lib.rs b/core/proposal/src/lib.rs index c4c3b66..f0d61d4 100644 --- a/core/proposal/src/lib.rs +++ b/core/proposal/src/lib.rs @@ -17,11 +17,12 @@ along with the AfricaOS Platform. If not, see . extern crate json; use json::{JsonValue}; use std::io::{Error, ErrorKind}; - use db::{DB, DBWriteProposal, DBReadProposal, - FileDirectoryReader}; + FileDirectoryReader, + DBReadProposalPeerStatus, + DBWriteProposalPeerStatus}; use block::{Block, CreateNewBlock, @@ -33,6 +34,7 @@ use block::{Block, use timestamp::{Timestamp, NewTimestamp, StringToTimestamp}; use hash::{Hasher, CalculateSHA256Hash}; +use executor::{Executor, ExecuteMacro}; /* @name Proposal @@ -54,19 +56,19 @@ pub struct Proposal { */ #[derive(Clone,Debug,PartialEq)] pub enum ProposalStatus { - Pending, - Created, - Accepted, - AcceptedBroadcasted, - AcceptedByNetwork, - Rejected, - RejectedBroadcasted, - RejectedByNetwork, - Committed, - NotValid, - NotValidIncorrectNextBlockIndex, - NotValidIncorrectProposalHash, - ProposalStatusError + Pending, //for proposals just made + Created, //for proposals made, and broadcasted to the network + Accepted, //proposals accepted by peers + AcceptedBroadcasted, //proposals accepted by a node, AND BROADCASTED + AcceptedByNetwork, //proposal accepted by peers, AND BROADCASTED + Rejected, //proposal rejected by peers + RejectedBroadcasted, //proposal rejected by a node AND BROADCASTED + RejectedByNetwork, //proposal rejected by peervalidate_proposals, AND BROADCASTED + Committed, //proposal agreed upon by peers + NotValid, //proposals that do not match any of the above enum values + NotValidIncorrectNextBlockIndex, //proposals that do not have the correct next block index + NotValidIncorrectProposalHash, //proposals that do not hash to be correct + ProposalStatusError //DEFAULT ENUM ERROR } pub trait StatusToString { @@ -102,7 +104,7 @@ pub trait StringToStatus { } /* -@name status_from_string +@name @desc */ impl StringToStatus for Proposal { @@ -168,7 +170,8 @@ impl JsonConverter for Proposal { } fn from_json(payload: JsonValue) -> Result { - println!("From JSON: {}", payload); + //TODO: return a Result + println!("Proposal, JsonConverter, From JSON: {}", payload); let proposal_id_from_json_option: Option = payload["proposal_id"].as_i32(); match proposal_id_from_json_option { Some(proposal_id_from_json) => { @@ -203,10 +206,10 @@ impl JsonConverter for Proposal { Err(String::from("Proposal, ERROR: from_json, proposal_id could not be parsed as i32!")) } } - } fn from_json_string(json_string: String) -> Result { + // TODO: conditionally unwrap instead of just unwrapping let parsed = json::parse( &format!(r#"{}"#, json_string) ).unwrap(); Self::from_json(parsed) } @@ -228,9 +231,11 @@ pub trait CreateProposalIndex { */ impl CreateProposalIndex for Proposal { fn create_proposal_index() -> (){ + let new_proposal_index = object!{ "proposals" => object!{} }; + let index_to_write: String = json::stringify(new_proposal_index); match DB::write_proposal_index(index_to_write) { Ok(_) => { @@ -272,20 +277,32 @@ impl UpdateProposalInDB for DB { peer: String) -> Result { println!("Inside add_node_status_to_proposal_json"); - let mut proposal_index_option: Option = Self::get_proposal_index_as_json(); - match proposal_index_option { - Some(mut proposal_index) => { - let all_proposals = &mut proposal_index["proposals"]; + + //TODO: get proposal peer statuses + let mut proposal_object_option: Option = Proposal::read_proposal_file_by_id(proposal.proposal_id); + + match proposal_object_option { + Some(mut proposal_loaded) => { + let proposal_root = &mut proposal_loaded; + //TODO: add peer name key to proposal let stringed_status = Proposal::string_from_status(status); - all_proposals[proposal.proposal_id.to_string()][peer] = JsonValue::from(stringed_status); - let proposal_write_result: Result = match Self::write_proposal_index(proposal_index.dump()) { + + // TODO: dont alter proposal index, only alter proposal file object + //proposal_root[proposal.proposal_id.to_string()][peer] = JsonValue::from(stringed_status); + proposal_root[peer] = JsonValue::from(stringed_status); + + // TODO: write proposal peer status + let proposal_write_result: Result = match Self::write_proposal_to_sql(proposal.proposal_id, proposal_root.dump()) { Ok(result) => { Ok(result) }, Err(err) => { - Err( String::from("add_peer_status_to_proposal ERROR: Writing proposal index failed") ) + //let proposal_db_write_peer_status_error = Error::new(ErrorKind::Other, "Couldn't write Proposal peer status to DB"); + Err( String::from("add_peer_status_to_proposal ERROR: Writing proposal peer status failed") ) } }; + + proposal_write_result }, None => { @@ -296,9 +313,11 @@ impl UpdateProposalInDB for DB { fn update_proposal(proposal: Proposal, status: &str) -> Result { println!("Inside update proposal"); + //TODO: get proposal index let mut proposal_index_option: Option = Self::get_proposal_index_as_json(); match proposal_index_option { Some(mut proposal_index) => { + //TODO: change the entry let all_proposals = &proposal_index["proposals"]; let new_proposal_status: ProposalStatus = Proposal::status_from_string( status.clone() ); let altered_proposal_block: Result = Block::from_json(all_proposals[ proposal.proposal_id.to_string() ]["proposal_block"].clone()); @@ -314,20 +333,18 @@ impl UpdateProposalInDB for DB { }; let parsed = json::parse( &format!(r#"{}"#, Proposal::to_json(altered_proposal.clone()) ) ); if parsed.is_ok() { + //TODO: overwrite not whole proposal, but only status, so we conserve the node/peer statuses proposal_index ["proposals"] [proposal.proposal_id.to_string()] ["proposal_status"] = JsonValue::from(status); let proposal_write_result: Result = match Self::write_proposal_index(proposal_index.dump()) { Ok(result) => { - match Self::write_proposal(altered_proposal.clone(), new_proposal_status.clone()){ - Ok(result) => { - Ok( String::from("update_proposal SUCCESS: Successful write of proposal") ) - }, - Err(_) => { - Err( String::from("update_proposal ERROR: Writing to disk failed") ) - } - } + /* + //TODO: overwrites actual proposal file... + */ + //TODO: TEST TO SEE IF STOP OVERWRITING PROPOSAL File + Ok( String::from("update_proposal SUCCESS: Successful write of proposal") ) }, Err(err) => { Err( String::from("update_proposal ERROR: Writing proposal index failed") ) @@ -357,6 +374,7 @@ impl UpdateProposalInDB for DB { */ pub trait ReadProposalFromDB { fn get_proposal_index_as_json() -> Option; + fn get_proposal_peer_status_as_json(proposal_id: i32) -> Option; fn get_latest_proposal() -> Option; fn get_all_proposals() -> Option>; fn get_last_n_proposals() -> Option>; @@ -367,6 +385,7 @@ pub trait ReadProposalFromDB { @desc */ impl ReadProposalFromDB for DB { + /* @name get_proposal_index_as_json @desc return the proposal index as a json object @@ -374,11 +393,13 @@ impl ReadProposalFromDB for DB { fn get_proposal_index_as_json() -> Option { let proposal_index: String = match DB::read_proposal_index() { Some(i) => { + //TODO: parse/verify proposal index i }, - None => String::from("NO INDEX") + None => String::from("get_proposal_index_as_json, NO INDEX") }; println!("Proposal index: {}", proposal_index); + //TODO: convert DB json string to json let parsed_result: Result = json::parse( &format!(r#"{}"#, proposal_index) ); match parsed_result { Ok(parsed) => { @@ -392,6 +413,34 @@ impl ReadProposalFromDB for DB { } } + /* + @name get_proposal_peer_status_as_json + @desc return the proposal index as a json object + */ + fn get_proposal_peer_status_as_json(proposal_id: i32) -> Option { + //let proposal_index: String = match DB::read_proposal_index() { + let proposal_index: String = match DB::read_proposal_peer_status(proposal_id) { + Some(i) => { + //TODO: parse/verify proposal index + i + }, + None => String::from("NO INDEX") + }; + println!("Proposal index: {}", proposal_index); + //TODO: convert DB json string to json + let parsed_result: Result = json::parse( &format!(r#"{}"#, proposal_index) ); + match parsed_result { + Ok(parsed) => { + println!("get_proposal_peer_status_as_json, proposal index parsed: {}", parsed["proposals"]); + println!("get_proposal_peer_status_as_json, PI parse example 0 {}", parsed["proposals"]["0"]); + Some(parsed) + }, + Err(_) => { + None + } + } + } + /* @name get_latest_proposal @desc get the latest proposal @@ -413,6 +462,7 @@ impl ReadProposalFromDB for DB { @desc get all proposals from the proposals directory */ fn get_all_proposals() -> Option> { + //TODO: read proposal index let parsed_option: Option = Self::get_proposal_index_as_json(); match parsed_option { Some(parsed) => { @@ -450,7 +500,7 @@ impl ReadProposalFromDB for DB { Some(next_proposal_id) => { let mut all_proposals_vector: Vec = Vec::new(); let mut highest_proposal_to_fetch: i32 = ( format!("{}", proposal_index["proposals"].clone().len() ).parse::().unwrap() );//next_proposal_id + 5; - let mut furthest_proposal_to_fetch: i32 = highest_proposal_to_fetch - 1;//next_proposal_id; + let mut furthest_proposal_to_fetch: i32 = highest_proposal_to_fetch - 1;// was 5 //next_proposal_id; if furthest_proposal_to_fetch < 0 { furthest_proposal_to_fetch = 0; } else {} @@ -498,36 +548,37 @@ impl WriteProposalToDB for DB { */ fn write_proposal(mut proposal: Proposal, new_status: ProposalStatus) -> Result { println!("inside write_proposal new_status: ProposalStatusn Proposal, DB trait"); + //TODO: Read proposal index JSON + //TODO: pass Node Peer name let parsed_option: Option = Self::get_proposal_index_as_json(); match parsed_option { Some(mut parsed) => { proposal.proposal_status = new_status.clone(); + //TODO: convert from Proposal to JSON let proposal_string: String = Proposal::to_json(proposal.clone()); if parsed.has_key( &(format!("{}", proposal.proposal_id).to_string()) ) { + //the proposal index has the key already, so update the status ONLY + //THIS PRESERVES THE DATA IN IT ALREADY! let stringed_status = Proposal::string_from_status(new_status); + //overwrite the proposal status ONLY parsed["proposals"] [&(format!("{}", proposal.proposal_id).to_string())] ["proposal_status"] = JsonValue::from(stringed_status); println!("write_proposal, UPDATE Proposal JSON: {}", parsed.dump()); + //write index first! let db_index_write_result: Result = Self::write_proposal_index(parsed.dump()); match db_index_write_result { Ok(result) => { let proposal_string: String = Proposal::to_json(proposal.clone()); - let db_write_result: Result = Self::write_proposal_to_sql(proposal.proposal_id, proposal_string.clone()); - match db_write_result { - Ok(r) => { - Ok(r) - }, - Err(err) => { - Err(err) - } - } + //TODO: dont overwrite peer status proposal files + Ok(String::from("successul, write_proposal, db_index_write_result")) }, Err(err) => { Err(err) } } } else { + //TODO: alter proposal index json object let new_proposal_entry = object!{ "proposal_id" => proposal.proposal_id, "proposal_status" => Proposal::string_from_status(new_status), @@ -543,8 +594,14 @@ impl WriteProposalToDB for DB { println!("New Proposal JSON: {}", parsed.dump()); let db_index_write_result = Self::write_proposal_index(parsed.dump()); if db_index_write_result.is_ok() { + //TODO: commit proposal index to DB + //TODO: commit proposal to DB + + //TODO: dont overwrite peer status proposal files + //Ok(String::from("successul, write_proposal, db_index_write_result")) let db_write_result: Result = Self::write_proposal_to_sql(proposal.proposal_id, proposal_string.clone()); db_write_result + } else { let proposal_db_write_error = Error::new(ErrorKind::Other, "Couldn't write Proposal to DB"); Err(proposal_db_write_error) @@ -555,6 +612,7 @@ impl WriteProposalToDB for DB { let proposal_index_insert_error = Error::new(ErrorKind::Other, "Could not add proposal to proposal_index"); Err(proposal_index_insert_error) } + }; pindex_insert_result } @@ -581,7 +639,8 @@ trait WriteNewProposalToDB { */ impl WriteNewProposalToDB for Proposal { fn write_new_proposal(proposal: Proposal) -> Result { - DB::write_proposal(proposal, ProposalStatus::Pending) + //TODO: Pass "Node Name" to DB functions so it knows where to write? + DB::write_proposal(proposal, ProposalStatus::Pending) //write proposal } } @@ -599,6 +658,7 @@ pub trait StoreProposal { */ impl StoreProposal for Proposal { fn store_proposal(proposal: Proposal, proposal_status: ProposalStatus) -> Result { + //TODO: "Node Name" to DB functions so it knows where to write? DB::write_proposal(proposal, proposal_status) } } @@ -611,6 +671,7 @@ pub trait GetProposals { fn get_all_proposals() -> Option>; fn get_last_n_proposals() -> Option>; fn get_latest_proposal() -> Option; + fn read_proposal_file_by_id(proposal_id: i32) -> Option; } impl GetProposals for Proposal { @@ -626,6 +687,27 @@ impl GetProposals for Proposal { DB::get_latest_proposal() } + fn read_proposal_file_by_id(proposal_id: i32) -> Option{ + let proposal_index: String = match DB::read_proposal_file_by_id(proposal_id) { + Some(i) => { + //TODO: parse/verify proposal index + i + }, + None => String::from("proposal, read_proposal_file_by_index, NO PROPOSAL FOUND") + }; + println!("proposal, read_proposal_file_by_index, Proposal: {}", proposal_index); + //TODO: convert DB json string to json + let parsed_result: Result = json::parse( &format!(r#"{}"#, proposal_index) ); + match parsed_result { + Ok(parsed) => { + println!("proposal, read_proposal_file_by_index, proposal parsed: {}", parsed); + Some(parsed) + }, + Err(_) => { + None + } + } + } } /* @@ -638,7 +720,6 @@ pub trait UpdateProposal { @desc add a key to the proposal DB */ fn add_peer_status_to_proposal(proposal: Proposal, status: ProposalStatus, peer: String) -> Result; - fn update_proposal(proposal: Proposal, status: &str) -> Result ; } @@ -690,6 +771,7 @@ impl NewProposal for Proposal { */ fn create(request_origin: String) -> Option { println!("Creating New Proposal..."); + //TODO: determine proposal ID let new_proposal_id:i32 = match Self::get_next_proposal_id(){ Some(pid) => pid, None => -1 @@ -702,9 +784,11 @@ impl NewProposal for Proposal { Some(ts) => { let new_proposal_sender: String = request_origin; let new_proposal_hash: String = Self::hash_proposal(calculated_proposal_id.clone(), new_proposal_sender.clone(), ts.clone()); + //TODO: CREATE NEW BLOCK let new_proposal_block: Result = Block::new(new_proposal_hash.clone()); match new_proposal_block { Ok(block) => { + //Increment the local proposal id let new_proposal: Proposal = Proposal { proposal_id: calculated_proposal_id, proposal_status: new_proposal_status, @@ -713,6 +797,7 @@ impl NewProposal for Proposal { proposal_sender: new_proposal_sender, proposal_block: block }; + //TODO: create proposal attempt in DB Self::write_new_proposal(new_proposal.clone()).unwrap(); Some(new_proposal) }, @@ -766,16 +851,22 @@ impl ProposalIDGenerator for Proposal { @desc generate the next proposal_id from all proposals on disk */ fn get_next_proposal_id() -> Option { + //read all directories let files:Vec = DB::read_proposals_directory(); let mut iter = (&files).into_iter(); let mut highest_proposal_index: i32 = -1; + //iterate over all proposal files while let Some(v) = iter.next(){ println!("Filename Iter: {}", v); + //parse file name for proposal id let filename_split_vector = v.split("/").collect::>(); let last_split_section: &str = filename_split_vector[filename_split_vector.len() - 1]; let parsed_proposal_id: Option = Self::parse_filename_for_proposal_id(last_split_section); + //TODO: parse to a proposal type, and check status + // should not count a none commited block in the highest_block_id calculation match parsed_proposal_id { Some(pid) => { + //Could keep this in memory globally? if pid > highest_proposal_index { highest_proposal_index = pid; } @@ -809,12 +900,15 @@ impl ProposalValidator for Proposal { fn is_accepted_broadcasted_already(submitted_proposal: Proposal) -> bool { let all_proposals: Option> = Proposal::get_last_n_proposals(); + //TODO: Breakout into Proposal::find_proposal let already_commited: Option = match all_proposals { Some(proposals) => { let mut commited_proposal: Option = None; for proposal in proposals { + // here we are saying, if we already have proposal, that matches the block id of the submitted proposal if proposal.clone().proposal_block.block_id == submitted_proposal.proposal_block.block_id { println!("is_commited_already, proposal.block_id matches submitted block_id"); + //if we have the proposal, check its status, if we already AcceptedBroadcasted, AcceptedByNetwork it, return it if proposal.clone().proposal_status == ProposalStatus::AcceptedBroadcasted || @@ -826,6 +920,7 @@ impl ProposalValidator for Proposal { { println!("is_commited_already, proposal status IS INDEED AcceptedBroadcasted, DO NOT VALIDATE ANOTHER VERSION"); commited_proposal = Some(proposal); + // TODO: can safely break for proposal in proposals iteration } else { println!("is_commited_already, ERROR proposal STATUS IS NOT COMMITED, DON'T RESPOND WITH IT"); } @@ -851,6 +946,7 @@ impl ProposalValidator for Proposal { // this should only be invoked from one node for a given block fn validate_proposal(submitted_proposal: Proposal) -> Result { println!("validate_proposal(), Submitted Proposal: {}", submitted_proposal.proposal_id); + //security - if i already agreed and broadcasted, I should not validate another proposal if Self::is_accepted_broadcasted_already(submitted_proposal.clone()) { let is_commited_already_error = Error::new(ErrorKind::Other, "validate_proposal() [ERROR] WE COMMITED THE BLOCK ALREADY, PROBABLY AWAITING RESOLUTION"); return Err(is_commited_already_error) @@ -863,10 +959,15 @@ impl ProposalValidator for Proposal { return Err(get_proposal_index_as_json_error) } }; + //TODO: check to see if submitted_proposal.proposal_id is higher than my highest proposal_id, like sequence numbers + //TODO: Bob checks if he already has a Proposal with that proposal_id (proposal_id = 1) + //TODO: if bob doesn't have proposal_id = 1, bob verifies the proposal using the following criteria let all_proposals = &proposal_index_parsed["proposals"]; let proposal_id_string: String = format!("{}", submitted_proposal.proposal_id); let proposal_id_check = &all_proposals[proposal_id_string.clone()]; if !proposal_id_check.has_key( proposal_id_string.clone().as_str() ) { + //TODO: if bob has proposal_id = 1, he checks the status of it + //TODO: What is the current block_id bob has? let current_block_id: Option = Block::get_latest_block_id(); let current_block_id_result: i64 = match current_block_id { Some(block_id) => { @@ -874,33 +975,43 @@ impl ProposalValidator for Proposal { block_id }, None => { + //NO PREVIOUS BLOCK -1 } }; + //TODO: a valid proposal will ONLY be the current block_id + 1 if ( (current_block_id_result + 1) == submitted_proposal.proposal_block.block_id ) { } else { return Ok(ProposalValidationResult::NotValidIncorrectNextBlockIndex) } + //TODO: breakout into modular, verify_proposal_hash + //TODO: calculate the hash of the proposal (see below) let string_to_hash: String = String::from( format!("{}{}{}", submitted_proposal.proposal_id, submitted_proposal.proposal_sender, submitted_proposal.proposal_time.timestamp).as_str() ) ; let expected_hash: String = submitted_proposal.proposal_hash; let submitted_proposal_hash: String = Hasher::calculate_sha256( string_to_hash ); - + //TODO: validate the proposal_hash provided by alice against the proposal_hash bob just calculated match submitted_proposal_hash { _ if submitted_proposal_hash == expected_hash => { println!("HASH SUCCESS: proposal hash IS CORRECT: {}{}", expected_hash, submitted_proposal_hash); }, _ => { + //TODO: if the hashes are different, bob rejects the proposal, and sets the proposal to NotValid in the proposal index println!("ERROR: proposal hash not valid: {}{}", expected_hash, submitted_proposal_hash); return Ok(ProposalValidationResult::NotValidIncorrectProposalHash) } } + //TODO: What is the current block_hash of our highest block? + //TODO: If the block_hash of bob's highest block is NOT equal to the block_parent_hash of the submitted proposal's proposal_block, bob rejects the block, and sets the proposal to NotValid in the proposal index + //TODO: if all of the above does not reject the proposal, bob accepts alice's submitted proposal, responds to alice with "acceptance", and updates the proposal_index to accepted for the proposal_id } else { - + //TODO: if the proposal_status is accepted, or rejected, and the submitter is NOT bob, do nothing because bob already added it to the proposal index + // proposal_id_check.has_key IS FALSE + // return Ok(ProposalValidationResult::NotValid) } let proposal_validation_error = Error::new(ErrorKind::Other, "Couldn't validate proposal"); @@ -920,6 +1031,8 @@ pub trait CompareWithoutStatus { impl CompareWithoutStatus for Proposal { fn compare_without_status(proposal_left: Proposal, proposal_right: Proposal) -> bool { + + //test proposal_id match proposal_left.proposal_id == proposal_right.proposal_id { true => { @@ -929,6 +1042,7 @@ impl CompareWithoutStatus for Proposal { } } + //test proposal_hash match proposal_left.proposal_hash == proposal_right.proposal_hash { true => { @@ -938,6 +1052,7 @@ impl CompareWithoutStatus for Proposal { } } + //test proposal_time match proposal_left.proposal_time == proposal_right.proposal_time { true => { @@ -947,6 +1062,7 @@ impl CompareWithoutStatus for Proposal { } } + //test proposal_sender match proposal_left.proposal_sender == proposal_right.proposal_sender { true => { @@ -956,6 +1072,7 @@ impl CompareWithoutStatus for Proposal { } } + //test proposal_block match proposal_left.proposal_block == proposal_right.proposal_block { true => { @@ -966,7 +1083,6 @@ impl CompareWithoutStatus for Proposal { } true - } } @@ -980,13 +1096,13 @@ pub trait ValidateProposalBlock { impl ValidateProposalBlock for Proposal { fn validate_proposal_block(&mut self) -> Result<(), String> { + //////////// TODO: check if we already commited a proposal println!("validate_proposal_block: check if we commited already"); + //TODO SECURITY: Block::commit_if_valid(self.clone().proposal_block) } } - - /* @desc upon a resolution proposal received, provided it is accepted-broadcast, attempt to finalize the proposal, this includes commiting it @@ -1002,8 +1118,11 @@ pub trait ProposalResolutionAccepted { impl ProposalResolutionAccepted for Proposal { fn validate_proposal_resolution(local_proposal: Proposal, received_proposal: Proposal) -> Result<(), ()> { match received_proposal.proposal_status { + //was it accepted by the network, according to the submitter, not us ProposalStatus::AcceptedByNetwork => { println!("invoke_action(), proposal_resolution - received_proposal STATUS IS AcceptedByNetwork"); + //TODO: WE ACCEPTED IT, BROADCASTED IT AND WE JUST RECEIVED A RESOLUTION + //Proposal::update_proposal(found_proposal.clone().unwrap(),"accepted_by_network"); if received_proposal .clone() .validate_proposal_block() @@ -1015,15 +1134,15 @@ impl ProposalResolutionAccepted for Proposal { Err(()) } }, - ProposalStatus::RejectedByNetwork => { println!("invoke_action(), proposal_resolution - decoded_proposal STATUS IS RejectedByNetwork"); + //TODO: WE CREATED IT AND WE JUST RECEIVED A REJECTION + //Proposal::update_proposal(found_proposal.clone().unwrap(),"rejected_by_network"); Err(()) }, - - ProposalStatus::Committed => { println!("invoke_action(), proposal_resolution - received_proposal STATUS IS Commited"); + //TODO: WE ACCEPTED IT, BROADCASTED IT AND WE JUST RECEIVED A RESOLUTION if received_proposal .clone() .validate_proposal_block() @@ -1053,11 +1172,11 @@ pub trait CalculateProposalCreatorID { impl CalculateProposalCreatorID for Proposal { fn calculate_next_proposal_creator_id(peer_length: usize, latest_block_id: i64) -> i64 { + //TODO: invoke PCE macro. macro use println!("calculate_next_proposal_creator_id: peer_length: {} latest_block_id: {}", peer_length, latest_block_id); - //todo: add one to include myself -- ASSUMING we are connected to everybody - return ( (latest_block_id + 1) % ( (peer_length + 1) as i64) ) + 1; + Executor::execute_proposal_creator_election(peer_length, latest_block_id) } } @@ -1069,7 +1188,6 @@ mod tests { ProposalValidator, ProposalValidationResult, CalculateProposalCreatorID}; - use block::{Block, CreateNewBlock}; use timestamp::{Timestamp, NewTimestamp}; @@ -1115,4 +1233,5 @@ mod tests { next_block_id), 3); } + } diff --git a/core/signature/src/lib.rs b/core/signature/src/lib.rs index 8bb0cce..fed1ec6 100644 --- a/core/signature/src/lib.rs +++ b/core/signature/src/lib.rs @@ -1,22 +1,4 @@ -/* -Copyright 2018-Present The AfricaOS Authors -This file is part of the AfricaOS library. -The AfricaOS Platform is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. -The AfricaOS Platform is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. -You should have received a copy of the GNU Lesser General Public License -along with the AfricaOS Platform. If not, see . -*/ - #[cfg(test)] mod tests { - #[test] - fn it_works() { - assert_eq!(2 + 2, 4); - } + } diff --git a/core/src/Cargo.toml b/core/src/Cargo.toml index 5fe1ca0..68317f8 100644 --- a/core/src/Cargo.toml +++ b/core/src/Cargo.toml @@ -4,10 +4,12 @@ version = "0.1.0" authors = ["Kunta Labs"] edition = "2018" + [dependencies] [[bin]] name = "src" path = "main.rs" + #may not need, only cargo test throws this error for node import node = { path = "../node"} diff --git a/core/src/main.rs b/core/src/main.rs index 290e7f6..16338af 100644 --- a/core/src/main.rs +++ b/core/src/main.rs @@ -64,7 +64,7 @@ fn main() { loop { node.transition(); - thread::sleep_ms(1000); //delay between every global state transition + thread::sleep_ms(5000); //delay between every global state transition } } @@ -79,13 +79,13 @@ mod tests { use timestamp::{Timestamp, NewTimestamp}; use network::{Server, PayloadParser, API}; - #[test] fn test_validate_proposal_isok() { let successful_msg: &str = "Successful Proposal Validation"; let successful_result_stub: Result = Ok(String::from(successful_msg)); let test_timestamp: Option = Timestamp::new(); let test_block: Result = Block::new(); + println!("test_timestamp: {}", test_timestamp.clone().unwrap().timestamp); let test_proposal: Proposal = Proposal { @@ -100,7 +100,6 @@ mod tests { assert!(proposal_validated.is_ok()); } - #[test] fn test_validate_proposal() { let successful_msg: &str = "Successful Proposal Validation"; @@ -108,7 +107,6 @@ mod tests { let test_timestamp: Option = Timestamp::new(); let test_block: Result = Block::new(); println!("test_timestamp: {}", test_timestamp.clone().unwrap().timestamp); - let test_proposal: Proposal = Proposal { proposal_id: 0, proposal_status: ProposalStatus::Created, @@ -117,7 +115,6 @@ mod tests { proposal_sender: String::from("test proposal sender"), proposal_block: test_block.unwrap() }; - let proposal_validated: Result = Proposal::validate_proposal(test_proposal); assert_eq!(ProposalValidationResult::Valid, proposal_validated.unwrap()); } diff --git a/core/storage/.DS_Store b/core/storage/.DS_Store index a666d2856029fe9c125b27efee3bfdccf76a89b4..b88491bbb303805213843c5dae75138ff08199e0 100644 GIT binary patch delta 129 zcmZn(XfcprU|?W$DortDU=RQ@Ie-{MGqg=C6q~50D9Q+A12Ir6pP`r`ks&9gI5}rx zp*kbuWJM{NjfL^-i`h9i1erm4fXcXmgewrYZ7lrGJeglbmlI^b0U&0WY{MfxIYEV; N6=WsD=6IeN%m8Va79;=w literal 10244 zcmeHMdu$X%7@zNGWtT2=i!FWNFGpf#;F5fUbZ2>4JcOPKvlqH_8arGUq&Lkeh5h`C{FZ)4hV?IK;D z;d{XMfbRj{1HK1*51bnwz;`xHLK&xi?t8%ZfbW5<2k8C~#)s0VN2fStTL)(B3V^f( z*(_lh?*Ysv?9r%4r#NM%*e1U{fM*Kc6$7|A%EMG0Y1E@roN{vl+?;^dGk7-?V7;SV zn5h$pa_Z;42Ye3{dw|~EOG$!^5{2k__xBRA8F@*fdkJo-%aDZEQRN zfjK3m7t9redBVK;^KOWZTH`TSannw#Vh_?g#r8QHQD->z%pLvm%CWx9%P zYBn?0RSi{k?a)ot8gI4Irs}YEVq72yq96{*izX&kH8+H>Y@BQePh8Qsx*@!3&DzOH zQK(+lbZy6;;kcWG~rI4v@p-De@*cPR@`Y$?q^1YGEm?fM&QB*1<;TfKKRzEwGha2Qo3fDANU} zFo-lKn8rv`EMX;6TzcqCfz%mm;}8#=%!K+Bco1iS4t92RZx zxgwz$iL{}(T8m*&KOwI$LOFOS5cThLEZfDXV`D?IoOZOW8n&g6Y3o=u!cb^;)gz1HHMY*&j7!l=Hgk35Qr`u9#Pb!lHmjvqtVUY6M zYz=eHmM#r8h_WK|Ip_-B#AQ;GC_7@`UR?Ryv{_m$$f~%FL65W6N>>YVr?_K`BbGC- zlcGWeG>H8e*d)%=;q12HdOG@M4}#8 zvry$YjYJzcWbm{SB&$d}=^}fGO(w|w#hk9s)Cb%47+ya{r!d=i03dBJL4Q>PnTo{9KxCQQlyWt+V7aoLz@F+Y6 zhu}$g7M_C_;U#zz-i7zzeK-y$;B)u_zJ%}L2lxg43B$^qPKkybf<5mfu1we}qUr4RkZcltpeguV-@j8cR9I zO=&k9_@hOQD8(7LC+s&g)nW#)v6yWdmN|la`g##a3Rk>^xkVg2 zAsPSA6H(DLbI(OZ!u%I1^I(wp6NH=!9Xmanj(S#|ylAmziepj`vr>$di?MQv zRxna}jm(kej`6iB)_9(DW^~2U{BgzXVJ_ZkLR;l zHD|{5&zkw;4AXJ?$@hTof%DA+{7px~^!xw%+5i8aZ*KAD>wDn5@&J^y$J$$Q%9*bx zg|nxJq%b~v@S%z7O>xRhFrzAz&pyXvg?^4_cWC5g?#xsk_4KAVMGmI_{AWP+tAv05 U$I~f(_ci?cKl}K3{@nln0+Gn+OaK4? diff --git a/core/storage/chain/.DS_Store b/core/storage/chain/.DS_Store index 284386c9ce2ed46f7d9b13dec511e03087fa4112..06c63c0e78a96a9760dabb74a657b4663f8fdf3e 100644 GIT binary patch delta 106 zcmZp1XfcprU|?W$DortDU=RQ@Ie-{MGqg`E6q~50$jCG?zxq*Z$5VvnE{LVa?U&fG;VX_U6G!GNRAduM%o8x)rFarR-VG)4< literal 8196 zcmeI1zfQw25XSFHhe~BYV(I9S2~qyDMO8s!r8|GpZdB?55@Q~NhvETv8~E;QRZZ$# z#e%3fOZJyK{`Hd2N!&|`h)1upA<=+{dZ?m%eKeUO`=zdxPU_i*GI*kAdY~gZq!~S} zjrIUT@t2ci>+-AkT+@Dq1A1qUxgqjivz5F1oc~ z8+m{-A!(7cimE8SGiDE*P);cZM;!Zu42Kp;tEdqtjyQ3O%qc@*jSep8aN;7Wx9)&D zuO?88M@TCP?O=_63{wDR&pmuAqhRe4FO%Bnx zw-T+%8SzZ+IDedcw;|q13-`H!JI!;Q`wiS#o|9SNH^Z0n77p`Zau<0{eX-(Qws7Lh zDp$Y8j9DuphW7?#IB!r_`T3ITu|~-qE=z4lS|qKa z>MD#zS7E5H|M10&JFra-?8Ji|NdAA_{{DZPPRWne9dHMJ=m2-elko_UuId?BR{4PX qh$=wTF diff --git a/core/storage/states.db b/core/storage/states.db index 658ba98..0615fd1 100644 --- a/core/storage/states.db +++ b/core/storage/states.db @@ -1 +1 @@ -{"afcb7a4d1c9aaf5466544c60e8421803f82beae4dc19ef9b60e958c8064ace31":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","039fd18f3f74f89c03dce042b7c8ddce98ffd2a319ea9bc3965ce03388f6c25e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e656f3a900e313c3efeceb8c5df833816f511cb0bebf48ffa14eb7eb98c8182a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","79feda8150fd9ff28824bcff50ad81b1842ecf4a8451bcd3e1fcf165d5cf3c47":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0b84fdd263407877975a978b0f185a0b4c8774f467bf7488412624cd130b1d89":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","593f9ec0542e9eae2dea204533eab9972448ebebd4cea0f70984952f74843aaf":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e86523ec53ae24c49c3b22f934c0624aeee730c756abfdfd0b580e5a096c7325":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4e681209c43c98559d53e90c7d7892f174b3727410eb25e7fb4c3721d3f688b5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f306fa13098abca6577baefc72994116b098bc42f5494b0d5aaee2fbc91dfaa6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b766a7f7f4923ba7073bb20215e3d28685ccf2f303c596b28cf0d057153ce7eb":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9f970726ce457ac6d83d679d65d1464dc941ff58d80243eee6c7a7b086edcb01":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f56fa46ff11354db35af27ccab1b00a212d0920a8bc227918324c9d3692d3ba7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0921d937232406e4afc541a0e961123e7459ac0fd51eb04b30d3da13152a89c3":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5a3ac870856cc100f29b0fd56e77985fe65b6755b076a073d5691050a90662a5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","37e4490e17823b67f426606dd0b129c8b2a43ee2ec11a9ba6261820e1a3230f6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","fc4654aef7326655ce181c2b5eb917a5ac1b62a426d08de20d14b7de0e7c17dd":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7ef69ac815c28e1fe057b06bd769cfa739dc2409fed2b6d46955d2bb287cdd8a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","50d3c92f41acbd5378c33537049bb2d282577651722c8f451a30fe4df2dcd999":"MTAwLDIwMCx0ZXN0X3N0cmluZw=="} \ No newline at end of file +{"81555e6b267e7f678411c02f0e744f3c2e93f21887639b775fb88a090f0efab9":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ba117e4b4f6153ddd1b52aca2fd16ad79be7191addcad5b49cd5e65ce291b2bd":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","83778575fcdf54e9d48229f83065bf099fa3bf025b244b6aed89ad68d8d71ae9":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9abb8541fb1baac4e419486655827c2165647a4e23c20923b8ea6846509aa0a7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4b46e5318eeb1fcafa3a4e2d3a89ce310f518d8f86702e1da5d5cb2b61df572a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","3ca775d8b4e09cba37a2479c16553c53af374d2895373024c6301f67cddfdf98":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","3e6179253d03e08033defca55c3132fe5487ba09642fe31d27f6b992e04f8822":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","65d94a34c5e53b249d9d3c081989591f95bdaf4178d142dbb422aae17a2cf53d":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","513e3fe2e637e09451e20c68d52cd4cf3528bde20068a9f923f4953be16bc716":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","87e86d64a5c935688407ada5eb9a55bb6487e4bf89a01e024c2bd3acba6a764a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4fb02ee58bba6fdf2a4fcb83745dac5e5db1b8279e7eaf70f81ff5f2673cbc73":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","1b1891901071058987926d2647814d9bcceb6932861f4127ee40fddd8ac6f3b6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4c6b19275fc864b122ff30fc230d4250c1da44c602bfebb1478cc784d7bb1627":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8787e2ee72741bbd90044093d6c7d0289350c41d8182239523b279cf56f88eaa":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7e1e23d0e941c4f141210b91693239d4d6d138560cb3853ca86fc9214a37e91c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5560b8ae901197f108be7fb426b8c0b41411517976f20d2e41b95e96b39b56c6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6c6e86cd5fe2b54aebdeac72e6ba18e8067fbf5318b193e4d47500534092afc6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","18eabafc2d0470754a8011dc91f77839b207f7a6ee33538ebf7b6df38ef3d52d":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","bf00949ec9432295b0168db44a444fcd7a4ad1f2e32c3e67cf99b68d688aa8a2":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","be895dd8d640c88786b10740693b62b91c26f6a878d68413d0912c5f9a2c97ee":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","169006d0b846327087bfdb499d2db7a298f778db8aa09301e984726a99129120":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","843dcd055b5a04871aaca58b3d598235a8ff454a86b0d4d8d722fbab279375b6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0df23fc268478c868ad54da84cfa57293409a2ae90d03e6a0966d41e72ab6f11":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","07089385fc3517b512a627fde50da61a9e79597e4717633537e8a5e3db042f0e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","68880a9f1ad01c08c7a456124eeea516a03a1ff05a63879b9bb9c6d70b85960e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","3001f8c434506060ce888d42cd054005bac07903aecf4ab996726ac14995032b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","162539cbe6ea0128d58ce59fc816c299e53ec4e477e95fd51c8eba71e7d4f40b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","74975bb9e085b7bcb7807d54ebccad0c41e9dcd077db4c204130e1fba0ddbdae":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","cc7caa8f1161a16368009b2b1d3bc0a2f75cf2f15071a9cbf873bf07b2f41258":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5f5f1f13fc22c886fdb14a02b31a41f955e78170e86cfffbd8eeb7d83e185eba":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","925870837bf7d273b0840a3ef3b242f7bf3cfc9ad808e265811c84d1c1c455ba":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","28a79fd70af601b02353ddd77bd4186aa69e106678ebf814da69605c1d6d7467":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","88e212499f6e5eb53e38a1628faba50440d23a7d2adad73292bccb30f714fd56":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0275bc9c366a022c98782bce6bc626bb643ea1f772b17756cc955afcc9e319ff":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","2743426b1c3522724933ed000478bae0ef49d07d549ff9126e9738c2c1bf497b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e3464b128cba24291e4684350cd24afb5cb5b600b105645842a4610a5be1c75c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","12ff75f35e04ba6339127dca534e6a5b6b991ff8e1efab8d10deb4702bce16e7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e0426c0634c5ea66d21346dc6a3339cad7d8439b47238a6ca3b3ab1780e61502":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a7ccb3577ab13e22bba8ee2bad60934735fc14c3a1caf54d6dda0711314f3e44":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","43281031d2fb1b7032b71dfa04a8e41c5ea44a512e99882c792899544a12de4a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f47e47d8ef4e9dbc0a9b2f02de33dd919f4dc9a6766a4019291fba7b6536236b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ad4ae2c2b8cd7a42bde5378e41a8de03b648f1bdba00af47844eef0979471131":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","707793645c0af232d747a92b06423472c80ff48e19a0bb9a136dcc9f7f6ccf0a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","3db933e0a5cedcb49edd2242167c90c6cf2e374586414f3f468441c2bacf5fc5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7a2817c26c25f6ba395289682178ba2659b6b65ff9653685d05c71ef4ac4a6d3":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f617477bc8fd3016477e0e403cbf729584df073107eff7b9acd261484f4a90d5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","617a860bab25b474a549375d800597f994a8433f3111adf5674ef5ab30641464":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","12ee73cf84d55bb89591faec6939cf786fb7be868a2486f8a5982a43a5188ac8":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ae4323902cf3682627e0759c794a4ff2a4107f463edecbc6492b49ab72c77a65":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","18f49e35ce566fcaa1d9b3e973bab97d1cf5164f100d766f5203ca4545d17e94":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","25ae0cb3cb42db85b1a310a0366c85f0f7c3707d7dc2e7deda4d2f2cb3b1a921":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b9df528ffb15eb3c9893442d5cfdc18a6f6ff1c015b8c5ba1f827436771d2056":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","08b043d84f356ce5a2f1506993dce47e77e57307651e06bb93bce86052d772f6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7696c32604109ab792dc11b07527e7e3c5cb507331f7e31a0f40c9847a46da13":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6e8715888f556df8dde4f980ae2db9146ef1cdb3234403ee8bc88ea36fa98d4f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","dd9c5435d663eaa62e4bf49855e2fc3ff122aea3f54ded214be3b863637dc439":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","cf249f00b287a74f31ac9ca54bfba8fe1e9a3a0af66a2f218e617c92bc24d797":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","cab28ba9ddc851d7adb10b5311277411c29c06d3ca970a5d5990f0d8e84b73fa":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8a4fb1c66cbb65d7af5551e8b3f7dc476bfaac16e192541c178d1cdae49929c8":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a3dfd1149948053df92405c40c0deb2aa26a4c119032cd8d0e8852a43633f81c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","84bf446af4dc8096d8cff2127cf50a50c0893aa907fd5b44922fc4596c13d45e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","3e94d196f46d5acd48cb65521bb312aa4740099390d0a7906553a5cba37384b6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","dda0b4b66058ff73d77758db15d7a7e40ed229b2744d849817e9ea5d0d0db751":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c11dfde2441f978999016126e387753354f60e35a692f8e10770fc1263801819":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","555e7f88984b4cd9f919cece58971a0785111b45e369090098abc18b8f446397":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","bd08428fa2229be7ddb962f8788c098db687bf1e23baa7f04d7924f99b7e64b9":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","56b2547782eeedc4c6782c26e730edfa2a8063db55c05269b35e35f757c5b935":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ddd5431b1decaf3b4106f95f29aa7c6a115f92cc2244bd121b3e7dce8f76d8ce":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","24fd66702f1b915993225b4e6cab631b6c726103094a3b4c8aeef6248190781f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d7e1bbcdddd7f6b87ceba11336ae71eebe95775b1fb50b80c22eb1f4b2d5ebd6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7ad2c233326d9f421ce8fcb3bfae229b53673ce91640c57244a7a1b9c8f85585":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","1bb4444733d4f52bb42b9e31fadf68e15cfe1cb96fb6a46c733f15fa9c1d1329":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","829ea9a17e2a21cf00b76828863ea8f7e116ae76855fa273a22eba66ddc576cf":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b4b1168273925f8770831a71aad79c98a1eef944ad9e7093f77339982e8c6992":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d557fc7ef97dfded083fef8aec64df5396ac50183d31eca58db569fd98e176a1":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d84b7ed05cdfaf6b6757aae254f436d9861ad75284e4a52e654f1cadc6bcdc73":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b78d2bec53f0302728b5febad068c55fb9f499c1cd2b558acd53f043342cc9a5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b374d333c6114c44ef4533087806abe278f3550292b5a6bf48d36e9ed83c96a8":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6c70114f1aee28e536440f0d5eaf83810ce6065b8ede1405f9a797400b98dedb":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","99396fa57dacc04d24c0d0fe9515c3e62a25fdd64f167fa5c3c3b22a5532f076":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","63492c18860906d15deed085ecf9c029e0dc8aca8bcff53b621bb2b80b4df08f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8e5b86d84734429735357230c2b4a9c64f400150bf8eef2d51328c87e825a1e0":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6cac040f3d35c17cdb22b3fcd60fcfa2491d96b0e6893897970f47c612f7341f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b21645b06011e8a28ac9f6a77967b2c2ac30271f370284e79c6cdc70cf6c1953":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","297900786a013bea7ae1ae2697ca2f59bbadd27370319a14abc706d41b231e8a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","678a4277b101e048cef3b56c83be4c5eb9ea445f404b6aeeef09a49862d2a093":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","624838c99db771fde34a0efb802f40a54e3da3c169f075c7b78c1a68e93df9e5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","cbe467f5404799d1dad75b146edfe886de492b273a6ddde4f9367b746372db07":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d524bcc0153da26a6e3d42b9f38f8760ba56ff60a4b39a98f5305e5b588af8c2":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6e2a310590004a473e071e1c50f8cdef95ec8291544f2b020ae4018d03152f7e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6306a4c831a28372ce5544c1c6d06c8e53eb2fff9709c73152d0a761a64199c7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8851a6a6e3a419b92f2539d7d1abe35e44440f1827e865faaa11ba5e1b662630":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","62c2262f64afcaea1cbdac371cdd4d79620c3dda88d3534aea7ceeeaeccf6fe0":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","07e78a93d23c5a605cc16001661898e56a60aba6070b9a867b45ccdf5b7199fe":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6632ba640d2d36e2840b06bd69ef9d4e723ea88f869a7c087e8331c82cf8b5a6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b65b281e1938c8743c3281418a01cf084556b0a28d1384d5419fbb686d9ea401":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c04d7bbe24b9926f94e91913872569cc2e758ae3092ec4f143d83ee9d0771ec2":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","bf16beac8f79289b18df7be4a746e0e60df8c756a3e62c6788a1f1271eb8ee5c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","dc52997cb972eb2784373b89cdcf455ff07b7745c548d8ee36ccccceb5233e35":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","2f5760609f4337fb58fedb424c1d7acad6830bac58f2ffbae15575a1d3d29aad":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ab07e34ad6a9270858595807e546f34e8bc497e17bcee4dd95fda3ed8fa4517d":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c13e9bf9df68fbf9ac0d1a8709299c0039f8ed88e29261131d4ca3afbc49fafc":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ee3c47385e778701d3d8287ef686067e6f14726899d2128a6d9751d59db5ddaf":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","abc348f35d2847ec369f15147f2beec4b58952612e9ad2936e76ed7a0a66b347":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","542ccf8d91e9cfaa835c50ad401c209b12dc377e48a963de346d552544361bae":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","3c378057ca958dcb1253df1cedd36795bb6630d1ed4cfcbd4b0b86639a1ab714":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b8d7010f9875a0f46ccc6e993bd7888b4b6685ca5b15aa35ec58319b67882205":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","868014545dcc6f6d477f2360d0e044deee0ff7ab585bd3d4c27476e9f1ea3fcf":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f41b73f776fcbe2c851afc117171bd05daadb8bc72553b98ce5060ab1433e045":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e7e2d33c437d36e6b19a6e933088f386b14e491216535d7369eafabeb60c7f90":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7c59627f7d2ef72e5bca3a24f769693f91ae1bf604795f025e58b53d9b1caaf0":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c2bfe8c54d757d650ce39d92fce3643e55cc270090abf95666576b7ae07b9aaa":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","259b9ccb401a579964baa6fdd8713e6b686245a193619126d4c85ef5773dcb1b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","33e208d6dd46e15bdf16794755baf0411dceb3de68007fd069b449593c0b8912":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a6543b75ca099cb5c96542e39944139f3411685c728f97bf0738097dca329981":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f17e5c0af346a8cc2c0c59f612d68883d97eddde47c7d410140504ccc8e1f182":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f76cbca5cfbd4a477ebbbe757e53a0f36985a598492a03747791c356699b0cae":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","de648b2669bcd1eb109d414fc9539a05bd84d36fa90ac0f14e3b12b44a4a7e7c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","04230e5d358af11d04e2944ebadc966813b3e6ffd94a324cb3c8e73e4690e525":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","489bc792fdd1b4d6ba1f5f9996965ca67670c090750ebb3b40f6104c0b86f5fb":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","cc22ee8e406166025723bd0c3f56fdcbaef4485ae4fd2a62fa1f2539cd433acb":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5239abfa1a2534e4510af94f8bc72ce6797b5b1d2b57c6aacb71ff047b56a6f0":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8b0342de8e5d441f275ad251f256335f807a697cdfa8904cffd0bb530e207b64":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f07e2ba2ae55ebaa23d5fbe4b955a6c004a02f57e41b43befbb7c6f5615f120e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7062f865f53f4ef35f78cd9905b58a40c404485237ef16eaee5e4466602258b5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f5309c1ea4a08a194a5ec78a4611cff85e0414e6067f4b4eb6e7057d1f1594a5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d46271fb9f94a018998af7c6b0ec0ab1c5a4202bd46ce051ee3eaf32419174c4":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","13623e1bad8dbbc05b9cd633d39d094515ce4351fa0a29a35b39c6c7cf44d582":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","37c00fbdd878da146c8be22df279eeed17e4be5d223f325d897445ffaeeaad41":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","207f0f37e61a5033933ff01c157c5672439af4513596aba602a40b723f3c7e74":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","db472f660720a7641e319bd52290139d6e84d4409fc77520fca86802a60bb198":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b86103891ccd6102795dea6cd6271808dd31bd36cf3e0c21f66d1ccf9617f8e9":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f549cd737ab7b61a061091a8e14fdec61c52111a79c4c3478f18c23927ca2373":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e23b808d3c52840f9276208e1f80abfa56a2fce91f81e188d1b15a225ea92d64":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","fd4d9ad05e063c45da66a5c80e2a7481780e7859fb7473a01e8d3fc5f98311df":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","11fd49d28a5bac365af8b2dd12310a0e6e2b75a8bf4f9f90aec5a2e887b14a11":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","25ca3c400d81ae857aea6eb5dfa7edee862e05885b537030fd5582cfe0dc5401":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e28273b04bb99f7535552aeb22f324e5f2e93fb49535180055e70e4e5d9d61b7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ebcc7954e3aacee401e0045f81765ca15f1b69772a8170eca634f83037ef9c31":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","3538f97f61187de4db11f9f82e5f3aa0bdfa26d4955e7bd1ffb8a4166dd0251b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","61132cbc474db44975dc162bc82713315e498daff4cae19750def5c7692a21c5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ed39c4537281d501ed0c4527364cc839c6fb30860f18cf0b99958550da119ba7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8e217d37a674b64a3691da2b1415a7cbe570044dbcfc4433d644d84ade1f51c5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7beed0706b79aa3cd42f749f6b35cb1109f7f77bbaac0c0e42626624ce4063cd":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e07459b45ecf542cbd52ef184991f6d31a02bc6c850817ae6fbc324794bf368c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9cc2812fa0342c9c61627c0f7ca09798b0ab540806259b2bb9f3e1983f3d5a31":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a7314148d96c8cc8f15a669746b10f3799e4dfc2f0c94a90b05f01b05c19a57d":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","50d8e9992818b157018cc3c2cfdf9ccc0ca04c1d8f65250b26f0e3e6c436c562":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d9e488d3acf97c7fc3a2df91b858c8dab0f18200781ba442da80220ad5099a9a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","80434db216be83de08d2666c9227986d975ec49faaa671eb401371e49fadd209":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","74eb7e145d48b63d6d044aa0cef5f3cb879c47b0a46b213d05f398563ba6a7b7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","1f0756e1f7a7d85691412705914f588d301a9c0956a1cc1c4e78e9b5869af06d":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f305a44ddbd1109cf4a043e718293ac2283dc534ac3b76e882be39ae63dc755f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c3ce75473e9ac90a389a48aa47db3e92434dd3ac269f8df0e95fb20c30917da4":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","66364ce7a3a133d12797ed81d5fea6bf1224c1bb9523e0e35997c8f6fffddcb2":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","45106b6554ef5641383efccdc8fdf509d875dcc8192a1948fa55a96750b54337":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c0e9001e793d70708844cae87f275b4fccef5280c4cc915000f99aa46d962972":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","629e4a160e6a9e34ab67fd79f0937269757f0a84d1aaf99fe5f9d91c86d3e6e5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d98865fde07f00d77fe11f48be84ae6497d6b836d711a594879df4116bc50eaf":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a621c56676d54ae764b0cc9fd8af806f4c252b69b25b51be0867d743f4e6af95":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5559ae20190c668e43313c549ab3eeffb12135d4a535814216f177187738a411":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","2c13733867d9bd5dbd8e75f9dfdc558a9e23aea2c8007f2ede2ebe93c12c7c96":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","84be074b66ac485dd06f819056f281c54a135607a1f36d4480b91e138181d09d":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","720e4871c136329a5c90d553106f5e2209bb662b858518f749fb1adea3e997b0":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0eb0620d63b875a6648d9ed3ab221aa9e86d5bd83d0bfc12ec4dfa962ea8163e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6a05eca2c1c8e314e1fc79d05d88b2a29fe8608a360658b4ddbab7c8b0e477e3":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","53efb9a08508d287882b5a0d5df29f2bc9b4307107167ced35b414c98fa53efa":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4d870dd7ddd402060fd7bea52852d86d175e2872182793849c58ceda364da5c8":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8024c37b37ee3ed4e23b81405f52cd124d96ea6538957a1b0c892a2d28d08a61":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","04559e99943c52f8cd2834e5ee692c9a6b59c2329cc7aeab197636d4a9fc4359":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a93de512aa0d8e2be428b88f33f842446e6ae69328485f6e9e292f638f3ce317":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f4aa6ac223b2c6be8d5f30898dc6f019e90892e6fbb1ac00b8a86165176ab90c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ca4849c7ecec3746c103215af8cac065da14fe9cda833bf19d12c46322ba47c0":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","bc06b5b7b3946ba9b7e2d04cb5893e816258778176d884a0841c2765f2ae4c53":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a1ed7c14438b29f1d38ce6a2027d5dd06a6b7e069e04f68cfddd4d1c8d899dec":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9dce6b2a528fccb66116cf4934f5fad6648a02b67713f45d42b26d0cf9231730":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","014bdf580df9e75efae8ba5c155a2978f85bd8fac6b5121643f3e0ba5a8caf95":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b6d9e8d8d1be22c3c886fbe3680a69754b2b2934fc1c1379822a123a3c9505aa":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7e9dd18256ea022b2323adad7e586ae3cf7fd1fa9eee6c1d956082843a19dc80":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b605f536cbee221ec6165531c7e2fdfbafdd1111a1c9976eda52e772c8584934":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","11d97bf09a84b9650fd51d4a7454b8eb971c068b5a50870388763ca6c9f9ec89":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","810876dca7113bc8be9c86adf26fa3583257241590c1ebc04778b2b03392b6f3":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","73412f10899dd4db0d6efde2f39c4ea2cb5c197ff68125494d1da43611ab9929":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9259dcfb80f4d646b9efc28281a91c0459ba108095ba91d87d1f764bc2376e92":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c634bcb719425231449ba58e3c8c64460709e786e9377c359b869d4b29a04ef2":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","2e7a1e210153cac763eb4fc8f8200c53b70d414c9cfd0bd6e32a4bbb4a1c0a57":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","1a7ee06e7ca459180a0eeb09e68f5865fa3feb40c7e51a2aa3ae32df8a657dcc":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f8c6913579a8512f148ea0422a907fb55c9a44a2529daca1fa5361372b5ee3f0":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e7cd6c7861da77ec4be85567dba6621ea729580a4fbcb4c03abe16d66c52245c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7a87685085148c64c26d67381ae281027203c7a0b3417beb68be42574eada2ee":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6a5713073e9718304415cd4646439ca7ede8209a3deeba67474bf34c0f81857f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d0d14c29e739a8c13ef3afab2e8e2f253d69c1b035e73b987cc45489354e1e9c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6be3e7a7e1a0656dec3e7a0a2351299ad6acc7ca57e4636200e3ccf5949167ca":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b8b844a8b2d82426b49b1d257e63669a62b1e2724103e82a6bf23df7a0eda9dc":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","65bcdd63ad2204258115d899afc6740a0255a11703dc335351a9159fcdce4261":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c5b81462556284cbf2e2286b35b9aa84fd1af6c83f890a5dec058f7b5c653b5e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","507c95f0f5bd9c95c718c6b3ef2c20c83f51d54288a54803c28df2218f5f2615":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","748ad0c62de2d9c8ce6a62ca64d5ae4eee750199935741431750c4df0d776845":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ce2825476f0361ba6da46fccccb2bae61ef4d524eaec7263f7a31f3bfde605ce":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e046982c84456691e60a37a70f369ba39e0122a7c69b34d1a0b94ee38713fd20":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","817c506c878b3b903151ac8224136179e1db57205818f2ea4eecda10ccdee287":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0ecc194e4e4c3ca94bedffc6610623830949e3483ddb29fa010c01786370d870":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e7ab8a0475d639a83264815f06f9eea077bdc81da714fd804a8e61dd0c6bd340":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6b24e6e66c43bb849b8b3d1712e9e86caa5f85c450d0ff6d855a5a3010e0f9d2":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","212e813a82ed31a218a7e9573501b85425fcc645506c2fdff167319aaf1bd981":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","00d621c1688c08877413776adadbeb87df8d39334c0fec725b3b5b93f85db0db":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9b42d157e0000c4e060537cc54af76ad9d4bb3e2bec51c8e33171ddbd4bd940a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5738b515df140e8a70f31aa9921fd39825ac269c99f4164d7dc6027414fffe89":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","1a1ffa9dcfeb3ffb894823acd02122d5e2c7eb1b1140b506f73e1eeb4a71ba05":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","020588ecd63f418758652d4addd210af4ba31f4819531287b7c9fb660c58c19a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","eae5833af6cd6cb0fc43ca62b008b7d31b316be6f3360d50aceab1aacf9a1b6d":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","437538346fd1bf5ae8437c1ce81980227ba043c34e243e5e9e8d6e608f4b45f1":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c73446bc6f36bfe33debcf2609191d8263c4d9205b5c37f7f03dcb85e3c3c031":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0c26f62e0d5aac2137df45040f678a7cee1f208be527272bb18d3e9b687dc64b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7cd0666a8ed1a57afaa47353e8189fce542994da7e2b689875d49ff9eb3ed079":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","fd63dbd6369d0f92b3de89860530431d56b40b73f58fe94972b849fb0c004e9f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c7038e047b5306b75abc109e325bccd2852319157ddbca8621e86bdffdd3a7e5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","87f7c7b91eff0ae77c6bff5c5b6e58f58af8a6ba6057489709373c8941535681":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b041fa5ee227f2f5ab643dfe153a81019d7100d2bd012d7973a74725aeddcb54":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","53c7dd5602998bd289443f581ea79ec0aa80a010bfad0ab0b43fc11c0d10a55f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","156496838aeeed3ed5c0b40335403611b0ba7fcb462ae49f6979f7cdbf8178e7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b55b483678e13b908139a27f41d342fe531e291b24e78844b25f5d517b976cc9":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a82b41c6a3b363190a24a676a4f7105edbc9dfa337ee0d6e4853a461624db161":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","80659ff6036b6f0134a1a9eebf20e07f70721fa0e6cda19dce6f30813c966ef1":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c8329329880f44da6ed76f91d619daa667796d91680c6df1f76b8772db7c1133":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","93665815b88cd08dbb1b1e103cfbafc6a238b9cf3cbfc2382ceb9263cbfc91e9":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","03aeea1d667362fe4811ee4fe959b33e855bacc85c5895cf159c7fd86d4a21e8":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d0ac21c0949f67873f6e0c8dcbdfbdf8e5a451add588ea4fde81a5312792b291":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","dbfdf8b7c1485650c9d30d3699f33d362e62bbeb4b511c8208fa15773e303006":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b02c4a7943ac0541c6e5e0b4353e3251113c5351c680f642b67b3a87d98535b0":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8087c16f0ab3ece47026a3223c3d531f1eddf5bd8bf303e73da22dfc23bd9044":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b8748ac50b40b74797f0f74fbbd9227f288ecdc1f2a8cab5ed355ce6ea06a0d2":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","2839e7407d62e7f80fde1d6c2186cf58199c4d62df232eaa13368d8f9686feb5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","be45bc57ee0d660bcebd55481bc6346e9f616e39ace99bb188a3c127ce47dc27":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","aed1d5f1d51b94e4d12cd9eac299292919a128cc838be763e55744c6b077ec18":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a591fd1b34b681205ae6295d0dd50cc6cdf4f67190f6d3197a50a09c1a413ec9":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0b259fdf558b9ae77ab6bf2921e7e42693c578ced65526f13d24115d4eaec239":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4fc9ed034c2229a21bf86efbf7e9404b5012fc992320979838abe93bf074fc0e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f8c0dd2a4fabcaac95752c125237214ab2fa5f9cfd82070fb2429e56c7f26240":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7a00b46d5171e4646e0bf991cd8a184dacc0328e108b38fd51b44be888a09f10":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f3908384a1c9284a15af97d34757f60ef005c2e1f3b93b02859da02cdae2c4ec":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0624f072896e07ef89d8e9f8f7e2e2116f93898cd573b63c1335ec4ef1fb6efe":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5c4ba32cb3b6515b44464a13a0cc9dbfb57cd478e6ddaf8343d01c8293d974c4":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6902dea8c7e3bbd402c5ae9b38fa8adfe8dbcfcead2d4ee84685875f9607b770":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d42558e6276adc49da29ae6f3d69bfbdb7c9c402f0c3c7ecb6ac23a099c56fb8":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","38def90382a1698be1d9b06e569d3c0316d1961674de46fb856827cb2fee2f1a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8ce769792775203ff84813fb605f0d9d6e386151d362943322c02a3763b673ab":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","bde29b236e16b8e9ff0580d490f58947757371f088e52b157e9ee30fa094b0c5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e4980da13333ab54dd50ea5111c3a7af53180290703eac3890bccddd2474b59f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4c9b20cff6c64ea85a2852d868d242c9e8f71c3cb4ae2255058469ca85d8ef7b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","92c043c75687d1103174712e17d9a63fa3dda7cda076987dbea3b949fae0914f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","82af1602f4446eca24689ceb7d39c0dc77d92c0fe901c173a0897ea0275bd002":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","df2c23a4fd95a83c014ee0c43e7630f733509033b8eefb08e98935ca060ce505":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","57c1504c9af0bc78780a64071e0e8958675c5e800fbd1664dc0c839f71a4ca37":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","73af0dddbd65a4de6aad8e70981c0ee919937629fc43ee77f8b09843aeaa7d2c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a70527797f2aa8a65de5c94fda638d36163426a5b34028db07ba805f22fa1dc4":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","88246b509ec7f9dec0f630b8645760c3c759f3021e00c684191eaeeefbda70cc":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0897c9db573e8579d7d937bdb5041ae62a60e6246149be88dc7ff739fd0d390a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7177ec700132075e742a373a4b9ccfb2a03bcc6fd681a268dbcbfcb4c550a9c8":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","3a1f293cfb232082febc6206c8e59ec22a647433dc8c426a4784df2112a686ab":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","60387d6f8cb7faba7df099f823eec56c5317729e17674d31c826b6409e187f57":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","23f847fa326dd7e06535b28e3bf5abc26e6bbafebf2cd88f66472027fb8b163c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","58c6dafdf9eee21278ca3052667018b9595292bc2d312f58d5d6ac2bf109a005":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6fb38fb4a31fd9c45aa6463c8ed8ca7171eecc5266e89712085ef461452e72c3":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","dcfc54d428a39f83caff462c9a6a7ede429fe7c7741c77937b206e075723fe2f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","51d3635c9da0ca2488983567438d32f8abfe3c7c123c6f007155ed853482c5b5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","554c8790097b6374394e5aed5b9643e18753ee6ed58d77b3e4bafb934211ebbe":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9556be93a0d4c9693933f68675a7bfb0dec898a699dfbc9cf29a1a63d75487b1":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8128382bcca517b3ddd02cb61b832e7db9396e046a25160a33f5e20f7126980f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","07b57f2e265df6b0497f04ac6827d7a18bfd88f53f3a4f8068fee77279ac2164":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b2130bfdf076e7811b3e540486efbd56f043b238c6fe8ce6cea856afec9c8b6f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","93a7c6de3fe75db2cf91d7d215e2280baa1998969bc8e9d89c0696a73c3e7e24":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","885a5b4a009af92a76d3fa602b3a97c627e943bcd34fcafaaf53caad2debe7f5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ae12c1d6f4bfe318dd06fde9d1d63956a2b4a2d741ea1a53b49981caee3fb929":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","2ebe352f089c8ec25bdfd80204801dc38f6343381506bd1ba051c8c201fb8792":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","36a70e46df00d73dc579bfa0c2df4feb2c0d37bb394fedce2b2c325b7b52505e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","97cb1a206cd9e8ebd11d85dd9d84d6b4814a4b7ec86b069060b75da4aa920015":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","59d5a78df764850d40fbdaa4b5b5a039948f3fee7e820468edef1012954d0b1d":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f7d1bd5d4c6ae1a48f1633ac7d4529da3d16e524f2bdf37f84ca7b3c9107c281":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","70dfc4c45b869abd22bcd2e3c25c09a82658b4c42c485b40c37321ed54516d02":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4e0e6552d319edde0d14c8bca2d95617d7c16813c7d2ef86e11a3f0316e2745b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5e7922f362e231af42d7dda00f60ae53f370a405f521bc9204b0c9760613feb1":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","3a424eda93548849fdb56ab7479a52a8c67d13f96da539ece0fa6f582571dfef":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e1d0500bc089fb41738d2637ac4c53a309e8e8d08564e9bd814b7bbb90f61acb":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9ab239d6fc601e190206051adca425ee83b546217e697d0ee0086059b100cb87":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5c8d0823720b26db57274ea51b295308ccc04dea82d00dc32ba70c37e9b8ad5b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0ec1720c387e60f4117d499114468d2ce1bde2cb671a2ce50e8022199f59cfce":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","30ae0259ee0cd27118ffffff8603ee72938f01b9930ef844d411003a9ef7e26c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4f9cc75ed674ccceaa394305daf425b4a0c88b5ebc57118c0c1491467f56f5af":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c7baa6370ae3b01815f13619b72714dc559bfe2361b5993648d7d6bafcb70d12":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9cc12c0d1ad80584fa79ded58989e145473185abd4dae76147e82a6c86eb7a5d":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","51f50b980f0aeb712744272d52f2752c1a8184574da967db96479ce8b4002fec":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d2c3379896b18c34fcf15561de153b742daf30d58eb7a2cf70f26cad65bb9dd4":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","264665530d56493a52ff466e07f020bb99a4fc3ada0a2c47d6cfa8c21e4ab011":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","1c8c21aa2952b69fe51667a814da8a59ff0bdad47c16e72eef7c2401b7a6b4f3":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","51a84a2445908c7156d74b1f09d252dc0b8a51645915fa6db261d4db14e964a7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5b78530062357b91ac804dbbcc60a023f58b73a30cfba39dcfa6d60401b75a54":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ab355bc0de3f996ab56e5622d75aef0cbb41500f271bccd41475bf375cc4816a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","89d530108a2a36ec0e7c191c0650baa361b5a5f9895b5a5ea32656baa3c1cc58":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","58764e179041a1dc49ab011b3a7c246a027544a68a3f6beddb60596744a15448":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","088f43b0b31d8af0016af46bbd2a3e0343aeb59ea5b5c1b9f110ef9c582d2a8c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6e39a71d914dcc0a3e0e602fcb3481c82a8e97dacd0a99ef02075de481c93ccc":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","882ea8870356b8cc4f3cc99a35f2ba2455b051fa2fc26b84d50233fe8d526314":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","fddfd0277dfa895e7a87a552b97917668fa4b858c455ac1011c27cb660699dd5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5d789249ac4578a34dc9bf44151073c9a706eba609203136894292d2f44d8c86":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","62be9b328bfc05db76dcd60dd1618e862881fbcdabe55986bd758fe8ee8b233a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","52a8c60a96561f8c4332c217ca9a972accac22ec927559a8a89a0c97942e12a8":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6ae170a602e56acb782501ebb7a72c9bb19878a1f64334e995d544354840ccf4":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e96725993c105a2979cd4f150ebeb7a112285c36ac1a0b57e33a9547b894b584":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","2dc7d35d4dd5f873f489ad7d0b6f21fe74d501d6e2c7ab03a7ceee34c6f8ac24":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","3bf3da7fd94e82ba81dc8cc790628832bf0def355583aea5906aaf10cba70f77":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","cfa7ae3fe8376be7eb9eadfa3b4ac847e3414dbfb078e9197d7fa709bda2218a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","36a9c4b5cee69346d7e36f5ccec7b220a6c88dd29429bf4f3dbcadc30d6a26e3":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","888a9bca300b655525b4a41e46b861d257eea65102b73ea8f3601810a121c01e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","893c7da585d3c59ceb8559401749a93d8a65d5484d1afff430719b4dc4739bd9":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","2fbddefda846ccaaeeea501dde75fae230d6034f369a59dce7f2b0f89ed3a8ac":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","cb56e10b128c6985f5367dcd0739f861c2fc2a8e5900eea25befe5478f2a87bb":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a7ffe1464bf73ed0f878a6ac74cb1380e311de88b5e79886f958aa0eb3cfd777":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","45abe6ef1548ce0f4a6c689db5bfda2d7816c9c0fb5a072548b9bfe7ad656b7d":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ead20d66706a4723a32d023fc2009d1844e3481b911488d6d4d2cc583069e1b7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","2282f2b3215edeee78b653aed982845db637192bb7b8d956ff3256056c862cc8":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9c79948da726335523d0f11d8675200042d9b7b54ce21cd8f868772aeaf37468":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","fa981f3f0494a23578152b68abf0493c9bcb952886b6ce893a72b8df55560ea1":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e5061f04ac45fb0238e1d52560e59f0a4bd4ca3c037795febfbc5b328aa4194a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c93fb1cae0cb918abfdb0a28f8bbd83f96da18c49e59106af9900dd298602385":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a8883055393e43948d2563c235433a81e31ac19688548e8477febe2ff68494c6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","16decdb8b09307e9a4fefb2a4228eda5d907c866fac6fdaf847974339ae7c592":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","470ddba7e4cf62f8bba17088ccbb6578eb8b5d3740425205d32828d19dd3596a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a875b996fc21221cf328c85658dca94e43359942965058a2e0ea8d484f494771":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","2b81984e47373b632202d6954f904b510acb6dccd859d703d68426e216f41848":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5e571650d050b656d52a88dfeafcd0eb81d0d08f87a993963026d2582793eb5b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6f9f1277f6fa37249381be472c2c524b78d0a9a57233f44ea29f6f5e7f559f3b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","dea3dccb3e9c067887355a733a925ef09d1880b1aafb11d347d624f34a0d4ee9":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e33dd7f07174d0dc2a11f8de85f923ed34bc0080015dde56dcc284d7e859aaf7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4b8d81c901a31b8a554c2207087c747af583b46fcad6137e588ce259c5893e31":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0cf67d4b4e87de38be7d37d5025b1676dbf120213da59ff239eb60c9b68f2986":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4138fd61dd24e0f04a365a088b48f12138746905ea42ee6e865f7f3d34876d66":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b69f3c345a21e713dca5d3b3841c99fbdb7076a8d75f1d66724d10fd799ed324":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","bac4bc781e33bc449c1653d421dcd5d60097a49dacadb8f067f82eb7b1b9484b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","955baebb4abe5eb643876a4dc838f06234b660244b1f670a378425bf1af5e0af":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","be6b685bbe557814f22de574e6bd133e169755d3381cde16159b5b5b68a33b82":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7526130032cbcaca4084d5ce40b313a1b102feafe87967b88f5e54449e425713":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7e9102d9401c7d8d58979da6a2493570794518c58df04273b1cf22d7cdc09bfe":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","1e790c3119dfd60437c1ab82293f57195051a573ed68edfc659fed74bae62abc":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9a8041a7f1684c84f59c43b42c77004e06be9c1e0cdb43a9693f82b6ebe866fe":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b39eb41d00e49738973d58b5e9c9b0290cff2919f7ec0d4651b085f011f1d143":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a3d9f28d524149d37064eab37410c018e1ea7dec2c1b3340f40fd84e5b7d86fb":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f2d284b9ca26f68f1824fbf952d433874a7020ab870448512ac8e2b481bf3d34":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c99445bc5f0ecf612e3b275b8a9cec51bac840b85bdf175746893c7d422dad11":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f69f89efe5979c00d591a7d7e9a2e2b16afb3797d5148fac3f73d729ceba9e46":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5dbf9be519a57acd8e537c990201684acae8add67e255b31735415edde380c68":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f62e45b94c80fe96eb15c75d27fd925b693206532de7f2501863438565698775":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","746f62b9b2e01b544a7bf6d8833df9ebd5ea06161f2da3ea18cea273d68e8003":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","1375048986328c5edd8e8aaed6433fb40ba22692e16c88a6bcbd1a013909defe":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","fed422a43443778abd319ecba20d2a049d01c130a7f8ac3caa581329879f2226":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","feee4f59117bb83d2d4b8c6a068e51e297b5a79d85ee3f298c48d9ad52f3345f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","54b55faf1c309ba58fb58ae7f243b50e37395e2827572c86410b305520aaf38c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c6734090a6e27d4dbe7c5f16d7e02379310094382ff20550189088d01f31a947":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e34c597a92ea735ed12e73b6d96a899ecc3c07bcbb18eb4e7e22f9ffe36e15c5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","9ff284232d84f199c26aa64cbe1567cb437ea3c81966f08acf61ff2cb4f940e7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d4c84dfd4b774496ce9c5932740937816fb7bc37d244f720522820021a0e3615":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","857f0619de9b70a578d4aad4a7cfcb973bf01156c8c1d70218038781f1159f58":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e2db1339b4a665ff34e86eb8b5c5d4dbc9653be3e549e4d7b3a3a13a7c544eab":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","015defa910ae686845b87a7f3cd239c328a10b397e8b957c0113db3a1369c6b0":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","09b2b7c41cde741f5f4bd1af75420edc7b33c6a172f96e7052cff710030df059":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","02e077634e58577a2caf7dfb51c292e810c49cc593a99591183f40dc2322b359":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","37f4c48b5acd1630c05d7dbad6634151e33424b292e4e7073952fe9558eaee15":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","0d97e9b459869c647d97e8a05d2cfd856f190afc1838d2bb26ffb15254814c93":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","174de21653d3c757f7bb453e3d34b8e54f8ee3e5a0e80398cc0f5395e6cd1cdb":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c1e84cb1a98cefac5276fadef891b8473253e9d7ef8e05f7f858747bc9e69dca":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","7b04a88d7b851f6d0db8b60187304afdb09d165ff65c2a2ec9fbc217708e6950":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","177bb06ba41fb50124dcacf8986b340f29aa5e4e8b85a3848d3075b432ed6835":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f477a38f4d439192ba9d05846390577e66b9012cc86c106d0ef5aa41c2134145":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b7dfc8c7105d09d5f51df617319dcdbe8b093cee89a3c20f657a6fb6afec38b3":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","f40e85070d7d96dc6118e20d05e0b719a8f2d2fc9d008e83b1c52001cd764e81":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a1fac62c3e29408fa7dbea40366d98d681866be860245b2050eac53f17e66d63":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","168c175860cbb344ea7c214bd456b278ca7aabf616c5622fccc9eb9a6d212a5c":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","aa8890b7a24322e9525e9c8055444508920ee8ec07b17f46609b4465e5c27194":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","65c23995ca10753f7a4062e2cc93081eb2bed6ede9e77f4e811659151fa9d032":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","40477b61d300235597d9fabc4b5ec05223be15c2e88949f7c88d760bd7ca96a8":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","42ff8e825dc00dc53667a08259d83999df0983a88603896e294bf288a3c8116a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5c0e0cba39f83b2fbb1c5851631c550d2a15deab5306046362677ca63fdab884":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","04bd268424b49d3d9250d26363c04b55fcb4606a61e9801934ca2e57b4583434":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","65db0b7de0db059bf668e3976a9519a00cbd13b55f6fbf30a4757e2d6a16127a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","638d10667fb4f776870e6c48e9e2cfed849e83722aa6f12bca56ca1666710db7":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","067a0a8a4bc784111f366a75c2a53b09a8bcf9c344538ae357bf911090291a4e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","dd2ed670bf8c253312498c7ca3a8261fc6e95f136e0b17d09f719f1c374b8fb5":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","4e40b0e102faf87da15e1a8aa749ed98eff40e28798662382752babf537e7e38":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","249133729389004579576aa3c00e73461f9627e32013f93196cf535145da6153":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","59707fdfaa3d6c2a8976f3119b88c85cd972bffd24e128d001016da2af01102b":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c2170a56321a8e3dc68c8aab082b0465ad680916f829e92e3c6853e98e96db67":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ce07ad7e3f65a43af24b1831580daec11477d0c4077cd3a0e1bb9d87d85e8e4a":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","2f7b2eb79b7efcb0ab30623427c35263fce1f698f33cf2f748d76048f34af053":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","5bace4b5ed2abf37bda40b0621cf67f6e06e9fe125cf259cdb89e53cbd57e92f":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c9ee71520fafc0ee0680c20158df24f045d6247cb0165a67d7a544f153c29b55":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","674dd70592e8471caccd9d57dd0f51980fa6f2776ac735ba5d627e15f868644e":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","ef14b8c89a9c5c5e824fdabcbc6b4890c00d7bd63b35dccd31e8fe3d03241bc6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","999528a481bcb37bee8de09ea300e24f71948e34679644b2cf0a4908f6d3fddc":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","becb1aa6b4bd1749c4fd9b8f36febf8fe4c280db51b68e1a1061f4eaff324d99":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","1521d8e65c84bdfee9bc4659928a0dc34d09aa3d1a2a4c57b521643a29b52710":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","1e409018e1a3e2a09018f6073b03a08797ef51e0980b1941f11cb2c373586b97":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8e5d0c517a9ca3080806e9681d1d426d759a4b908d3ad694153a6638f7da19df":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","802ec8c10bf5abcc747b97d0bc8a1540495f9d225a4e165d4366deae2f6797cb":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d15660473274f4d51409e5cf456e73ac9933f5ec069a43e3fa52a433ceec9b75":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","b6dbe98911b0aa3ccb2a0f5831d27e0cd10349abbdd1591b615360bdf5184a29":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","87c861561380e41138c77e74e3ebd6c387e63780fe5b11c48e13ebd063d8b3c0":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d72eb62f5995598bd94628ccff198367c78d395b67453fd8dce5fdaccc91cdf1":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","34f988394909c7f8620f6a7680b7f3d011007e202da5684e6eea373b8bf76679":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","e37fa7769f8c59968a85fd0d83207578c9fc321558e7daa2dd4f601725706950":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","1c2072afc581a69b9740abfbc5b4340cfa44203abe1a63c5e08b963219aa1426":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","8a23138e4c6c305adbcc9eb1e52a183a86499aae7bb24508a9b65b9a52846095":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","a9c7d5744920a1d78fbbea5a2981a66fce202400a5c1e637874d58fb65d66fbe":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","dabd62aaf7904ef8173557419dad570ab1fea729b4d433163fe9c1219e1e37b4":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","6dca1feeb2f0740a14de19f19a2980960c62a6a1858550f61ee0c323650eb5c4":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","c728e5232cfcc8b369a64ec9a8bc7e288dc3c377397a75773bcc2893fdc063d6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","402b3c7bba71b02e3fa4c50f8e8d39eefe20559606e87be582c613490cabe3eb":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","538da8c825a10c2d11949ccd3c47a65824b91987852c4155ffd5809b45882cb2":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","375d2c9c5423e96a6e481d82549e3cda4e5660282d9ff7b217d877192a083ee6":"MTAwLDIwMCx0ZXN0X3N0cmluZw==","d9f7f8004293faa8876b756438f7b553f105a34d568d5eb265743c4e21e89e8f":"MTAwLDIwMCx0ZXN0X3N0cmluZw=="} \ No newline at end of file diff --git a/core/timestamp/src/lib.rs b/core/timestamp/src/lib.rs index a954d27..e8df4c2 100644 --- a/core/timestamp/src/lib.rs +++ b/core/timestamp/src/lib.rs @@ -22,7 +22,6 @@ pub struct Timestamp { pub timestamp: String } - /* @name NewTimestamp @desc create a new timestamp right now diff --git a/core/transaction/Cargo.toml b/core/transaction/Cargo.toml index 98efecd..0f858f2 100644 --- a/core/transaction/Cargo.toml +++ b/core/transaction/Cargo.toml @@ -10,3 +10,4 @@ db = { path = "../db" } timestamp = { path = "../timestamp" } hash = { path = "../hash" } encode = { path = "../encode" } +executor = { path = "../executor" } diff --git a/core/transaction/src/lib.rs b/core/transaction/src/lib.rs index 84683db..1888cb5 100644 --- a/core/transaction/src/lib.rs +++ b/core/transaction/src/lib.rs @@ -15,17 +15,18 @@ along with the AfricaOS Platform. If not, see . #[macro_use] extern crate json; + use json::{JsonValue}; use db::{DB, DBReadTransaction, FileDirectoryReader, DBWriteTransaction, DBStateManager}; - use std::io::{Error, ErrorKind}; use timestamp::{Timestamp, NewTimestamp, StringToTimestamp}; use hash::{Hasher, CalculateSHA256Hash}; use encode::{Encoder, Base64Encode, Base64Decode}; +use executor::{Executor, ExecuteMacro}; /* @name Transaction @@ -72,6 +73,7 @@ trait StringToTransactionType { impl TransactionTypeToString for Transaction {} impl StringToTransactionType for Transaction {} + /* @name HashTransaction @desc hash the contents of a transaction @@ -117,6 +119,9 @@ pub trait JsonConverter { */ fn tx_vec_from_json(payload: JsonValue) -> Result, String>; + /* + @name tx_vec_from_json + */ fn json_from_tx_vec(transactions: Vec) -> Result; } @@ -169,7 +174,6 @@ impl JsonConverter for Transaction { } - fn from_json_string(json_string: String) -> Result { let json_parsed_tx = json::parse( &format!(r#"{}"#, json_string) ).unwrap(); Self::from_json(json_parsed_tx) @@ -210,8 +214,6 @@ impl JsonConverter for Transaction { } } -///// END JSON TX - pub trait ReadTransactionFromDB { fn get_all_transactions() -> Vec; fn get_latest_transaction_id() -> Option; @@ -271,7 +273,6 @@ impl ReadTransactionFromDB for DB { } } - trait TransactionIndexReader { fn get_transaction_index_as_json() -> JsonValue; } @@ -280,18 +281,19 @@ impl TransactionIndexReader for DB { fn get_transaction_index_as_json() -> JsonValue { let transaction_index: String = match Self::read_transaction_index() { Some(i) => { + //TODO: parse/verify proposal index i }, None => String::from("NO TRANSACTION INDEX") }; println!("Transaction index: {}", transaction_index); + //TODO: convert DB json string to json let parsed = json::parse( &format!(r#"{}"#, transaction_index) ).unwrap(); println!("get_transaction_index_as_json(), transaction index parsed: {}", parsed["transactions"]); parsed } } - /* @name CreateTransactionIndex @desc @@ -321,7 +323,6 @@ impl CreateTransactionIndex for Transaction { } } - /* @name ClearTransactionIndex @desc make the transaction index empty again after block commitment @@ -339,12 +340,6 @@ impl ClearTransactionIndex for Transaction { } } - - - -/////////////////////// TX DB WRITE work - - /* @name WriteTransactionToDB @desc trait to write a transaction to the DB @@ -364,8 +359,12 @@ impl WriteTransactionToDB for DB { */ fn write_transaction(transaction: Transaction) -> Result { println!("inside write_transaction, DB trait"); + //TODO: convert from Proposal to JSON let transaction_string: String = Transaction::to_json(transaction.clone()); + //TODO: Read transaction index JSON + //TODO: pass Node Peer name let mut parsed: JsonValue = Self::get_transaction_index_as_json(); + //TODO: alter proposal index json object let new_transaction_entry = object!{ "transaction_id" => transaction.transaction_id, "transaction_timestamp" => transaction.transaction_timestamp.timestamp, @@ -375,29 +374,28 @@ impl WriteTransactionToDB for DB { "transaction_data" => transaction.transaction_data, "transaction_hash" => transaction.transaction_hash }; + let tindex_insert_result: Result = match parsed["transactions"] .insert( &(format!("{}", transaction.transaction_id).to_string() ), new_transaction_entry) { Ok(_) => { println!("New Transaction JSON: {}", parsed.dump()); - let db_write_result: Result = Self::write_transaction_to_sql(transaction.transaction_id, - transaction_string.clone()); + //TODO: commit proposal to DB + let db_write_result: Result = Self::write_transaction_to_sql(transaction.transaction_id, transaction_string.clone()); if db_write_result.is_ok() { + //TODO: commit proposal index to DB let db_index_write_result = Self::write_transaction_index(parsed.dump()); db_index_write_result } else { let transaction_db_write_error = Error::new(ErrorKind::Other, "Couldn't write Transaction to DB"); Err(transaction_db_write_error) } - }, - Err(r) => { println!("Failed adding new Transaction to Transaction_index: {}", parsed.dump()); let transaction_index_insert_error = Error::new(ErrorKind::Other, "Could not add transaction to transaction_index"); Err(transaction_index_insert_error) } - }; tindex_insert_result } @@ -437,9 +435,7 @@ impl StateToJson for State { let state_sections: Vec<&str> = sub_state.split(":").collect::>(); let first_section: String = String::from(state_sections[0]); let second_section: String = String::from(state_sections[1]); - let state_insert_result: Result = match state_object - .insert( &( format!("{}", first_section).to_string() ), - format!("{}", second_section).as_str() ) { + let state_insert_result: Result = match state_object.insert( &( format!("{}", first_section).to_string() ), format!("{}", second_section).as_str() ) { Ok(_) => { Ok(String::from("")) }, @@ -447,7 +443,6 @@ impl StateToJson for State { Ok(String::from("")) }, }; - } state_object } @@ -464,6 +459,7 @@ impl JsonToState for State { for (address, state) in states_iter { state_vec.push(format!("{}:{}", address.to_string(), state.to_string())); } + //TODO: change from a single string to a pair State{ tree: state_vec } @@ -486,6 +482,7 @@ impl CreateStateDB for State { fn create_state_db() -> () { let new_state_index = object!{}; let index_to_write: String = json::stringify(new_state_index); + // TODO: SPECIFY WHICH STATE INDEX TO WRITE match DB::write_state(index_to_write) { Ok(_) => { println!("Successfully wrote/created state DB"); @@ -507,6 +504,7 @@ impl WriteState for State { fn write(state: State) -> Result { let new_state_index: JsonValue = State::to_json(state.clone()); let index_to_write: String = json::stringify(new_state_index); + // TODO: SPECIFY WHICH STATE INDEX TO WRITE match DB::write_state(index_to_write) { Ok(_) => { println!("Successfully wrote/created state DB"); @@ -526,10 +524,13 @@ trait ReadState { impl ReadState for State{ fn read() -> Option{ + //TODO: read json string for state let current_state_string: Option = DB::read_state(); match current_state_string { Some(state) => { + //TODO: parse as JSONValue let parsed = json::parse( &format!(r#"{}"#, state) ).unwrap(); + //TODO: STATE::from_json let state_from_json: State = State::to_state(parsed); Some(state_from_json) }, @@ -548,16 +549,16 @@ pub trait CreateNewOuputTransaction { } impl CreateNewOuputTransaction for Transaction { + //TODO: convert to return an Option instead of only Transaction fn new_output(sender: String, data: String) -> Option { let latest_transaction_id: Option = DB::get_latest_transaction_id(); + //TODO: condition on successful latest_transaction_id let new_transaction_id: i32 = latest_transaction_id.unwrap() + 1; let new_timestamp: Timestamp = Timestamp::new().unwrap(); let b64_encoded_data: Result = Encoder::encode_base64(data); match b64_encoded_data { Ok(data) => { - let new_transaction_hash: String = Self::hash_transaction(new_transaction_id.clone(), - new_timestamp.clone(), - data.clone()); + let new_transaction_hash: String = Self::hash_transaction(new_transaction_id.clone(), new_timestamp.clone(), data.clone()); let new_tx = Transaction { transaction_id: new_transaction_id, transaction_timestamp: new_timestamp, @@ -582,8 +583,6 @@ impl CreateNewOuputTransaction for Transaction { None } } - - } } @@ -606,6 +605,7 @@ pub trait ExecuteTransactions { */ impl ExecuteTransactions for Transaction { fn execute_block_transactions(mut transactions: Vec) -> () { + //TODO: READ CURRENT STATE let current_state: Option = State::read(); match current_state { Some(state) => { @@ -617,6 +617,7 @@ impl ExecuteTransactions for Transaction { println!("execute_block_transactions(), AFTER json_state_buffer OVERWRITE: {}", json_state_buffer.clone() ); }); let state_to_write: String = json::stringify( json_state_buffer.clone() ); + // TODO: SPECIFY WHICH STATE INDEX TO WRITE match DB::write_state(state_to_write) { Ok(_) => { println!("execute_block_transactions(), Successfully wrote/created state DB AFTER TX EXECUTION"); @@ -633,8 +634,6 @@ impl ExecuteTransactions for Transaction { } } - - /* @name Executable @desc trait for Executable behavior on transactions @@ -648,23 +647,19 @@ impl Executable for Transaction { format!("Executing Transaction {}", self.transaction_id); match &self.transaction_type { TransactionType::Output => { + // TODO MACRO USE!!!! CUSTOM_TRANSACTION_OUTPUT_LOGIC!() println!("TX execute TX Output BEFORE: {} : ", State::to_json( current_state_buffer.clone().unwrap() ) ); + + // TODO: create new address in state let mut state_as_json: JsonValue = State::to_json(current_state_buffer.clone().unwrap()); - match &state_as_json.insert( &( format!("{}", self.transaction_hash).to_string() ), format!("{}", self.transaction_data) ) { - Ok(_) => { - println!("TX execute TX Output AFTER: {} : ", state_as_json.clone() ); - state_as_json - }, - Err(_) => { - println!("TX execute ERROR: State::to_json is NOT okay: {} ", State::to_json( current_state_buffer.clone().unwrap() ) ); - State::to_json(current_state_buffer.clone().unwrap()) - } - } + Executor::execute_transaction_output_logic(state_as_json, + self.transaction_hash.clone(), + self.transaction_data.clone()) }, TransactionType::Input => { + // TODO MACRO USE!!!! CUSTOM_TRANSACTION_INPUT_LOGIC!() println!("TX execute TX Input"); State::to_json(current_state_buffer.clone().unwrap()) - //TODO: male tx }, TransactionType::TxTypeError => { println!("TX execute ERROR: TxTypeError"); @@ -674,7 +669,6 @@ impl Executable for Transaction { } } - #[cfg(test)] mod tests { use super::{Transaction, @@ -684,6 +678,7 @@ mod tests { ExecuteTransactions, State}; use timestamp::{Timestamp, NewTimestamp}; + #[test] fn test_create_tx_output_execution() { let new_timestamp: Timestamp = Timestamp::new().unwrap();