Skip to content
This repository has been archived by the owner on Jun 22, 2024. It is now read-only.

Commit

Permalink
fix big number for matrix
Browse files Browse the repository at this point in the history
  • Loading branch information
sondt1337 committed Mar 12, 2024
1 parent d279699 commit 2cb1807
Show file tree
Hide file tree
Showing 4 changed files with 81 additions and 20 deletions.
34 changes: 22 additions & 12 deletions server.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ def write_total(correct_count, start_time):
file.write(f"elapsed time: {time.time() - start_time} seconds\n")

# SEND worker sequence number, F (json) & G (json)
def worker(i, F_json, G_json):
subprocess.run(["python", "worker.py", str(i), F_json, G_json])
def worker(i, F_json, G_json, M_json, m_json):
subprocess.run(["python", "worker.py", str(i), F_json, G_json, M_json, m_json])

# WRITE file function
def write_to_file(file, content):
Expand All @@ -42,7 +42,7 @@ def get_number_of_incorrect():

# CREATE random matrix (X, Y)
def create_matrix(rows, cols):
matrix = np.random.randint(100, size=(rows, cols))
matrix = np.random.randint(2, size=(rows, cols))
return matrix

# DIVIDE sub matrix 1
Expand Down Expand Up @@ -145,12 +145,12 @@ def recovery_threshold(m, n, p, delta_pc, Pc):


# M, N, P, m, n, p, Pc = map(int, input("Enter M, N, P, m, n, p, Pc: ").split())
M = 2
N = 4
P = 6
M = 10
N = 10
P = 10
m = 2
n = 1
p = 3
n = 2
p = 2
Pc = 2

delta_pc = math.ceil(Pc / n) # COMPUTE delta_pc
Expand Down Expand Up @@ -183,8 +183,10 @@ def recovery_threshold(m, n, p, delta_pc, Pc):
FxG_key = np.dot(FxG, key)
write_to_file("result.txt", f"worker {i+1} (FxG_key):\n{FxG_key}")
F_json = json.dumps(F.tolist())
M_json = json.dumps(M)
m_json = json.dumps(m)
G_json = json.dumps(G.tolist())
r = multiprocessing.Process(target=worker, args=(i, F_json, G_json))
r = multiprocessing.Process(target=worker, args=(i, F_json, G_json, M_json, m_json))
r.start()
r.join()
correct_count = get_number_of_correct()
Expand All @@ -202,11 +204,14 @@ def recovery_threshold(m, n, p, delta_pc, Pc):
break

# CHECK process (server when receive data from workers)
if len(sys.argv) == 4 and sys.argv[1] == "check":
if len(sys.argv) == 6 and sys.argv[1] == "check":
# receive worker sequence number, F mul G (json)
i = int(sys.argv[2])
FmulG_json = sys.argv[3]
FmulG = np.array(json.loads(FmulG_json))
M = json.loads(sys.argv[4])
m = json.loads(sys.argv[5])
x = int(M/m)

# GET key gen from result.txt
with open("result.txt", "r") as file:
Expand All @@ -225,10 +230,15 @@ def recovery_threshold(m, n, p, delta_pc, Pc):
# GET F mul G
with open('result.txt', 'r') as file:
lines = file.readlines()
last_line = lines[-1].strip()
last_two_lines = lines[-M:-x]
last_two_lines_str = ("".join(last_two_lines))

# with open("result.txt", "a") as file:
# file.write(f"{FmulG_check}\n")
# file.write(f"{last_two_lines_str[:-1]}")

# CHECK server value & workers value
if np.array_equal(FmulG_check_str, last_line):
if np.array_equal(FmulG_check_str, last_two_lines_str[:-1]):
with open("result.txt", "a") as file:
file.write("Correct Array\n")
with open("total.txt", "r+") as total_file:
Expand Down
22 changes: 16 additions & 6 deletions server_docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ def write_total(correct_count, start_time):
file.write(f"elapsed time: {time.time() - start_time} seconds\n")

# SEND worker sequence number, F (json) & G (json)
def worker(i, F_json, G_json):
subprocess.run(["python", "worker.py", str(i), F_json, G_json])
def worker(i, F_json, G_json, M_json, m_json):
subprocess.run(["python", "worker.py", str(i), F_json, G_json, M_json, m_json])

# WRITE file function
def write_to_file(file, content):
Expand Down Expand Up @@ -174,7 +174,9 @@ def recovery_threshold(m, n, p, delta_pc, Pc):
write_to_file("result.txt", f"worker {i+1} (FxG_key):\n{FxG_key}")
F_json = json.dumps(F.tolist())
G_json = json.dumps(G.tolist())
r = multiprocessing.Process(target=worker, args=(i, F_json, G_json)) # parallel send from server to workers
M_json = json.dumps(M)
m_json = json.dumps(m)
r = multiprocessing.Process(target=worker, args=(i, F_json, G_json, M_json, m_json)) # parallel send from server to workers
r.start()
r.join()
correct_count = get_number_of_correct()
Expand Down Expand Up @@ -206,11 +208,14 @@ def recovery_threshold(m, n, p, delta_pc, Pc):
break

# CHECK process (server when receive data from workers)
if len(sys.argv) == 4 and sys.argv[1] == "check":
if len(sys.argv) == 6 and sys.argv[1] == "check":
# receive worker sequence number, F mul G (json)
i = int(sys.argv[2])
FmulG_json = sys.argv[3]
FmulG = np.array(json.loads(FmulG_json))
M = json.loads(sys.argv[4])
m = json.loads(sys.argv[5])
x = int(M/m)

# GET key gen from result.txt
with open("result.txt", "r") as file:
Expand All @@ -229,10 +234,15 @@ def recovery_threshold(m, n, p, delta_pc, Pc):
# GET F mul G
with open('result.txt', 'r') as file:
lines = file.readlines()
last_line = lines[-1].strip()
last_two_lines = lines[-M:-x]
last_two_lines_str = ("".join(last_two_lines))

# with open("result.txt", "a") as file:
# file.write(f"{FmulG_check}\n")
# file.write(f"{last_two_lines_str[:-1]}")

# CHECK server value & workers value
if np.array_equal(FmulG_check_str, last_line):
if np.array_equal(FmulG_check_str, last_two_lines_str[:-1]):
with open("result.txt", "a") as file:
file.write("Correct Array\n")
with open("total.txt", "r+") as total_file:
Expand Down
10 changes: 8 additions & 2 deletions worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,23 @@

G_json = sys.argv[3]
G = np.array(json.loads(G_json))
M_json = sys.argv[4]
m_json = sys.argv[5]

#create error rate
if (err == 1):
F+=F

def write_to_file(file, content):
with open(file, 'a') as f:
f.write(content + '\n')

FmulG = np.dot(F, G) # F mul G
FmulG_json = json.dumps(FmulG.tolist()) # convert value -> json
write_to_file("result.txt", f"{FmulG}")

# send F mul G to server (from each worker)
def server(FmulG_json, i):
subprocess.run(["python", "server_docker.py", "check", str(i), FmulG_json])
subprocess.run(["python", "server.py", "check", str(i), FmulG_json, M_json, m_json])

if __name__ == "__main__":
server(FmulG_json, i)
35 changes: 35 additions & 0 deletions worker_docker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import json
import sys
import numpy as np
import subprocess
import random

# receive worker sequence number, F (json) & G (json)
i = int(sys.argv[1])
F_json = sys.argv[2]
F = np.array(json.loads(F_json))

err = random.randint(1, 6) # random rate of workers

G_json = sys.argv[3]
G = np.array(json.loads(G_json))
M_json = sys.argv[4]
m_json = sys.argv[5]

# create error rate
if (err == 1):
F+=F
def write_to_file(file, content):
with open(file, 'a') as f:
f.write(content + '\n')

FmulG = np.dot(F, G) # F mul G
FmulG_json = json.dumps(FmulG.tolist()) # convert value -> json
write_to_file("result.txt", f"{FmulG}")

# send F mul G to server (from each worker)
def server(FmulG_json, i):
subprocess.run(["python", "server_docker.py", "check", str(i), FmulG_json, M_json, m_json])

if __name__ == "__main__":
server(FmulG_json, i)

0 comments on commit 2cb1807

Please sign in to comment.