Skip to content
This repository has been archived by the owner on Sep 5, 2024. It is now read-only.

Commit

Permalink
separate resource folders
Browse files Browse the repository at this point in the history
  • Loading branch information
okedeji committed Apr 23, 2024
1 parent f73ca15 commit 176ed61
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 55 deletions.
6 changes: 3 additions & 3 deletions allocmd/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def validator(name=None, network=None):
if click.confirm(colored("\nWould you like to proceed?", 'white', attrs=['bold']), default=True):
cprint("\nProceeding with the creation of validator node directory...", 'green')

os.makedirs(f"{name}/scripts", exist_ok=True)
os.makedirs(f"{name}/validator/scripts", exist_ok=True)

file_configs = [
{
Expand All @@ -74,9 +74,9 @@ def validator(name=None, network=None):
},
]

generate_all_files(env, file_configs, Command.INIT, name)
generate_all_files(env, file_configs, Command.INIT, "validator", name)

subprocess.run(['chmod', '+x', f'{name}/scripts/start-validator.sh'], check=True)
subprocess.run(['chmod', '+x', f'{name}/validator/scripts/start-validator.sh'], check=True)
else:
cprint("\nOperation cancelled.", 'red')

Expand Down
2 changes: 1 addition & 1 deletion allocmd/utilities/constants.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
cliVersion = "0.3.12"
cliVersion = "0.3.13"
108 changes: 57 additions & 51 deletions allocmd/utilities/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def print_allora_banner():
"""
cprint(banner_text, 'blue', attrs=['bold'])

def generate_all_files(env: Environment, file_configs, command: Command, name = ''):
def generate_all_files(env: Environment, file_configs, command: Command, type, name = ''):
if command == Command.INIT:
cprint(f"Bootstraping '{name}' directory...", 'cyan')
time.sleep(1)
Expand All @@ -109,9 +109,9 @@ def generate_all_files(env: Environment, file_configs, command: Command, name =
template = env.get_template(config["template_name"])

if command == Command.INIT:
file_path = os.path.join(os.getcwd(), f'{name}/{config["file_name"]}')
file_path = os.path.join(os.getcwd(), f'{name}/{type}/{config["file_name"]}')
elif command == Command.DEPLOY:
file_path = os.path.join(os.getcwd(), f'{config["file_name"]}')
file_path = os.path.join(os.getcwd(), f'{type}/{config["file_name"]}')

content = template.render(**config["context"])
with open(file_path, 'w') as f:
Expand All @@ -122,13 +122,13 @@ def generate_all_files(env: Environment, file_configs, command: Command, name =

def run_key_generate_command(worker_name, type):
command = (
f'docker run -it --entrypoint=bash -v "$(pwd)/{worker_name}/data":/data '
f'docker run -it --entrypoint=bash -v "$(pwd)/{worker_name}/{type}/data":/data '
'alloranetwork/allora-inference-base:latest '
f'-c "mkdir -p /data/head/key /data/{type}/key && (cd /data/head/key && allora-keys) && (cd /data/{type}/key && allora-keys)"'
f'-c "mkdir -p /{type}/data/head/key /{type}/data/{type}/key && (cd /{type}/data/head/key && allora-keys) && (cd /{type}/data/{type}/key && allora-keys)"'
)
try:
subprocess.run(command, shell=True, check=True)
peer_id_path = os.path.join(os.getcwd(), f'{worker_name}/data/head/key', 'identity')
peer_id_path = os.path.join(os.getcwd(), f'{worker_name}/{type}/data/head/key', 'identity')
with open(peer_id_path, 'r') as file:
cprint(f"local {type} identity generated successfully.", 'cyan')
head_peer_id = file.read().strip()
Expand All @@ -137,7 +137,7 @@ def run_key_generate_command(worker_name, type):
click.echo(f"error generating local {type} identity: {e}", err=True)

def generateWorkerAccount(worker_name, type):
config_path = os.path.join(os.getcwd(), worker_name, 'config.yaml')
config_path = os.path.join(os.getcwd(), worker_name, type, 'config.yaml')
try:
with open(config_path, 'r') as file:
config = yaml.safe_load(file)
Expand Down Expand Up @@ -165,54 +165,60 @@ def generateWorkerAccount(worker_name, type):
def generateProdCompose(env: Environment, type):
"""Deploy resource production kubernetes cluster"""

subprocess.run("mkdir -p ./data/scripts", shell=True, check=True)
cprint(f"\nMake sure you are running this command in the appropriate directory [validator, reputer, worker]", 'cyan')
cprint(f"\nif not, please cd to the right directory", 'cyan')
if click.confirm(colored("\nif you are in the right folder, please proceed", 'white', attrs=['bold']), default=True):

try:
result = subprocess.run("chmod -R +rx ./data/scripts", shell=True, check=True, capture_output=True, text=True)
print(result)
except subprocess.CalledProcessError as e:
print(f"Command '{e.cmd}' returned non-zero exit status {e.returncode}.")
if e.stderr:
print(f"Stderr: {e.stderr}")
subprocess.run("mkdir -p ./data/scripts", shell=True, check=True)

config_path = os.path.join(os.getcwd(), 'config.yaml')
try:
with open(config_path, 'r') as file:
config = yaml.safe_load(file)
except yaml.YAMLError as e:
print(colored(f"Error reading config file: {e}", 'red', attrs=['bold']))
return
try:
result = subprocess.run("chmod -R +rx ./data/scripts", shell=True, check=True, capture_output=True, text=True)
print(result)
except subprocess.CalledProcessError as e:
print(f"Command '{e.cmd}' returned non-zero exit status {e.returncode}.")
if e.stderr:
print(f"Stderr: {e.stderr}")

worker_name = config['name']
hex_coded_pk = config[type]['hex_coded_pk']
boot_nodes = config[type]['boot_nodes']
chain_rpc_address = config[type]['chain_rpc_address']
chain_topic_id = config[type]['chain_topic_id']

file_configs = [
{
"template_name": "prod-docker-compose.yaml.j2",
"file_name": "prod-docker-compose.yaml",
"context": {
"worker_name": worker_name,
"boot_nodes": boot_nodes,
"chain_rpc_address": chain_rpc_address,
"topic_id": chain_topic_id,
}
},
{
"template_name": "init.sh.j2",
"file_name": "data/scripts/init.sh",
"context": {
"worker_name": worker_name,
"hex_coded_pk": hex_coded_pk
config_path = os.path.join(os.getcwd(), 'config.yaml')
try:
with open(config_path, 'r') as file:
config = yaml.safe_load(file)
except yaml.YAMLError as e:
print(colored(f"Error reading config file: {e}", 'red', attrs=['bold']))
return

worker_name = config['name']
hex_coded_pk = config[type]['hex_coded_pk']
boot_nodes = config[type]['boot_nodes']
chain_rpc_address = config[type]['chain_rpc_address']
chain_topic_id = config[type]['chain_topic_id']

file_configs = [
{
"template_name": "prod-docker-compose.yaml.j2",
"file_name": "prod-docker-compose.yaml",
"context": {
"worker_name": worker_name,
"boot_nodes": boot_nodes,
"chain_rpc_address": chain_rpc_address,
"topic_id": chain_topic_id,
}
},
{
"template_name": "init.sh.j2",
"file_name": "data/scripts/init.sh",
"context": {
"worker_name": worker_name,
"hex_coded_pk": hex_coded_pk
}
}
}
]
]

generate_all_files(env, file_configs, Command.DEPLOY)
cprint(f"production docker compose file generated to be deployed", 'green')
cprint(f"please run chmod -R +rx ./data/scripts to grant script access to the image", 'yellow')
generate_all_files(env, file_configs, Command.DEPLOY)
cprint(f"production docker compose file generated to be deployed", 'green')
# cprint(f"please run chmod -R +rx ./data/scripts to grant script access to the image", 'yellow')
else:
cprint("\nOperation cancelled.", 'red')


def blocklessNode(environment, env, type, name=None, topic=None):
Expand Down Expand Up @@ -284,7 +290,7 @@ def blocklessNode(environment, env, type, name=None, topic=None):
else:
cprint("\nOperation cancelled.", 'red')
elif environment == 'prod':
devComposePath = os.path.join(os.getcwd(), 'dev-docker-compose.yaml')
devComposePath = os.path.join(os.getcwd(), type, 'dev-docker-compose.yaml')
if not os.path.exists(devComposePath):
cprint(f"You must initialize the {type} on dev please run allocmd generate {type} --env dev --name <{type} name> --topic <topic id> and then run the prod generate in the directory created", 'red')
else:
Expand Down

0 comments on commit 176ed61

Please sign in to comment.