Skip to content

Commit

Permalink
Afahs/scalein improve (#76)
Browse files Browse the repository at this point in the history
* adding a the includes operator

* ONM-integration

* fixes for env-vars and getcluster and label nodes

* Alex's comments

* spotlessApply

* improving the scale in task of the cluster and adding filter by cloud id
  • Loading branch information
alijawadfahs authored Apr 27, 2024
1 parent 887b19b commit 82dfbaf
Show file tree
Hide file tree
Showing 9 changed files with 68 additions and 23 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,10 @@ private static boolean satisfyAttributeRequirement(AttributeRequirement attribut
.compare(nodeCandidate.getCloud().getCloudType().name(),
attributeRequirement.getValue());
}
if (attributeRequirement.getRequirementAttribute().equals("id")) {
return attributeRequirement.getRequirementOperator().compare(nodeCandidate.getCloud().getId(),
attributeRequirement.getValue());
}
}
if (attributeRequirement.getRequirementClass().toLowerCase(Locale.ROOT).equals("environment")) {
if (attributeRequirement.getRequirementAttribute().equals("runtime")) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -274,15 +274,19 @@ public Cluster scaleInCluster(String sessionId, String clusterName, List<String>
throw new NotConnectedException();
}
LOGGER.info("scaleDownCluster endpoint is called for the cluster: " + clusterName);
Cluster toScaleCluster = getCluster(sessionId, clusterName);
Cluster toScaleCluster = ClusterUtils.getClusterByName(clusterName, repositoryService.listCluster());
repositoryService.deleteCluster(toScaleCluster);
repositoryService.flush();
String masterNodeToken = "";
if (toScaleCluster != null) {
masterNodeToken = toScaleCluster.getMasterNode() + "_" + clusterName;
}
List<ClusterNodeDefinition> clusterNodes = toScaleCluster.getNodes();
for (String nodeName : nodesToDelete) {
ClusterNodeDefinition node = getNodeFromCluster(toScaleCluster, nodeName);
if (node != null && !node.getName().equals(toScaleCluster.getMasterNode())) {
try {
if (deleteNode(sessionId, clusterName, node) != -1L) {
if (deleteNode(sessionId, clusterName, node, masterNodeToken, true) != -1L) {
clusterNodes = deleteNodeFromCluster(clusterNodes, nodeName);
}
} catch (NotConnectedException e) {
Expand Down Expand Up @@ -326,7 +330,8 @@ public Long labelNodes(String sessionId, String clusterName, List<Map<String, St
script,
masterNodeToken,
"label_nodes_" + clusterName,
"basic");
"basic",
"");
} catch (IOException e) {
throw new RuntimeException(e);
}
Expand Down Expand Up @@ -359,7 +364,8 @@ public Long deployApplication(String sessionId, String clusterName, ClusterAppli
script,
masterNodeToken,
"deploy_app_" + clusterName,
"basic");
"basic",
"");
} catch (IOException e) {
throw new RuntimeException(e);
}
Expand All @@ -373,7 +379,7 @@ public boolean deleteCluster(String sessionId, String clusterName) throws NotCon
Cluster toScaleCluster = getCluster(sessionId, clusterName);
for (ClusterNodeDefinition node : toScaleCluster.getNodes()) {
if (node != null) {
deleteNode(sessionId, clusterName, node);
deleteNode(sessionId, clusterName, node, "", false);
}
}
repositoryService.deleteCluster(toScaleCluster);
Expand Down Expand Up @@ -403,13 +409,27 @@ private String getNodeUrl(String sessionId, String clusterName, ClusterNodeDefin
return "";
}

private Long deleteNode(String sessionId, String clusterName, ClusterNodeDefinition node)
throws NotConnectedException {
private Long deleteNode(String sessionId, String clusterName, ClusterNodeDefinition node, String masterNodeToken,
boolean drain) throws NotConnectedException {
String nodeUrl = getNodeUrl(sessionId, clusterName, node);
Long jobId = -1L;
if (nodeUrl != null && !nodeUrl.isEmpty()) {
try {
jobId = jobService.submitOneTaskJob(sessionId, nodeUrl, "", "delete_node_" + node.getName(), "delete");
if (drain) {
jobId = jobService.submitOneTaskJob(sessionId,
nodeUrl,
masterNodeToken,
"delete_node_" + node.getName(),
"drain-delete",
node.getNodeJobName(clusterName));
} else {
jobId = jobService.submitOneTaskJob(sessionId,
nodeUrl,
masterNodeToken,
"delete_node_" + node.getName(),
"delete",
"");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -808,19 +808,27 @@ private void setAllReconfigurationMandatoryDependencies(TaskFlowJob paJob, Job j
setAllMandatoryDependencies(paJob, job);
}

public Long submitOneTaskJob(String sessionId, String script, String token, String jobName, String type)
throws NotConnectedException, IOException {
public Long submitOneTaskJob(String sessionId, String input, String token, String jobName, String type,
String nodeName) throws NotConnectedException, IOException {
if (!paGatewayService.isConnectionActive(sessionId)) {
throw new NotConnectedException();
}
TaskFlowJob paJob = new TaskFlowJob();
paJob.setName(jobName);
LOGGER.info("Job created: " + paJob.toString());
LOGGER.info("Job created: " + paJob);
try {
if (Objects.equals(type, "basic")) {
paJob.addTask(taskBuilder.createOneNodeTask(script, token, jobName));
paJob.addTask(taskBuilder.createOneNodeTask(input, token, jobName));
} else if (Objects.equals(type, "delete")) {
paJob.addTask(taskBuilder.createDeleteNodeTask(script));
paJob.addTask(taskBuilder.createDeleteNodeTask(input));
} else if (Objects.equals(type, "drain")) {
paJob.addTask(taskBuilder.createDrainNodeTask(input, token));
} else if (Objects.equals(type, "drain-delete")) {
ScriptTask parentTask = taskBuilder.createDrainNodeTask(nodeName, token);
ScriptTask childTask = taskBuilder.createDeleteNodeTask(input);
childTask.addDependence(parentTask);
paJob.addTask(parentTask);
paJob.addTask(childTask);
} else {
LOGGER.error("type of submitOneTaskJob \"{}\" is not supported!", type);
throw new IOException();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ private void submitScalingOutJob(Job job, String scaledTaskName, List<Long> newN
jobService.addCleanChannelsTaskWithDependencies(paJob, job);

paJob.setMaxNumberOfExecution(2);
paJob.setProjectName("Morphemic");
paJob.setProjectName("NebulOuS");

long submittedJobId = schedulerGateway.submit(paJob).longValue();
job.setSubmittedJobId(submittedJobId);
Expand Down Expand Up @@ -374,7 +374,7 @@ private void submitScalingInJob(Job job, String scaledTaskName) {
jobService.addCleanChannelsTaskWithDependencies(paJob, job);

paJob.setMaxNumberOfExecution(2);
paJob.setProjectName("Morphemic");
paJob.setProjectName("NebulOuS");

long submittedJobId = schedulerGateway.submit(paJob).longValue();
job.setSubmittedJobId(submittedJobId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,14 @@
import org.ow2.proactive.sal.service.util.Utils;
import org.ow2.proactive.scheduler.common.task.ScriptTask;
import org.ow2.proactive.scheduler.common.task.TaskVariable;
import org.ow2.proactive.scripting.InvalidScriptException;
import org.ow2.proactive.scripting.SelectionScript;
import org.ow2.proactive.scripting.*;
import org.springframework.stereotype.Service;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;

import lombok.extern.log4j.Log4j2;
import net.bytebuddy.dynamic.scaffold.TypeInitializer;


@Log4j2
Expand Down Expand Up @@ -98,6 +98,8 @@ public class TaskBuilder {

private static final String WAIT_FOR_LOCK_SCRIPT = "wait_for_lock_script.sh";

private static final String DRIAN_NODE_SCRIPT = "drain_node_script.sh";

private static final String WAIT_FOR_MASTER_SCRIPT = "wait_for_master.groovy";

private static final String SET_TOKEN_SCRIPT = "set_token.groovy";
Expand Down Expand Up @@ -892,10 +894,19 @@ private ScriptTask createSetTokenTask(String masterNodeToken) {
}

public ScriptTask createDeleteNodeTask(String nodeUrl) {
ScriptTask setTokenTask = PAFactory.createGroovyScriptTaskFromFile("Delete-Node", DELETE_NODE_SCRIPT);
ScriptTask deleteNodeTask = PAFactory.createGroovyScriptTaskFromFile("Delete-Node", DELETE_NODE_SCRIPT);
Map<String, TaskVariable> variablesMap = new HashMap<>();
variablesMap.put("nodeURL", new TaskVariable("nodeURL", nodeUrl));
setTokenTask.setVariables(variablesMap);
return setTokenTask;
deleteNodeTask.setVariables(variablesMap);
return deleteNodeTask;
}

public ScriptTask createDrainNodeTask(String nodeName, String masterNodeToken) {
ScriptTask drainNodeTask = PAFactory.createBashScriptTaskFromFile("Drain-Node", DRIAN_NODE_SCRIPT);
Map<String, TaskVariable> variablesMap = new HashMap<>();
variablesMap.put("nodeName", new TaskVariable("nodeName", nodeName));
drainNodeTask.setVariables(variablesMap);
drainNodeTask.addGenericInformation("NODE_ACCESS_TOKEN", masterNodeToken);
return drainNodeTask;
}
}
2 changes: 1 addition & 1 deletion sal-service/src/main/resources/Define_NS_AWS.xml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<job
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="urn:proactive:jobdescriptor:3.13" xsi:schemaLocation="urn:proactive:jobdescriptor:3.13 http://www.activeeon.com/public_content/schemas/proactive/jobdescriptor/3.13/schedulerjob.xsd" name="Define_NS_AWS" projectName="Morphemic" priority="normal" onTaskError="continueJobExecution" maxNumberOfExecution="2" >
xmlns="urn:proactive:jobdescriptor:3.13" xsi:schemaLocation="urn:proactive:jobdescriptor:3.13 http://www.activeeon.com/public_content/schemas/proactive/jobdescriptor/3.13/schedulerjob.xsd" name="Define_NS_AWS" projectName="NebulOuS" priority="normal" onTaskError="continueJobExecution" maxNumberOfExecution="2" >
<variables>
<variable name="NS_name" value="" />
<variable name="NS_nVMs" value="0" model="PA:Integer" />
Expand Down
2 changes: 1 addition & 1 deletion sal-service/src/main/resources/Define_NS_AWS_AutoScale.xml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<job
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="urn:proactive:jobdescriptor:3.13" xsi:schemaLocation="urn:proactive:jobdescriptor:3.13 http://www.activeeon.com/public_content/schemas/proactive/jobdescriptor/3.13/schedulerjob.xsd" name="Define_NS_AWS_AS" projectName="Morphemic" priority="normal" onTaskError="continueJobExecution" maxNumberOfExecution="2" >
xmlns="urn:proactive:jobdescriptor:3.13" xsi:schemaLocation="urn:proactive:jobdescriptor:3.13 http://www.activeeon.com/public_content/schemas/proactive/jobdescriptor/3.13/schedulerjob.xsd" name="Define_NS_AWS_AS" projectName="NebulOuS" priority="normal" onTaskError="continueJobExecution" maxNumberOfExecution="2" >
<variables>
<variable name="NS_name" value="" />
<variable name="aws_username" value="" model="PA:HIDDEN"/>
Expand Down
2 changes: 1 addition & 1 deletion sal-service/src/main/resources/Define_NS_OS.xml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<job
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="urn:proactive:jobdescriptor:3.13" xsi:schemaLocation="urn:proactive:jobdescriptor:3.13 http://www.activeeon.com/public_content/schemas/proactive/jobdescriptor/3.13/schedulerjob.xsd" name="Define_NS_OS" projectName="Morphemic" priority="normal" onTaskError="continueJobExecution" maxNumberOfExecution="2" >
xmlns="urn:proactive:jobdescriptor:3.13" xsi:schemaLocation="urn:proactive:jobdescriptor:3.13 http://www.activeeon.com/public_content/schemas/proactive/jobdescriptor/3.13/schedulerjob.xsd" name="Define_NS_OS" projectName="NebulOuS" priority="normal" onTaskError="continueJobExecution" maxNumberOfExecution="2" >
<variables>
<variable name="NS_name" value="" />
<variable name="NS_nVMs" value="0" model="PA:Integer"/>
Expand Down
2 changes: 2 additions & 0 deletions sal-service/src/main/resources/drain_node_script.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
sudo -H -u ubuntu bash -c "kubectl drain $variables_nodeName --ignore-daemonsets"
sudo -H -u ubuntu bash -c "kubectl delete node $variables_nodeName"

0 comments on commit 82dfbaf

Please sign in to comment.