Skip to content

Commit

Permalink
Introduce normal Jet job options for ANALYZE statement [HZ-3668] (haz…
Browse files Browse the repository at this point in the history
  • Loading branch information
Fly-Style authored Nov 7, 2023
1 parent 7d739f4 commit bce4b7a
Show file tree
Hide file tree
Showing 13 changed files with 238 additions and 88 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import com.hazelcast.core.HazelcastException;
import com.hazelcast.dataconnection.impl.InternalDataConnectionService;
import com.hazelcast.jet.config.JobConfig;
import com.hazelcast.jet.core.DAG;
import com.hazelcast.jet.datamodel.Tuple2;
import com.hazelcast.jet.sql.impl.SqlPlanImpl.AlterJobPlan;
Expand Down Expand Up @@ -166,7 +167,6 @@
import static com.hazelcast.jet.sql.impl.SqlPlanImpl.ExplainStatementPlan;
import static com.hazelcast.jet.sql.impl.opt.OptUtils.schema;
import static java.lang.String.format;
import static java.util.Collections.emptyMap;
import static java.util.stream.Collectors.toList;

/**
Expand Down Expand Up @@ -378,11 +378,10 @@ private SqlPlan createPlan(
} else {
// only Select and DML are currently eligible for ANALYZE
boolean analyze = false;
Map<String, String> analyzeOptions = emptyMap();
SqlAnalyzeStatement analyzeStatement = null;
if (node instanceof SqlAnalyzeStatement) {
analyze = true;
final SqlAnalyzeStatement analyzeStatement = (SqlAnalyzeStatement) node;
analyzeOptions = analyzeStatement.options();
analyzeStatement = (SqlAnalyzeStatement) node;
node = analyzeStatement.getQuery();
}

Expand All @@ -396,7 +395,7 @@ private SqlPlan createPlan(
false,
task.getSql(),
analyze,
analyzeOptions
analyze ? analyzeStatement.getJobConfig() : null
);
}
}
Expand Down Expand Up @@ -489,7 +488,7 @@ private SqlPlan toCreateJobPlan(PlanKey planKey, QueryParseResult parseResult, O
true,
query,
false,
emptyMap()
null
);
assert dmlPlan instanceof DmlPlan && ((DmlPlan) dmlPlan).getOperation() == Operation.INSERT;

Expand Down Expand Up @@ -606,7 +605,7 @@ private SqlPlanImpl toPlan(
boolean isCreateJob,
String query,
boolean analyze,
Map<String, String> analyzeOptions
@Nullable JobConfig analyzeJobConfig
) {
PhysicalRel physicalRel = optimize(parameterMetadata, rel, context, isCreateJob);

Expand Down Expand Up @@ -686,7 +685,7 @@ private SqlPlanImpl toPlan(
planExecutor,
permissions,
analyze,
analyzeOptions);
analyzeJobConfig);
} else if (physicalRel instanceof DeleteByKeyMapPhysicalRel) {
assert !isCreateJob;
DeleteByKeyMapPhysicalRel delete = (DeleteByKeyMapPhysicalRel) physicalRel;
Expand Down Expand Up @@ -718,7 +717,7 @@ private SqlPlanImpl toPlan(
planExecutor,
permissions,
analyze,
analyzeOptions);
analyzeJobConfig);
} else if (physicalRel instanceof DeletePhysicalRel) {
checkDmlOperationWithView(physicalRel);
Tuple2<DAG, Set<PlanObjectKey>> dagAndKeys = createDag(
Expand All @@ -737,7 +736,7 @@ private SqlPlanImpl toPlan(
planExecutor,
permissions,
analyze,
analyzeOptions
analyzeJobConfig
);
} else {
Tuple2<DAG, Set<PlanObjectKey>> dagAndKeys = createDag(
Expand All @@ -763,7 +762,7 @@ private SqlPlanImpl toPlan(
permissions,
partitionStrategyCandidates(physicalRel, parameterMetadata),
analyze,
analyzeOptions
analyzeJobConfig
);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -521,8 +521,8 @@ SqlResult execute(SelectPlan plan,
ssc
);

JobConfig jobConfig = new JobConfig()
.setArgument(SQL_ARGUMENTS_KEY_NAME, args)
JobConfig jobConfig = plan.isAnalyzed() ? plan.analyzeJobConfig() : new JobConfig();
jobConfig.setArgument(SQL_ARGUMENTS_KEY_NAME, args)
.setArgument(KEY_SQL_QUERY_TEXT, plan.getQuery())
.setArgument(KEY_SQL_UNBOUNDED, plan.isStreaming())
.setTimeoutMillis(timeout);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1068,7 +1068,7 @@ static class SelectPlan extends SqlPlanImpl {
// mapName -> { columnName -> RexLiteralOrDynamicParam }
private final Map<String, List<Map<String, Expression<?>>>> partitionStrategyCandidates;
private final boolean analyzed;
private final Map<String, String> analyzeOptions;
private final JobConfig analyzeJobConfig;

@SuppressWarnings("checkstyle:ParameterNumber")
SelectPlan(
Expand All @@ -1083,7 +1083,7 @@ static class SelectPlan extends SqlPlanImpl {
List<Permission> permissions,
Map<String, List<Map<String, Expression<?>>>> partitionStrategyCandidates,
final boolean analyzed,
final Map<String, String> analyzeOptions
final JobConfig analyzeJobConfig
) {
super(planKey);

Expand All @@ -1097,7 +1097,7 @@ static class SelectPlan extends SqlPlanImpl {
this.permissions = permissions;
this.partitionStrategyCandidates = partitionStrategyCandidates;
this.analyzed = analyzed;
this.analyzeOptions = analyzeOptions;
this.analyzeJobConfig = analyzeJobConfig;
}

QueryParameterMetadata getParameterMetadata() {
Expand Down Expand Up @@ -1138,8 +1138,8 @@ public boolean isAnalyzed() {
return analyzed;
}

public Map<String, String> getAnalyzeOptions() {
return analyzeOptions;
public JobConfig analyzeJobConfig() {
return analyzeJobConfig;
}

@Override
Expand Down Expand Up @@ -1169,7 +1169,7 @@ static class DmlPlan extends SqlPlanImpl {
private final PlanExecutor planExecutor;
private final List<Permission> permissions;
private final boolean analyzed;
private final Map<String, String> analyzeOptions;
private final JobConfig analyzeJobConfig;

@SuppressWarnings("checkstyle:ParameterNumber")
DmlPlan(
Expand All @@ -1183,7 +1183,7 @@ static class DmlPlan extends SqlPlanImpl {
PlanExecutor planExecutor,
List<Permission> permissions,
boolean analyzed,
Map<String, String> analyzeOptions) {
JobConfig analyzeJobConfig) {
super(planKey);

this.operation = operation;
Expand All @@ -1195,7 +1195,7 @@ static class DmlPlan extends SqlPlanImpl {
this.planExecutor = planExecutor;
this.permissions = permissions;
this.analyzed = analyzed;
this.analyzeOptions = analyzeOptions;
this.analyzeJobConfig = analyzeJobConfig;
}

Operation getOperation() {
Expand Down Expand Up @@ -1232,8 +1232,8 @@ public boolean isAnalyzed() {
return analyzed;
}

public Map<String, String> getAnalyzeOptions() {
return analyzeOptions;
public JobConfig analyzeJobConfig() {
return analyzeJobConfig;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
/*
* Copyright 2023 Hazelcast Inc.
*
* Licensed under the Hazelcast Community License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://hazelcast.com/hazelcast-community-license
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hazelcast.jet.sql.impl.parse;

import com.hazelcast.jet.config.ProcessingGuarantee;
import org.apache.calcite.sql.validate.SqlValidator;

import static com.hazelcast.jet.config.ProcessingGuarantee.AT_LEAST_ONCE;
import static com.hazelcast.jet.config.ProcessingGuarantee.EXACTLY_ONCE;
import static com.hazelcast.jet.config.ProcessingGuarantee.NONE;
import static com.hazelcast.jet.sql.impl.parse.ParserResource.RESOURCE;

public class ParseUtils {

private ParseUtils() {
}

static ProcessingGuarantee parseProcessingGuarantee(SqlValidator validator, SqlOption option) {
switch (option.valueString()) {
case "exactlyOnce":
return EXACTLY_ONCE;
case "atLeastOnce":
return AT_LEAST_ONCE;
case "none":
return NONE;
default:
throw validator.newValidationError(option.value(),
RESOURCE.processingGuaranteeBadValue(option.keyString(), option.valueString()));
}
}

static long parseLong(SqlValidator validator, SqlOption option) {
try {
return Long.parseLong(option.valueString());
} catch (NumberFormatException e) {
throw validator.newValidationError(option.value(),
RESOURCE.jobOptionIncorrectNumber(option.keyString(), option.valueString()));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,9 @@ public interface ParserResource {
@BaseMessage("Unknown job option: {0}")
ExInst<SqlValidatorException> unknownJobOption(String key);

@BaseMessage("Job option is not supported for ANALYZE: {0}")
ExInst<SqlValidatorException> unsupportedAnalyzeJobOption(String key);

@BaseMessage("The OR REPLACE option is required for CREATE SNAPSHOT")
ExInst<SqlValidatorException> createSnapshotWithoutReplace();
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
import java.util.Set;

import static com.hazelcast.jet.sql.impl.parse.ParserResource.RESOURCE;
import static com.hazelcast.jet.sql.impl.parse.SqlCreateJob.parseLong;
import static com.hazelcast.jet.sql.impl.parse.UnparseUtil.unparseOptions;
import static java.util.Objects.requireNonNull;

Expand Down Expand Up @@ -124,7 +123,7 @@ public void validate(SqlValidator validator, SqlValidatorScope scope) {

switch (key) {
case "snapshotIntervalMillis":
deltaConfig.setSnapshotIntervalMillis(parseLong(validator, option));
deltaConfig.setSnapshotIntervalMillis(ParseUtils.parseLong(validator, option));
break;
case "autoScaling":
deltaConfig.setAutoScaling(Boolean.parseBoolean(value));
Expand All @@ -139,7 +138,7 @@ public void validate(SqlValidator validator, SqlValidatorScope scope) {
deltaConfig.setStoreMetricsAfterJobCompletion(Boolean.parseBoolean(value));
break;
case "maxProcessorAccumulatedRecords":
deltaConfig.setMaxProcessorAccumulatedRecords(parseLong(validator, option));
deltaConfig.setMaxProcessorAccumulatedRecords(ParseUtils.parseLong(validator, option));
break;
case "suspendOnFailure":
deltaConfig.setSuspendOnFailure(Boolean.parseBoolean(value));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

package com.hazelcast.jet.sql.impl.parse;

import com.hazelcast.jet.config.JobConfig;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
Expand All @@ -24,18 +25,21 @@
import org.apache.calcite.sql.SqlSpecialOperator;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidator;

import java.util.LinkedHashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import static com.hazelcast.jet.sql.impl.parse.ParserResource.RESOURCE;
import static java.util.Objects.requireNonNull;

public class SqlAnalyzeStatement extends SqlCall {
public static final SqlSpecialOperator OPERATOR = new SqlSpecialOperator("ANALYZE", SqlKind.OTHER);

private SqlNode query;
private final SqlNodeList options;
private final JobConfig jobConfig = new JobConfig();

public SqlAnalyzeStatement(SqlParserPos pos, SqlNode query, SqlNodeList options) {
super(pos);
Expand All @@ -51,16 +55,6 @@ public void setQuery(final SqlNode query) {
this.query = query;
}

public Map<String, String> options() {
return options.getList().stream()
.map(node -> (SqlOption) node)
.collect(
LinkedHashMap::new,
(map, option) -> map.putIfAbsent(option.keyString(), option.valueString()),
Map::putAll
);
}

@Override
public SqlOperator getOperator() {
return OPERATOR;
Expand All @@ -71,6 +65,10 @@ public List<SqlNode> getOperandList() {
return List.of(query);
}

public JobConfig getJobConfig() {
return jobConfig;
}

@Override
public void unparse(final SqlWriter writer, final int leftPrec, final int rightPrec) {
writer.keyword("ANALYZE");
Expand All @@ -80,4 +78,51 @@ public void unparse(final SqlWriter writer, final int leftPrec, final int rightP

query.unparse(writer, leftPrec, rightPrec);
}

public void validate(SqlValidator validator) {
Set<String> optionNames = new HashSet<>();
jobConfig.setMetricsEnabled(true);
jobConfig.setStoreMetricsAfterJobCompletion(true);

jobConfig.setSplitBrainProtection(false);
jobConfig.setAutoScaling(false);
jobConfig.setSuspendOnFailure(false);

for (SqlNode option0 : options) {
SqlOption option = (SqlOption) option0;
String key = option.keyString();
String value = option.valueString();

if (!optionNames.add(key)) {
throw validator.newValidationError(option, RESOURCE.duplicateOption(key));
}

switch (key) {
case "processingGuarantee":
jobConfig.setProcessingGuarantee(ParseUtils.parseProcessingGuarantee(validator, option));
break;
case "snapshotIntervalMillis":
jobConfig.setSnapshotIntervalMillis(ParseUtils.parseLong(validator, option));
break;
case "initialSnapshotName":
jobConfig.setInitialSnapshotName(value);
break;
case "maxProcessorAccumulatedRecords":
jobConfig.setMaxProcessorAccumulatedRecords(ParseUtils.parseLong(validator, option));
break;
case "metricsEnabled":
jobConfig.setMetricsEnabled(Boolean.parseBoolean(value));
break;
case "storeMetricsAfterJobCompletion":
jobConfig.setStoreMetricsAfterJobCompletion(Boolean.parseBoolean(value));
break;
case "autoScaling":
case "splitBrainProtectionEnabled":
case "suspendOnFailure":
throw validator.newValidationError(option.key(), RESOURCE.unsupportedAnalyzeJobOption(key));
default:
throw validator.newValidationError(option.key(), RESOURCE.unknownJobOption(key));
}
}
}
}
Loading

0 comments on commit bce4b7a

Please sign in to comment.