Skip to content

Commit

Permalink
Spark 3.5: Remove obsolete conf parsing logic (#10309)
Browse files Browse the repository at this point in the history
  • Loading branch information
aokolnychyi authored May 11, 2024
1 parent e484f0d commit 04792cf
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

import java.time.Duration;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;
import org.apache.iceberg.Table;
Expand All @@ -31,23 +30,24 @@
import org.apache.spark.sql.RuntimeConfig;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.internal.SQLConf;
import org.apache.spark.sql.util.CaseInsensitiveStringMap;

class SparkConfParser {

private final Map<String, String> properties;
private final RuntimeConfig sessionConf;
private final Map<String, String> options;
private final CaseInsensitiveStringMap options;

SparkConfParser() {
this.properties = ImmutableMap.of();
this.sessionConf = new RuntimeConfig(SQLConf.get());
this.options = ImmutableMap.of();
this.options = CaseInsensitiveStringMap.empty();
}

SparkConfParser(SparkSession spark, Table table, Map<String, String> options) {
this.properties = table.properties();
this.sessionConf = spark.conf();
this.options = options;
this.options = asCaseInsensitiveStringMap(options);
}

public BooleanConfParser booleanConf() {
Expand All @@ -70,6 +70,14 @@ public DurationConfParser durationConf() {
return new DurationConfParser();
}

private static CaseInsensitiveStringMap asCaseInsensitiveStringMap(Map<String, String> map) {
if (map instanceof CaseInsensitiveStringMap) {
return (CaseInsensitiveStringMap) map;
} else {
return new CaseInsensitiveStringMap(map);
}
}

class BooleanConfParser extends ConfParser<BooleanConfParser, Boolean> {
private Boolean defaultValue;
private boolean negate = false;
Expand Down Expand Up @@ -220,14 +228,10 @@ public ThisT tableProperty(String name) {
}

protected T parse(Function<String, T> conversion, T defaultValue) {
if (!optionNames.isEmpty()) {
for (String optionName : optionNames) {
// use lower case comparison as DataSourceOptions.asMap() in Spark 2 returns a lower case
// map
String optionValue = options.get(optionName.toLowerCase(Locale.ROOT));
if (optionValue != null) {
return conversion.apply(optionValue);
}
for (String optionName : optionNames) {
String optionValue = options.get(optionName);
if (optionValue != null) {
return conversion.apply(optionValue);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ public class SparkReadConf {
private final SparkSession spark;
private final Table table;
private final String branch;
private final Map<String, String> readOptions;
private final SparkConfParser confParser;

public SparkReadConf(SparkSession spark, Table table, Map<String, String> readOptions) {
Expand All @@ -69,7 +68,6 @@ public SparkReadConf(
this.spark = spark;
this.table = table;
this.branch = branch;
this.readOptions = readOptions;
this.confParser = new SparkConfParser(spark, table, readOptions);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,15 @@ public void after() {
sql("DROP TABLE IF EXISTS %s", tableName);
}

@TestTemplate
public void testOptionCaseInsensitive() {
Table table = validationCatalog.loadTable(tableIdent);
Map<String, String> options = ImmutableMap.of("option", "value");
SparkConfParser parser = new SparkConfParser(spark, table, options);
String parsedValue = parser.stringConf().option("oPtIoN").parseOptional();
assertThat(parsedValue).isEqualTo("value");
}

@TestTemplate
public void testDurationConf() {
Table table = validationCatalog.loadTable(tableIdent);
Expand Down

0 comments on commit 04792cf

Please sign in to comment.