Skip to content

Commit

Permalink
update branch
Browse files Browse the repository at this point in the history
  • Loading branch information
laguilarlyft committed Nov 16, 2023
1 parent cf294bd commit b8bdeeb
Show file tree
Hide file tree
Showing 5 changed files with 0 additions and 108 deletions.
17 changes: 0 additions & 17 deletions .github/workflows/build_main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,27 +17,11 @@
# under the License.
#

<<<<<<<< HEAD:.github/workflows/build_and_test_ansi.yml
name: "Build and test (ANSI)"
========
name: "Build"
>>>>>>>> 17a8e67a6a03fd5a33f4ed078f8325665a0635aa:.github/workflows/build_main.yml

on:
push:
branches:
<<<<<<<< HEAD:.github/workflows/build_and_test_ansi.yml
- branch-3.3

jobs:
call-build-and-test:
name: Call main build
uses: ./.github/workflows/build_and_test.yml
if: github.repository == 'apache/spark'
with:
ansi_enabled: true

========
- '**'

jobs:
Expand All @@ -46,4 +30,3 @@ jobs:
packages: write
name: Run
uses: ./.github/workflows/build_and_test.yml
>>>>>>>> 17a8e67a6a03fd5a33f4ed078f8325665a0635aa:.github/workflows/build_main.yml
17 changes: 0 additions & 17 deletions core/src/main/scala/org/apache/spark/status/protobuf/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -14,23 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.integrationtest

<<<<<<<< HEAD:resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/YuniKornSuite.scala
@YuniKornTag
class YuniKornSuite extends KubernetesSuite {

override protected def setUpTest(): Unit = {
super.setUpTest()
val namespace = sparkAppConf.get("spark.kubernetes.namespace")
sparkAppConf
.set("spark.kubernetes.scheduler.name", "yunikorn")
.set("spark.kubernetes.driver.label.queue", "root." + namespace)
.set("spark.kubernetes.executor.label.queue", "root." + namespace)
.set("spark.kubernetes.driver.annotation.yunikorn.apache.org/app-id", "{{APP_ID}}")
.set("spark.kubernetes.executor.annotation.yunikorn.apache.org/app-id", "{{APP_ID}}")
}
========
package org.apache.spark.status.protobuf

import java.util.{Map => JMap}
Expand Down Expand Up @@ -59,5 +43,4 @@ private[protobuf] object Utils {
putAllFunc(input)
}
}
>>>>>>>> 17a8e67a6a03fd5a33f4ed078f8325665a0635aa:core/src/main/scala/org/apache/spark/status/protobuf/Utils.scala
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,6 @@
* limitations under the License.
*/

<<<<<<<< HEAD:sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/RowLevelOperationBuilder.java
package org.apache.spark.sql.connector.write;

import org.apache.spark.annotation.Experimental;

/**
* An interface for building a {@link RowLevelOperation}.
========
package org.apache.spark.sql.connector.read;

import org.apache.spark.annotation.Evolving;
Expand All @@ -31,25 +23,14 @@
* A mix-in interface for {@link ScanBuilder}. Data sources can implement this interface to
* push down OFFSET. We can push down OFFSET with many other operations if they follow the
* operator order we defined in {@link ScanBuilder}'s class doc.
>>>>>>>> 17a8e67a6a03fd5a33f4ed078f8325665a0635aa:sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/SupportsPushDownOffset.java
*
* @since 3.4.0
*/
<<<<<<<< HEAD:sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/RowLevelOperationBuilder.java
@Experimental
public interface RowLevelOperationBuilder {
/**
* Returns a {@link RowLevelOperation} that controls how Spark rewrites data
* for DELETE, UPDATE, MERGE commands.
*/
RowLevelOperation build();
========
@Evolving
public interface SupportsPushDownOffset extends ScanBuilder {

/**
* Pushes down OFFSET to the data source.
*/
boolean pushOffset(int offset);
>>>>>>>> 17a8e67a6a03fd5a33f4ed078f8325665a0635aa:sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/SupportsPushDownOffset.java
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,39 +15,15 @@
* limitations under the License.
*/

<<<<<<<< HEAD:sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/RowLevelOperationInfo.java
package org.apache.spark.sql.connector.write;

import org.apache.spark.annotation.Experimental;
import org.apache.spark.sql.connector.write.RowLevelOperation.Command;
import org.apache.spark.sql.util.CaseInsensitiveStringMap;

/**
* An interface with logical information for a row-level operation such as DELETE, UPDATE, MERGE.
========
package org.apache.spark.sql.connector.read.colstats;

import org.apache.spark.annotation.Evolving;

/**
* An interface to represent a bin in an equi-height histogram.
>>>>>>>> 17a8e67a6a03fd5a33f4ed078f8325665a0635aa:sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/colstats/HistogramBin.java
*
* @since 3.4.0
*/
<<<<<<<< HEAD:sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/RowLevelOperationInfo.java
@Experimental
public interface RowLevelOperationInfo {
/**
* Returns options that the user specified when performing the row-level operation.
*/
CaseInsensitiveStringMap options();

/**
* Returns the row-level SQL command (e.g. DELETE, UPDATE, MERGE).
*/
Command command();
========
@Evolving
public interface HistogramBin {
/**
Expand All @@ -64,5 +40,4 @@ public interface HistogramBin {
* @return approximate number of distinct values in this bin
*/
long ndv();
>>>>>>>> 17a8e67a6a03fd5a33f4ed078f8325665a0635aa:sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/colstats/HistogramBin.java
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,6 @@
* limitations under the License.
*/

<<<<<<<< HEAD:sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Cast.java
package org.apache.spark.sql.connector.expressions;

import java.io.Serializable;

import org.apache.spark.annotation.Evolving;
import org.apache.spark.sql.types.DataType;

/**
* Represents a cast expression in the public logical expression API.
========
package org.apache.spark.sql.connector.write;

import org.apache.spark.annotation.Experimental;
Expand All @@ -34,27 +23,9 @@
/**
* A mix-in interface for {@link RowLevelOperation}. Data sources can implement this interface
* to indicate they support handling deltas of rows.
>>>>>>>> 17a8e67a6a03fd5a33f4ed078f8325665a0635aa:sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsDelta.java
*
* @since 3.4.0
*/
<<<<<<<< HEAD:sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Cast.java
@Evolving
public class Cast implements Expression, Serializable {
private Expression expression;
private DataType dataType;

public Cast(Expression expression, DataType dataType) {
this.expression = expression;
this.dataType = dataType;
}

public Expression expression() { return expression; }
public DataType dataType() { return dataType; }

@Override
public Expression[] children() { return new Expression[]{ expression() }; }
========
@Experimental
public interface SupportsDelta extends RowLevelOperation {
@Override
Expand All @@ -64,5 +35,4 @@ public interface SupportsDelta extends RowLevelOperation {
* Returns the row ID column references that should be used for row equality.
*/
NamedReference[] rowId();
>>>>>>>> 17a8e67a6a03fd5a33f4ed078f8325665a0635aa:sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/SupportsDelta.java
}

0 comments on commit b8bdeeb

Please sign in to comment.