Skip to content

Commit

Permalink
Merge branch 'master' into python-init-state-impl
Browse files Browse the repository at this point in the history
  • Loading branch information
jingz-db authored Oct 2, 2024
2 parents 6fb43b5 + d97acc1 commit 5cace96
Show file tree
Hide file tree
Showing 613 changed files with 27,117 additions and 20,887 deletions.
2 changes: 2 additions & 0 deletions .asf.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ github:
merge: false
squash: true
rebase: true
ghp_branch: master
ghp_path: /docs

notifications:
pullrequests: [email protected]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.
#

name: "Build / Maven (master, Scala 2.13, Hadoop 3, JDK 21, macos-14)"
name: "Build / Maven (master, Scala 2.13, Hadoop 3, JDK 21, MacOS-15)"

on:
schedule:
Expand All @@ -32,7 +32,7 @@ jobs:
if: github.repository == 'apache/spark'
with:
java: 21
os: macos-14
os: macos-15
envs: >-
{
"OBJC_DISABLE_INITIALIZE_FORK_SAFETY": "YES"
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_python_connect.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ jobs:
python packaging/connect/setup.py sdist
cd dist
pip install pyspark*connect-*.tar.gz
pip install 'six==1.16.0' 'pandas<=2.2.2' scipy 'plotly>=4.8' 'mlflow>=2.8.1' coverage matplotlib openpyxl 'memory-profiler>=0.61.0' 'scikit-learn>=1.3.2' 'graphviz==0.20.3' torch torchvision torcheval deepspeed unittest-xml-reporting
pip install 'six==1.16.0' 'pandas<=2.2.2' scipy 'plotly>=4.8' 'mlflow>=2.8.1' coverage matplotlib openpyxl 'memory-profiler>=0.61.0' 'scikit-learn>=1.3.2' 'graphviz==0.20.3' torch torchvision torcheval deepspeed unittest-xml-reporting 'plotly>=4.8'
- name: Run tests
env:
SPARK_TESTING: 1
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/maven_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ on:
description: OS to run this build.
required: false
type: string
default: ubuntu-22.04
default: ubuntu-latest
envs:
description: Additional environment variables to set when running the tests. Should be in JSON format.
required: false
Expand Down
97 changes: 97 additions & 0 deletions .github/workflows/pages.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#

name: GitHub Pages deployment

on:
push:
branches:
- master

concurrency:
group: 'docs preview'
cancel-in-progress: false

jobs:
docs:
name: Build and deploy documentation
runs-on: ubuntu-latest
permissions:
id-token: write
pages: write
environment:
name: github-pages # https://github.com/actions/deploy-pages/issues/271
env:
SPARK_TESTING: 1 # Reduce some noise in the logs
RELEASE_VERSION: 'In-Progress'
steps:
- name: Checkout Spark repository
uses: actions/checkout@v4
with:
repository: apache/spark
ref: 'master'
- name: Install Java 17
uses: actions/setup-java@v4
with:
distribution: zulu
java-version: 17
- name: Install Python 3.9
uses: actions/setup-python@v5
with:
python-version: '3.9'
architecture: x64
cache: 'pip'
- name: Install Python dependencies
run: |
pip install 'sphinx==4.5.0' mkdocs 'pydata_sphinx_theme>=0.13' sphinx-copybutton nbsphinx numpydoc jinja2 markupsafe 'pyzmq<24.0.0' \
ipython ipython_genutils sphinx_plotly_directive 'numpy>=1.20.0' pyarrow 'pandas==2.2.3' 'plotly>=4.8' 'docutils<0.18.0' \
'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' 'black==23.9.1' \
'pandas-stubs==1.2.0.53' 'grpcio==1.62.0' 'grpcio-status==1.62.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \
'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2' 'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3' 'sphinxcontrib-serializinghtml==1.1.5'
- name: Install Ruby for documentation generation
uses: ruby/setup-ruby@v1
with:
ruby-version: '3.3'
bundler-cache: true
- name: Install Pandoc
run: |
sudo apt-get update -y
sudo apt-get install pandoc
- name: Install dependencies for documentation generation
run: |
cd docs
gem install bundler -v 2.4.22 -n /usr/local/bin
bundle install --retry=100
- name: Run documentation build
run: |
sed -i".tmp1" 's/SPARK_VERSION:.*$/SPARK_VERSION: '"$RELEASE_VERSION"'/g' docs/_config.yml
sed -i".tmp2" 's/SPARK_VERSION_SHORT:.*$/SPARK_VERSION_SHORT: '"$RELEASE_VERSION"'/g' docs/_config.yml
sed -i".tmp3" "s/'facetFilters':.*$/'facetFilters': [\"version:$RELEASE_VERSION\"]/g" docs/_config.yml
sed -i".tmp4" 's/__version__: str = .*$/__version__: str = "'"$RELEASE_VERSION"'"/' python/pyspark/version.py
cd docs
SKIP_RDOC=1 bundle exec jekyll build
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: 'docs/_site'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
Empty file added .nojekyll
Empty file.
4 changes: 2 additions & 2 deletions assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@
<!--
Because we don't shade dependencies anymore, we need to restore Guava to compile scope so
that the libraries Spark depend on have it available. We'll package the version that Spark
uses (14.0.1) which is not the same as Hadoop dependencies, but works.
uses which is not the same as Hadoop dependencies, but works.
-->
<dependency>
<groupId>com.google.guava</groupId>
Expand Down Expand Up @@ -200,7 +200,7 @@
<configuration>
<executable>cp</executable>
<arguments>
<argument>${basedir}/../connector/connect/client/jvm/target/spark-connect-client-jvm_${scala.binary.version}-${version}.jar</argument>
<argument>${basedir}/../connector/connect/client/jvm/target/spark-connect-client-jvm_${scala.binary.version}-${project.version}.jar</argument>
<argument>${basedir}/target/scala-${scala.binary.version}/jars/connect-repl</argument>
</arguments>
</configuration>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,8 @@ public Iterator<T> iterator() {
iteratorTracker.add(new WeakReference<>(it));
return it;
} catch (Exception e) {
throw Throwables.propagate(e);
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ public boolean hasNext() {
try {
close();
} catch (IOException ioe) {
throw Throwables.propagate(ioe);
throw new RuntimeException(ioe);
}
}
return next != null;
Expand All @@ -151,7 +151,8 @@ public T next() {
next = null;
return ret;
} catch (Exception e) {
throw Throwables.propagate(e);
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,8 @@ public Iterator<T> iterator() {
iteratorTracker.add(new WeakReference<>(it));
return it;
} catch (Exception e) {
throw Throwables.propagate(e);
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ public boolean hasNext() {
try {
close();
} catch (IOException ioe) {
throw Throwables.propagate(ioe);
throw new RuntimeException(ioe);
}
}
return next != null;
Expand All @@ -137,7 +137,8 @@ public T next() {
next = null;
return ret;
} catch (Exception e) {
throw Throwables.propagate(e);
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -290,9 +290,11 @@ public void onFailure(Throwable e) {
try {
return result.get(timeoutMs, TimeUnit.MILLISECONDS);
} catch (ExecutionException e) {
throw Throwables.propagate(e.getCause());
Throwables.throwIfUnchecked(e.getCause());
throw new RuntimeException(e.getCause());
} catch (Exception e) {
throw Throwables.propagate(e);
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,8 @@ public void operationComplete(final Future<Channel> handshakeFuture) {
logger.error("Exception while bootstrapping client after {} ms", e,
MDC.of(LogKeys.BOOTSTRAP_TIME$.MODULE$, bootstrapTimeMs));
client.close();
throw Throwables.propagate(e);
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
long postBootstrap = System.nanoTime();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import java.security.GeneralSecurityException;
import java.util.concurrent.TimeoutException;

import com.google.common.base.Throwables;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
Expand Down Expand Up @@ -80,7 +79,7 @@ public void doBootstrap(TransportClient client, Channel channel) {
doSparkAuth(client, channel);
client.setClientId(appId);
} catch (GeneralSecurityException | IOException e) {
throw Throwables.propagate(e);
throw new RuntimeException(e);
} catch (RuntimeException e) {
// There isn't a good exception that can be caught here to know whether it's really
// OK to switch back to SASL (because the server doesn't speak the new protocol). So
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,8 @@ protected boolean doAuthChallenge(
try {
engine.close();
} catch (Exception e) {
throw Throwables.propagate(e);
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslException;

import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;

import org.apache.spark.internal.SparkLogger;
Expand Down Expand Up @@ -62,7 +61,7 @@ public SparkSaslClient(String secretKeyId, SecretKeyHolder secretKeyHolder, bool
this.saslClient = Sasl.createSaslClient(new String[] { DIGEST }, null, null, DEFAULT_REALM,
saslProps, new ClientCallbackHandler());
} catch (SaslException e) {
throw Throwables.propagate(e);
throw new RuntimeException(e);
}
}

Expand All @@ -72,7 +71,7 @@ public synchronized byte[] firstToken() {
try {
return saslClient.evaluateChallenge(new byte[0]);
} catch (SaslException e) {
throw Throwables.propagate(e);
throw new RuntimeException(e);
}
} else {
return new byte[0];
Expand All @@ -98,7 +97,7 @@ public synchronized byte[] response(byte[] token) {
try {
return saslClient != null ? saslClient.evaluateChallenge(token) : new byte[0];
} catch (SaslException e) {
throw Throwables.propagate(e);
throw new RuntimeException(e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import java.util.Map;

import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
Expand Down Expand Up @@ -94,7 +93,7 @@ public SparkSaslServer(
this.saslServer = Sasl.createSaslServer(DIGEST, null, DEFAULT_REALM, saslProps,
new DigestCallbackHandler());
} catch (SaslException e) {
throw Throwables.propagate(e);
throw new RuntimeException(e);
}
}

Expand All @@ -119,7 +118,7 @@ public synchronized byte[] response(byte[] token) {
try {
return saslServer != null ? saslServer.evaluateResponse(token) : new byte[0];
} catch (SaslException e) {
throw Throwables.propagate(e);
throw new RuntimeException(e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.network.shuffledb;

import com.google.common.base.Throwables;

import java.io.IOException;
import java.util.Map;
import java.util.NoSuchElementException;
Expand Down Expand Up @@ -47,7 +45,7 @@ public boolean hasNext() {
try {
close();
} catch (IOException ioe) {
throw Throwables.propagate(ioe);
throw new RuntimeException(ioe);
}
}
return next != null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

import java.io.IOException;

import com.google.common.base.Throwables;
import org.rocksdb.RocksDBException;

/**
Expand All @@ -37,7 +36,7 @@ public void put(byte[] key, byte[] value) {
try {
db.put(key, value);
} catch (RocksDBException e) {
throw Throwables.propagate(e);
throw new RuntimeException(e);
}
}

Expand All @@ -46,7 +45,7 @@ public byte[] get(byte[] key) {
try {
return db.get(key);
} catch (RocksDBException e) {
throw Throwables.propagate(e);
throw new RuntimeException(e);
}
}

Expand All @@ -55,7 +54,7 @@ public void delete(byte[] key) {
try {
db.delete(key);
} catch (RocksDBException e) {
throw Throwables.propagate(e);
throw new RuntimeException(e);
}
}

Expand Down
Loading

0 comments on commit 5cace96

Please sign in to comment.