diff --git a/lakesoul-common/pom.xml b/lakesoul-common/pom.xml
index d97ba86fd..46cf9a599 100644
--- a/lakesoul-common/pom.xml
+++ b/lakesoul-common/pom.xml
@@ -12,12 +12,12 @@ SPDX-License-Identifier: Apache-2.0
lakesoul-parent
com.dmetasoul
- 2.4.0
+ 2.4.1
4.0.0
lakesoul-common
- 2.4.0
+ 2.4.1
diff --git a/lakesoul-flink/pom.xml b/lakesoul-flink/pom.xml
index ce7be0ddf..eb825c230 100644
--- a/lakesoul-flink/pom.xml
+++ b/lakesoul-flink/pom.xml
@@ -12,12 +12,12 @@ SPDX-License-Identifier: Apache-2.0
lakesoul-parent
com.dmetasoul
- 2.4.0
+ 2.4.1
4.0.0
lakesoul-flink
- 2.4.0-flink-1.17
+ 2.4.1-flink-1.17
1.17.1
2.12
@@ -30,7 +30,7 @@ SPDX-License-Identifier: Apache-2.0
com.dmetasoul
lakesoul-common
- 2.4.0
+ 2.4.1
org.slf4j
@@ -41,7 +41,7 @@ SPDX-License-Identifier: Apache-2.0
com.dmetasoul
lakesoul-io-java
- 2.4.0
+ 2.4.1
org.slf4j
diff --git a/lakesoul-presto/pom.xml b/lakesoul-presto/pom.xml
index c3c3c5c07..6dd1fea18 100644
--- a/lakesoul-presto/pom.xml
+++ b/lakesoul-presto/pom.xml
@@ -12,11 +12,11 @@
com.dmetasoul
lakesoul-parent
- 2.4.0
+ 2.4.1
lakesoul-presto
- 2.4.0-presto-0.28
+ 2.4.1-presto-0.28
8
@@ -33,12 +33,12 @@
com.dmetasoul
lakesoul-common
- 2.4.0
+ 2.4.1
com.dmetasoul
lakesoul-io-java
- 2.4.0
+ 2.4.1
org.slf4j
diff --git a/lakesoul-spark/pom.xml b/lakesoul-spark/pom.xml
index a0d267f86..53cdb898b 100644
--- a/lakesoul-spark/pom.xml
+++ b/lakesoul-spark/pom.xml
@@ -12,12 +12,12 @@ SPDX-License-Identifier: Apache-2.0
lakesoul-parent
com.dmetasoul
- 2.4.0
+ 2.4.1
4.0.0
lakesoul-spark
- 2.4.0-spark-3.3
+ 2.4.1-spark-3.3
2.12.10
@@ -37,12 +37,12 @@ SPDX-License-Identifier: Apache-2.0
com.dmetasoul
lakesoul-common
- 2.4.0
+ 2.4.1
com.dmetasoul
lakesoul-io-java
- 2.4.0
+ 2.4.1
org.slf4j
diff --git a/native-io/lakesoul-io-java/pom.xml b/native-io/lakesoul-io-java/pom.xml
index 82a94bdea..b646d643a 100644
--- a/native-io/lakesoul-io-java/pom.xml
+++ b/native-io/lakesoul-io-java/pom.xml
@@ -12,13 +12,13 @@ SPDX-License-Identifier: Apache-2.0
lakesoul-parent
com.dmetasoul
- 2.4.0
+ 2.4.1
../../pom.xml
4.0.0
lakesoul-io-java
- 2.4.0
+ 2.4.1
jar
diff --git a/pom.xml b/pom.xml
index 65b0984ea..ee1d44b68 100644
--- a/pom.xml
+++ b/pom.xml
@@ -9,7 +9,7 @@ SPDX-License-Identifier: Apache-2.0
4.0.0
com.dmetasoul
lakesoul-parent
- 2.4.0
+ 2.4.1
2022
lakesoul-common
diff --git a/website/docs/01-Getting Started/01-setup-local-env.md b/website/docs/01-Getting Started/01-setup-local-env.md
index 541226a19..8805c68a7 100644
--- a/website/docs/01-Getting Started/01-setup-local-env.md
+++ b/website/docs/01-Getting Started/01-setup-local-env.md
@@ -47,7 +47,7 @@ After unpacking spark package, you could find LakeSoul distribution jar from htt
wget https://dmetasoul-bucket.obs.cn-southwest-2.myhuaweicloud.com/releases/spark/spark-3.3.2-bin-hadoop-3.tgz
tar xf spark-3.3.2-bin-hadoop-3.tgz
export SPARK_HOME=${PWD}/spark-3.3.2-bin-hadoop3
-wget https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-spark-2.4.0-spark-3.3.jar -P $SPARK_HOME/jars
+wget https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-spark-2.4.1-spark-3.3.jar -P $SPARK_HOME/jars
```
:::tip
diff --git a/website/docs/01-Getting Started/02-docker-compose.mdx b/website/docs/01-Getting Started/02-docker-compose.mdx
index 61449e395..cc677d3c5 100644
--- a/website/docs/01-Getting Started/02-docker-compose.mdx
+++ b/website/docs/01-Getting Started/02-docker-compose.mdx
@@ -40,7 +40,7 @@ docker run --net lakesoul-docker-compose-env_default --rm -ti \
-v $(pwd)/lakesoul.properties:/opt/spark/work-dir/lakesoul.properties \
--env lakesoul_home=/opt/spark/work-dir/lakesoul.properties bitnami/spark:3.3.1 \
spark-shell \
- --packages com.dmetasoul:lakesoul-spark:2.4.0-spark-3.3 \
+ --packages com.dmetasoul:lakesoul-spark:2.4.1-spark-3.3 \
--conf spark.sql.extensions=com.dmetasoul.lakesoul.sql.LakeSoulSparkSessionExtension \
--conf spark.sql.catalog.lakesoul=org.apache.spark.sql.lakesoul.catalog.LakeSoulCatalog \
--conf spark.sql.defaultCatalog=lakesoul \
diff --git a/website/docs/03-Usage Docs/02-setup-spark.md b/website/docs/03-Usage Docs/02-setup-spark.md
index 9d23c0dd1..e6d939dd5 100644
--- a/website/docs/03-Usage Docs/02-setup-spark.md
+++ b/website/docs/03-Usage Docs/02-setup-spark.md
@@ -16,14 +16,14 @@ To use `spark-shell`, `pyspark` or `spark-sql` shells, you should include LakeSo
#### Use Maven Coordinates via --packages
```bash
-spark-shell --packages com.dmetasoul:lakesoul-spark:2.4.0-spark-3.3
+spark-shell --packages com.dmetasoul:lakesoul-spark:2.4.1-spark-3.3
```
#### Use Local Packages
You can find the LakeSoul packages from our release page: [Releases](https://github.com/lakesoul-io/LakeSoul/releases).
Download the jar file and pass it to `spark-submit`.
```bash
-spark-submit --jars "lakesoul-spark-2.4.0-spark-3.3.jar"
+spark-submit --jars "lakesoul-spark-2.4.1-spark-3.3.jar"
```
Or you could directly put the jar into `$SPARK_HOME/jars`
@@ -34,7 +34,7 @@ Include maven dependencies in your project:
com.dmetasoul
lakesoul
- 2.4.0-spark-3.3
+ 2.4.1-spark-3.3
```
@@ -93,7 +93,7 @@ spark.sql.sources.default lakesoul
## Setup Flink Project or Job
### Required Flink Version
-Since 2.4.0, Flink version 1.17 is supported.
+Since 2.4.1, Flink version 1.17 is supported.
### Setup Metadata Database Connection for Flink
@@ -144,7 +144,7 @@ taskmanager.memory.task.off-heap.size: 3000m
:::
### Add LakeSoul Jar to Flink's directory
-Download LakeSoul Flink Jar from: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-flink-2.4.0-flink-1.17.jar
+Download LakeSoul Flink Jar from: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-flink-2.4.1-flink-1.17.jar
And put the jar file under `$FLINK_HOME/lib`. After this, you could start flink session cluster or application as usual.
@@ -155,6 +155,6 @@ Add the following to your project's pom.xml
com.dmetasoul
lakesoul
- 2.4.0-flink-1.17
+ 2.4.1-flink-1.17
```
\ No newline at end of file
diff --git a/website/docs/03-Usage Docs/05-flink-cdc-sync.md b/website/docs/03-Usage Docs/05-flink-cdc-sync.md
index 25cea4f38..c4a3403ac 100644
--- a/website/docs/03-Usage Docs/05-flink-cdc-sync.md
+++ b/website/docs/03-Usage Docs/05-flink-cdc-sync.md
@@ -21,7 +21,7 @@ In the Stream API, the main functions of LakeSoul Sink are:
## How to use the command line
### 1. Download LakeSoul Flink Jar
-It can be downloaded from the LakeSoul Release page: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-flink-2.4.0-flink-1.17.jar.
+It can be downloaded from the LakeSoul Release page: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-flink-2.4.1-flink-1.17.jar.
The currently supported Flink version is 1.17.
@@ -60,7 +60,7 @@ export LAKESOUL_PG_PASSWORD=root
#### 2.2 Start sync job
```bash
bin/flink run -c org.apache.flink.lakesoul.entry.MysqlCdc \
- lakesoul-flink-2.4.0-flink-1.17.jar \
+ lakesoul-flink-2.4.1-flink-1.17.jar \
--source_db.host localhost \
--source_db.port 3306 \
--source_db.db_name default \
@@ -79,7 +79,7 @@ Description of required parameters:
| Parameter | Meaning | Value Description |
|----------------|------------------------------------|-------------------------------------------- |
| -c | The task runs the main function entry class | org.apache.flink.lakesoul.entry.MysqlCdc |
-| Main package | Task running jar | lakesoul-flink-2.4.0-flink-1.17.jar |
+| Main package | Task running jar | lakesoul-flink-2.4.1-flink-1.17.jar |
| --source_db.host | The address of the MySQL database | |
| --source_db.port | MySQL database port | |
| --source_db.user | MySQL database username | |
diff --git a/website/docs/03-Usage Docs/06-flink-lakesoul-connector.md b/website/docs/03-Usage Docs/06-flink-lakesoul-connector.md
index 17dfb0a90..61fa77c08 100644
--- a/website/docs/03-Usage Docs/06-flink-lakesoul-connector.md
+++ b/website/docs/03-Usage Docs/06-flink-lakesoul-connector.md
@@ -9,7 +9,7 @@ SPDX-License-Identifier: Apache-2.0
:::tip
Since 2.3.0
-LakeSoul with version 2.3.0 is targeting Flink 1.14 while 2.4.0 is targeting Flink 1.17。
+LakeSoul with version 2.3.0 is targeting Flink 1.14 while 2.4.1 is targeting Flink 1.17。
:::
LakeSoul provides Flink Connector which implements the Dynamic Table interface, through which developers can use Flink's DataStream API, Table API or SQL to read and write LakeSoul data, and supports both streaming and batch modes for read and write. Read and Write in Flink streaming both support Flink Changelog Stream semantics.
@@ -18,14 +18,14 @@ LakeSoul provides Flink Connector which implements the Dynamic Table interface,
To setup Flink environment, please refer to [Setup Spark/Flink Job/Project](../03-Usage%20Docs/02-setup-spark.md)
-Introduce LakeSoul dependency: package and compile the lakesoul-flink folder to get lakesoul-flink-2.4.0-flink-1.17.jar.
+Introduce LakeSoul dependency: package and compile the lakesoul-flink folder to get lakesoul-flink-2.4.1-flink-1.17.jar.
In order to use Flink to create LakeSoul tables, it is recommended to use Flink SQL Client, which supports direct use of Flink SQL commands to operate LakeSoul tables. In this document, the Flink SQL is to directly enter statements on the Flink SQL Client cli interface; whereas the Table API needs to be used in a Java projects.
Switch to the flink folder and execute the command to start the SQLclient client.
```bash
# Start Flink SQL Client
-bin/sql-client.sh embedded -j lakesoul-flink-2.4.0-flink-1.14.jar
+bin/sql-client.sh embedded -j lakesoul-flink-2.4.1-flink-1.14.jar
```
## 2. DDL
diff --git a/website/docs/03-Usage Docs/09-clean-redundant-data.md b/website/docs/03-Usage Docs/09-clean-redundant-data.md
index 1fc007dfb..bd67ebb77 100644
--- a/website/docs/03-Usage Docs/09-clean-redundant-data.md
+++ b/website/docs/03-Usage Docs/09-clean-redundant-data.md
@@ -88,7 +88,7 @@ Start the Spark cleanup command locally:
--executor-cores 1 \
--num-executors 20 \
--class com.dmetasoul.lakesoul.spark.clean.CleanExpiredData \
- jars/lakesoul-spark-2.4.0-spark-3.3.jar
+ jars/lakesoul-spark-2.4.1-spark-3.3.jar
```
:::tip
diff --git a/website/docs/03-Usage Docs/10-setup-presto.md b/website/docs/03-Usage Docs/10-setup-presto.md
index e257ae0c3..3a1b92648 100644
--- a/website/docs/03-Usage Docs/10-setup-presto.md
+++ b/website/docs/03-Usage Docs/10-setup-presto.md
@@ -7,7 +7,7 @@ Available since version 2.4.
LakeSoul implements Presto Connector and currently supports reading tables. It can read tables without primary keys and tables with primary keys (including [CDC format tables](04-cdc-ingestion-table.mdx)). When reading, Merge on Read will be automatically executed to obtain the latest data.
## Download Jar package
-You can download the Presto package from the Github Release page: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-presto-2.4.0-presto-0.28.jar
+You can download the Presto package from the Github Release page: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-presto-2.4.1-presto-0.28.jar
## Configure Presto
Create a new lakesoul subdirectory in the plugin subdirectory under the Presto directory. Place the jar downloaded above into this subdirectory.
diff --git a/website/docs/03-Usage Docs/13-setup-kyuubi.md b/website/docs/03-Usage Docs/13-setup-kyuubi.md
index ca710dfd2..5ca22d734 100644
--- a/website/docs/03-Usage Docs/13-setup-kyuubi.md
+++ b/website/docs/03-Usage Docs/13-setup-kyuubi.md
@@ -20,7 +20,7 @@ LakeSoul implements Flink/Spark Connector.We could use Spark/Flink SQL queries t
| Kyuubi | 1.8 |
| Spark | 3.3 |
| Flink | 1.17 |
-| LakeSoul | 2.4.0 |
+| LakeSoul | 2.4.1 |
| Java | 1.8 |
The operating environment is Linux, and Spark, Flink, and Kyuubi have been installed. It is recommended to use Hadoop Yarn to run the Kyuubi Engine. Also, you could start local spark/flink cluster.
@@ -31,7 +31,7 @@ The operating environment is Linux, and Spark, Flink, and Kyuubi have been insta
### 1. Dependencies
-Download LakeSoul Flink Jar from: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-flink-2.4.0-flink-1.17.jar
+Download LakeSoul Flink Jar from: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-flink-2.4.1-flink-1.17.jar
And put the jar file under `$FLINK_HOME/lib`.
@@ -74,7 +74,7 @@ More details about Flink SQL with LakeSoul refer to : [Flink Lakesoul Connector]
### 1. Dependencies
-Download LakeSoul Spark Jar from: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-spark-2.4.0-spark-3.3.jar
+Download LakeSoul Spark Jar from: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-spark-2.4.1-spark-3.3.jar
And put the jar file under `$SPARK_HOME/jars`.
diff --git a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/01-Getting Started/01-setup-local-env.md b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/01-Getting Started/01-setup-local-env.md
index 3e2745880..0155a7ec7 100644
--- a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/01-Getting Started/01-setup-local-env.md
+++ b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/01-Getting Started/01-setup-local-env.md
@@ -37,10 +37,10 @@ https://dlcdn.apache.org/spark/spark-3.3.2/spark-3.3.2-bin-without-hadoop.tgz
LakeSoul 发布 jar 包可以从 GitHub Releases 页面下载:https://github.com/lakesoul-io/LakeSoul/releases 。下载后请将 Jar 包放到 Spark 安装目录下的 jars 目录中:
```bash
-wget https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-spark-2.4.0-spark-3.3.jar -P $SPARK_HOME/jars
+wget https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-spark-2.4.1-spark-3.3.jar -P $SPARK_HOME/jars
```
-如果访问 Github 有问题,也可以从如下链接下载:https://dmetasoul-bucket.obs.cn-southwest-2.myhuaweicloud.com/releases/lakesoul/lakesoul-spark-2.4.0-spark-3.3.jar
+如果访问 Github 有问题,也可以从如下链接下载:https://dmetasoul-bucket.obs.cn-southwest-2.myhuaweicloud.com/releases/lakesoul/lakesoul-spark-2.4.1-spark-3.3.jar
:::tip
从 2.1.0 版本起,LakeSoul 自身的依赖已经通过 shade 方式打包到一个 jar 包中。之前的版本是多个 jar 包以 tar.gz 压缩包的形式发布。
diff --git a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/01-Getting Started/02-docker-compose.mdx b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/01-Getting Started/02-docker-compose.mdx
index 2419142e3..a4710ff75 100644
--- a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/01-Getting Started/02-docker-compose.mdx
+++ b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/01-Getting Started/02-docker-compose.mdx
@@ -40,7 +40,7 @@ docker run --net lakesoul-docker-compose-env_default --rm -ti \
-v $(pwd)/lakesoul.properties:/opt/spark/work-dir/lakesoul.properties \
--env lakesoul_home=/opt/spark/work-dir/lakesoul.properties bitnami/spark:3.3.1 \
spark-shell \
- --packages com.dmetasoul:lakesoul-spark:2.4.0-spark-3.3 \
+ --packages com.dmetasoul:lakesoul-spark:2.4.1-spark-3.3 \
--conf spark.sql.extensions=com.dmetasoul.lakesoul.sql.LakeSoulSparkSessionExtension \
--conf spark.sql.catalog.lakesoul=org.apache.spark.sql.lakesoul.catalog.LakeSoulCatalog \
--conf spark.sql.defaultCatalog=lakesoul \
diff --git a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/02-Tutorials/02-flink-cdc-sink/index.md b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/02-Tutorials/02-flink-cdc-sink/index.md
index 71bf9b315..dd905256a 100644
--- a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/02-Tutorials/02-flink-cdc-sink/index.md
+++ b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/02-Tutorials/02-flink-cdc-sink/index.md
@@ -90,7 +90,7 @@ $FLINK_HOME/bin/start-cluster.sh
```bash
./bin/flink run -ys 1 -yjm 1G -ytm 2G \
-c org.apache.flink.lakesoul.entry.MysqlCdc \
- lakesoul-flink-2.4.0-flink-1.17.jar \
+ lakesoul-flink-2.4.1-flink-1.17.jar \
--source_db.host localhost \
--source_db.port 3306 \
--source_db.db_name test_cdc \
@@ -105,7 +105,7 @@ $FLINK_HOME/bin/start-cluster.sh
--server_time_zone UTC
```
-其中 lakesoul-flink 的 jar 包可以从 [Github Release](https://github.com/lakesoul-io/LakeSoul/releases/) 页面下载。如果访问 Github 有问题,也可以通过这个链接下载:https://dmetasoul-bucket.obs.cn-southwest-2.myhuaweicloud.com/releases/lakesoul/lakesoul-flink-2.4.0-flink-1.17.jar
+其中 lakesoul-flink 的 jar 包可以从 [Github Release](https://github.com/lakesoul-io/LakeSoul/releases/) 页面下载。如果访问 Github 有问题,也可以通过这个链接下载:https://dmetasoul-bucket.obs.cn-southwest-2.myhuaweicloud.com/releases/lakesoul/lakesoul-flink-2.4.1-flink-1.17.jar
在 http://localhost:8081 Flink 作业页面中,点击 Running Job,进入查看 LakeSoul 作业是否已经处于 `Running` 状态。
diff --git a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/02-setup-spark.md b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/02-setup-spark.md
index 450e47a9c..5546241b3 100644
--- a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/02-setup-spark.md
+++ b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/02-setup-spark.md
@@ -15,22 +15,22 @@ LakeSoul 目前支持 Spark 3.3 + Scala 2.12.
#### 使用 `--packages` 传 Maven 仓库和包名
```bash
-spark-shell --packages com.dmetasoul:lakesoul-spark:2.4.0-spark-3.3
+spark-shell --packages com.dmetasoul:lakesoul-spark:2.4.1-spark-3.3
```
#### 使用打包好的 LakeSoul 包
可以从 [Releases](https://github.com/lakesoul-io/LakeSoul/releases) 页面下载已经打包好的 LakeSoul Jar 包。
下载 jar 并传给 `spark-submit` 命令:
```bash
-spark-submit --jars "lakesoul-spark-2.4.0-spark-3.3.jar"
+spark-submit --jars "lakesoul-spark-2.4.1-spark-3.3.jar"
```
#### 直接将 Jar 包放在 Spark 环境中
可以将 Jar 包下载后,放在 $SPARK_HOME/jars 中。
-Jar 包可以从 Github Release 页面下载:https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-spark-2.4.0-spark-3.3.jar
+Jar 包可以从 Github Release 页面下载:https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-spark-2.4.1-spark-3.3.jar
-或者从国内地址下载:https://dmetasoul-bucket.obs.cn-southwest-2.myhuaweicloud.com/releases/lakesoul/lakesoul-spark-2.4.0-spark-3.3.jar
+或者从国内地址下载:https://dmetasoul-bucket.obs.cn-southwest-2.myhuaweicloud.com/releases/lakesoul/lakesoul-spark-2.4.1-spark-3.3.jar
### 设置 Java/Scala 项目
增加以下 Maven 依赖项:
@@ -38,7 +38,7 @@ Jar 包可以从 Github Release 页面下载:https://github.com/lakesoul-io/La
com.dmetasoul
lakesoul-spark
- 2.4.0-spark-3.3
+ 2.4.1-spark-3.3
```
@@ -139,7 +139,7 @@ taskmanager.memory.task.off-heap.size: 3000m
### 添加 LakeSoul Jar 到 Flink 部署的目录
-从以下地址下载 LakeSoul Flink Jar:https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-flink-2.4.0-flink-1.17.jar
+从以下地址下载 LakeSoul Flink Jar:https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-flink-2.4.1-flink-1.17.jar
并将 jar 文件放在 `$FLINK_HOME/lib` 下。在此之后,您可以像往常一样启动 flink 会话集群或应用程序。
@@ -160,6 +160,6 @@ export HADOOP_CLASSPATH=`$HADOOP_HOME/bin/hadoop classpath`
com.dmetasoul
lakesoul
- 2.4.0-flink-1.17
+ 2.4.1-flink-1.17
```
\ No newline at end of file
diff --git a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/05-flink-cdc-sync.md b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/05-flink-cdc-sync.md
index 0eb57f161..255d648c2 100644
--- a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/05-flink-cdc-sync.md
+++ b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/05-flink-cdc-sync.md
@@ -19,9 +19,9 @@ LakeSoul 自 2.1.0 版本起,实现了 Flink CDC Sink,能够支持 Table API
## 命令行使用方法
### 1. 下载 LakeSoul Flink Jar
-可以在 LakeSoul Release 页面下载:https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-flink-2.4.0-flink-1.17.jar。
+可以在 LakeSoul Release 页面下载:https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-flink-2.4.1-flink-1.17.jar。
-如果访问 Github 有问题,也可以通过这个链接下载:https://dmetasoul-bucket.obs.cn-southwest-2.myhuaweicloud.com/releases/lakesoul/lakesoul-flink-2.4.0-flink-1.17.jar。
+如果访问 Github 有问题,也可以通过这个链接下载:https://dmetasoul-bucket.obs.cn-southwest-2.myhuaweicloud.com/releases/lakesoul/lakesoul-flink-2.4.1-flink-1.17.jar。
目前支持的 Flink 版本为 1.17。
@@ -58,7 +58,7 @@ export LAKESOUL_PG_PASSWORD=root
#### 2.2 启动同步作业
```bash
bin/flink run -c org.apache.flink.lakesoul.entry.MysqlCdc \
- lakesoul-flink-2.4.0-flink-1.17.jar \
+ lakesoul-flink-2.4.1-flink-1.17.jar \
--source_db.host localhost \
--source_db.port 3306 \
--source_db.db_name default \
@@ -77,7 +77,7 @@ bin/flink run -c org.apache.flink.lakesoul.entry.MysqlCdc \
| 参数 | 含义 | 取值说明 |
|----------------|--------------------------------------------------------------------------------------|---------------------------------------------|
| -c | 任务运行main函数入口类 | org.apache.flink.lakesoul.entry.MysqlCdc |
-| 主程序包 | 任务运行jar包 | lakesoul-flink-2.4.0-flink-1.17.jar |
+| 主程序包 | 任务运行jar包 | lakesoul-flink-2.4.1-flink-1.17.jar |
| --source_db.host | MySQL 数据库的地址 | |
| --source_db.port | MySQL 数据库的端口 | |
| --source_db.user | MySQL 数据库的用户名 | |
diff --git a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/06-flink-lakesoul-connector.md b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/06-flink-lakesoul-connector.md
index 6fc6ff008..a56a64a56 100644
--- a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/06-flink-lakesoul-connector.md
+++ b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/06-flink-lakesoul-connector.md
@@ -9,7 +9,7 @@ SPDX-License-Identifier: Apache-2.0
:::tip
该功能于 2.3.0 版本起提供。
-2.3.0 版本适配的是 Flink 1.14,2.4.0 版本起升级到了 Flink 1.17。
+2.3.0 版本适配的是 Flink 1.14,2.4.1 版本起升级到了 Flink 1.17。
:::
LakeSoul 提供了 Flink Connector,实现了 Flink Dynamic Table 接口,可以使用 Flink 的 DataStream API, Table API 或 SQL 来执行对 LakeSoul 数据的读写,读和写均支持流式和批式两种模式。在 Flink 流式读、写时君支持 Flink Changelog Stream 语义。
@@ -18,14 +18,14 @@ LakeSoul 提供了 Flink Connector,实现了 Flink Dynamic Table 接口,可
设置 LakeSoul 元数据,请参考 [设置 Spark/Flink 工程/作业](../03-Usage%20Docs/02-setup-spark.md)
-Flink 引入 LakeSoul 依赖的方法:下載 lakesoul-flink-2.4.0-flink-1.17.jar,放入 `$FLINK_HOME/lib` ,或在启动时指定 jar 的路径。
+Flink 引入 LakeSoul 依赖的方法:下載 lakesoul-flink-2.4.1-flink-1.17.jar,放入 `$FLINK_HOME/lib` ,或在启动时指定 jar 的路径。
为了使用 Flink 创建 LakeSoul 表,推荐使用 Flink SQL Client,支持直接使用 Flink SQL 命令操作 LakeSoul 表,本文档中 Flink SQL 是在 Flink SQL Client 界面直接输入语句;Table API 需要在 Java 项目中编写使用。
切换到 Flink 文件夹下,执行命令开启 SQL Client 客户端。
```bash
# 启动 Flink SQL Client
-bin/sql-client.sh embedded -j lakesoul-flink-2.3.0-flink-1.17.jar
+bin/sql-client.sh embedded -j lakesoul-flink-2.4.1-flink-1.17.jar
```
## 2. DDL
diff --git a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/09-clean-redundant-data.md b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/09-clean-redundant-data.md
index 90361b0e2..14fe52d7d 100644
--- a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/09-clean-redundant-data.md
+++ b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/09-clean-redundant-data.md
@@ -88,7 +88,7 @@ LakeSoul 提供了一个清理过期数据的 Spark 作业实现,会扫描元
--executor-cores 1 \
--num-executors 20 \
--class com.dmetasoul.lakesoul.spark.clean.CleanExpiredData \
- jars/lakesoul-spark-2.4.0-spark-3.3.jar
+ jars/lakesoul-spark-2.4.1-spark-3.3.jar
```
:::tip
diff --git a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/10-setup-presto.md b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/10-setup-presto.md
index 8dd3a924a..ea0bc3ca0 100644
--- a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/10-setup-presto.md
+++ b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/10-setup-presto.md
@@ -7,7 +7,7 @@
LakeSoul 实现了 Presto Connector,目前支持读取湖仓表,能够读取无主键表、有主键表(包括 [CDC 格式表](04-cdc-ingestion-table.mdx)),读取时会自动执行 Merge on Read 获取最新的数据。
## 下载 Jar 包
-可以从 Github Release 页面下载 Presto 的包:https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-presto-2.4.0-presto-0.28.jar
+可以从 Github Release 页面下载 Presto 的包:https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-presto-2.4.1-presto-0.28.jar
## 配置 Presto
在 Presto 目录下的 plugin 子目录下,新建 lakesoul 子目录。将上面下载好的 jar 放入该子目录。
diff --git a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/13-setup-kyuubi.md b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/13-setup-kyuubi.md
index 986236dfb..139e17c2a 100644
--- a/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/13-setup-kyuubi.md
+++ b/website/i18n/zh-Hans/docusaurus-plugin-content-docs/current/03-Usage Docs/13-setup-kyuubi.md
@@ -20,7 +20,7 @@ LakeSoul实现了Flink/Spark Connector。我们可以通过Kyuubi使用Spark/Fli
| Kyuubi | 1.8 |
| Spark | 3.3 |
| Flink | 1.17 |
-| LakeSoul | 2.4.0 |
+| LakeSoul | 2.4.1 |
| Java | 1.8 |
运行环境为Linux环境,并已安装Spark, Flink, Kyuubi,推荐Kyuubi Engine以Hadoop Yarn作为执行环境,当然也可以本地启动Spark/Flink Local集群。
@@ -32,7 +32,7 @@ LakeSoul实现了Flink/Spark Connector。我们可以通过Kyuubi使用Spark/Fli
### 1. 依赖
-下载LakeSoul Flink Jar: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-flink-2.4.0-flink-1.17.jar
+下载LakeSoul Flink Jar: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-flink-2.4.1-flink-1.17.jar
将该jar拷贝至 `$FLINK_HOME/lib`.
@@ -76,7 +76,7 @@ drop table `lakesoul`.`default`.test_lakesoul_table_v1;
### 1. 依赖
-下载LakeSoul Spark Jar: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.0/lakesoul-spark-2.4.0-spark-3.3.jar
+下载LakeSoul Spark Jar: https://github.com/lakesoul-io/LakeSoul/releases/download/v2.4.1/lakesoul-spark-2.4.1-spark-3.3.jar
将该jar拷贝至 `$SPARK_HOME/jars`.