Skip to content

Commit

Permalink
[SPARK-50810][BUILD] Enable SBT CI for profiler module
Browse files Browse the repository at this point in the history
  • Loading branch information
pan3793 committed Jan 14, 2025
1 parent 1fd8362 commit 3a4f5bc
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 7 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ jobs:
- >-
api, catalyst, hive-thriftserver
- >-
mllib-local, mllib, graphx
mllib-local, mllib, graphx, profiler
- >-
streaming, sql-kafka-0-10, streaming-kafka-0-10, streaming-kinesis-asl,
kubernetes, hadoop-cloud, spark-ganglia-lgpl, protobuf, connect
Expand Down
9 changes: 8 additions & 1 deletion connector/profiler/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,15 @@
## Build

To build

```
./build/mvn clean package -DskipTests -Pjvm-profiler -pl :spark-profiler_2.13 -am
```

or

```
./build/mvn clean package -DskipTests -Pjvm-profiler
./build/sbt -Pjvm-profiler clean "profiler/package"
```

## Executor Code Profiling
Expand Down
9 changes: 9 additions & 0 deletions dev/sparktestsupport/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,15 @@ def __hash__(self):
],
)

profiler = Module(
name="profiler",
dependencies=[],
build_profile_flags=["-Pjvm-profiler"],
source_file_regexes=[
"connector/profiler",
],
)

protobuf = Module(
name="protobuf",
dependencies=[sql],
Expand Down
10 changes: 5 additions & 5 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,10 @@ object BuildCommons {
).map(ProjectRef(buildLocation, _)) ++ sqlProjects ++ streamingProjects ++ connectProjects

val optionallyEnabledProjects@Seq(kubernetes, yarn,
sparkGangliaLgpl, streamingKinesisAsl,
sparkGangliaLgpl, streamingKinesisAsl, profiler,
dockerIntegrationTests, hadoopCloud, kubernetesIntegrationTests) =
Seq("kubernetes", "yarn",
"ganglia-lgpl", "streaming-kinesis-asl",
"ganglia-lgpl", "streaming-kinesis-asl", "profiler",
"docker-integration-tests", "hadoop-cloud", "kubernetes-integration-tests").map(ProjectRef(buildLocation, _))

val assemblyProjects@Seq(networkYarn, streamingKafka010Assembly, streamingKinesisAslAssembly) =
Expand Down Expand Up @@ -371,7 +371,7 @@ object SparkBuild extends PomBuild {
Seq(
spark, hive, hiveThriftServer, repl, networkCommon, networkShuffle, networkYarn,
unsafe, tags, tokenProviderKafka010, sqlKafka010, connectCommon, connect, connectClient,
variant, connectShims
variant, connectShims, profiler
).contains(x)
}

Expand Down Expand Up @@ -1469,11 +1469,11 @@ object SparkUnidoc extends SharedUnidocSettings {
(ScalaUnidoc / unidoc / unidocProjectFilter) :=
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, kubernetes,
yarn, tags, streamingKafka010, sqlKafka010, connectCommon, connect, connectClient,
connectShims, protobuf),
connectShims, protobuf, profiler),
(JavaUnidoc / unidoc / unidocProjectFilter) :=
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, kubernetes,
yarn, tags, streamingKafka010, sqlKafka010, connectCommon, connect, connectClient,
connectShims, protobuf),
connectShims, protobuf, profiler),
)
}

Expand Down

0 comments on commit 3a4f5bc

Please sign in to comment.