Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Port imports and dependencies of CSV Samples to Pekko #6

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 15 additions & 15 deletions docs/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -47,22 +47,22 @@ HttpCsvToKafka / paradoxProperties ++= Map(
"canonical.base_url" -> s"${homepage.value.get}/${HttpCsvToKafka.name}",
"snip.build.base_dir" -> s"${baseDirectory.value}/../pekko-connectors-sample-${HttpCsvToKafka.name}",
"github.root.base_dir" -> s"${baseDirectory.value}/..",
// Alpakka
"scaladoc.akka.stream.alpakka.base_url" -> s"https://doc.akka.io/api/alpakka/${Dependencies.HttpCsvToKafka.AlpakkaVersion}",
"javadoc.akka.base_url" -> "",
"extref.alpakka.base_url" -> s"https://doc.akka.io/docs/alpakka/${Dependencies.HttpCsvToKafka.AlpakkaVersion}/%s",
// Pekko Connectors
"scaladoc.pekko.connectors.base_url" -> s"https://pekko.apache.org/api/pekko-connectors/${Dependencies.HttpCsvToKafka.PekkoConnectorsVersion}/org/apache", // TODO: TBC
"javadoc.pekko.connectors.base_url" -> s"https://pekko.apache.org/japi/pekko-connectors/${Dependencies.HttpCsvToKafka.PekkoConnectorsVersion}/org/apache", // TODO: TBC
"extref.pekko-connectors.base_url" -> s"https://pekko.apache.org/docs/pekko-connectors/${Dependencies.HttpCsvToKafka.PekkoConnectorsVersion}/%s",
// Alpakka Kafka
"scaladoc.akka.kafka.base_url" -> s"https://doc.akka.io/api/alpakka-kafka/${Dependencies.HttpCsvToKafka.AlpakkaKafkaVersion}",
"javadoc.akka.kafka.base_url" -> "",
"extref.alpakka-kafka.base_url" -> s"https://doc.akka.io/docs/alpakka-kafka/${Dependencies.HttpCsvToKafka.AlpakkaKafkaVersion}/%s",
// Akka
"scaladoc.akka.base_url" -> s"https://doc.akka.io/api/akka/${Dependencies.HttpCsvToKafka.AkkaVersion}",
"javadoc.akka.base_url" -> s"https://doc.akka.io/japi/akka/${Dependencies.HttpCsvToKafka.AkkaVersion}",
"extref.akka.base_url" -> s"https://doc.akka.io/docs/akka/${Dependencies.HttpCsvToKafka.AkkaVersion}/%s",
// Akka HTTP
"scaladoc.akka.http.base_url" -> s"https://doc.akka.io/api/akka-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}",
"javadoc.akka.http.base_url" -> s"https://doc.akka.io/japi/akka-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}",
"extref.akka-http.base_url" -> s"https://doc.akka.io/docs/akka-http/${Dependencies.HttpCsvToKafka.AkkaHttpVersion}/%s",
"scaladoc.pekko.kafka.base_url" -> s"https://pekko.apache.org/api/pekko-connectors-kafka/${Dependencies.HttpCsvToKafka.PekkoConnectorsKafkaVersion}/org/apache", // TODO: TBC
"javadoc.pekko.kafka.base_url" -> s"https://pekko.apache.org/japi/pekko-connectors-kafka/${Dependencies.HttpCsvToKafka.PekkoConnectorsKafkaVersion}/org/apache", // TODO: TBC
"extref.pekko-connectors-kafka.base_url" -> s"https://pekko.apache.org/docs/pekko-connectors-kafka/${Dependencies.HttpCsvToKafka.PekkoConnectorsKafkaVersion}/%s",
// Pekko
"scaladoc.pekko.base_url" -> s"https://pekko.apache.org/api/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}/org/apache",
"javadoc.pekko.base_url" -> s"https://pekko.apache.org/japi/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}/org/apache",
"extref.pekko.base_url" -> s"https://pekko.apache.org/docs/pekko/${Dependencies.HttpCsvToKafka.PekkoVersion}/%s",
// Pekko HTTP
"scaladoc.pekko.http.base_url" -> s"https://pekko.apache.org/api/pekko-http/${Dependencies.HttpCsvToKafka.PekkoHttpVersion}/org/apache", // TODO: TBC
"javadoc.pekko.http.base_url" -> s"https://pekko.apache.org/japi/pekko-http/${Dependencies.HttpCsvToKafka.PekkoHttpVersion}/org/apache", // TODO: TBC
"extref.pekko-http.base_url" -> s"https://pekko.apache.org/docs/pekko-http/${Dependencies.HttpCsvToKafka.PekkoHttpVersion}/%s",
)
HttpCsvToKafka / paradoxGroups := Map("Language" -> Seq("Java", "Scala"))

Expand Down
8 changes: 4 additions & 4 deletions docs/project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@ object Dependencies {

val ScalaVersion = versions("scalaVer")
val ScalaTestVersion = versions("ScalaTestVersion")
val AkkaVersion = versions("AkkaVersion")
val AkkaHttpVersion = versions("AkkaHttpVersion")
val AlpakkaVersion = versions("AlpakkaVersion")
val AlpakkaKafkaVersion = versions("AlpakkaKafkaVersion")
val PekkoVersion = versions("PekkoVersion")
val PekkoHttpVersion = versions("PekkoHttpVersion")
val PekkoConnectorsVersion = versions("PekkoConnectorsVersion")
val PekkoConnectorsKafkaVersion = versions("PekkoConnectorsKafkaVersion")
}

object JdbcToElasticsearch {
Expand Down
2 changes: 1 addition & 1 deletion pekko-connectors-sample-http-csv-to-kafka/.courseName
Original file line number Diff line number Diff line change
@@ -1 +1 @@
Alpakka: HTTP CSV to Kafka
Pekko Connectors: HTTP CSV to Kafka
10 changes: 5 additions & 5 deletions pekko-connectors-sample-http-csv-to-kafka/README.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# Alpakka sample
# Pekko Connectors sample

## Fetch CSV via Akka HTTP and publish the data as JSON to Kafka
## Fetch CSV via Pekko HTTP and publish the data as JSON to Kafka

This example uses @extref[Akka HTTP to send the HTTP request](akka-http:client-side/connection-level.html#opening-http-connections) and Akka HTTPs primary JSON support via @extref[Spray JSON](akka-http:common/json-support.html#spray-json-support) (for Scala) or Jackson JSON (for Java) to convert the map into a JSON structure which gets published to a Kafka topic.
This example uses @extref[Pekko HTTP to send the HTTP request](pekko-http:client-side/connection-level.html#opening-http-connections) and Pekko HTTPs primary JSON support via @extref[Spray JSON](pekko-http:common/json-support.html#spray-json-support) (for Scala) or Jackson JSON (for Java) to convert the map into a JSON structure which gets published to a Kafka topic.

Browse the sources at @link:[Github](https://github.com/akka/alpakka-samples/tree/master/alpakka-sample-http-csv-to-kafka) { open=new }.
Browse the sources at @link:[Github](https://github.com/apache/incubator-pekko-connectors-samples/tree/main/pekko-connectors-sample-http-csv-to-kafka) { open=new }.

To try out this project clone @link:[the Alpakka Samples repository](https://github.com/akka/alpakka-samples) { open=new } and find it in the `alpakka-sample-http-csv-to-kafka` directory.
To try out this project clone @link:[the Pekko Connectors Samples repository](https://github.com/apache/incubator-pekko-connectors-samples) { open=new } and find it in the `pekko-connectors-sample-http-csv-to-kafka` directory.
2 changes: 1 addition & 1 deletion pekko-connectors-sample-http-csv-to-kafka/build.sbt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@

lazy val alpakka_sample_master = project
lazy val pekko_connectors_sample_master = project
.in(file("."))
.aggregate(
common,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
akka {
loggers = ["akka.event.slf4j.Slf4jLogger"]
logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
pekko {
loggers = ["org.apache.pekko.event.slf4j.Slf4jLogger"]
logging-filter = "org.apache.pekko.event.slf4j.Slf4jLoggingFilter"
loglevel = "DEBUG"
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,7 @@

<logger name="org.apache" level="WARN"/>
<logger name="kafka" level="WARN"/>
<logger name="akka" level="WARN"/>
<logger name="akka.kafka.benchmarks" level="INFO"/>
<logger name="org.apache.pekko" level="WARN"/>
<logger name="org.apache.kafka.common.utils.AppInfoParser" level="ERROR"/>
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

could you add back <logger name="org.apache.pekko.kafka.benchmarks" level="INFO"/> ?

<logger name="org.apache.kafka.clients.NetworkClient" level="ERROR"/>
<logger name="org.I0Itec.zkclient" level="WARN"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
Dependencies
: @@snip [snip](/project/Dependencies.scala) { #dependencies }

### All Alpakka samples
### All Pekko Connectors samples

Show [Alpakka samples listing](../index.html).
Show [Pekko Connectors samples listing](../index.html).

@@toc

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ import sbtstudent.AdditionalSettings

object CommonSettings {
lazy val commonSettings = Seq(
organization := "com.lightbend.training",
version := "1.3.0",
organization := "org.apache.pekko",
version := "1.0.0",
scalaVersion := Dependencies.scalaVer,
scalacOptions ++= CompileOptions.compileOptions,
Compile / unmanagedSourceDirectories := List((Compile / scalaSource).value, (Compile / javaSource).value),
Expand All @@ -16,9 +16,13 @@ object CommonSettings {
ThisBuild / parallelExecution := false,
GlobalScope / parallelExecution := false,
Test / fork := true,
libraryDependencies ++= Dependencies.dependencies
libraryDependencies ++= Dependencies.dependencies,

// #TODO: Remove these lines ones Pekko Connectors have 1.0.0
resolvers += "Apache Snapshots" at "https://repository.apache.org/content/repositories/snapshots/",
) ++
AdditionalSettings.initialCmdsConsole ++
AdditionalSettings.initialCmdsTestConsole ++
AdditionalSettings.cmdAliases
}

Original file line number Diff line number Diff line change
Expand Up @@ -4,26 +4,26 @@ object Dependencies {
val scalaVer = "2.13.7"
// #dependencies
val ScalaTestVersion = "3.1.4"
val AkkaVersion = "2.6.19"
val AkkaHttpVersion = "10.1.12"
val AlpakkaVersion = "4.0.0"
val AlpakkaKafkaVersion = "3.0.1"
val PekkoVersion = "1.0.0"
val PekkoHttpVersion = "0.0.0+4469-fb6a5426-SNAPSHOT" // #TODO: Change to release version
val PekkoConnectorsVersion = "0.0.0+131-79ec6fa6-SNAPSHOT" // #TODO: Change to release version
val PekkoConnectorsKafkaVersion = "0.0.0+1761-2291eac2-SNAPSHOT" // #TODO: Change to release version

val dependencies = List(
"com.lightbend.akka" %% "akka-stream-alpakka-csv" % AlpakkaVersion,
"com.typesafe.akka" %% "akka-stream-kafka" % AlpakkaKafkaVersion,
"com.typesafe.akka" %% "akka-actor-typed" % AkkaVersion,
"com.typesafe.akka" %% "akka-stream" % AkkaVersion,
"com.typesafe.akka" %% "akka-http" % AkkaHttpVersion,
"org.apache.pekko" %% "pekko-connectors-csv" % PekkoConnectorsVersion,
"org.apache.pekko" %% "pekko-connectors-kafka" % PekkoConnectorsKafkaVersion,
"org.apache.pekko" %% "pekko-actor-typed" % PekkoVersion,
"org.apache.pekko" %% "pekko-stream" % PekkoVersion,
"org.apache.pekko" %% "pekko-http" % PekkoHttpVersion,
// Used from Scala
"com.typesafe.akka" %% "akka-http-spray-json" % AkkaHttpVersion,
"org.apache.pekko" %% "pekko-http-spray-json" % PekkoHttpVersion,
// Used from Java
"com.fasterxml.jackson.datatype" % "jackson-datatype-jdk8" % "2.11.4",
"com.fasterxml.jackson.datatype" % "jackson-datatype-jsr310" % "2.11.4",
"com.fasterxml.jackson.datatype" % "jackson-datatype-jdk8" % "2.14.3",
"com.fasterxml.jackson.datatype" % "jackson-datatype-jsr310" % "2.14.3",

"org.testcontainers" % "kafka" % "1.14.3",
"org.testcontainers" % "kafka" % "1.18.3",

"com.typesafe.akka" %% "akka-slf4j" % AkkaVersion,
"org.apache.pekko" %% "pekko-slf4j" % PekkoVersion,
"ch.qos.logback" % "logback-classic" % "1.2.3"
)
// #dependencies
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@

### Description

This code uses Akka HTTP to request a file containing listed companies from the NASDAQ web site.
This code uses Pekko HTTP to request a file containing listed companies from the NASDAQ web site.

It starts the Actor System, imports the Actor System's dispatcher as `ExecutionContext`, and gets a stream materializer from the Actor System.

The HTTP request is created as value (it will be sent multiple times in later steps) and sets a specific HTTP request header.

The request is run in an Akka Stream from the single value, issuing the request by Akka HTTP, and printing out the HTTP response.
The request is run in an Pekko Stream from the single value, issuing the request by Pekko HTTP, and printing out the HTTP response.

Once the stream completes, the Actor System is terminated and the program exits.
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@

package samples.javadsl;

import akka.Done;
import akka.actor.ActorSystem;
import akka.http.javadsl.Http;
import akka.http.javadsl.model.HttpRequest;
import akka.http.javadsl.model.MediaRanges;
import akka.http.javadsl.model.headers.Accept;
import akka.stream.javadsl.Sink;
import akka.stream.javadsl.Source;
import org.apache.pekko.Done;
import org.apache.pekko.actor.ActorSystem;
import org.apache.pekko.http.javadsl.Http;
import org.apache.pekko.http.javadsl.model.HttpRequest;
import org.apache.pekko.http.javadsl.model.MediaRanges;
import org.apache.pekko.http.javadsl.model.headers.Accept;
import org.apache.pekko.stream.javadsl.Sink;
import org.apache.pekko.stream.javadsl.Source;

import java.util.Collections;
import java.util.concurrent.CompletionStage;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,21 @@

package samples

import akka.Done
import akka.actor._
import akka.http.scaladsl._
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.headers.Accept
import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
import akka.stream._
import akka.stream.scaladsl.{ Sink, Source }
import akka.util.ByteString
import org.apache.pekko.Done

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@reypader Pekko uses a root nested imports style, that is

import org.apache.pekko
import pekko.Done
import pekko.actor._
etc etc

Would you be able to update the PR to this style for all .scala source files?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've been trying to play around with my formatters and scalafmt. I copied over scalafmt from pekko-connectors project even. No luck. I can't seem to find formatter config that would automatically do this for me. Could you show me how?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It looks like you have a similar thread on this specific topic apache/pekko#414

Since this project is separate from incubator-pekko maybe it would be easier to enable scalafix here?

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Regarding scalafmt, this can be done later. For now I would just manually do the change I suggested

import org.apache.pekko.actor._
import org.apache.pekko.http.scaladsl._
import org.apache.pekko.http.scaladsl.model.StatusCodes._
import org.apache.pekko.http.scaladsl.model.headers.Accept
import org.apache.pekko.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
import org.apache.pekko.stream._
import org.apache.pekko.stream.scaladsl.{ Sink, Source }
import org.apache.pekko.util.ByteString

import scala.concurrent.Future

object Main extends App {

implicit val actorSystem = ActorSystem("alpakka-samples")
implicit val actorSystem = ActorSystem("pekko-connectors-samples")

import actorSystem.dispatcher

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@

### Description

The HTTP response with status OK is expected and the contained HTTP entity is extracted. Instead of the HTTP response, the contained entity (page content) continues in the stream in the form of @scaladoc:[ByteString](akka.util.ByteString) elements.
The HTTP response with status OK is expected and the contained HTTP entity is extracted. Instead of the HTTP response, the contained entity (page content) continues in the stream in the form of @scaladoc:[ByteString](pekko.http.impl.util.JavaMapping.ByteString) elements.
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,23 @@

package samples.javadsl;

import akka.Done;
import akka.actor.typed.ActorSystem;
import akka.actor.typed.javadsl.Behaviors;
import akka.http.javadsl.Http;
import akka.http.javadsl.model.HttpRequest;
import akka.http.javadsl.model.HttpResponse;
import akka.http.javadsl.model.MediaRanges;
import akka.http.javadsl.model.StatusCodes;
import akka.http.javadsl.model.headers.Accept;
import akka.stream.javadsl.Sink;
import akka.stream.javadsl.Source;
import akka.util.ByteString;
import org.apache.pekko.Done;
import org.apache.pekko.actor.typed.ActorSystem;
import org.apache.pekko.actor.typed.javadsl.Behaviors;
import org.apache.pekko.http.javadsl.Http;
import org.apache.pekko.http.javadsl.model.HttpRequest;
import org.apache.pekko.http.javadsl.model.HttpResponse;
import org.apache.pekko.http.javadsl.model.MediaRanges;
import org.apache.pekko.http.javadsl.model.StatusCodes;
import org.apache.pekko.http.javadsl.model.headers.Accept;
import org.apache.pekko.stream.javadsl.Sink;
import org.apache.pekko.stream.javadsl.Source;
import org.apache.pekko.util.ByteString;

import java.util.Collections;
import java.util.concurrent.CompletionStage;

import static akka.actor.typed.javadsl.Adapter.toClassic;
import static org.apache.pekko.actor.typed.javadsl.Adapter.toClassic;

public class Main {

Expand All @@ -43,7 +43,7 @@ public static void main(String[] args) throws Exception {
}

private void run() throws Exception {
ActorSystem<Void> system = ActorSystem.create(Behaviors.empty(), "alpakka-samples");
ActorSystem<Void> system = ActorSystem.create(Behaviors.empty(), "pekko-connectors-samples");
Http http = Http.get(toClassic(system));

CompletionStage<Done> completion =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,22 @@

package samples

import akka.Done
import akka.actor.typed.ActorSystem
import akka.actor.typed.scaladsl.Behaviors
import akka.actor.typed.scaladsl.adapter._
import akka.http.scaladsl._
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.headers.Accept
import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
import akka.stream.scaladsl.{ Sink, Source }
import akka.util.ByteString
import org.apache.pekko.Done
import org.apache.pekko.actor.typed.ActorSystem
import org.apache.pekko.actor.typed.scaladsl.Behaviors
import org.apache.pekko.actor.typed.scaladsl.adapter._
import org.apache.pekko.http.scaladsl._
import org.apache.pekko.http.scaladsl.model.StatusCodes._
import org.apache.pekko.http.scaladsl.model.headers.Accept
import org.apache.pekko.http.scaladsl.model.{ HttpRequest, HttpResponse, MediaRanges }
import org.apache.pekko.stream.scaladsl.{ Sink, Source }
import org.apache.pekko.util.ByteString

import scala.concurrent.Future

object Main extends App {

implicit val actorSystem: ActorSystem[Nothing] = ActorSystem[Nothing](Behaviors.empty, "alpakka-samples")
implicit val actorSystem: ActorSystem[Nothing] = ActorSystem[Nothing](Behaviors.empty, "pekko-connectors-samples")

import actorSystem.executionContext

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@

### Description

The binary data in @scaladoc:[ByteString](akka.util.ByteString)s is passed into @extref:[Alpakka CSV](alpakka:data-transformations/csv.html) to be parsed and converted per line into a Map. The stream elements becomes a @scala[`Map[String, ByteString]`]@java[`Map<String, ByteString>`], one entry per column using the column headers as keys.
The binary data in @scaladoc:[ByteString](pekko.http.impl.util.JavaMapping.ByteString)s is passed into @extref:[Pekko Connectors CSV](pekko-connectors:data-transformations/csv.html) to be parsed and converted per line into a Map. The stream elements becomes a @scala[`Map[String, ByteString]`]@java[`Map<String, ByteString>`], one entry per column using the column headers as keys.
Original file line number Diff line number Diff line change
Expand Up @@ -4,26 +4,26 @@

package samples.javadsl;

import akka.Done;
import akka.actor.typed.ActorSystem;
import akka.actor.typed.javadsl.Behaviors;
import akka.http.javadsl.Http;
import akka.http.javadsl.model.HttpRequest;
import akka.http.javadsl.model.HttpResponse;
import akka.http.javadsl.model.MediaRanges;
import akka.http.javadsl.model.StatusCodes;
import akka.http.javadsl.model.headers.Accept;
import akka.stream.alpakka.csv.javadsl.CsvParsing;
import akka.stream.alpakka.csv.javadsl.CsvToMap;
import akka.stream.javadsl.Sink;
import akka.stream.javadsl.Source;
import akka.util.ByteString;
import org.apache.pekko.Done;
import org.apache.pekko.actor.typed.ActorSystem;
import org.apache.pekko.actor.typed.javadsl.Behaviors;
import org.apache.pekko.http.javadsl.Http;
import org.apache.pekko.http.javadsl.model.HttpRequest;
import org.apache.pekko.http.javadsl.model.HttpResponse;
import org.apache.pekko.http.javadsl.model.MediaRanges;
import org.apache.pekko.http.javadsl.model.StatusCodes;
import org.apache.pekko.http.javadsl.model.headers.Accept;
import org.apache.pekko.stream.connectors.csv.javadsl.CsvParsing;
import org.apache.pekko.stream.connectors.csv.javadsl.CsvToMap;
import org.apache.pekko.stream.javadsl.Sink;
import org.apache.pekko.stream.javadsl.Source;
import org.apache.pekko.util.ByteString;

import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.concurrent.CompletionStage;

import static akka.actor.typed.javadsl.Adapter.toClassic;
import static org.apache.pekko.actor.typed.javadsl.Adapter.toClassic;

public class Main {

Expand All @@ -46,7 +46,7 @@ public static void main(String[] args) throws Exception {
}

private void run() throws Exception {
ActorSystem<Void> system = ActorSystem.create(Behaviors.empty(), "alpakka-samples");
ActorSystem<Void> system = ActorSystem.create(Behaviors.empty(), "pekko-connectors-samples");
Http http = Http.get(toClassic(system));

CompletionStage<Done> completion =
Expand Down
Loading