diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..4b87f3d9 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,21 @@ +[*] +charset = utf-8 +end_of_line = lf +indent_size = 4 +indent_style = space +max_line_length = 120 +tab_width = 4 +ij_continuation_indent_size = 8 +ij_smart_tabs = false + +[*.java] +ij_java_imports_layout = *,|,javax.**,java.**,|,$* +ij_java_class_count_to_use_import_on_demand = 9999 +ij_java_names_count_to_use_import_on_demand = 9999 +ij_java_packages_to_use_import_on_demand = java.awt.*,javax.swing.* + +[{*.kt,*.kts}] +ij_kotlin_imports_layout = *,java.**,javax.**,kotlin.**,^ +ij_kotlin_name_count_to_use_star_import = 9999 +ij_kotlin_name_count_to_use_star_import_for_members = 9999 +ij_kotlin_packages_to_use_import_on_demand = java.util.*,kotlinx.android.synthetic.*,io.ktor.* diff --git a/apps/kafka-key-generator/README.md b/apps/kafka-key-generator/README.md index 7aadad23..a0040abf 100644 --- a/apps/kafka-key-generator/README.md +++ b/apps/kafka-key-generator/README.md @@ -10,88 +10,76 @@ https://paw-kafka-key-generator.intern.dev.nav.no/docs Øvrige teknologier, rammeverk og biblioteker som er blitt tatt i bruk: -- [**Kotlin**](https://kotlinlang.org/) -- [**Ktor**](https://ktor.io/) -- [**PostgreSQL**](https://www.postgresql.org/) -- [**Flyway**](https://flywaydb.org/) -- [**Gradle**](https://gradle.org/) +- [**Kotlin**](https://kotlinlang.org) +- [**Ktor**](https://ktor.io) +- [**PostgreSQL**](https://www.postgresql.org) +- [**Flyway**](https://flywaydb.org) +- [**Kafka**](https://kafka.apache.org) +- [**Gradle**](https://gradle.org) ## Dev oppsett -Eksempel: +### JDK 21 -```sh -$ curl -XPOST https://paw-kafka-key-generator.intern.dev.nav.no/api/v1/hentEllerOpprett -H 'Authorization: Bearer ' -d '{"ident": "2072234860133"}' -``` - -## Lokalt oppsett -Authentisering fungerer ikke lokalt, så det er ikke mulig å teste lokalt på nåværende tidspunkt. -Lokalt kjører løsning mot statisk PDL data som innehlder 2 personer. Data ligger under src/test i no.nav.paw.kafkakeygenerator.testdata.kt. - -Under er det satt opp et par ting som må på plass for at applikasjonen og databasen skal fungere. - - -### JDK 17 - -JDK 17 må være installert. Enkleste måten å installere riktig versjon av Java er ved å +JDK 21 må være installert. Enkleste måten å installere riktig versjon av Java er ved å bruke [sdkman](https://sdkman.io/install). ### Docker -`docker` og `docker-compose` må være installert. For å -installere disse kan du følge oppskriften på [Dockers](https://www.docker.com/) offisielle side. For installering på Mac -trykk [her](https://docs.docker.com/desktop/mac/install/) eller -trykk [her](https://docs.docker.com/engine/install/ubuntu/) for Ubuntu. - -Man må også installere `docker-compose` som en separat greie -for [Ubuntu](https://docs.docker.com/compose/install/#install-compose-on-linux-systems). For Mac følger dette med når -man installerer Docker Desktop. - -Kjør opp docker containerne med +[Docker](https://docs.docker.com) og [Docker Compose](https://docs.docker.com/compose) må være installert. -```sh -docker-compose up -d +#### Start PostgreSQL database +```shell +docker compose -f ../../docker/postgres/docker-compose.yaml up -d ``` -Se at alle kjører med - -```sh -docker ps +#### Start Kafka broker +```shell +docker compose -f ../../docker/kafka/docker-compose.yaml up -d ``` -To containere skal kjøre: postgres og mock-oauth2-server. +#### Start mocks +Benytter mock [OAuth2 server](https://github.com/navikt/mock-oauth2-server) fra NAV Security og mock PDL vha [Wiremock](https://wiremock.org). +```shell +docker compose -f ../../docker/mocks/docker-compose.yaml up -d +``` ### App -Start app med `./gradlew runTestApp` eller kjør main metoden i 'src/test/kotlin/no/nav/paw/kafkakeygenerator/run_test_app.kt' via Intellij. - -### Autentisering +#### Gradle +Start appen vha Gradle sin [application plugin](https://docs.gradle.org/current/userguide/application_plugin.html). +```shell +../../gradlew :apps:kafka-key-generator:run +``` -For å kalle APIet lokalt må man være autentisert med et Bearer token. +Alternativt test-oppsett. +```shell +../../gradlew :apps:kafka-key-generator:runTestApp +``` -Vi benytter mock-ouath2-server til å utstede tokens på lokal maskin. Følgende steg kan benyttes til å generere opp et token: +#### IntelliJ +Start appen ved å kjøre `main` funksjonen i `./src/main/kotlin/no/nav/paw/kafkakeygenerator/AppStarter.kt`. -1. Sørg for at containeren for mock-oauth2-server kjører lokalt (docker-compose up -d) -2. Naviger til [mock-oauth2-server sin side for debugging av tokens](http://localhost:8081/default/debugger) -3. Generer et token -4. Trykk på knappen Get a token -5. Skriv inn noe random i "Enter any user/subject" og pid i optional claims, f.eks. +Alternativt test-oppsett i `./src/test/kotlin/no/nav/paw/kafkakeygenerator/TestAppStarter.kt`. -```json -{ "acr": "Level4", "pid": "18908396568" } +### Autentisering +Applikasjonen er sikret som en OAuth2 Resource Server. For å kalle APIet må man sende med et OAuth2 Bearer Token. + +For å hente token fra `mock-oauth2-server` gjør følgende request med `curl`: +```shell +ACCESS_TOKEN="$(curl -X POST http://localhost:8081/azure/token \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "grant_type=client_credentials&client_id=paw-kafka-key-generator&client_secret=abcd1234&scope=openid%20pid" \ +| jq .access_token)" ``` -6. Trykk Sign in -7. Kopier verdien for access_token og benytt denne som Bearer i Authorization-header - -8. Eksempel: +### Gjøre kall ```sh -$ curl -XPOST http://localhost:8080/api/v1/hentEllerOpprett -H 'Authorization: Bearer access_token' -d '{"ident": "2072234860133"}' +$ curl -X POST http://localhost:8080/api/v1/hentEllerOpprett -H "Authorization: Bearer ${ACCESS_TOKEN}" -d '{"ident": "01017012345"}' ``` -eller benytt en REST-klient (f.eks. [insomnia](https://insomnia.rest/) eller [Postman](https://www.postman.com/product/rest-client/)) - +Kan også benytte en grafisk REST-klient (f.eks. [insomnia](https://insomnia.rest/) eller [Postman](https://www.postman.com/product/rest-client/)) ## Deploye kun til dev @@ -110,12 +98,8 @@ git branch -m dev/ # Henvendelser -Spørsmål knyttet til koden eller prosjektet kan stilles via issues her på github. +Spørsmål knyttet til koden eller prosjektet kan stilles via issues her på GitHub. ## For NAV-ansatte Interne henvendelser kan sendes via Slack i kanalen [#team-paw-dev](https://nav-it.slack.com/archives/CLTFAEW75) - -# Lisens - -[MIT](LICENSE) diff --git a/apps/kafka-key-generator/build.gradle.kts b/apps/kafka-key-generator/build.gradle.kts index 311d3362..71b9a1ef 100644 --- a/apps/kafka-key-generator/build.gradle.kts +++ b/apps/kafka-key-generator/build.gradle.kts @@ -13,7 +13,11 @@ val jvmMajorVersion: String by project dependencies { // PAW implementation(project(":lib:hoplite-config")) + implementation(project(":lib:error-handling")) + implementation(project(":lib:http-client-utils")) implementation(project(":lib:pdl-client")) + implementation(project(":lib:kafka")) + implementation(project(":domain:interne-hendelser")) // NAV implementation(libs.nav.common.log) @@ -39,7 +43,6 @@ dependencies { implementation(libs.ktor.client.contentNegotiation) implementation(libs.ktor.client.core) implementation(libs.ktor.client.cio) - implementation(libs.ktor.client.okhttp) implementation(libs.ktor.client.logging) // Micrometer & OTEL @@ -81,7 +84,7 @@ java { } application { - mainClass.set("no.nav.paw.kafkakeygenerator.AppStarterKt") + mainClass.set("no.nav.paw.kafkakeygenerator.ApplicationKt") } tasks.withType(Jar::class) { @@ -113,6 +116,6 @@ tasks.named("test") { tasks.create("runTestApp", JavaExec::class) { classpath = sourceSets["test"].runtimeClasspath + sourceSets["main"].runtimeClasspath - mainClass = "no.nav.paw.kafkakeygenerator.Run_test_appKt" + mainClass = "no.nav.paw.kafkakeygenerator.TestApplicationKt" args = listOf() } diff --git a/apps/kafka-key-generator/docker-compose.yaml b/apps/kafka-key-generator/docker-compose.yaml deleted file mode 100644 index ff3a05c7..00000000 --- a/apps/kafka-key-generator/docker-compose.yaml +++ /dev/null @@ -1,46 +0,0 @@ -version: '3.9' - -services: - database: - image: postgres:14 - ports: - - "5432:5432" - environment: - - POSTGRES_USER=admin - - POSTGRES_PASSWORD=admin - - POSTGRES_DB=pawkafkakeys - - mock-oauth2-server: - image: ghcr.io/navikt/mock-oauth2-server:0.4.6 - ports: - - "8081:8081" - environment: - SERVER_PORT: 8081 - JSON_CONFIG: > - { - "interactiveLogin": true, - "httpServer": "NettyWrapper", - "tokenCallbacks": [ - { - "issuerId": "default", - "tokenExpiry": 15780000, - "requestMappings": [ - { - "requestParam": "scope", - "match": "openid somescope", - "claims": { - "sub": "admin@paw-kafka-key-generator", - "aud": ["paw-kafka-key-generator"] - } - }, - { - "requestParam": "scope", - "match": "default", - "claims": { - "aud": ["paw-kafka-key-generator"] - } - } - ] - } - ] - } diff --git a/apps/kafka-key-generator/nais/nais-dev.yaml b/apps/kafka-key-generator/nais/nais-dev.yaml index 22a517e1..c2714c07 100644 --- a/apps/kafka-key-generator/nais/nais-dev.yaml +++ b/apps/kafka-key-generator/nais/nais-dev.yaml @@ -8,6 +8,17 @@ metadata: spec: image: {{ image }} port: 8080 + env: + - name: PDL_URL + value: "https://pdl-api.dev-fss-pub.nais.io/graphql" + - name: PDL_SCOPE + value: "api://dev-fss.pdl.pdl-api/.default" + - name: PDL_TEMA + value: "OPP" + - name: KAFKA_CONSUMER_GROUP_ID + value: "paw-kafka-key-generator-v1" + - name: KAFKA_TOPIC_PAW_ARBEIDSSOKER_HENDELSESLOGG + value: "paw.arbeidssoker-hendelseslogg-v1" ingresses: - https://record-key-arbeidssoekerregisteret.intern.dev.nav.no/api/v1/record-key - https://record-key-arbeidssoekerregisteret.intern.dev.nav.no/docs/record-key @@ -33,6 +44,8 @@ spec: autoInstrumentation: enabled: true runtime: java + kafka: + pool: nav-dev gcp: sqlInstances: - type: POSTGRES_14 diff --git a/apps/kafka-key-generator/nais/nais-prod.yaml b/apps/kafka-key-generator/nais/nais-prod.yaml index dfcbc338..e1278bf7 100644 --- a/apps/kafka-key-generator/nais/nais-prod.yaml +++ b/apps/kafka-key-generator/nais/nais-prod.yaml @@ -8,6 +8,17 @@ metadata: spec: image: {{ image }} port: 8080 + env: + - name: PDL_URL + value: "https://pdl-api.prod-fss-pub.nais.io/graphql" + - name: PDL_SCOPE + value: "api://prod-fss.pdl.pdl-api/.default" + - name: PDL_TEMA + value: "OPP" + - name: KAFKA_CONSUMER_GROUP_ID + value: "paw-kafka-key-generator-v1" + - name: KAFKA_TOPIC_PAW_ARBEIDSSOKER_HENDELSESLOGG + value: "paw.arbeidssoker-hendelseslogg-v1" replicas: min: 2 max: 2 @@ -30,6 +41,8 @@ spec: autoInstrumentation: enabled: true runtime: java + kafka: + pool: nav-prod gcp: sqlInstances: - type: POSTGRES_14 diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/AppStarter.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/AppStarter.kt deleted file mode 100644 index 679b6f17..00000000 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/AppStarter.kt +++ /dev/null @@ -1,62 +0,0 @@ -package no.nav.paw.kafkakeygenerator - -import io.micrometer.prometheusmetrics.PrometheusConfig -import io.micrometer.prometheusmetrics.PrometheusMeterRegistry -import no.nav.paw.kafkakeygenerator.config.Autentiseringskonfigurasjon -import no.nav.paw.kafkakeygenerator.config.DatabaseKonfigurasjon -import no.nav.paw.kafkakeygenerator.config.dataSource -import no.nav.paw.kafkakeygenerator.config.lastKonfigurasjon -import no.nav.paw.kafkakeygenerator.database.flywayMigrate -import no.nav.paw.kafkakeygenerator.ktor.initKtorServer -import no.nav.paw.kafkakeygenerator.merge.MergeDetector -import no.nav.paw.kafkakeygenerator.pdl.PdlIdentitesTjeneste -import no.nav.paw.kafkakeygenerator.pdl.opprettPdlKlient -import no.nav.paw.pdl.PdlClient -import org.jetbrains.exposed.sql.Database -import javax.sql.DataSource - -const val serverAuthentiseringKonfigFil = "ktor_server_autentisering.toml" -const val postgresKonfigFil = "postgres.toml" -const val pdlKlientKonfigFil = "pdl_klient.toml" -const val azureTokenKlientKonfigFil = "azure_token_klient.toml" - - -fun main() { - val dataSource = lastKonfigurasjon(postgresKonfigFil) - .dataSource() - val pdlKlient = opprettPdlKlient( - lastKonfigurasjon(pdlKlientKonfigFil), - lastKonfigurasjon(azureTokenKlientKonfigFil) - ) - startApplikasjon( - lastKonfigurasjon(serverAuthentiseringKonfigFil), - dataSource, - pdlKlient - ) -} - -fun startApplikasjon( - autentiseringKonfig: Autentiseringskonfigurasjon, - dataSource: DataSource, - pdlKlient: PdlClient -) { - val database = Database.connect(dataSource) - val prometheusMeterRegistry = PrometheusMeterRegistry(PrometheusConfig.DEFAULT) - flywayMigrate(dataSource) - val kafkaKeysDbTjeneste = KafkaKeys(database) - val pdlIdTjeneste = PdlIdentitesTjeneste(pdlKlient) - val applikasjon = Applikasjon( - kafkaKeysDbTjeneste, - pdlIdTjeneste - ) - val mergeDetector = MergeDetector( - pdlIdTjeneste, - kafkaKeysDbTjeneste - ) - initKtorServer( - autentiseringKonfig, - prometheusMeterRegistry, - applikasjon, - mergeDetector - ).start(wait = true) -} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Application.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Application.kt new file mode 100644 index 00000000..a2797694 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Application.kt @@ -0,0 +1,128 @@ +package no.nav.paw.kafkakeygenerator + +import io.ktor.server.engine.embeddedServer +import io.ktor.server.netty.Netty +import io.micrometer.core.instrument.binder.kafka.KafkaClientMetrics +import io.micrometer.prometheusmetrics.PrometheusConfig +import io.micrometer.prometheusmetrics.PrometheusMeterRegistry +import no.nav.paw.arbeidssokerregisteret.intern.v1.HendelseDeserializer +import no.nav.paw.client.config.AZURE_M2M_CONFIG +import no.nav.paw.client.config.AzureAdM2MConfig +import no.nav.paw.config.hoplite.loadNaisOrLocalConfiguration +import no.nav.paw.config.kafka.KAFKA_CONFIG +import no.nav.paw.config.kafka.KafkaConfig +import no.nav.paw.config.kafka.KafkaFactory +import no.nav.paw.health.repository.HealthIndicatorRepository +import no.nav.paw.kafkakeygenerator.config.AUTHENTICATION_CONFIG +import no.nav.paw.kafkakeygenerator.config.AuthenticationConfig +import no.nav.paw.kafkakeygenerator.config.DATABASE_CONFIG +import no.nav.paw.kafkakeygenerator.config.DatabaseConfig +import no.nav.paw.kafkakeygenerator.config.KAFKA_TOPOLOGY_CONFIG +import no.nav.paw.kafkakeygenerator.config.KafkaTopologyConfig +import no.nav.paw.kafkakeygenerator.config.PDL_CLIENT_CONFIG +import no.nav.paw.kafkakeygenerator.config.PdlClientConfig +import no.nav.paw.kafkakeygenerator.merge.MergeDetector +import no.nav.paw.kafkakeygenerator.plugin.configSerialization +import no.nav.paw.kafkakeygenerator.plugin.configureAuthentication +import no.nav.paw.kafkakeygenerator.plugin.configureDatabase +import no.nav.paw.kafkakeygenerator.plugin.configureErrorHandling +import no.nav.paw.kafkakeygenerator.plugin.configureKafka +import no.nav.paw.kafkakeygenerator.plugin.configureLogging +import no.nav.paw.kafkakeygenerator.plugin.configureMetrics +import no.nav.paw.kafkakeygenerator.plugin.configureRouting +import no.nav.paw.kafkakeygenerator.repository.IdentitetRepository +import no.nav.paw.kafkakeygenerator.repository.KafkaKeysAuditRepository +import no.nav.paw.kafkakeygenerator.repository.KafkaKeysRepository +import no.nav.paw.kafkakeygenerator.service.KafkaConsumerService +import no.nav.paw.kafkakeygenerator.service.KafkaKeysService +import no.nav.paw.kafkakeygenerator.service.PdlService +import no.nav.paw.kafkakeygenerator.utils.createDataSource +import no.nav.paw.kafkakeygenerator.utils.createPdlClient +import no.nav.paw.pdl.PdlClient +import org.apache.kafka.common.serialization.LongDeserializer +import org.jetbrains.exposed.sql.Database +import javax.sql.DataSource + +fun main() { + val databaseConfig = loadNaisOrLocalConfiguration(DATABASE_CONFIG) + val authenticationConfig = loadNaisOrLocalConfiguration(AUTHENTICATION_CONFIG) + val azureAdM2MConfig = loadNaisOrLocalConfiguration(AZURE_M2M_CONFIG) + val pdlClientConfig = loadNaisOrLocalConfiguration(PDL_CLIENT_CONFIG) + val dataSource = createDataSource(databaseConfig) + val pdlClient = createPdlClient(pdlClientConfig, azureAdM2MConfig) + startApplication(authenticationConfig, dataSource, pdlClient) +} + +fun startApplication( + authenticationConfig: AuthenticationConfig, + dataSource: DataSource, + pdlClient: PdlClient +) { + val database = Database.connect(dataSource) + val healthIndicatorRepository = HealthIndicatorRepository() + val prometheusMeterRegistry = PrometheusMeterRegistry(PrometheusConfig.DEFAULT) + val identitetRepository = IdentitetRepository(database) + val kafkaKeysRepository = KafkaKeysRepository(database) + val kafkaKeysAuditRepository = KafkaKeysAuditRepository(database) + val kafkaConsumerService = KafkaConsumerService( + database, + healthIndicatorRepository, + prometheusMeterRegistry, + identitetRepository, + kafkaKeysRepository, + kafkaKeysAuditRepository + ) + val pdlService = PdlService(pdlClient) + val kafkaKeysService = KafkaKeysService( + prometheusMeterRegistry, + kafkaKeysRepository, + pdlService + ) + val mergeDetector = MergeDetector( + pdlService, + kafkaKeysRepository + ) + val kafkaConfig = loadNaisOrLocalConfiguration(KAFKA_CONFIG) + val kafkaTopologyConfig = loadNaisOrLocalConfiguration(KAFKA_TOPOLOGY_CONFIG) + val kafkaFactory = KafkaFactory(kafkaConfig) + + val hendelseKafkaConsumer = kafkaFactory.createConsumer( + groupId = kafkaTopologyConfig.consumerGroupId, + clientId = "${kafkaTopologyConfig.consumerGroupId}-consumer", + keyDeserializer = LongDeserializer::class, + valueDeserializer = HendelseDeserializer::class + ) + + embeddedServer( + factory = Netty, + port = 8080, + configure = { + connectionGroupSize = 8 + workerGroupSize = 8 + callGroupSize = 16 + } + ) { + configSerialization() + configureLogging() + configureErrorHandling() + configureAuthentication(authenticationConfig) + configureMetrics( + meterRegistry = prometheusMeterRegistry, + extraMeterBinders = listOf(KafkaClientMetrics(hendelseKafkaConsumer)) + ) + configureDatabase(dataSource) + configureKafka( + consumeFunction = kafkaConsumerService::handleRecords, + errorFunction = kafkaConsumerService::handleException, + kafkaConsumer = hendelseKafkaConsumer, + kafkaTopics = listOf(kafkaTopologyConfig.hendelseloggTopic) + ) + configureRouting( + authenticationConfig, + prometheusMeterRegistry, + healthIndicatorRepository, + kafkaKeysService, + mergeDetector + ) + }.start(wait = true) +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/ktor/helse_endepunkt.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/internal/Metrics.kt similarity index 77% rename from apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/ktor/helse_endepunkt.kt rename to apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/internal/Metrics.kt index 87709244..247a867e 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/ktor/helse_endepunkt.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/internal/Metrics.kt @@ -1,33 +1,24 @@ -package no.nav.paw.kafkakeygenerator.ktor +package no.nav.paw.kafkakeygenerator.api.internal import io.ktor.server.application.call import io.ktor.server.response.respond -import io.ktor.server.response.respondText import io.ktor.server.routing.Routing import io.ktor.server.routing.get import io.micrometer.prometheusmetrics.PrometheusMeterRegistry -import kotlinx.coroutines.runBlocking -import no.nav.paw.kafkakeygenerator.Either -import no.nav.paw.kafkakeygenerator.Failure +import no.nav.paw.kafkakeygenerator.vo.Either +import no.nav.paw.kafkakeygenerator.vo.Failure import no.nav.paw.kafkakeygenerator.merge.MergeDetector import org.slf4j.LoggerFactory import java.util.concurrent.CompletableFuture -import java.util.concurrent.CompletableFuture.supplyAsync import java.util.concurrent.atomic.AtomicReference private val task = AtomicReference>?>(null) -fun Routing.konfigurereHelse( +fun Routing.konfigurereMetrics( prometheusMeterRegistry: PrometheusMeterRegistry, mergeDetector: MergeDetector ) { val mergeLogger = LoggerFactory.getLogger("MergeDetector") - get("/internal/isAlive") { - call.respondText("ALIVE") - } - get("/internal/isReady") { - call.respondText("READY") - } get("/internal/metrics") { call.respond(prometheusMeterRegistry.scrape()) } diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/RecordKeyApi.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/RecordKeyApi.kt index 60555dc3..0f813a2d 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/RecordKeyApi.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/RecordKeyApi.kt @@ -7,9 +7,9 @@ import io.ktor.server.response.* import io.ktor.server.routing.* import io.ktor.util.pipeline.* import io.opentelemetry.instrumentation.annotations.WithSpan -import no.nav.paw.kafkakeygenerator.* import no.nav.paw.kafkakeygenerator.api.recordkey.functions.recordKey -import no.nav.paw.kafkakeygenerator.config.Autentiseringskonfigurasjon +import no.nav.paw.kafkakeygenerator.config.AuthenticationConfig +import no.nav.paw.kafkakeygenerator.service.KafkaKeysService import no.nav.paw.kafkakeygenerator.vo.CallId import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer import org.slf4j.Logger @@ -17,26 +17,26 @@ import org.slf4j.LoggerFactory import java.util.* fun Routing.configureRecordKeyApi( - autentiseringKonfigurasjon: Autentiseringskonfigurasjon, - applikasjon: Applikasjon + authenticationConfig: AuthenticationConfig, + kafkaKeysService: KafkaKeysService ) { val logger = LoggerFactory.getLogger("record-key-api") - authenticate(autentiseringKonfigurasjon.kafkaKeyApiAuthProvider) { + authenticate(authenticationConfig.kafkaKeyApiAuthProvider) { post("/api/v1/record-key") { - handleRequest(applikasjon, logger) + handleRequest(kafkaKeysService, logger) } } } @WithSpan private suspend fun PipelineContext.handleRequest( - applikasjon: Applikasjon, + kafkaKeysService: KafkaKeysService, logger: Logger ) { val callId = call.request.headers["traceparent"] ?.let(::CallId) ?: CallId(UUID.randomUUID().toString()) val identitetsnummer = Identitetsnummer(call.receive().ident) - val (status, response) = applikasjon::hent.recordKey(logger, callId, identitetsnummer) + val (status, response) = kafkaKeysService::hent.recordKey(logger, callId, identitetsnummer) call.respond(status, response) } \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/GetRecordKey.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/GetRecordKey.kt index 23a1a169..9ce5225e 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/GetRecordKey.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/GetRecordKey.kt @@ -1,9 +1,9 @@ package no.nav.paw.kafkakeygenerator.api.recordkey.functions import io.ktor.http.* -import no.nav.paw.kafkakeygenerator.Either -import no.nav.paw.kafkakeygenerator.Failure -import no.nav.paw.kafkakeygenerator.FailureCode +import no.nav.paw.kafkakeygenerator.vo.Either +import no.nav.paw.kafkakeygenerator.vo.Failure +import no.nav.paw.kafkakeygenerator.vo.FailureCode import no.nav.paw.kafkakeygenerator.api.recordkey.RecordKeyResponse import no.nav.paw.kafkakeygenerator.api.recordkey.recordKeyLookupResponseV1 import no.nav.paw.kafkakeygenerator.api.v2.publicTopicKeyFunction diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/MapFailure.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/MapFailure.kt index b8ad7acf..221b7ef0 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/MapFailure.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/MapFailure.kt @@ -1,8 +1,8 @@ package no.nav.paw.kafkakeygenerator.api.recordkey.functions import io.ktor.http.* -import no.nav.paw.kafkakeygenerator.Failure -import no.nav.paw.kafkakeygenerator.FailureCode +import no.nav.paw.kafkakeygenerator.vo.Failure +import no.nav.paw.kafkakeygenerator.vo.FailureCode import no.nav.paw.kafkakeygenerator.api.recordkey.FailureResponseV1 import no.nav.paw.kafkakeygenerator.api.recordkey.Feilkode diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/v2/ApiEndepunktV2.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/v2/ApiEndepunktV2.kt index 23854556..28c99d79 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/v2/ApiEndepunktV2.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/v2/ApiEndepunktV2.kt @@ -10,11 +10,11 @@ import io.ktor.server.response.* import io.ktor.server.routing.* import io.ktor.util.pipeline.* import io.opentelemetry.instrumentation.annotations.WithSpan -import no.nav.paw.kafkakeygenerator.Applikasjon -import no.nav.paw.kafkakeygenerator.FailureCode -import no.nav.paw.kafkakeygenerator.Left -import no.nav.paw.kafkakeygenerator.Right -import no.nav.paw.kafkakeygenerator.config.Autentiseringskonfigurasjon +import no.nav.paw.kafkakeygenerator.service.KafkaKeysService +import no.nav.paw.kafkakeygenerator.vo.FailureCode +import no.nav.paw.kafkakeygenerator.vo.Left +import no.nav.paw.kafkakeygenerator.vo.Right +import no.nav.paw.kafkakeygenerator.config.AuthenticationConfig import no.nav.paw.kafkakeygenerator.vo.CallId import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer import org.slf4j.Logger @@ -22,30 +22,30 @@ import org.slf4j.LoggerFactory import java.util.* fun Routing.konfigurerApiV2( - autentiseringKonfigurasjon: Autentiseringskonfigurasjon, - applikasjon: Applikasjon + authenticationConfig: AuthenticationConfig, + kafkaKeysService: KafkaKeysService ) { val logger = LoggerFactory.getLogger("api") - authenticate(autentiseringKonfigurasjon.kafkaKeyApiAuthProvider) { + authenticate(authenticationConfig.kafkaKeyApiAuthProvider) { post("/api/v2/hentEllerOpprett") { - hentEllerOpprett(applikasjon, logger) + hentEllerOpprett(kafkaKeysService, logger) } post("/api/v2/info") { - hentInfo(applikasjon, logger) + hentInfo(kafkaKeysService, logger) } post("/api/v2/lokalInfo") { - hentLokalInfo(applikasjon, logger) + hentLokalInfo(kafkaKeysService, logger) } } } @WithSpan suspend fun PipelineContext.hentLokalInfo( - applikasjon: Applikasjon, + kafkaKeysService: KafkaKeysService, logger: Logger ) { val request = call.receive() - when (val resultat = applikasjon.hentLokaleAlias(request.antallPartisjoner, request.identer)) { + when (val resultat = kafkaKeysService.hentLokaleAlias(request.antallPartisjoner, request.identer)) { is Right -> call.respond( OK, AliasResponse( alias = resultat.right @@ -63,14 +63,14 @@ suspend fun PipelineContext.hentLokalInfo( @WithSpan suspend fun PipelineContext.hentInfo( - applikasjon: Applikasjon, + kafkaKeysService: KafkaKeysService, logger: Logger ) { val callId = call.request.headers["traceparent"] ?.let { CallId(it) } ?: CallId(UUID.randomUUID().toString()) val request = call.receive() - when (val resultat = applikasjon.validerLagretData(callId, Identitetsnummer(request.ident))) { + when (val resultat = kafkaKeysService.validerLagretData(callId, Identitetsnummer(request.ident))) { is Left -> call.respond( status = InternalServerError, message = resultat.left.code.name @@ -81,14 +81,14 @@ suspend fun PipelineContext.hentInfo( @WithSpan private suspend fun PipelineContext.hentEllerOpprett( - applikasjon: Applikasjon, + kafkaKeysService: KafkaKeysService, logger: Logger ) { val callId = call.request.headers["traceparent"] ?.let { CallId(it) } ?: CallId(UUID.randomUUID().toString()) val request = call.receive() - when (val resultat = applikasjon.hentEllerOpprett(callId, Identitetsnummer(request.ident))) { + when (val resultat = kafkaKeysService.hentEllerOpprett(callId, Identitetsnummer(request.ident))) { is Right -> { call.respond( OK, responseV2( diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/v2/HentLokaleAlias.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/v2/HentLokaleAlias.kt index 48676e29..db6a0d43 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/v2/HentLokaleAlias.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/api/v2/HentLokaleAlias.kt @@ -1,9 +1,16 @@ package no.nav.paw.kafkakeygenerator.api.v2 import no.nav.paw.kafkakeygenerator.* +import no.nav.paw.kafkakeygenerator.service.KafkaKeysService +import no.nav.paw.kafkakeygenerator.vo.Either +import no.nav.paw.kafkakeygenerator.vo.Failure +import no.nav.paw.kafkakeygenerator.vo.FailureCode import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import no.nav.paw.kafkakeygenerator.vo.flatten +import no.nav.paw.kafkakeygenerator.vo.recover +import no.nav.paw.kafkakeygenerator.vo.right -fun Applikasjon.hentLokaleAlias(antallPartisjoner: Int, identiteter: List): Either> { +fun KafkaKeysService.hentLokaleAlias(antallPartisjoner: Int, identiteter: List): Either> { return identiteter.mapNotNull { identitet -> hentLokaleAlias(antallPartisjoner, Identitetsnummer(identitet)) .recover(FailureCode.DB_NOT_FOUND) { right(null) } diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/Autentiseringskonfigurasjon.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/AuthenticationConfig.kt similarity index 57% rename from apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/Autentiseringskonfigurasjon.kt rename to apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/AuthenticationConfig.kt index 1e879093..9e695029 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/Autentiseringskonfigurasjon.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/AuthenticationConfig.kt @@ -1,12 +1,13 @@ package no.nav.paw.kafkakeygenerator.config +const val AUTHENTICATION_CONFIG = "authentication_config.toml" -data class Autentiseringskonfigurasjon( - val providers: List, +data class AuthenticationConfig( + val providers: List, val kafkaKeyApiAuthProvider: String ) -data class Autentisering( +data class AuthenticationProviderConfig( val name: String, val discoveryUrl: String, val acceptedAudience: List, diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/AzureTokenKlientKonfigurasjon.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/AzureTokenKlientKonfigurasjon.kt deleted file mode 100644 index 39582185..00000000 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/AzureTokenKlientKonfigurasjon.kt +++ /dev/null @@ -1,7 +0,0 @@ -package no.nav.paw.kafkakeygenerator.config - -data class AzureTokenKlientKonfigurasjon( - val clientId: String, - val privateJwk: String, - val tokenEndpointUrl: String -) \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/DatabaseConfig.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/DatabaseConfig.kt new file mode 100644 index 00000000..0487bbda --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/DatabaseConfig.kt @@ -0,0 +1,15 @@ +package no.nav.paw.kafkakeygenerator.config + +const val DATABASE_CONFIG = "database_config.toml" + +data class DatabaseConfig( + val host: String, + val port: Int, + val database: String, + val username: String, + val password: String, + val driverClassName: String, + val autoCommit: Boolean +) { + val jdbcUrl = "jdbc:postgresql://$host:$port/$database?user=$username&password=$password" +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/DatabaseKonfigurasjon.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/DatabaseKonfigurasjon.kt deleted file mode 100644 index 236a3866..00000000 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/DatabaseKonfigurasjon.kt +++ /dev/null @@ -1,11 +0,0 @@ -package no.nav.paw.kafkakeygenerator.config - -data class DatabaseKonfigurasjon( - val host: String, - val port: Int, - val brukernavn: String, - val passord: String, - val databasenavn: String, -) { - val url get() = "jdbc:postgresql://$host:$port/$databasenavn?user=$brukernavn&password=$passord" -} \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/KafkaTopologyConfig.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/KafkaTopologyConfig.kt new file mode 100644 index 00000000..e33962bc --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/KafkaTopologyConfig.kt @@ -0,0 +1,8 @@ +package no.nav.paw.kafkakeygenerator.config + +const val KAFKA_TOPOLOGY_CONFIG = "kafka_topology_config.toml" + +data class KafkaTopologyConfig( + val consumerGroupId: String, + val hendelseloggTopic: String +) diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/PdlClientConfig.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/PdlClientConfig.kt new file mode 100644 index 00000000..edb03dd9 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/PdlClientConfig.kt @@ -0,0 +1,9 @@ +package no.nav.paw.kafkakeygenerator.config + +const val PDL_CLIENT_CONFIG = "pdl_client_config.toml" + +data class PdlClientConfig( + val url: String, + val scope: String, + val tema: String +) \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/PdlKlientKonfigurasjon.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/PdlKlientKonfigurasjon.kt deleted file mode 100644 index 2006eb84..00000000 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/PdlKlientKonfigurasjon.kt +++ /dev/null @@ -1,9 +0,0 @@ -package no.nav.paw.kafkakeygenerator.config - -data class PdlKlientKonfigurasjon( - val url: String, - val tema: String, - val pdlCluster: String, - val namespace: String, - val appName: String -) \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/utils.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/utils.kt deleted file mode 100644 index 5fb9c2b3..00000000 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/config/utils.kt +++ /dev/null @@ -1,32 +0,0 @@ -package no.nav.paw.kafkakeygenerator.config - -import com.sksamuel.hoplite.ConfigLoaderBuilder -import com.sksamuel.hoplite.ExperimentalHoplite -import com.sksamuel.hoplite.addResourceSource -import com.zaxxer.hikari.HikariConfig -import com.zaxxer.hikari.HikariDataSource -import java.lang.System.getenv - -@OptIn(ExperimentalHoplite::class) -inline fun lastKonfigurasjon(navn: String): A { - val fulltNavn = when (getenv("NAIS_CLUSTER_NAME")) { - "prod-gcp" -> "/prod/$navn" - "dev-gcp" -> "/dev/$navn" - else -> "/local/$navn" - } - return ConfigLoaderBuilder - .default() - .withExplicitSealedTypes() - .addResourceSource(fulltNavn) - .build() - .loadConfigOrThrow() -} - -fun DatabaseKonfigurasjon.dataSource() = - HikariDataSource(HikariConfig().apply { - jdbcUrl = url - driverClassName = "org.postgresql.Driver" - password = passord - username = brukernavn - isAutoCommit = false - }) \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/database/KafkaKeysAuditTable.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/database/KafkaKeysAuditTable.kt new file mode 100644 index 00000000..6279426e --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/database/KafkaKeysAuditTable.kt @@ -0,0 +1,13 @@ +package no.nav.paw.kafkakeygenerator.database + +import no.nav.paw.kafkakeygenerator.vo.IdentitetStatus +import org.jetbrains.exposed.dao.id.LongIdTable +import org.jetbrains.exposed.sql.javatime.timestamp + +object KafkaKeysAuditTable : LongIdTable("kafka_keys_audit") { + val identitetsnummer = varchar("identitetsnummer", 255).references(IdentitetTabell.identitetsnummer) + val tidligereKafkaKey = long("tidligere_kafka_key") + val status = enumerationByName("status", 50) + val detaljer = varchar("detaljer", 255) + val tidspunkt = timestamp("tidspunkt") +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/database/flyway.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/database/flyway.kt deleted file mode 100644 index 102aaf9c..00000000 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/database/flyway.kt +++ /dev/null @@ -1,9 +0,0 @@ -package no.nav.paw.kafkakeygenerator.database - -fun flywayMigrate(dataSource: javax.sql.DataSource) { - org.flywaydb.core.Flyway.configure() - .dataSource(dataSource) - .baselineOnMigrate(true) - .load() - .migrate() -} \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/ktor/init_ktor.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/ktor/init_ktor.kt deleted file mode 100644 index d8d18046..00000000 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/ktor/init_ktor.kt +++ /dev/null @@ -1,30 +0,0 @@ -package no.nav.paw.kafkakeygenerator.ktor - -import io.ktor.server.engine.embeddedServer -import io.ktor.server.netty.Netty -import io.micrometer.prometheusmetrics.PrometheusMeterRegistry -import no.nav.paw.kafkakeygenerator.Applikasjon -import no.nav.paw.kafkakeygenerator.config.Autentiseringskonfigurasjon -import no.nav.paw.kafkakeygenerator.merge.MergeDetector - -fun initKtorServer( - autentiseringKonfigurasjon: Autentiseringskonfigurasjon, - prometheusMeterRegistry: PrometheusMeterRegistry, - applikasjon: Applikasjon, - mergeDetector: MergeDetector -) = embeddedServer( - factory = Netty, - port = 8080, - configure = { - connectionGroupSize = 8 - workerGroupSize = 8 - callGroupSize = 16 - } -) { - konfigurerServer( - autentiseringKonfigurasjon = autentiseringKonfigurasjon, - prometheusMeterRegistry = prometheusMeterRegistry, - applikasjon = applikasjon, - mergeDetector = mergeDetector - ) -} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/ktor/ktor_oppsett.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/ktor/ktor_oppsett.kt deleted file mode 100644 index 6a2c93ef..00000000 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/ktor/ktor_oppsett.kt +++ /dev/null @@ -1,147 +0,0 @@ -package no.nav.paw.kafkakeygenerator.ktor - -import com.fasterxml.jackson.databind.DatabindException -import io.ktor.http.ContentType -import io.ktor.http.HttpStatusCode -import io.ktor.serialization.jackson.jackson -import io.ktor.server.application.Application -import io.ktor.server.application.install -import io.ktor.server.auth.authentication -import io.ktor.server.metrics.micrometer.MicrometerMetrics -import io.ktor.server.plugins.callloging.CallLogging -import io.ktor.server.plugins.contentnegotiation.ContentNegotiation -import io.ktor.server.plugins.statuspages.StatusPages -import io.ktor.server.plugins.swagger.swaggerUI -import io.ktor.server.request.path -import io.ktor.server.response.respondText -import io.ktor.server.routing.routing -import io.micrometer.core.instrument.binder.jvm.JvmGcMetrics -import io.micrometer.core.instrument.binder.jvm.JvmMemoryMetrics -import io.micrometer.core.instrument.binder.system.ProcessorMetrics -import io.micrometer.core.instrument.distribution.DistributionStatisticConfig -import io.micrometer.prometheusmetrics.PrometheusMeterRegistry -import no.nav.paw.kafkakeygenerator.Applikasjon -import no.nav.paw.kafkakeygenerator.api.recordkey.configureRecordKeyApi -import no.nav.paw.kafkakeygenerator.api.v2.konfigurerApiV2 -import no.nav.paw.kafkakeygenerator.config.Autentiseringskonfigurasjon -import no.nav.paw.kafkakeygenerator.masker -import no.nav.paw.kafkakeygenerator.merge.MergeDetector -import no.nav.security.token.support.v2.IssuerConfig -import no.nav.security.token.support.v2.RequiredClaims -import no.nav.security.token.support.v2.TokenSupportConfig -import no.nav.security.token.support.v2.tokenValidationSupport -import org.slf4j.LoggerFactory -import java.time.Duration - -fun Application.konfigurerServer( - autentiseringKonfigurasjon: Autentiseringskonfigurasjon, - prometheusMeterRegistry: PrometheusMeterRegistry, - applikasjon: Applikasjon, - mergeDetector: MergeDetector -) { - autentisering(autentiseringKonfigurasjon) - micrometerMetrics(prometheusMeterRegistry) - configureLogging() - serialisering() - statusPages() - routing { - konfigurereHelse( - prometheusMeterRegistry = prometheusMeterRegistry, - mergeDetector = mergeDetector - ) - konfigurerApiV2(autentiseringKonfigurasjon, applikasjon) - configureRecordKeyApi(autentiseringKonfigurasjon, applikasjon) - swaggerUI(path = "docs", swaggerFile = "openapi/documentation.yaml") - swaggerUI(path = "docs/record-key", swaggerFile = "openapi/record-key-api-spec.yaml") - } -} - -fun Application.micrometerMetrics(prometheusMeterRegistry: PrometheusMeterRegistry) { - install(MicrometerMetrics) { - registry = prometheusMeterRegistry - meterBinders = listOf( - JvmMemoryMetrics(), - JvmGcMetrics(), - ProcessorMetrics(), - ) - distributionStatisticConfig = - DistributionStatisticConfig.builder() - .percentilesHistogram(true) - .maximumExpectedValue(Duration.ofMillis(750).toNanos().toDouble()) - .minimumExpectedValue(Duration.ofMillis(20).toNanos().toDouble()) - .serviceLevelObjectives( - Duration.ofMillis(100).toNanos().toDouble(), - Duration.ofMillis(200).toNanos().toDouble() - ) - .build() - } -} - -fun Application.autentisering(autentiseringskonfigurasjon: Autentiseringskonfigurasjon) { - authentication { - autentiseringskonfigurasjon.providers.forEach { provider -> - tokenValidationSupport( - name = provider.name, - requiredClaims = RequiredClaims( - issuer = provider.name, - claimMap = provider.requiredClaims.toTypedArray() - ), - config = TokenSupportConfig( - IssuerConfig( - name = provider.name, - discoveryUrl = provider.discoveryUrl, - acceptedAudience = provider.acceptedAudience - ), - ), - ) - } - } -} - -fun Application.serialisering() { - install(ContentNegotiation) { - jackson() - } -} - -private val feilLogger = LoggerFactory.getLogger("error_logger") -fun Application.statusPages() { - install(StatusPages) { - exception { call, throwable -> - when (throwable) { - is DatabindException -> { - feilLogger.info( - "Ugyldig kall {}, feilet, grunnet: {}", - masker(call.request.path()), - masker(throwable.message) - ) - call.respondText( - "Bad request", - ContentType.Text.Plain, - HttpStatusCode.BadRequest - ) - } - - else -> { - feilLogger.error( - "Kall {}, feilet, grunnet: {}", - masker(call.request.path()), - masker(throwable.message) - ) - call.respondText( - "En uventet feil oppstod", - ContentType.Text.Plain, - HttpStatusCode.InternalServerError - ) - } - } - } - } -} - -fun Application.configureLogging() { - install(CallLogging) { - disableDefaultColors() - filter { !it.request.path().startsWith("/internal") && it.response.status() != HttpStatusCode.OK } - } -} \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/listener/NoopConsumerRebalanceListener.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/listener/NoopConsumerRebalanceListener.kt new file mode 100644 index 00000000..ff1d16ac --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/listener/NoopConsumerRebalanceListener.kt @@ -0,0 +1,9 @@ +package no.nav.paw.kafkakeygenerator.listener + +import org.apache.kafka.clients.consumer.ConsumerRebalanceListener +import org.apache.kafka.common.TopicPartition + +class NoopConsumerRebalanceListener : ConsumerRebalanceListener { + override fun onPartitionsRevoked(partitions: MutableCollection?) {} + override fun onPartitionsAssigned(partitions: MutableCollection?) {} +} \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/merge/MergeDetector.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/merge/MergeDetector.kt index b146acb1..7acc17b5 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/merge/MergeDetector.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/merge/MergeDetector.kt @@ -2,19 +2,25 @@ package no.nav.paw.kafkakeygenerator.merge import no.nav.paw.kafkakeygenerator.* import no.nav.paw.kafkakeygenerator.mergedetector.vo.MergeDetected -import no.nav.paw.kafkakeygenerator.pdl.PdlIdentitesTjeneste +import no.nav.paw.kafkakeygenerator.service.PdlService +import no.nav.paw.kafkakeygenerator.repository.KafkaKeysRepository import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId +import no.nav.paw.kafkakeygenerator.vo.Either +import no.nav.paw.kafkakeygenerator.vo.Failure import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import no.nav.paw.kafkakeygenerator.vo.Left +import no.nav.paw.kafkakeygenerator.vo.Right +import no.nav.paw.kafkakeygenerator.vo.right import no.nav.paw.pdl.graphql.generated.hentidenter.IdentInformasjon import org.slf4j.LoggerFactory class MergeDetector( - private val pdlIdentitesTjeneste: PdlIdentitesTjeneste, - private val kafkaKeys: KafkaKeys + private val pdlService: PdlService, + private val kafkaKeysRepository: KafkaKeysRepository ) { private val logger = LoggerFactory.getLogger("MergeDetector") private val hentEllerNull: (Identitetsnummer) -> ArbeidssoekerId? = { id -> - kafkaKeys.hent(id) + kafkaKeysRepository.hent(id) .fold( { null}, { it } @@ -23,7 +29,7 @@ class MergeDetector( suspend fun findMerges(batchSize: Int): Either { require(batchSize > 0) { "Batch size must be greater than 0" } - return kafkaKeys.hentSisteArbeidssoekerId() + return kafkaKeysRepository.hentSisteArbeidssoekerId() .map { it.value } .suspendingFlatMap { max -> processRange( @@ -51,10 +57,10 @@ class MergeDetector( if (currentPos >= stopAt) { results } else { - val storedData = kafkaKeys.hent(currentPos, maxSize) + val storedData = kafkaKeysRepository.hent(currentPos, maxSize) val detected = storedData .suspendingFlatMap { - pdlIdentitesTjeneste.hentIdenter(it.keys.toList()) + pdlService.hentIdenter(it.keys.toList()) } .map { pdl -> detectMerges(hentEllerNull, pdl) } .map(Sequence::count) diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/mergedetector/FailureBuilder.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/mergedetector/FailureBuilder.kt index a1dcd5f6..23d88f00 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/mergedetector/FailureBuilder.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/mergedetector/FailureBuilder.kt @@ -1,7 +1,7 @@ package no.nav.paw.kafkakeygenerator.mergedetector -import no.nav.paw.kafkakeygenerator.Failure -import no.nav.paw.kafkakeygenerator.FailureCode +import no.nav.paw.kafkakeygenerator.vo.Failure +import no.nav.paw.kafkakeygenerator.vo.FailureCode fun failure(code: FailureCode) : Failure = Failure( diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/mergedetector/HentLagretData.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/mergedetector/HentLagretData.kt index 09519d79..01422425 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/mergedetector/HentLagretData.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/mergedetector/HentLagretData.kt @@ -1,19 +1,19 @@ package no.nav.paw.kafkakeygenerator.mergedetector import io.opentelemetry.instrumentation.annotations.WithSpan -import kotlinx.coroutines.yield -import no.nav.paw.kafkakeygenerator.* -import no.nav.paw.kafkakeygenerator.FailureCode.DB_NOT_FOUND -import no.nav.paw.kafkakeygenerator.FailureCode.PDL_NOT_FOUND +import no.nav.paw.kafkakeygenerator.vo.FailureCode.DB_NOT_FOUND +import no.nav.paw.kafkakeygenerator.vo.FailureCode.PDL_NOT_FOUND import no.nav.paw.kafkakeygenerator.vo.Info import no.nav.paw.kafkakeygenerator.vo.PdlId import no.nav.paw.kafkakeygenerator.mergedetector.vo.LagretData import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId +import no.nav.paw.kafkakeygenerator.vo.Either +import no.nav.paw.kafkakeygenerator.vo.Failure import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer -import kotlin.coroutines.Continuation -import kotlin.coroutines.CoroutineContext -import kotlin.coroutines.EmptyCoroutineContext -import kotlin.coroutines.createCoroutine +import no.nav.paw.kafkakeygenerator.vo.flatten +import no.nav.paw.kafkakeygenerator.vo.left +import no.nav.paw.kafkakeygenerator.vo.recover +import no.nav.paw.kafkakeygenerator.vo.right @WithSpan fun hentLagretData( diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/pdl/utils.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/pdl/utils.kt deleted file mode 100644 index 4d2e12e8..00000000 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/pdl/utils.kt +++ /dev/null @@ -1,38 +0,0 @@ -package no.nav.paw.kafkakeygenerator.pdl - -import io.ktor.client.* -import io.ktor.client.engine.okhttp.* -import io.ktor.client.plugins.contentnegotiation.* -import io.ktor.serialization.jackson.* -import no.nav.common.token_client.builder.AzureAdTokenClientBuilder -import no.nav.common.token_client.cache.CaffeineTokenCache -import no.nav.paw.kafkakeygenerator.config.AzureTokenKlientKonfigurasjon -import no.nav.paw.kafkakeygenerator.config.PdlKlientKonfigurasjon -import no.nav.paw.pdl.PdlClient - -fun opprettKtorKlient() = HttpClient(OkHttp) { - install(ContentNegotiation) { - jackson() - } -} - -fun opprettPdlKlient( - konfig: PdlKlientKonfigurasjon, - autentiseringskonfigurasjon: AzureTokenKlientKonfigurasjon -) : PdlClient { - val scope = "api://${konfig.pdlCluster}.${konfig.namespace}.${konfig.appName}/.default" - val azureTokenClient = aadMachineToMachineTokenClient(autentiseringskonfigurasjon) - return PdlClient( - url = konfig.url, - tema = konfig.tema, - httpClient = opprettKtorKlient() - ) { azureTokenClient.createMachineToMachineToken(scope) } -} - -private fun aadMachineToMachineTokenClient(konfig: AzureTokenKlientKonfigurasjon) = - AzureAdTokenClientBuilder.builder() - .withClientId(konfig.clientId) - .withPrivateJwk(konfig.privateJwk) - .withTokenEndpointUrl(konfig.tokenEndpointUrl) - .withCache(CaffeineTokenCache()) - .buildMachineToMachineTokenClient() diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Authentication.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Authentication.kt new file mode 100644 index 00000000..a31920cd --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Authentication.kt @@ -0,0 +1,30 @@ +package no.nav.paw.kafkakeygenerator.plugin + +import io.ktor.server.application.Application +import io.ktor.server.auth.authentication +import no.nav.paw.kafkakeygenerator.config.AuthenticationConfig +import no.nav.security.token.support.v2.IssuerConfig +import no.nav.security.token.support.v2.RequiredClaims +import no.nav.security.token.support.v2.TokenSupportConfig +import no.nav.security.token.support.v2.tokenValidationSupport + +fun Application.configureAuthentication(authenticationConfig: AuthenticationConfig) { + authentication { + authenticationConfig.providers.forEach { provider -> + tokenValidationSupport( + name = provider.name, + requiredClaims = RequiredClaims( + issuer = provider.name, + claimMap = provider.requiredClaims.toTypedArray() + ), + config = TokenSupportConfig( + IssuerConfig( + name = provider.name, + discoveryUrl = provider.discoveryUrl, + acceptedAudience = provider.acceptedAudience + ), + ), + ) + } + } +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Database.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Database.kt new file mode 100644 index 00000000..3203b5ab --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Database.kt @@ -0,0 +1,12 @@ +package no.nav.paw.kafkakeygenerator.plugin + +import io.ktor.server.application.Application +import io.ktor.server.application.install +import no.nav.paw.kafkakeygenerator.plugin.custom.FlywayPlugin +import javax.sql.DataSource + +fun Application.configureDatabase(dataSource: DataSource) { + install(FlywayPlugin) { + this.dataSource = dataSource + } +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/ErrorHandling.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/ErrorHandling.kt new file mode 100644 index 00000000..e829bb8f --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/ErrorHandling.kt @@ -0,0 +1,48 @@ +package no.nav.paw.kafkakeygenerator.plugin + +import com.fasterxml.jackson.databind.DatabindException +import io.ktor.http.ContentType +import io.ktor.http.HttpStatusCode +import io.ktor.server.application.Application +import io.ktor.server.application.install +import io.ktor.server.plugins.statuspages.StatusPages +import io.ktor.server.request.path +import io.ktor.server.response.respondText +import no.nav.paw.kafkakeygenerator.utils.masker +import org.slf4j.LoggerFactory + +private val feilLogger = LoggerFactory.getLogger("error_logger") + +fun Application.configureErrorHandling() { + install(StatusPages) { + exception { call, throwable -> + when (throwable) { + is DatabindException -> { + feilLogger.info( + "Ugyldig kall {}, feilet, grunnet: {}", + masker(call.request.path()), + masker(throwable.message) + ) + call.respondText( + "Bad request", + ContentType.Text.Plain, + HttpStatusCode.BadRequest + ) + } + + else -> { + feilLogger.error( + "Kall {}, feilet, grunnet: {}", + masker(call.request.path()), + masker(throwable.message) + ) + call.respondText( + "En uventet feil oppstod", + ContentType.Text.Plain, + HttpStatusCode.InternalServerError + ) + } + } + } + } +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Kafka.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Kafka.kt new file mode 100644 index 00000000..453d8a99 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Kafka.kt @@ -0,0 +1,24 @@ +package no.nav.paw.kafkakeygenerator.plugin + +import io.ktor.server.application.Application +import io.ktor.server.application.install +import no.nav.paw.kafkakeygenerator.plugin.custom.kafkaConsumerPlugin +import org.apache.kafka.clients.consumer.ConsumerRecords +import org.apache.kafka.clients.consumer.KafkaConsumer + +fun Application.configureKafka( + consumeFunction: ((ConsumerRecords) -> Unit), + successFunction: ((ConsumerRecords) -> Unit)? = null, + errorFunction: ((throwable: Throwable) -> Unit), + kafkaConsumer: KafkaConsumer, + kafkaTopics: List +) { + + install(kafkaConsumerPlugin()) { + this.consumeFunction = consumeFunction + this.successFunction = successFunction + this.errorFunction = errorFunction + this.kafkaConsumer = kafkaConsumer + this.kafkaTopics = kafkaTopics + } +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Logging.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Logging.kt new file mode 100644 index 00000000..e915b3dd --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Logging.kt @@ -0,0 +1,14 @@ +package no.nav.paw.kafkakeygenerator.plugin + +import io.ktor.http.HttpStatusCode +import io.ktor.server.application.Application +import io.ktor.server.application.install +import io.ktor.server.plugins.callloging.CallLogging +import io.ktor.server.request.path + +fun Application.configureLogging() { + install(CallLogging) { + disableDefaultColors() + filter { !it.request.path().startsWith("/internal") && it.response.status() != HttpStatusCode.OK } + } +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Metrics.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Metrics.kt new file mode 100644 index 00000000..95537d6d --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Metrics.kt @@ -0,0 +1,36 @@ +package no.nav.paw.kafkakeygenerator.plugin + +import io.ktor.server.application.Application +import io.ktor.server.application.install +import io.ktor.server.metrics.micrometer.MicrometerMetrics +import io.micrometer.core.instrument.MeterRegistry +import io.micrometer.core.instrument.binder.MeterBinder +import io.micrometer.core.instrument.binder.jvm.JvmGcMetrics +import io.micrometer.core.instrument.binder.jvm.JvmMemoryMetrics +import io.micrometer.core.instrument.binder.system.ProcessorMetrics +import io.micrometer.core.instrument.distribution.DistributionStatisticConfig +import java.time.Duration + +fun Application.configureMetrics( + meterRegistry: MeterRegistry, + extraMeterBinders: List +) { + install(MicrometerMetrics) { + this.registry = meterRegistry + this.meterBinders = listOf( + JvmGcMetrics(), + JvmMemoryMetrics(), + ProcessorMetrics() + ) + extraMeterBinders + this.distributionStatisticConfig = + DistributionStatisticConfig.builder() + .percentilesHistogram(true) + .maximumExpectedValue(Duration.ofMillis(750).toNanos().toDouble()) + .minimumExpectedValue(Duration.ofMillis(20).toNanos().toDouble()) + .serviceLevelObjectives( + Duration.ofMillis(100).toNanos().toDouble(), + Duration.ofMillis(200).toNanos().toDouble() + ) + .build() + } +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Routing.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Routing.kt new file mode 100644 index 00000000..f00ced6e --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Routing.kt @@ -0,0 +1,34 @@ +package no.nav.paw.kafkakeygenerator.plugin + +import io.ktor.server.application.Application +import io.ktor.server.plugins.swagger.swaggerUI +import io.ktor.server.routing.routing +import io.micrometer.prometheusmetrics.PrometheusMeterRegistry +import no.nav.paw.health.repository.HealthIndicatorRepository +import no.nav.paw.health.route.healthRoutes +import no.nav.paw.kafkakeygenerator.service.KafkaKeysService +import no.nav.paw.kafkakeygenerator.api.recordkey.configureRecordKeyApi +import no.nav.paw.kafkakeygenerator.api.v2.konfigurerApiV2 +import no.nav.paw.kafkakeygenerator.config.AuthenticationConfig +import no.nav.paw.kafkakeygenerator.api.internal.konfigurereMetrics +import no.nav.paw.kafkakeygenerator.merge.MergeDetector + +fun Application.configureRouting( + authenticationConfig: AuthenticationConfig, + prometheusMeterRegistry: PrometheusMeterRegistry, + healthIndicatorRepository: HealthIndicatorRepository, + kafkaKeysService: KafkaKeysService, + mergeDetector: MergeDetector +) { + routing { + healthRoutes(healthIndicatorRepository) + konfigurereMetrics( + prometheusMeterRegistry = prometheusMeterRegistry, + mergeDetector = mergeDetector + ) + konfigurerApiV2(authenticationConfig, kafkaKeysService) + configureRecordKeyApi(authenticationConfig, kafkaKeysService) + swaggerUI(path = "docs", swaggerFile = "openapi/documentation.yaml") + swaggerUI(path = "docs/record-key", swaggerFile = "openapi/record-key-api-spec.yaml") + } +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Serialization.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Serialization.kt new file mode 100644 index 00000000..e243f05a --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/Serialization.kt @@ -0,0 +1,12 @@ +package no.nav.paw.kafkakeygenerator.plugin + +import io.ktor.serialization.jackson.jackson +import io.ktor.server.application.Application +import io.ktor.server.application.install +import io.ktor.server.plugins.contentnegotiation.ContentNegotiation + +fun Application.configSerialization() { + install(ContentNegotiation) { + jackson() + } +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/custom/FlywayPlugin.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/custom/FlywayPlugin.kt new file mode 100644 index 00000000..f11d8e99 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/custom/FlywayPlugin.kt @@ -0,0 +1,45 @@ +package no.nav.paw.kafkakeygenerator.plugin.custom + +import io.ktor.events.EventDefinition +import io.ktor.server.application.Application +import io.ktor.server.application.ApplicationPlugin +import io.ktor.server.application.ApplicationStarted +import io.ktor.server.application.createApplicationPlugin +import io.ktor.server.application.hooks.MonitoringEvent +import io.ktor.server.application.log +import io.ktor.util.KtorDsl +import org.flywaydb.core.Flyway +import javax.sql.DataSource + +val FlywayMigrationCompleted: EventDefinition = EventDefinition() + +@KtorDsl +class FlywayPluginConfig { + var dataSource: DataSource? = null + var baselineOnMigrate: Boolean = true + + companion object { + const val PLUGIN_NAME = "FlywayPlugin" + } +} + +val FlywayPlugin: ApplicationPlugin = + createApplicationPlugin(FlywayPluginConfig.PLUGIN_NAME, ::FlywayPluginConfig) { + application.log.info("Oppretter {}", FlywayPluginConfig.PLUGIN_NAME) + val dataSource = requireNotNull(pluginConfig.dataSource) { "DataSource er null" } + val baselineOnMigrate = pluginConfig.baselineOnMigrate + + on(MonitoringEvent(ApplicationStarted)) { application -> + application.log.info("Running database migration") + dataSource.flywayMigrate(baselineOnMigrate) + application.environment.monitor.raise(FlywayMigrationCompleted, application) + } + } + +fun DataSource.flywayMigrate(baselineOnMigrate: Boolean = true) { + Flyway.configure() + .dataSource(this) + .baselineOnMigrate(baselineOnMigrate) + .load() + .migrate() +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/custom/KafkaConsumerPlugin.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/custom/KafkaConsumerPlugin.kt new file mode 100644 index 00000000..34f90e8a --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/plugin/custom/KafkaConsumerPlugin.kt @@ -0,0 +1,102 @@ +package no.nav.paw.kafkakeygenerator.plugin.custom + +import io.ktor.events.EventDefinition +import io.ktor.server.application.Application +import io.ktor.server.application.ApplicationPlugin +import io.ktor.server.application.ApplicationStarted +import io.ktor.server.application.ApplicationStopping +import io.ktor.server.application.createApplicationPlugin +import io.ktor.server.application.hooks.MonitoringEvent +import io.ktor.server.application.log +import io.ktor.util.KtorDsl +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.Job +import kotlinx.coroutines.launch +import no.nav.paw.kafkakeygenerator.listener.NoopConsumerRebalanceListener +import no.nav.paw.kafkakeygenerator.utils.buildApplicationLogger +import org.apache.kafka.clients.consumer.ConsumerRebalanceListener +import org.apache.kafka.clients.consumer.ConsumerRecords +import org.apache.kafka.clients.consumer.KafkaConsumer +import java.time.Duration +import java.util.concurrent.atomic.AtomicBoolean + +private val logger = buildApplicationLogger +val KafkaConsumerReady: EventDefinition = EventDefinition() + +@KtorDsl +class KafkaConsumerPluginConfig { + var consumeFunction: ((ConsumerRecords) -> Unit)? = null + var successFunction: ((ConsumerRecords) -> Unit)? = null + var errorFunction: ((throwable: Throwable) -> Unit)? = null + var kafkaConsumer: KafkaConsumer? = null + var kafkaTopics: Collection? = null + var pollTimeout: Duration? = null + var closeTimeout: Duration? = null + var rebalanceListener: ConsumerRebalanceListener? = null + val shutdownFlag = AtomicBoolean(false) + + companion object { + const val PLUGIN_NAME = "KafkaConsumerPlugin" + } +} + +private fun KafkaConsumer.defaultSuccessFunction(records: ConsumerRecords) { + if (!records.isEmpty) { + logger.debug("Kafka Consumer success. {} records processed", records.count()) + this.commitSync() + } +} + +private fun defaultErrorFunction(throwable: Throwable) { + logger.error("Kafka Consumer failed", throwable) + throw throwable +} + +fun kafkaConsumerPlugin(): ApplicationPlugin> = + createApplicationPlugin(KafkaConsumerPluginConfig.PLUGIN_NAME, ::KafkaConsumerPluginConfig) { + application.log.info("Oppretter {}", KafkaConsumerPluginConfig.PLUGIN_NAME) + val kafkaTopics = requireNotNull(pluginConfig.kafkaTopics) { "KafkaTopics er null" } + val kafkaConsumer = requireNotNull(pluginConfig.kafkaConsumer) { "KafkaConsumer er null" } + val consumeFunction = requireNotNull(pluginConfig.consumeFunction) { "ConsumeFunction er null" } + val successFunction = pluginConfig.successFunction ?: kafkaConsumer::defaultSuccessFunction + val errorFunction = pluginConfig.errorFunction ?: ::defaultErrorFunction + val pollTimeout = pluginConfig.pollTimeout ?: Duration.ofMillis(100) + val closeTimeout = pluginConfig.closeTimeout ?: Duration.ofSeconds(1) + val rebalanceListener = pluginConfig.rebalanceListener ?: NoopConsumerRebalanceListener() + val shutdownFlag = pluginConfig.shutdownFlag + var consumeJob: Job? = null + + on(MonitoringEvent(ApplicationStarted)) { application -> + logger.info("Kafka Consumer klargjøres") + kafkaConsumer.subscribe(kafkaTopics, rebalanceListener) + application.environment.monitor.raise(KafkaConsumerReady, application) + } + + on(MonitoringEvent(ApplicationStopping)) { _ -> + logger.info("Kafka Consumer stopper") + kafkaConsumer.unsubscribe() + kafkaConsumer.close(closeTimeout) + shutdownFlag.set(true) + consumeJob?.cancel() + } + + on(MonitoringEvent(KafkaConsumerReady)) { application -> + consumeJob = application.launch(Dispatchers.IO) { + logger.info("Kafka Consumer starter") + while (!shutdownFlag.get()) { + try { + val records = kafkaConsumer.poll(pollTimeout) + consumeFunction(records) + successFunction(records) + } catch (throwable: Throwable) { + kafkaConsumer.unsubscribe() + kafkaConsumer.close(closeTimeout) + shutdownFlag.set(true) + errorFunction(throwable) + } + } + logger.info("Kafka Consumer avsluttet") + consumeJob?.cancel() + } + } + } diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/repository/IdentitetRepository.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/repository/IdentitetRepository.kt new file mode 100644 index 00000000..dfb24ce0 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/repository/IdentitetRepository.kt @@ -0,0 +1,44 @@ +package no.nav.paw.kafkakeygenerator.repository + +import no.nav.paw.kafkakeygenerator.database.IdentitetTabell +import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId +import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import org.jetbrains.exposed.sql.Database +import org.jetbrains.exposed.sql.insert +import org.jetbrains.exposed.sql.selectAll +import org.jetbrains.exposed.sql.transactions.transaction +import org.jetbrains.exposed.sql.update + +class IdentitetRepository( + private val database: Database +) { + fun find(identitetsnummer: Identitetsnummer): Pair? = transaction(database) { + IdentitetTabell.selectAll() + .where { IdentitetTabell.identitetsnummer eq identitetsnummer.value } + .singleOrNull() + ?.let { + Identitetsnummer(it[IdentitetTabell.identitetsnummer]) to ArbeidssoekerId(it[IdentitetTabell.kafkaKey]) + } + } + + fun insert( + ident: Identitetsnummer, + arbeidssoekerId: ArbeidssoekerId + ): Int = transaction(database) { + IdentitetTabell.insert { + it[identitetsnummer] = ident.value + it[kafkaKey] = arbeidssoekerId.value + }.insertedCount + } + + fun update( + identitetsnummer: Identitetsnummer, + tilArbeidssoekerId: ArbeidssoekerId + ): Int = transaction(database) { + IdentitetTabell.update(where = { + (IdentitetTabell.identitetsnummer eq identitetsnummer.value) + }) { + it[kafkaKey] = tilArbeidssoekerId.value + } + } +} \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/repository/KafkaKeysAuditRepository.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/repository/KafkaKeysAuditRepository.kt new file mode 100644 index 00000000..c0d51aa5 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/repository/KafkaKeysAuditRepository.kt @@ -0,0 +1,55 @@ +package no.nav.paw.kafkakeygenerator.repository + +import no.nav.paw.kafkakeygenerator.database.KafkaKeysAuditTable +import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId +import no.nav.paw.kafkakeygenerator.vo.Audit +import no.nav.paw.kafkakeygenerator.vo.IdentitetStatus +import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import org.jetbrains.exposed.sql.Database +import org.jetbrains.exposed.sql.SqlExpressionBuilder.eq +import org.jetbrains.exposed.sql.insert +import org.jetbrains.exposed.sql.selectAll +import org.jetbrains.exposed.sql.transactions.transaction + +class KafkaKeysAuditRepository( + private val database: Database +) { + + fun findByIdentitetsnummer(identitetsnummer: Identitetsnummer): List = transaction(database) { + KafkaKeysAuditTable.selectAll() + .where(KafkaKeysAuditTable.identitetsnummer eq identitetsnummer.value) + .map { + Audit( + identitetsnummer = Identitetsnummer(it[KafkaKeysAuditTable.identitetsnummer]), + tidligereArbeidssoekerId = ArbeidssoekerId(it[KafkaKeysAuditTable.tidligereKafkaKey]), + identitetStatus = it[KafkaKeysAuditTable.status], + detaljer = it[KafkaKeysAuditTable.detaljer], + tidspunkt = it[KafkaKeysAuditTable.tidspunkt] + ) + } + } + + fun findByStatus(status: IdentitetStatus): List = transaction(database) { + KafkaKeysAuditTable.selectAll() + .where(KafkaKeysAuditTable.status eq status) + .map { + Audit( + identitetsnummer = Identitetsnummer(it[KafkaKeysAuditTable.identitetsnummer]), + tidligereArbeidssoekerId = ArbeidssoekerId(it[KafkaKeysAuditTable.tidligereKafkaKey]), + identitetStatus = it[KafkaKeysAuditTable.status], + detaljer = it[KafkaKeysAuditTable.detaljer], + tidspunkt = it[KafkaKeysAuditTable.tidspunkt] + ) + } + } + + fun insert(audit: Audit): Int = transaction(database) { + KafkaKeysAuditTable.insert { + it[identitetsnummer] = audit.identitetsnummer.value + it[tidligereKafkaKey] = audit.tidligereArbeidssoekerId.value + it[status] = audit.identitetStatus + it[detaljer] = audit.detaljer + it[tidspunkt] = audit.tidspunkt + }.insertedCount + } +} \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/KafkaKeys.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/repository/KafkaKeysRepository.kt similarity index 79% rename from apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/KafkaKeys.kt rename to apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/repository/KafkaKeysRepository.kt index 12d896dc..0fd2ba8d 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/KafkaKeys.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/repository/KafkaKeysRepository.kt @@ -1,13 +1,33 @@ -package no.nav.paw.kafkakeygenerator +package no.nav.paw.kafkakeygenerator.repository import no.nav.paw.kafkakeygenerator.database.IdentitetTabell import no.nav.paw.kafkakeygenerator.database.KafkaKeysTabell import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId +import no.nav.paw.kafkakeygenerator.vo.Either +import no.nav.paw.kafkakeygenerator.vo.Failure +import no.nav.paw.kafkakeygenerator.vo.FailureCode import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer -import org.jetbrains.exposed.sql.* +import no.nav.paw.kafkakeygenerator.vo.attempt +import no.nav.paw.kafkakeygenerator.vo.flatMap +import no.nav.paw.kafkakeygenerator.vo.left +import no.nav.paw.kafkakeygenerator.vo.mapToFailure +import no.nav.paw.kafkakeygenerator.vo.right +import org.jetbrains.exposed.sql.Database +import org.jetbrains.exposed.sql.SortOrder +import org.jetbrains.exposed.sql.and +import org.jetbrains.exposed.sql.insert +import org.jetbrains.exposed.sql.insertIgnore +import org.jetbrains.exposed.sql.selectAll import org.jetbrains.exposed.sql.transactions.transaction -class KafkaKeys(private val database: Database) { +class KafkaKeysRepository(private val database: Database) { + + fun find(arbeidssoekerId: ArbeidssoekerId): ArbeidssoekerId? = + transaction(database) { + KafkaKeysTabell.selectAll() + .where { KafkaKeysTabell.id eq arbeidssoekerId.value } + .singleOrNull()?.let { ArbeidssoekerId(it[KafkaKeysTabell.id]) } + } fun hentSisteArbeidssoekerId(): Either = attempt { @@ -99,12 +119,12 @@ class KafkaKeys(private val database: Database) { fun opprett(identitet: Identitetsnummer): Either = attempt { transaction(database) { - val nøkkel = KafkaKeysTabell.insert { }[KafkaKeysTabell.id] + val key = KafkaKeysTabell.insert { }[KafkaKeysTabell.id] val opprettet = IdentitetTabell.insertIgnore { it[identitetsnummer] = identitet.value - it[kafkaKey] = nøkkel + it[kafkaKey] = key }.insertedCount == 1 - if (opprettet) nøkkel else null + if (opprettet) key else null } }.mapToFailure { exception -> Failure("database", FailureCode.INTERNAL_TECHINCAL_ERROR, exception) diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaConsumerService.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaConsumerService.kt new file mode 100644 index 00000000..dbed8064 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaConsumerService.kt @@ -0,0 +1,206 @@ +package no.nav.paw.kafkakeygenerator.service + +import io.micrometer.core.instrument.MeterRegistry +import io.opentelemetry.api.trace.Span +import io.opentelemetry.api.trace.StatusCode +import io.opentelemetry.instrumentation.annotations.WithSpan +import no.nav.paw.arbeidssokerregisteret.intern.v1.Hendelse +import no.nav.paw.arbeidssokerregisteret.intern.v1.IdentitetsnummerSammenslaatt +import no.nav.paw.health.model.HealthStatus +import no.nav.paw.health.model.LivenessHealthIndicator +import no.nav.paw.health.model.ReadinessHealthIndicator +import no.nav.paw.health.repository.HealthIndicatorRepository +import no.nav.paw.kafkakeygenerator.repository.IdentitetRepository +import no.nav.paw.kafkakeygenerator.repository.KafkaKeysAuditRepository +import no.nav.paw.kafkakeygenerator.repository.KafkaKeysRepository +import no.nav.paw.kafkakeygenerator.utils.buildErrorLogger +import no.nav.paw.kafkakeygenerator.utils.buildLogger +import no.nav.paw.kafkakeygenerator.utils.countKafkaFailed +import no.nav.paw.kafkakeygenerator.utils.countKafkaIgnored +import no.nav.paw.kafkakeygenerator.utils.countKafkaInserted +import no.nav.paw.kafkakeygenerator.utils.countKafkaProcessed +import no.nav.paw.kafkakeygenerator.utils.countKafkaReceived +import no.nav.paw.kafkakeygenerator.utils.countKafkaUpdated +import no.nav.paw.kafkakeygenerator.utils.countKafkaVerified +import no.nav.paw.kafkakeygenerator.utils.kafkaConflictGauge +import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId +import no.nav.paw.kafkakeygenerator.vo.Audit +import no.nav.paw.kafkakeygenerator.vo.IdentitetStatus +import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import org.apache.kafka.clients.consumer.ConsumerRecords +import org.jetbrains.exposed.sql.Database +import org.jetbrains.exposed.sql.transactions.transaction + +class KafkaConsumerService( + private val database: Database, + private val healthIndicatorRepository: HealthIndicatorRepository, + private val meterRegistry: MeterRegistry, + private val identitetRepository: IdentitetRepository, + private val kafkaKeysRepository: KafkaKeysRepository, + private val kafkaKeysAuditRepository: KafkaKeysAuditRepository, +) { + private val logger = buildLogger + private val errorLogger = buildErrorLogger + private val livenessIndicator = healthIndicatorRepository + .addLivenessIndicator(LivenessHealthIndicator(HealthStatus.HEALTHY)) + private val readinessIndicator = healthIndicatorRepository + .addReadinessIndicator(ReadinessHealthIndicator(HealthStatus.HEALTHY)) + + @WithSpan + fun handleRecords( + records: ConsumerRecords + ) { + records + .onEach { record -> + logger.debug( + "Mottok melding på topic: {}, partition: {}, offset {}", + record.topic(), + record.partition(), + record.offset() + ) + } + .map { it.value() } + .onEach { event -> + meterRegistry.countKafkaReceived() + if (event is IdentitetsnummerSammenslaatt) { + logger.debug("Prosesserer hendelse av type {}", event.hendelseType) + meterRegistry.countKafkaProcessed() + } else { + logger.debug("Ignorerer hendelse av type {}", event.hendelseType) + meterRegistry.countKafkaIgnored() + } + } + .filterIsInstance() + .forEach { event -> + logger.info("Mottok hendelse om sammenslåing av Identitetsnummer") + val identitetsnummer = event.flyttedeIdentitetsnumre + .map { Identitetsnummer(it) } + Identitetsnummer(event.identitetsnummer) + val fraArbeidssoekerId = ArbeidssoekerId(event.id) + val tilArbeidssoekerId = ArbeidssoekerId(event.flyttetTilArbeidssoekerId) + updateIdentiteter(HashSet(identitetsnummer), fraArbeidssoekerId, tilArbeidssoekerId) + } + } + + private fun updateIdentiteter( + identitetsnummerSet: HashSet, + fraArbeidssoekerId: ArbeidssoekerId, + tilArbeidssoekerId: ArbeidssoekerId + ) { + kafkaKeysRepository.find(fraArbeidssoekerId).let { arbeidssoekerId -> + if (arbeidssoekerId == null) { + meterRegistry.countKafkaFailed() + throw IllegalStateException("ArbeidssøkerId ikke funnet") + } + } + + kafkaKeysRepository.find(tilArbeidssoekerId).let { arbeidssoekerId -> + if (arbeidssoekerId == null) { + meterRegistry.countKafkaFailed() + throw IllegalStateException("ArbeidssøkerId ikke funnet") + } + } + + transaction(database) { + identitetsnummerSet.forEach { identitetsnummer -> + val kafkaKey = identitetRepository.find(identitetsnummer) + if (kafkaKey != null) { + updateIdentitet(identitetsnummer, fraArbeidssoekerId, tilArbeidssoekerId, kafkaKey.second) + } else { + insertIdentitet(identitetsnummer, tilArbeidssoekerId) + } + } + } + } + + @WithSpan + private fun updateIdentitet( + identitetsnummer: Identitetsnummer, + fraArbeidssoekerId: ArbeidssoekerId, + tilArbeidssoekerId: ArbeidssoekerId, + eksisterendeArbeidssoekerId: ArbeidssoekerId + ) { + if (eksisterendeArbeidssoekerId == tilArbeidssoekerId) { + logger.info("Identitetsnummer er allerede linket til korrekt ArbeidsøkerId") + meterRegistry.countKafkaVerified() + val audit = Audit( + identitetsnummer = identitetsnummer, + tidligereArbeidssoekerId = fraArbeidssoekerId, + identitetStatus = IdentitetStatus.VERIFISERT, + detaljer = "Ingen endringer" + ) + kafkaKeysAuditRepository.insert(audit) + } else if (eksisterendeArbeidssoekerId == fraArbeidssoekerId) { + logger.info("Identitetsnummer oppdateres med annen ArbeidsøkerId") + val count = identitetRepository.update(identitetsnummer, tilArbeidssoekerId) + if (count != 0) { + meterRegistry.countKafkaUpdated() + val audit = Audit( + identitetsnummer = identitetsnummer, + tidligereArbeidssoekerId = eksisterendeArbeidssoekerId, + identitetStatus = IdentitetStatus.OPPDATERT, + detaljer = "Bytte av arbeidsøkerId fra ${eksisterendeArbeidssoekerId.value} til ${tilArbeidssoekerId.value}" + ) + kafkaKeysAuditRepository.insert(audit) + } else { + logger.warn("Oppdatering førte ikke til noen endringer i databasen") + meterRegistry.countKafkaFailed() + val audit = Audit( + identitetsnummer = identitetsnummer, + tidligereArbeidssoekerId = eksisterendeArbeidssoekerId, + identitetStatus = IdentitetStatus.IKKE_OPPDATERT, + detaljer = "Kunne ikke bytte arbeidsøkerId fra ${eksisterendeArbeidssoekerId.value} til ${tilArbeidssoekerId.value}" + ) + kafkaKeysAuditRepository.insert(audit) + } + } else { + logger.error("Eksisterende ArbeidssøkerId stemmer ikke med hendelse") + meterRegistry.countKafkaFailed() + val audit = Audit( + identitetsnummer = identitetsnummer, + tidligereArbeidssoekerId = fraArbeidssoekerId, + identitetStatus = IdentitetStatus.KONFLIKT, + detaljer = "Eksisterende arbeidsøkerId ${eksisterendeArbeidssoekerId.value} stemmer ikke med arbeidsøkerId fra hendelse ${fraArbeidssoekerId.value}" + ) + kafkaKeysAuditRepository.insert(audit) + val conflicts = kafkaKeysAuditRepository.findByStatus(IdentitetStatus.KONFLIKT) + meterRegistry.kafkaConflictGauge(conflicts.size) + } + } + + @WithSpan + private fun insertIdentitet( + identitetsnummer: Identitetsnummer, + tilArbeidssoekerId: ArbeidssoekerId + ) { + logger.info("Identitetsnummer opprettes med eksisterende ArbeidsøkerId") + val count = identitetRepository.insert(identitetsnummer, tilArbeidssoekerId) + if (count != 0) { + meterRegistry.countKafkaInserted() + val audit = Audit( + identitetsnummer = identitetsnummer, + tidligereArbeidssoekerId = tilArbeidssoekerId, + identitetStatus = IdentitetStatus.OPPRETTET, + detaljer = "Opprettet ident for arbeidsøkerId ${tilArbeidssoekerId.value}" + ) + kafkaKeysAuditRepository.insert(audit) + } else { + logger.warn("Opprettelse førte ikke til noen endringer i databasen") + meterRegistry.countKafkaFailed() + val audit = Audit( + identitetsnummer = identitetsnummer, + tidligereArbeidssoekerId = tilArbeidssoekerId, + identitetStatus = IdentitetStatus.IKKE_OPPRETTET, + detaljer = "Kunne ikke opprette ident for arbeidsøkerId ${tilArbeidssoekerId.value}" + ) + kafkaKeysAuditRepository.insert(audit) + } + } + + @WithSpan + fun handleException(throwable: Throwable) { + errorLogger.error("Kafka Consumer avslutter etter feil", throwable) + Span.current().setStatus(StatusCode.ERROR) + // livenessIndicator.setUnhealthy() TODO: Disabler for å unngå å ta ned appen + // readinessIndicator.setUnhealthy() + } +} \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Applikasjon.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaKeysService.kt similarity index 62% rename from apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Applikasjon.kt rename to apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaKeysService.kt index 9a740906..1397ef71 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Applikasjon.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaKeysService.kt @@ -1,21 +1,45 @@ -package no.nav.paw.kafkakeygenerator +package no.nav.paw.kafkakeygenerator.service +import io.micrometer.core.instrument.MeterRegistry import io.opentelemetry.instrumentation.annotations.WithSpan -import no.nav.paw.kafkakeygenerator.FailureCode.CONFLICT -import no.nav.paw.kafkakeygenerator.FailureCode.DB_NOT_FOUND -import no.nav.paw.kafkakeygenerator.api.v2.* +import no.nav.paw.kafkakeygenerator.api.v2.Alias +import no.nav.paw.kafkakeygenerator.api.v2.InfoResponse +import no.nav.paw.kafkakeygenerator.api.v2.LokaleAlias +import no.nav.paw.kafkakeygenerator.api.v2.publicTopicKeyFunction import no.nav.paw.kafkakeygenerator.mergedetector.findMerge import no.nav.paw.kafkakeygenerator.mergedetector.hentLagretData import no.nav.paw.kafkakeygenerator.mergedetector.vo.MergeDetected -import no.nav.paw.kafkakeygenerator.pdl.PdlIdentitesTjeneste -import no.nav.paw.kafkakeygenerator.vo.* +import no.nav.paw.kafkakeygenerator.repository.KafkaKeysRepository +import no.nav.paw.kafkakeygenerator.utils.buildLogger +import no.nav.paw.kafkakeygenerator.utils.countRestApiFailed +import no.nav.paw.kafkakeygenerator.utils.countRestApiFetched +import no.nav.paw.kafkakeygenerator.utils.countRestApiInserted +import no.nav.paw.kafkakeygenerator.utils.countRestApiReceived +import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId +import no.nav.paw.kafkakeygenerator.vo.CallId +import no.nav.paw.kafkakeygenerator.vo.Either +import no.nav.paw.kafkakeygenerator.vo.Failure +import no.nav.paw.kafkakeygenerator.vo.FailureCode.CONFLICT +import no.nav.paw.kafkakeygenerator.vo.FailureCode.DB_NOT_FOUND +import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import no.nav.paw.kafkakeygenerator.vo.Info +import no.nav.paw.kafkakeygenerator.vo.LokalIdData +import no.nav.paw.kafkakeygenerator.vo.PdlData +import no.nav.paw.kafkakeygenerator.vo.PdlId +import no.nav.paw.kafkakeygenerator.vo.flatMap +import no.nav.paw.kafkakeygenerator.vo.left +import no.nav.paw.kafkakeygenerator.vo.recover +import no.nav.paw.kafkakeygenerator.vo.right +import no.nav.paw.kafkakeygenerator.vo.suspendingRecover import org.apache.kafka.clients.producer.internals.BuiltInPartitioner.partitionForKey import org.apache.kafka.common.serialization.Serdes -class Applikasjon( - private val kafkaKeys: KafkaKeys, - private val identitetsTjeneste: PdlIdentitesTjeneste +class KafkaKeysService( + private val meterRegistry: MeterRegistry, + private val kafkaKeysRepository: KafkaKeysRepository, + private val pdlService: PdlService ) { + private val logger = buildLogger private val keySerializer = Serdes.Long().serializer() @WithSpan @@ -23,7 +47,7 @@ class Applikasjon( antallPartisjoner: Int, identitet: Identitetsnummer ): Either { - return kafkaKeys.hent(identitet) + return kafkaKeysRepository.hent(identitet) .map { arbeidssoekerId -> val recordKey = publicTopicKeyFunction(arbeidssoekerId) Alias( @@ -33,7 +57,7 @@ class Applikasjon( partition = partitionForKey(keySerializer.serialize("", recordKey.value), antallPartisjoner) ) }.flatMap { alias -> - kafkaKeys.hent(ArbeidssoekerId(alias.arbeidsoekerId)) + kafkaKeysRepository.hent(ArbeidssoekerId(alias.arbeidsoekerId)) .map { identiteter -> identiteter.map { identitetsnummer -> Alias( @@ -57,7 +81,7 @@ class Applikasjon( return hentInfo(callId, identitet) .flatMap { info -> hentLagretData( - hentArbeidssoekerId = kafkaKeys::hent, + hentArbeidssoekerId = kafkaKeysRepository::hent, info = info ).map { info to it } } @@ -72,12 +96,12 @@ class Applikasjon( @WithSpan suspend fun hentInfo(callId: CallId, identitet: Identitetsnummer): Either { - val pdlIdInfo = identitetsTjeneste.hentIdentInformasjon( + val pdlIdInfo = pdlService.hentIdentInformasjon( callId = callId, identitet = identitet, histrorikk = true ) - return kafkaKeys.hent(identitet) + return kafkaKeysRepository.hent(identitet) .map { arbeidssoekerId -> LokalIdData( arbeidsoekerId = arbeidssoekerId.value, @@ -107,7 +131,9 @@ class Applikasjon( @WithSpan suspend fun hent(callId: CallId, identitet: Identitetsnummer): Either { - return kafkaKeys.hent(identitet) + logger.debug("Henter identer fra database") + meterRegistry.countRestApiFetched() + return kafkaKeysRepository.hent(identitet) .suspendingRecover(DB_NOT_FOUND) { sjekkMotAliaser(callId, identitet) } @@ -115,27 +141,32 @@ class Applikasjon( @WithSpan suspend fun hentEllerOpprett(callId: CallId, identitet: Identitetsnummer): Either { + meterRegistry.countRestApiReceived() return hent(callId, identitet) .suspendingRecover(DB_NOT_FOUND) { - kafkaKeys.opprett(identitet) + logger.debug("Oppretter identer i database") + meterRegistry.countRestApiInserted() + kafkaKeysRepository.opprett(identitet) }.recover(CONFLICT) { - kafkaKeys.hent(identitet) + meterRegistry.countRestApiFailed() + kafkaKeysRepository.hent(identitet) } } @WithSpan private suspend fun sjekkMotAliaser(callId: CallId, identitet: Identitetsnummer): Either { - return identitetsTjeneste.hentIdentiter( + logger.debug("Sjekker identer mot PDL") + return pdlService.hentIdentiter( callId = callId, identitet = identitet, histrorikk = true ) - .flatMap(kafkaKeys::hent) + .flatMap(kafkaKeysRepository::hent) .flatMap { ids -> ids.values .firstOrNull()?.let(::right) ?: left(Failure("database", DB_NOT_FOUND)) } - .onRight { key -> kafkaKeys.lagre(identitet, key) } + .onRight { key -> kafkaKeysRepository.lagre(identitet, key) } } } diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/pdl/PdlIdentitesTjeneste.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/service/PdlService.kt similarity index 82% rename from apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/pdl/PdlIdentitesTjeneste.kt rename to apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/service/PdlService.kt index 6b707485..4cf95355 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/pdl/PdlIdentitesTjeneste.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/service/PdlService.kt @@ -1,8 +1,15 @@ -package no.nav.paw.kafkakeygenerator.pdl +package no.nav.paw.kafkakeygenerator.service -import no.nav.paw.kafkakeygenerator.* -import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer import no.nav.paw.kafkakeygenerator.vo.CallId +import no.nav.paw.kafkakeygenerator.vo.Either +import no.nav.paw.kafkakeygenerator.vo.Failure +import no.nav.paw.kafkakeygenerator.vo.FailureCode +import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import no.nav.paw.kafkakeygenerator.vo.flatMap +import no.nav.paw.kafkakeygenerator.vo.left +import no.nav.paw.kafkakeygenerator.vo.mapToFailure +import no.nav.paw.kafkakeygenerator.vo.right +import no.nav.paw.kafkakeygenerator.vo.suspendeableAttempt import no.nav.paw.pdl.PdlClient import no.nav.paw.pdl.PdlException import no.nav.paw.pdl.graphql.generated.enums.IdentGruppe @@ -10,10 +17,9 @@ import no.nav.paw.pdl.graphql.generated.hentidenter.IdentInformasjon import no.nav.paw.pdl.hentIdenter import no.nav.paw.pdl.hentIdenterBolk -private const val consumerId = "paw-arbeidssoekerregisteret" -private const val behandlingsnummer = "B452" - -class PdlIdentitesTjeneste(private val pdlKlient: PdlClient) { +class PdlService(private val pdlKlient: PdlClient) { + private val consumerId = "paw-arbeidssoekerregisteret" + private val behandlingsnummer = "B452" suspend fun hentIdenter( identiteter: List, diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/DataSourceFactory.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/DataSourceFactory.kt new file mode 100644 index 00000000..c13a5cc0 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/DataSourceFactory.kt @@ -0,0 +1,13 @@ +package no.nav.paw.kafkakeygenerator.utils + +import com.zaxxer.hikari.HikariConfig +import com.zaxxer.hikari.HikariDataSource +import no.nav.paw.kafkakeygenerator.config.DatabaseConfig +import javax.sql.DataSource + +fun createDataSource(config: DatabaseConfig): DataSource = + HikariDataSource(HikariConfig().apply { + jdbcUrl = config.jdbcUrl + driverClassName = config.driverClassName + isAutoCommit = config.autoCommit + }) \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/IdMaskering.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/IdMaskering.kt similarity index 83% rename from apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/IdMaskering.kt rename to apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/IdMaskering.kt index bcf89179..38a2ee68 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/IdMaskering.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/IdMaskering.kt @@ -1,4 +1,4 @@ -package no.nav.paw.kafkakeygenerator +package no.nav.paw.kafkakeygenerator.utils import java.util.regex.Pattern diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/Logging.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/Logging.kt new file mode 100644 index 00000000..96758565 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/Logging.kt @@ -0,0 +1,9 @@ +package no.nav.paw.kafkakeygenerator.utils + +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +inline val T.buildLogger: Logger get() = LoggerFactory.getLogger(T::class.java) + +inline val buildApplicationLogger: Logger get() = LoggerFactory.getLogger("no.nav.paw.logger.application") +inline val buildErrorLogger: Logger get() = LoggerFactory.getLogger("no.nav.paw.logger.error") diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/Metrics.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/Metrics.kt new file mode 100644 index 00000000..9f06cadc --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/Metrics.kt @@ -0,0 +1,89 @@ +package no.nav.paw.kafkakeygenerator.utils + +import io.micrometer.core.instrument.MeterRegistry +import io.micrometer.core.instrument.Tag +import io.micrometer.core.instrument.Tags + +private const val METRIC_PREFIX = "paw_kafka_key_generator" + +fun MeterRegistry.genericCounter( + source: String, + target: String, + action: String +) { + counter( + "${METRIC_PREFIX}_antall_operasjoner", + Tags.of( + Tag.of("source", source), + Tag.of("target", target), + Tag.of("action", action) + ) + ).increment() +} + +fun MeterRegistry.countRestApiReceived() { + genericCounter("rest_api", "database", "received") +} + +fun MeterRegistry.countRestApiFetched() { + genericCounter("rest_api", "database", "fetched") +} + +fun MeterRegistry.countRestApiInserted() { + genericCounter("rest_api", "database", "inserted") +} + +fun MeterRegistry.countRestApiFailed() { + genericCounter("rest_api", "database", "failed") +} + +fun MeterRegistry.countKafkaReceived() { + genericCounter("kafka", "database", "received") +} + +fun MeterRegistry.countKafkaProcessed() { + genericCounter("kafka", "database", "processed") +} + +fun MeterRegistry.countKafkaIgnored() { + genericCounter("kafka", "database", "ignored") +} + +fun MeterRegistry.countKafkaInserted() { + genericCounter("kafka", "database", "inserted") +} + +fun MeterRegistry.countKafkaUpdated() { + genericCounter("kafka", "database", "updated") +} + +fun MeterRegistry.countKafkaVerified() { + genericCounter("kafka", "database", "verified") +} + +fun MeterRegistry.countKafkaFailed() { + genericCounter("kafka", "database", "failed") +} + +fun MeterRegistry.genericGauge( + number: T, + source: String, + target: String, + action: String +) { + gauge( + "${METRIC_PREFIX}_antall_hendelser", + Tags.of( + Tag.of("source", source), + Tag.of("target", target), + Tag.of("action", action) + ), + number + ) +} + +fun MeterRegistry.kafkaConflictGauge( + number: T +) { + genericGauge(number, "kafka", "database", "conflict") +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/PdlClientFactory.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/PdlClientFactory.kt new file mode 100644 index 00000000..24527ce2 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/utils/PdlClientFactory.kt @@ -0,0 +1,19 @@ +package no.nav.paw.kafkakeygenerator.utils + +import no.nav.paw.client.config.AzureAdM2MConfig +import no.nav.paw.client.factory.createAzureAdM2MTokenClient +import no.nav.paw.client.factory.createHttpClient +import no.nav.paw.kafkakeygenerator.config.PdlClientConfig +import no.nav.paw.pdl.PdlClient + +fun createPdlClient( + pdlClientConfig: PdlClientConfig, + azureAdM2MConfig: AzureAdM2MConfig +): PdlClient { + val azureTokenClient = createAzureAdM2MTokenClient(azureProviderConfig = azureAdM2MConfig) + return PdlClient( + url = pdlClientConfig.url, + tema = pdlClientConfig.tema, + httpClient = createHttpClient() + ) { azureTokenClient.createMachineToMachineToken(pdlClientConfig.scope) } +} diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/Audit.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/Audit.kt new file mode 100644 index 00000000..b870c735 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/Audit.kt @@ -0,0 +1,11 @@ +package no.nav.paw.kafkakeygenerator.vo + +import java.time.Instant + +data class Audit( + val identitetsnummer: Identitetsnummer, + val tidligereArbeidssoekerId: ArbeidssoekerId, + val identitetStatus: IdentitetStatus, + val detaljer: String, + val tidspunkt: Instant = Instant.now() +) diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Either.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/Either.kt similarity index 98% rename from apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Either.kt rename to apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/Either.kt index 3b0d85bf..40aaf753 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Either.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/Either.kt @@ -1,4 +1,4 @@ -package no.nav.paw.kafkakeygenerator +package no.nav.paw.kafkakeygenerator.vo sealed interface Either { fun map(f: (R) -> R2): Either diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/IdentitetStatus.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/IdentitetStatus.kt new file mode 100644 index 00000000..768d32e4 --- /dev/null +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/IdentitetStatus.kt @@ -0,0 +1,10 @@ +package no.nav.paw.kafkakeygenerator.vo + +enum class IdentitetStatus { + OPPRETTET, + OPPDATERT, + VERIFISERT, + KONFLIKT, + IKKE_OPPRETTET, + IKKE_OPPDATERT +} \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Result.kt b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/Result.kt similarity index 97% rename from apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Result.kt rename to apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/Result.kt index 64d905de..55bacc4c 100644 --- a/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/Result.kt +++ b/apps/kafka-key-generator/src/main/kotlin/no/nav/paw/kafkakeygenerator/vo/Result.kt @@ -1,4 +1,4 @@ -package no.nav.paw.kafkakeygenerator +package no.nav.paw.kafkakeygenerator.vo data class Failure( val system: String, diff --git a/apps/kafka-key-generator/src/main/resources/db/migration/V4__endre_kafka_keys_audit.sql b/apps/kafka-key-generator/src/main/resources/db/migration/V4__endre_kafka_keys_audit.sql new file mode 100644 index 00000000..eedc7370 --- /dev/null +++ b/apps/kafka-key-generator/src/main/resources/db/migration/V4__endre_kafka_keys_audit.sql @@ -0,0 +1,2 @@ +ALTER TABLE kafka_keys_audit +ADD tidligere_kafka_key BIGINT NOT NULL DEFAULT -1; \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/dev/azure_token_klient.toml b/apps/kafka-key-generator/src/main/resources/dev/azure_token_klient.toml deleted file mode 100644 index 3fff1b3e..00000000 --- a/apps/kafka-key-generator/src/main/resources/dev/azure_token_klient.toml +++ /dev/null @@ -1,3 +0,0 @@ -clientId = "${AZURE_APP_CLIENT_ID}" -privateJwk = "${AZURE_APP_JWK}" -tokenEndpointUrl = "${AZURE_OPENID_CONFIG_TOKEN_ENDPOINT}" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/dev/pdl_klient.toml b/apps/kafka-key-generator/src/main/resources/dev/pdl_klient.toml deleted file mode 100644 index 66056c06..00000000 --- a/apps/kafka-key-generator/src/main/resources/dev/pdl_klient.toml +++ /dev/null @@ -1,5 +0,0 @@ -url = "https://pdl-api.dev-fss-pub.nais.io/graphql" -tema = "OPP" -pdlCluster = "dev-fss" -namespace = "pdl" -appName = "pdl-api" diff --git a/apps/kafka-key-generator/src/main/resources/dev/postgres.toml b/apps/kafka-key-generator/src/main/resources/dev/postgres.toml deleted file mode 100644 index 6e7d12f6..00000000 --- a/apps/kafka-key-generator/src/main/resources/dev/postgres.toml +++ /dev/null @@ -1,5 +0,0 @@ -host = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_HOST}" -port = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_PORT}" -brukernavn = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_USERNAME}" -passord = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_PASSWORD}" -databasenavn = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_DATABASE}" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/local/ktor_server_autentisering.toml b/apps/kafka-key-generator/src/main/resources/local/authentication_config.toml similarity index 63% rename from apps/kafka-key-generator/src/main/resources/local/ktor_server_autentisering.toml rename to apps/kafka-key-generator/src/main/resources/local/authentication_config.toml index 1a6a4e87..06438070 100644 --- a/apps/kafka-key-generator/src/main/resources/local/ktor_server_autentisering.toml +++ b/apps/kafka-key-generator/src/main/resources/local/authentication_config.toml @@ -2,6 +2,6 @@ kafkaKeyApiAuthProvider = "azure" [[providers]] name = "azure" -discoveryUrl = "http://localhost:8081/default/.well-known/openid-configuration" +discoveryUrl = "http://localhost:8081/azure/.well-known/openid-configuration" acceptedAudience = ["paw-kafka-key-generator"] requiredClaims = ["NAVident"] diff --git a/apps/kafka-key-generator/src/main/resources/local/azure_m2m_config.toml b/apps/kafka-key-generator/src/main/resources/local/azure_m2m_config.toml new file mode 100644 index 00000000..1667971c --- /dev/null +++ b/apps/kafka-key-generator/src/main/resources/local/azure_m2m_config.toml @@ -0,0 +1,2 @@ +clientId = "paw-kafka-key-generator" +tokenEndpointUrl = "http://localhost:8081/azure/token" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/local/azure_token_klient.toml b/apps/kafka-key-generator/src/main/resources/local/azure_token_klient.toml deleted file mode 100644 index 16627e93..00000000 --- a/apps/kafka-key-generator/src/main/resources/local/azure_token_klient.toml +++ /dev/null @@ -1,3 +0,0 @@ -clientId = "paw-kafka-keys" -privateJwk = "jwk" -tokenEndpointUrl = "http://localhost:8081/auth/realms/paw/protocol/openid-connect/token" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/local/database_config.toml b/apps/kafka-key-generator/src/main/resources/local/database_config.toml new file mode 100644 index 00000000..2b727841 --- /dev/null +++ b/apps/kafka-key-generator/src/main/resources/local/database_config.toml @@ -0,0 +1,7 @@ +host = "localhost" +port = 5432 +database = "pawkafkakeys" +username = "admin" +password = "admin" +driverClassName = "org.postgresql.Driver" +autoCommit = false \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/local/kafka_configuration.toml b/apps/kafka-key-generator/src/main/resources/local/kafka_configuration.toml new file mode 100644 index 00000000..3bf5dd12 --- /dev/null +++ b/apps/kafka-key-generator/src/main/resources/local/kafka_configuration.toml @@ -0,0 +1 @@ +brokers="localhost:9092" diff --git a/apps/kafka-key-generator/src/main/resources/local/kafka_topology_config.toml b/apps/kafka-key-generator/src/main/resources/local/kafka_topology_config.toml new file mode 100644 index 00000000..cbc475d5 --- /dev/null +++ b/apps/kafka-key-generator/src/main/resources/local/kafka_topology_config.toml @@ -0,0 +1,2 @@ +consumerGroupId = "paw-kafka-key-generator-v1" +hendelseloggTopic = "paw.arbeidssoker-hendelseslogg-v1" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/local/pdl_client_config.toml b/apps/kafka-key-generator/src/main/resources/local/pdl_client_config.toml new file mode 100644 index 00000000..dc90617b --- /dev/null +++ b/apps/kafka-key-generator/src/main/resources/local/pdl_client_config.toml @@ -0,0 +1,3 @@ +url = "http://localhost:8090/pdl" +scope = "api://test.pdl.pdl-api/.default" +tema = "OPP" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/local/pdl_klient.toml b/apps/kafka-key-generator/src/main/resources/local/pdl_klient.toml deleted file mode 100644 index f6dd3714..00000000 --- a/apps/kafka-key-generator/src/main/resources/local/pdl_klient.toml +++ /dev/null @@ -1,5 +0,0 @@ -url = "https://pdl-api.prod-fss-pub.nais.io" -tema = "OPP" -pdlCluster = "prod-fss" -namespace = "pdl" -appName = "pdl-api" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/local/postgres.toml b/apps/kafka-key-generator/src/main/resources/local/postgres.toml deleted file mode 100644 index fddc77fe..00000000 --- a/apps/kafka-key-generator/src/main/resources/local/postgres.toml +++ /dev/null @@ -1,5 +0,0 @@ -host = "localhost" -port = 5432 -brukernavn = "admin" -passord = "admin" -databasenavn = "pawkafkakeys" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/logback.xml b/apps/kafka-key-generator/src/main/resources/logback.xml index b9456ec7..fae3c147 100644 --- a/apps/kafka-key-generator/src/main/resources/logback.xml +++ b/apps/kafka-key-generator/src/main/resources/logback.xml @@ -14,17 +14,15 @@ + + + + - - - - - - diff --git a/apps/kafka-key-generator/src/main/resources/dev/ktor_server_autentisering.toml b/apps/kafka-key-generator/src/main/resources/nais/authentication_config.toml similarity index 100% rename from apps/kafka-key-generator/src/main/resources/dev/ktor_server_autentisering.toml rename to apps/kafka-key-generator/src/main/resources/nais/authentication_config.toml diff --git a/apps/kafka-key-generator/src/main/resources/nais/database_config.toml b/apps/kafka-key-generator/src/main/resources/nais/database_config.toml new file mode 100644 index 00000000..1d0aeefa --- /dev/null +++ b/apps/kafka-key-generator/src/main/resources/nais/database_config.toml @@ -0,0 +1,7 @@ +host = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_HOST}" +port = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_PORT}" +database = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_DATABASE}" +username = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_USERNAME}" +password = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_PASSWORD}" +driverClassName = "org.postgresql.Driver" +autoCommit = false \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/nais/kafka_topology_config.toml b/apps/kafka-key-generator/src/main/resources/nais/kafka_topology_config.toml new file mode 100644 index 00000000..68bdcfc2 --- /dev/null +++ b/apps/kafka-key-generator/src/main/resources/nais/kafka_topology_config.toml @@ -0,0 +1,2 @@ +consumerGroupId = "${KAFKA_CONSUMER_GROUP_ID}" +hendelseloggTopic = "${KAFKA_TOPIC_PAW_ARBEIDSSOKER_HENDELSESLOGG}" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/nais/pdl_client_config.toml b/apps/kafka-key-generator/src/main/resources/nais/pdl_client_config.toml new file mode 100644 index 00000000..4f4caf13 --- /dev/null +++ b/apps/kafka-key-generator/src/main/resources/nais/pdl_client_config.toml @@ -0,0 +1,3 @@ +url = "${PDL_URL}" +scope = "${PDL_SCOPE}" +tema = "${PDL_TEMA}" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/prod/azure_token_klient.toml b/apps/kafka-key-generator/src/main/resources/prod/azure_token_klient.toml deleted file mode 100644 index 3fff1b3e..00000000 --- a/apps/kafka-key-generator/src/main/resources/prod/azure_token_klient.toml +++ /dev/null @@ -1,3 +0,0 @@ -clientId = "${AZURE_APP_CLIENT_ID}" -privateJwk = "${AZURE_APP_JWK}" -tokenEndpointUrl = "${AZURE_OPENID_CONFIG_TOKEN_ENDPOINT}" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/main/resources/prod/ktor_server_autentisering.toml b/apps/kafka-key-generator/src/main/resources/prod/ktor_server_autentisering.toml deleted file mode 100644 index f2823d30..00000000 --- a/apps/kafka-key-generator/src/main/resources/prod/ktor_server_autentisering.toml +++ /dev/null @@ -1,7 +0,0 @@ -kafkaKeyApiAuthProvider = "azure" - -[[providers]] -name = "azure" -discoveryUrl = "${AZURE_APP_WELL_KNOWN_URL}" -acceptedAudience = ["${AZURE_APP_CLIENT_ID}"] -requiredClaims = ["NAVident"] diff --git a/apps/kafka-key-generator/src/main/resources/prod/pdl_klient.toml b/apps/kafka-key-generator/src/main/resources/prod/pdl_klient.toml deleted file mode 100644 index 8be64927..00000000 --- a/apps/kafka-key-generator/src/main/resources/prod/pdl_klient.toml +++ /dev/null @@ -1,5 +0,0 @@ -url = "https://pdl-api.prod-fss-pub.nais.io/graphql" -tema = "OPP" -pdlCluster = "prod-fss" -namespace = "pdl" -appName = "pdl-api" diff --git a/apps/kafka-key-generator/src/main/resources/prod/postgres.toml b/apps/kafka-key-generator/src/main/resources/prod/postgres.toml deleted file mode 100644 index 6e7d12f6..00000000 --- a/apps/kafka-key-generator/src/main/resources/prod/postgres.toml +++ /dev/null @@ -1,5 +0,0 @@ -host = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_HOST}" -port = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_PORT}" -brukernavn = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_USERNAME}" -passord = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_PASSWORD}" -databasenavn = "${NAIS_DATABASE_PAW_KAFKA_KEY_GENERATOR_PAWKAFKAKEYS_DATABASE}" \ No newline at end of file diff --git a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/run_test_app.kt b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/TestApplication.kt similarity index 66% rename from apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/run_test_app.kt rename to apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/TestApplication.kt index ff50e244..0f03c7c1 100644 --- a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/run_test_app.kt +++ b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/TestApplication.kt @@ -2,8 +2,10 @@ package no.nav.paw.kafkakeygenerator import io.ktor.client.* import io.ktor.client.engine.mock.* -import no.nav.paw.kafkakeygenerator.config.Autentisering -import no.nav.paw.kafkakeygenerator.config.Autentiseringskonfigurasjon +import no.nav.paw.kafkakeygenerator.config.AuthenticationProviderConfig +import no.nav.paw.kafkakeygenerator.config.AuthenticationConfig +import no.nav.paw.kafkakeygenerator.test.genererResponse +import no.nav.paw.kafkakeygenerator.test.initTestDatabase import no.nav.paw.pdl.PdlClient fun main() { @@ -15,8 +17,8 @@ fun main() { genererResponse(it) }) ) { "fake token" } - startApplikasjon(Autentiseringskonfigurasjon( - providers = listOf(Autentisering( + startApplication(AuthenticationConfig( + providers = listOf(AuthenticationProviderConfig( name = "mock", discoveryUrl = "http://localhost:8081/.well-known/openid-configuration", acceptedAudience = listOf("mock"), diff --git a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/GetRecordKeyKtTest.kt b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/GetRecordKeyKtTest.kt index cc0298ce..2b86f741 100644 --- a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/GetRecordKeyKtTest.kt +++ b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/api/recordkey/functions/GetRecordKeyKtTest.kt @@ -6,7 +6,6 @@ import io.kotest.matchers.shouldBe import io.kotest.matchers.types.shouldBeInstanceOf import io.ktor.http.HttpStatusCode.Companion.NotFound import io.ktor.http.HttpStatusCode.Companion.OK -import no.nav.paw.kafkakeygenerator.* import no.nav.paw.kafkakeygenerator.api.recordkey.FailureResponseV1 import no.nav.paw.kafkakeygenerator.api.recordkey.Feilkode import no.nav.paw.kafkakeygenerator.api.recordkey.RecordKeyLookupResponseV1 @@ -14,7 +13,12 @@ import no.nav.paw.kafkakeygenerator.api.recordkey.recordKeyLookupResponseV1 import no.nav.paw.kafkakeygenerator.api.v2.publicTopicKeyFunction import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId import no.nav.paw.kafkakeygenerator.vo.CallId +import no.nav.paw.kafkakeygenerator.vo.Either +import no.nav.paw.kafkakeygenerator.vo.Failure +import no.nav.paw.kafkakeygenerator.vo.FailureCode import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import no.nav.paw.kafkakeygenerator.vo.left +import no.nav.paw.kafkakeygenerator.vo.right import org.slf4j.LoggerFactory class GetRecordKeyTest : FreeSpec({ diff --git a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/PublicKeyFunctionTest.kt b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/api/v2/KeyCalculationsTest.kt similarity index 83% rename from apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/PublicKeyFunctionTest.kt rename to apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/api/v2/KeyCalculationsTest.kt index 6c5cfe29..7ba2f9a4 100644 --- a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/PublicKeyFunctionTest.kt +++ b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/api/v2/KeyCalculationsTest.kt @@ -1,18 +1,15 @@ -package no.nav.paw.kafkakeygenerator +package no.nav.paw.kafkakeygenerator.api.v2 import io.kotest.core.spec.style.FreeSpec import io.kotest.matchers.shouldBe -import no.nav.paw.kafkakeygenerator.api.v2.PUBLIC_KEY_MODULO_VALUE -import no.nav.paw.kafkakeygenerator.api.v2.publicTopicKeyFunction import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId -import no.nav.paw.kafkakeygenerator.vo.RecordKey /** * Enkel test som vil feile ved endring av publicTopicKeyFunction eller nooen av * verdiene den bruker. * Slike endringer krever replay av eventlog til nye topics. */ -class PublicKeyFunctionTest : FreeSpec({ +class KeyCalculationsTest : FreeSpec({ "publicTopicKeyFunction" - { "nøkkelen må aldri endres da dette krever replay av eventlog til nye topics" { val expectedModuloValue = 7_500 diff --git a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaConsumerServiceTest.kt b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaConsumerServiceTest.kt new file mode 100644 index 00000000..86e1a2ad --- /dev/null +++ b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaConsumerServiceTest.kt @@ -0,0 +1,206 @@ +package no.nav.paw.kafkakeygenerator.service + +import io.kotest.assertions.throwables.shouldThrow +import io.kotest.core.spec.style.FreeSpec +import io.kotest.matchers.collections.shouldHaveSize +import io.kotest.matchers.shouldBe +import io.micrometer.core.instrument.logging.LoggingMeterRegistry +import no.nav.paw.arbeidssokerregisteret.intern.v1.Hendelse +import no.nav.paw.health.repository.HealthIndicatorRepository +import no.nav.paw.kafkakeygenerator.plugin.custom.flywayMigrate +import no.nav.paw.kafkakeygenerator.repository.IdentitetRepository +import no.nav.paw.kafkakeygenerator.repository.KafkaKeysAuditRepository +import no.nav.paw.kafkakeygenerator.repository.KafkaKeysRepository +import no.nav.paw.kafkakeygenerator.test.TestData +import no.nav.paw.kafkakeygenerator.test.asConsumerRecords +import no.nav.paw.kafkakeygenerator.test.initTestDatabase +import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId +import no.nav.paw.kafkakeygenerator.vo.Failure +import no.nav.paw.kafkakeygenerator.vo.FailureCode +import no.nav.paw.kafkakeygenerator.vo.IdentitetStatus +import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import org.jetbrains.exposed.sql.Database +import javax.sql.DataSource + +class KafkaConsumerServiceTest : FreeSpec({ + + lateinit var dataSource: DataSource + lateinit var kafkaKeysRepository: KafkaKeysRepository + lateinit var kafkaKeysAuditRepository: KafkaKeysAuditRepository + lateinit var kafkaConsumerService: KafkaConsumerService + + beforeSpec { + dataSource = initTestDatabase() + dataSource.flywayMigrate() + val database = Database.connect(dataSource) + val healthIndicatorRepository = HealthIndicatorRepository() + kafkaKeysRepository = KafkaKeysRepository(database) + kafkaKeysAuditRepository = KafkaKeysAuditRepository(database) + kafkaConsumerService = KafkaConsumerService( + database = database, + meterRegistry = LoggingMeterRegistry(), + healthIndicatorRepository = healthIndicatorRepository, + identitetRepository = IdentitetRepository(database), + kafkaKeysRepository = kafkaKeysRepository, + kafkaKeysAuditRepository = kafkaKeysAuditRepository + ) + } + + afterSpec { + dataSource.connection.close() + } + + "Skal ignorere hendelse av irrelevant type" { + val identitetsnummer1 = Identitetsnummer("01017012345") + val identitetsnummer2 = Identitetsnummer("02017012345") + val arbeidssoekerId1 = ArbeidssoekerId(1) + val arbeidssoekerId2 = ArbeidssoekerId(2) + val hendelser: List = listOf( + TestData.getPeriodeStartet(identitetsnummer1, arbeidssoekerId1), + TestData.getPeriodeAvsluttet(identitetsnummer1, arbeidssoekerId1), + TestData.getPeriodeStartAvvist(identitetsnummer1, arbeidssoekerId1), + TestData.getPeriodeAvsluttetAvvist(identitetsnummer1, arbeidssoekerId1), + TestData.getArbeidssoekerIdFlettetInn( + listOf(identitetsnummer1, identitetsnummer2), + arbeidssoekerId1, + arbeidssoekerId2 + ) + ) + + kafkaConsumerService.handleRecords(hendelser.asConsumerRecords()) + + val keyResult = kafkaKeysRepository.hent(identitetsnummer1) + val auditResult = kafkaKeysAuditRepository.findByIdentitetsnummer(identitetsnummer1) + + keyResult.onLeft { it shouldBe Failure("database", FailureCode.DB_NOT_FOUND) } + keyResult.onRight { it shouldBe null } + auditResult shouldHaveSize 0 + } + + "Skal ignorere hendelse for ukjent identitetsnummer" { + val identitetsnummer = Identitetsnummer("03017012345") + val fraArbeidssoekerId = ArbeidssoekerId(3) + val tilArbeidssoekerId = ArbeidssoekerId(4) + + val hendelser: List = listOf( + TestData.getIdentitetsnummerSammenslaatt(listOf(identitetsnummer), fraArbeidssoekerId, tilArbeidssoekerId) + ) + + shouldThrow { + kafkaConsumerService.handleRecords(hendelser.asConsumerRecords()) + } + + val keyResult = kafkaKeysRepository.hent(identitetsnummer) + val auditResult = kafkaKeysAuditRepository.findByIdentitetsnummer(identitetsnummer) + + keyResult.onLeft { it shouldBe Failure("database", FailureCode.DB_NOT_FOUND) } + keyResult.onRight { it shouldBe null } + auditResult shouldHaveSize 0 + } + + "Skal håndtere at det er konflikt mellom arbeidssøkerId i hendelse og database" { + val identitetsnummer1 = Identitetsnummer("04017012345") + val identitetsnummer2 = Identitetsnummer("05017012345") + val identitetsnummer3 = Identitetsnummer("06017012345") + + val opprettResult1 = kafkaKeysRepository.opprett(identitetsnummer1) + opprettResult1.onLeft { it shouldBe null } + opprettResult1.onRight { fraArbeidssoekerId -> + val opprettResult2 = kafkaKeysRepository.opprett(identitetsnummer2) + opprettResult2.onLeft { it shouldBe null } + opprettResult2.onRight { tilArbeidssoekerId -> + val opprettResult3 = kafkaKeysRepository.opprett(identitetsnummer3) + opprettResult3.onLeft { it shouldBe null } + opprettResult3.onRight { eksisterendeArbeidssoekerId -> + val hendelser: List = listOf( + TestData.getIdentitetsnummerSammenslaatt( + listOf(identitetsnummer2, identitetsnummer3), + fraArbeidssoekerId, + tilArbeidssoekerId + ) + ) + + kafkaConsumerService.handleRecords(hendelser.asConsumerRecords()) + + val keyResult1 = kafkaKeysRepository.hent(identitetsnummer1) + val keyResult2 = kafkaKeysRepository.hent(identitetsnummer2) + val keyResult3 = kafkaKeysRepository.hent(identitetsnummer3) + val auditResult1 = kafkaKeysAuditRepository.findByIdentitetsnummer(identitetsnummer1) + val auditResult2 = kafkaKeysAuditRepository.findByIdentitetsnummer(identitetsnummer2) + val auditResult3 = kafkaKeysAuditRepository.findByIdentitetsnummer(identitetsnummer3) + + keyResult1.onLeft { it shouldBe null } + keyResult2.onLeft { it shouldBe null } + keyResult3.onLeft { it shouldBe null } + keyResult1.onRight { it shouldBe fraArbeidssoekerId } + keyResult2.onRight { it shouldBe tilArbeidssoekerId } + keyResult3.onRight { it shouldBe eksisterendeArbeidssoekerId } + auditResult1 shouldHaveSize 0 + auditResult2 shouldHaveSize 1 + auditResult3 shouldHaveSize 1 + val audit2 = auditResult2.first() + val audit3 = auditResult3.first() + audit2.identitetsnummer shouldBe identitetsnummer2 + audit2.identitetStatus shouldBe IdentitetStatus.VERIFISERT + audit2.tidligereArbeidssoekerId shouldBe fraArbeidssoekerId + audit3.identitetsnummer shouldBe identitetsnummer3 + audit3.identitetStatus shouldBe IdentitetStatus.KONFLIKT + audit3.tidligereArbeidssoekerId shouldBe fraArbeidssoekerId + } + } + } + } + + "Skal oppdatere arbeidssøkerId for identitetsnummer" { + val identitetsnummer1 = Identitetsnummer("07017012345") + val identitetsnummer2 = Identitetsnummer("08017012345") + val identitetsnummer3 = Identitetsnummer("09017012345") + + val opprettResult1 = kafkaKeysRepository.opprett(identitetsnummer1) + opprettResult1.onLeft { it shouldBe null } + opprettResult1.onRight { tilArbeidssoekerId -> + val opprettResult2 = kafkaKeysRepository.opprett(identitetsnummer2) + opprettResult2.onLeft { it shouldBe null } + opprettResult2.onRight { fraArbeidssoekerId -> + val hendelser: List = listOf( + TestData.getIdentitetsnummerSammenslaatt( + listOf(identitetsnummer1, identitetsnummer2, identitetsnummer3), + fraArbeidssoekerId, + tilArbeidssoekerId + ) + ) + + kafkaConsumerService.handleRecords(hendelser.asConsumerRecords()) + + val keyResult1 = kafkaKeysRepository.hent(identitetsnummer1) + val keyResult2 = kafkaKeysRepository.hent(identitetsnummer2) + val keyResult3 = kafkaKeysRepository.hent(identitetsnummer3) + val auditResult1 = kafkaKeysAuditRepository.findByIdentitetsnummer(identitetsnummer1) + val auditResult2 = kafkaKeysAuditRepository.findByIdentitetsnummer(identitetsnummer2) + val auditResult3 = kafkaKeysAuditRepository.findByIdentitetsnummer(identitetsnummer3) + + keyResult1.onLeft { it shouldBe null } + keyResult2.onLeft { it shouldBe null } + keyResult3.onLeft { it shouldBe null } + keyResult1.onRight { it shouldBe tilArbeidssoekerId } + keyResult2.onRight { it shouldBe tilArbeidssoekerId } + keyResult3.onRight { it shouldBe tilArbeidssoekerId } + auditResult1 shouldHaveSize 1 + auditResult2 shouldHaveSize 1 + auditResult3 shouldHaveSize 1 + val audit1 = auditResult1.first() + val audit2 = auditResult2.first() + val audit3 = auditResult3.first() + audit1.identitetsnummer shouldBe identitetsnummer1 + audit1.identitetStatus shouldBe IdentitetStatus.VERIFISERT + audit1.tidligereArbeidssoekerId shouldBe fraArbeidssoekerId + audit2.identitetsnummer shouldBe identitetsnummer2 + audit2.identitetStatus shouldBe IdentitetStatus.OPPDATERT + audit2.tidligereArbeidssoekerId shouldBe fraArbeidssoekerId + audit3.identitetsnummer shouldBe identitetsnummer3 + audit3.identitetStatus shouldBe IdentitetStatus.OPPRETTET + audit3.tidligereArbeidssoekerId shouldBe tilArbeidssoekerId + } + } + } +}) \ No newline at end of file diff --git a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/ApplikasjonsTest.kt b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaKeysServiceTest.kt similarity index 53% rename from apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/ApplikasjonsTest.kt rename to apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaKeysServiceTest.kt index 1be9f422..68ec471d 100644 --- a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/ApplikasjonsTest.kt +++ b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/service/KafkaKeysServiceTest.kt @@ -1,25 +1,42 @@ -package no.nav.paw.kafkakeygenerator +package no.nav.paw.kafkakeygenerator.service import io.kotest.core.spec.style.StringSpec import io.kotest.matchers.collections.shouldContainExactlyInAnyOrder import io.kotest.matchers.shouldBe import io.kotest.matchers.shouldNotBe import io.kotest.matchers.types.shouldBeInstanceOf -import io.ktor.client.* -import io.ktor.client.engine.mock.* +import io.ktor.client.HttpClient +import io.ktor.client.engine.mock.MockEngine +import io.micrometer.core.instrument.logging.LoggingMeterRegistry import kotlinx.coroutines.runBlocking import no.nav.paw.kafkakeygenerator.api.v2.hentLokaleAlias -import no.nav.paw.kafkakeygenerator.pdl.PdlIdentitesTjeneste +import no.nav.paw.kafkakeygenerator.plugin.custom.flywayMigrate +import no.nav.paw.kafkakeygenerator.repository.KafkaKeysRepository +import no.nav.paw.kafkakeygenerator.test.genererResponse +import no.nav.paw.kafkakeygenerator.test.initTestDatabase +import no.nav.paw.kafkakeygenerator.test.person1_aktor_id +import no.nav.paw.kafkakeygenerator.test.person1_annen_ident +import no.nav.paw.kafkakeygenerator.test.person1_dnummer +import no.nav.paw.kafkakeygenerator.test.person1_fødselsnummer +import no.nav.paw.kafkakeygenerator.test.person2_aktor_id +import no.nav.paw.kafkakeygenerator.test.person2_fødselsnummer +import no.nav.paw.kafkakeygenerator.test.person3_fødselsnummer import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId import no.nav.paw.kafkakeygenerator.vo.CallId +import no.nav.paw.kafkakeygenerator.vo.Either +import no.nav.paw.kafkakeygenerator.vo.Failure +import no.nav.paw.kafkakeygenerator.vo.FailureCode import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import no.nav.paw.kafkakeygenerator.vo.Left +import no.nav.paw.kafkakeygenerator.vo.Right import no.nav.paw.pdl.PdlClient import org.jetbrains.exposed.sql.Database import org.junit.jupiter.api.fail import java.util.* -class ApplikasjonsTest : StringSpec({ +class KafkaKeysServiceTest : StringSpec({ val dataSource = initTestDatabase() + dataSource.flywayMigrate() val pdlKlient = PdlClient( url = "http://mock", tema = "tema", @@ -27,12 +44,14 @@ class ApplikasjonsTest : StringSpec({ genererResponse(it) }) ) { "fake token" } - val app = Applikasjon( - kafkaKeys = KafkaKeys(Database.connect(dataSource)), - identitetsTjeneste = PdlIdentitesTjeneste(pdlKlient) + val kafkaKeysService = KafkaKeysService( + meterRegistry = LoggingMeterRegistry(), + kafkaKeysRepository = KafkaKeysRepository(Database.connect(dataSource)), + pdlService = PdlService(pdlKlient) ) + fun hentEllerOpprett(identitetsnummer: String): Either = runBlocking { - app.hentEllerOpprett( + kafkaKeysService.hentEllerOpprett( callId = CallId(UUID.randomUUID().toString()), identitet = Identitetsnummer(identitetsnummer) ) @@ -49,23 +68,22 @@ class ApplikasjonsTest : StringSpec({ person1KafkaNøkler.filterIsInstance>() .map { it.right } .distinct().size shouldBe 1 - app.hentLokaleAlias(2, listOf(person1_dnummer)) - .onLeft { fail { "Uventet feil: $it" } } - .onRight { res -> res.flatMap { it.koblinger }.map { it.identitetsnummer }.shouldContainExactlyInAnyOrder( - person1_dnummer, person1_fødselsnummer, person1_aktor_id, person1_annen_ident) } - - val lokaleAlias = app.hentLokaleAlias(2, Identitetsnummer(person1_dnummer)) - hentEllerOpprett(person3_fødselsnummer).shouldBeInstanceOf>() - lokaleAlias - .onLeft { fail { "Uventet feil: $it" } } - .onRight { alias -> - alias.identitetsnummer shouldBe person1_dnummer - alias.koblinger.size shouldBe 4 - alias.koblinger.any { it.identitetsnummer == person1_fødselsnummer } shouldBe true - alias.koblinger.any { it.identitetsnummer == person1_dnummer } shouldBe true - alias.koblinger.any { it.identitetsnummer == person1_aktor_id } shouldBe true - alias.koblinger.any { it.identitetsnummer == person1_annen_ident } shouldBe true + kafkaKeysService.hentLokaleAlias(2, listOf(person1_dnummer)).onLeft { fail { "Uventet feil: $it" } } + .onRight { res -> + res.flatMap { it.koblinger }.map { it.identitetsnummer }.shouldContainExactlyInAnyOrder( + person1_dnummer, person1_fødselsnummer, person1_aktor_id, person1_annen_ident + ) } + val lokaleAlias = kafkaKeysService.hentLokaleAlias(2, Identitetsnummer(person1_dnummer)) + hentEllerOpprett(person3_fødselsnummer).shouldBeInstanceOf>() + lokaleAlias.onLeft { fail { "Uventet feil: $it" } }.onRight { alias -> + alias.identitetsnummer shouldBe person1_dnummer + alias.koblinger.size shouldBe 4 + alias.koblinger.any { it.identitetsnummer == person1_fødselsnummer } shouldBe true + alias.koblinger.any { it.identitetsnummer == person1_dnummer } shouldBe true + alias.koblinger.any { it.identitetsnummer == person1_aktor_id } shouldBe true + alias.koblinger.any { it.identitetsnummer == person1_annen_ident } shouldBe true + } } "alle identer for person2 skal gi samme nøkkel" { val person2KafkaNøkler = listOf( @@ -94,4 +112,3 @@ class ApplikasjonsTest : StringSpec({ } } }) - diff --git a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/test/TestData.kt b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/test/TestData.kt new file mode 100644 index 00000000..8a2080c9 --- /dev/null +++ b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/test/TestData.kt @@ -0,0 +1,234 @@ +package no.nav.paw.kafkakeygenerator.test + +import io.ktor.client.engine.mock.MockRequestHandleScope +import io.ktor.client.engine.mock.respond +import io.ktor.client.request.HttpRequestData +import io.ktor.client.request.HttpResponseData +import io.ktor.http.ContentType +import io.ktor.http.HttpHeaders +import io.ktor.http.HttpStatusCode +import io.ktor.http.content.TextContent +import io.ktor.http.headersOf +import no.nav.paw.arbeidssokerregisteret.intern.v1.ArbeidssoekerIdFlettetInn +import no.nav.paw.arbeidssokerregisteret.intern.v1.Avsluttet +import no.nav.paw.arbeidssokerregisteret.intern.v1.Avvist +import no.nav.paw.arbeidssokerregisteret.intern.v1.AvvistStoppAvPeriode +import no.nav.paw.arbeidssokerregisteret.intern.v1.Hendelse +import no.nav.paw.arbeidssokerregisteret.intern.v1.IdentitetsnummerSammenslaatt +import no.nav.paw.arbeidssokerregisteret.intern.v1.Kilde +import no.nav.paw.arbeidssokerregisteret.intern.v1.Startet +import no.nav.paw.arbeidssokerregisteret.intern.v1.vo.Bruker +import no.nav.paw.arbeidssokerregisteret.intern.v1.vo.BrukerType +import no.nav.paw.arbeidssokerregisteret.intern.v1.vo.Metadata +import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId +import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.clients.consumer.ConsumerRecords +import org.apache.kafka.common.TopicPartition +import java.time.Instant +import java.util.* +import kotlin.random.Random.Default.nextLong + +const val person1_fødselsnummer = "01017012346" +const val person1_aktor_id = "2649500819544" +const val person1_dnummer = "09127821913" +const val person1_annen_ident = "12129127821913" +const val person2_fødselsnummer = "01017012345" +const val person2_aktor_id = "1649500819544" +const val person3_fødselsnummer = "01017012344" + +fun hentSvar(ident: String) = + when (ident) { + person1_fødselsnummer -> person1MockSvar + person1_aktor_id -> person1MockSvar + person1_dnummer -> person1MockSvar + person1_annen_ident -> person1MockSvar + person2_fødselsnummer -> person2MockSvar + person2_aktor_id -> person2MockSvar + person3_fødselsnummer -> person3MockSvar + else -> ingenTreffMockSvar + } + +const val ingenTreffMockSvar = """ +{ + "data": { + "hentIdenter": { + "identer": [] + } + } +} +""" +const val person1MockSvar = """ +{ + "data": { + "hentIdenter": { + "identer": [ + { + "ident": "$person1_fødselsnummer", + "gruppe": "FOLKEREGISTERIDENT", + "historisk": false + }, + { + "ident": "$person1_aktor_id", + "gruppe": "AKTORID", + "historisk": false + }, + { + "ident": "$person1_dnummer", + "gruppe": "FOLKEREGISTERIDENT", + "historisk": true + }, + { + "ident": "$person1_annen_ident", + "gruppe": "ANNEN_IDENT", + "historisk": true + } + ] + } + } +} +""" + +const val person3MockSvar = """ + { + "data": { + "hentIdenter": { + "identer": [ + { + "ident": "$person3_fødselsnummer", + "gruppe": "FOLKEREGISTERIDENT", + "historisk": false + } + ] + } + } +} +""" + +const val person2MockSvar = """ + { + "data": { + "hentIdenter": { + "identer": [ + { + "ident": "$person2_fødselsnummer", + "gruppe": "FOLKEREGISTERIDENT", + "historisk": false + }, + { + "ident": "$person2_aktor_id", + "gruppe": "AKTORID", + "historisk": false + } + ] + } + } +} +""" + +fun MockRequestHandleScope.genererResponse(it: HttpRequestData): HttpResponseData { + val text = (it.body as TextContent).text + val start = text.indexOf("ident") + val end = text.indexOf("}", start) + val ident = text + .substring(start, end) + .split(",") + .first() + .replace("\"", "") + .replace("ident:", "") + .trim() + return respond( + content = hentSvar(ident), + status = HttpStatusCode.OK, + headers = headersOf(HttpHeaders.ContentType, ContentType.Application.Json.toString()) + ) +} + +fun List.asConsumerRecords(): ConsumerRecords = + this.map { TestData.getConsumerRecord(nextLong(), it) } + .let { TestData.getConsumerRecords(it) } + +object TestData { + + fun getMetadata(): Metadata = + Metadata( + tidspunkt = Instant.now(), + utfoertAv = Bruker(type = BrukerType.SYSTEM, id = "paw"), + kilde = "paw", + aarsak = "test" + ) + + fun getPeriodeStartet( + identitetsnummer: Identitetsnummer, + arbeidssoekerId: ArbeidssoekerId + ): Startet = Startet( + hendelseId = UUID.randomUUID(), + id = arbeidssoekerId.value, + identitetsnummer = identitetsnummer.value, + metadata = getMetadata() + ) + + fun getPeriodeAvsluttet( + identitetsnummer: Identitetsnummer, + arbeidssoekerId: ArbeidssoekerId + ): Avsluttet = Avsluttet( + hendelseId = UUID.randomUUID(), + id = arbeidssoekerId.value, + identitetsnummer = identitetsnummer.value, + metadata = getMetadata() + ) + + fun getPeriodeStartAvvist( + identitetsnummer: Identitetsnummer, + arbeidssoekerId: ArbeidssoekerId + ): Avvist = Avvist( + hendelseId = UUID.randomUUID(), + id = arbeidssoekerId.value, + identitetsnummer = identitetsnummer.value, + metadata = getMetadata() + ) + + fun getPeriodeAvsluttetAvvist( + identitetsnummer: Identitetsnummer, + arbeidssoekerId: ArbeidssoekerId + ): AvvistStoppAvPeriode = AvvistStoppAvPeriode( + hendelseId = UUID.randomUUID(), + id = arbeidssoekerId.value, + identitetsnummer = identitetsnummer.value, + metadata = getMetadata() + ) + + fun getIdentitetsnummerSammenslaatt( + identitetsnummerList: List, + fraArbeidssoekerId: ArbeidssoekerId, + tilArbeidssoekerId: ArbeidssoekerId + ): IdentitetsnummerSammenslaatt = IdentitetsnummerSammenslaatt( + hendelseId = UUID.randomUUID(), + id = fraArbeidssoekerId.value, + identitetsnummer = identitetsnummerList.first().value, + metadata = getMetadata(), + flyttedeIdentitetsnumre = HashSet(identitetsnummerList.map { it.value }), + flyttetTilArbeidssoekerId = tilArbeidssoekerId.value + ) + + fun getArbeidssoekerIdFlettetInn( + identitetsnummerList: List, + tilArbeidssoekerId: ArbeidssoekerId, + fraArbeidssoekerId: ArbeidssoekerId + ): ArbeidssoekerIdFlettetInn = ArbeidssoekerIdFlettetInn( + hendelseId = UUID.randomUUID(), + id = tilArbeidssoekerId.value, + identitetsnummer = identitetsnummerList.first().value, + metadata = getMetadata(), + kilde = Kilde( + identitetsnummer = HashSet(identitetsnummerList.map { it.value }), + arbeidssoekerId = fraArbeidssoekerId.value + ) + ) + + fun getConsumerRecord(key: K, value: V): ConsumerRecord = + ConsumerRecord("topic", 1, 1, key, value) + + fun getConsumerRecords(records: List>): ConsumerRecords = + ConsumerRecords(mapOf(TopicPartition("topic", 1) to records)) +} \ No newline at end of file diff --git a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/testdatabase.kt b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/test/TestDatabase.kt similarity index 51% rename from apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/testdatabase.kt rename to apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/test/TestDatabase.kt index 241fa8b8..30a040f5 100644 --- a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/testdatabase.kt +++ b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/test/TestDatabase.kt @@ -1,26 +1,26 @@ -package no.nav.paw.kafkakeygenerator +package no.nav.paw.kafkakeygenerator.test -import no.nav.paw.kafkakeygenerator.config.DatabaseKonfigurasjon -import no.nav.paw.kafkakeygenerator.config.dataSource -import no.nav.paw.kafkakeygenerator.database.flywayMigrate +import no.nav.paw.kafkakeygenerator.config.DatabaseConfig +import no.nav.paw.kafkakeygenerator.utils.createDataSource import org.testcontainers.containers.PostgreSQLContainer import org.testcontainers.containers.wait.strategy.Wait import javax.sql.DataSource fun initTestDatabase(): DataSource { - val postgres = postgreSQLContainer() - val dataSource = DatabaseKonfigurasjon( - host = postgres.host, - port = postgres.firstMappedPort, - brukernavn = postgres.username, - passord = postgres.password, - databasenavn = postgres.databaseName - ).dataSource() - flywayMigrate(dataSource) - return dataSource + val config = postgreSQLContainer().let { + DatabaseConfig( + host = it.host, + port = it.firstMappedPort, + database = it.databaseName, + username = it.username, + password = it.password, + driverClassName = "org.postgresql.Driver", + autoCommit = false + ) + } + return createDataSource(config) } - fun postgreSQLContainer(): PostgreSQLContainer> { val postgres = PostgreSQLContainer( "postgres:14" @@ -33,4 +33,4 @@ fun postgreSQLContainer(): PostgreSQLContainer> { postgres.start() postgres.waitingFor(Wait.forHealthcheck()) return postgres -} \ No newline at end of file +} diff --git a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/test/TestKafkaProducer.kt b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/test/TestKafkaProducer.kt new file mode 100644 index 00000000..22a1126e --- /dev/null +++ b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/test/TestKafkaProducer.kt @@ -0,0 +1,50 @@ +package no.nav.paw.kafkakeygenerator.test + +import no.nav.paw.arbeidssokerregisteret.intern.v1.Hendelse +import no.nav.paw.arbeidssokerregisteret.intern.v1.HendelseSerializer +import no.nav.paw.config.hoplite.loadNaisOrLocalConfiguration +import no.nav.paw.config.kafka.KAFKA_CONFIG +import no.nav.paw.config.kafka.KafkaConfig +import no.nav.paw.config.kafka.KafkaFactory +import no.nav.paw.kafkakeygenerator.config.KAFKA_TOPOLOGY_CONFIG +import no.nav.paw.kafkakeygenerator.config.KafkaTopologyConfig +import no.nav.paw.kafkakeygenerator.utils.buildApplicationLogger +import no.nav.paw.kafkakeygenerator.vo.ArbeidssoekerId +import no.nav.paw.kafkakeygenerator.vo.Identitetsnummer +import org.apache.kafka.clients.producer.ProducerRecord +import org.apache.kafka.common.serialization.LongSerializer + +private val logger = buildApplicationLogger + +fun main() { + val kafkaConfig = loadNaisOrLocalConfiguration(KAFKA_CONFIG) + val kafkaTopologyConfig = loadNaisOrLocalConfiguration(KAFKA_TOPOLOGY_CONFIG) + + val kafkaFactory = KafkaFactory(kafkaConfig) + val kafkaProducer = kafkaFactory.createProducer( + clientId = "${kafkaTopologyConfig.consumerGroupId}-producer", + keySerializer = LongSerializer::class, + valueSerializer = HendelseSerializer::class + ) + + val fraArbeidssoekerId = ArbeidssoekerId(2) + val tilArbeidssoekerId = ArbeidssoekerId(1) + val identitetsnummer1 = Identitetsnummer("02017012345") + val identitetsnummer2 = Identitetsnummer("06017012345") + + val key = 1L + val value = TestData.getIdentitetsnummerSammenslaatt( + identitetsnummerList = listOf(identitetsnummer1, identitetsnummer2), + fraArbeidssoekerId = fraArbeidssoekerId, + tilArbeidssoekerId = tilArbeidssoekerId + ) + + try { + logger.info("Sender hendelse {}", value) + kafkaProducer.send(ProducerRecord(kafkaTopologyConfig.hendelseloggTopic, key, value)).get() + } catch (e: Exception) { + logger.error("Send hendelse feilet", e) + } finally { + kafkaProducer.close() + } +} diff --git a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/testdata.kt b/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/testdata.kt deleted file mode 100644 index f489e0ff..00000000 --- a/apps/kafka-key-generator/src/test/kotlin/no/nav/paw/kafkakeygenerator/testdata.kt +++ /dev/null @@ -1,121 +0,0 @@ -package no.nav.paw.kafkakeygenerator - -import io.ktor.client.engine.mock.* -import io.ktor.client.request.* -import io.ktor.http.* -import io.ktor.http.content.* - -const val person1_fødselsnummer = "01017012346" -const val person1_aktor_id = "2649500819544" -const val person1_dnummer = "09127821913" -const val person1_annen_ident = "12129127821913" -const val person2_fødselsnummer = "01017012345" -const val person2_aktor_id = "1649500819544" -const val person3_fødselsnummer = "01017012344" - -fun hentSvar(ident: String) = - when(ident) { - person1_fødselsnummer -> person1MockSvar - person1_aktor_id -> person1MockSvar - person1_dnummer -> person1MockSvar - person1_annen_ident -> person1MockSvar - person2_fødselsnummer -> person2MockSvar - person2_aktor_id -> person2MockSvar - person3_fødselsnummer -> person3MockSvar - else -> ingenTreffMockSvar - } - -const val ingenTreffMockSvar = """ -{ - "data": { - "hentIdenter": { - "identer": [] - } - } -} -""" -const val person1MockSvar = """ -{ - "data": { - "hentIdenter": { - "identer": [ - { - "ident": "$person1_fødselsnummer", - "gruppe": "FOLKEREGISTERIDENT", - "historisk": false - }, - { - "ident": "$person1_aktor_id", - "gruppe": "AKTORID", - "historisk": false - }, - { - "ident": "$person1_dnummer", - "gruppe": "FOLKEREGISTERIDENT", - "historisk": true - }, - { - "ident": "$person1_annen_ident", - "gruppe": "ANNEN_IDENT", - "historisk": true - } - ] - } - } -} -""" - -const val person3MockSvar = """ - { - "data": { - "hentIdenter": { - "identer": [ - { - "ident": "$person3_fødselsnummer", - "gruppe": "FOLKEREGISTERIDENT", - "historisk": false - } - ] - } - } -} -""" - -const val person2MockSvar = """ - { - "data": { - "hentIdenter": { - "identer": [ - { - "ident": "$person2_fødselsnummer", - "gruppe": "FOLKEREGISTERIDENT", - "historisk": false - }, - { - "ident": "$person2_aktor_id", - "gruppe": "AKTORID", - "historisk": false - } - ] - } - } -} -""" - -fun MockRequestHandleScope.genererResponse(it: HttpRequestData): HttpResponseData { - val text = (it.body as TextContent).text - val start = text.indexOf("ident") - val end = text.indexOf("}", start) - val ident = text - .substring(start, end) - .split(",") - .first() - .replace("\"", "") - .replace("ident:", "") - .trim() - return respond( - content = hentSvar(ident), - status = HttpStatusCode.OK, - headers = headersOf(HttpHeaders.ContentType, ContentType.Application.Json.toString()) - ) -} diff --git a/apps/kafka-key-maintenance/src/main/kotlin/no/nav/paw/kafkakeymaintenance/pdlprocessor/DbReaderTask.kt b/apps/kafka-key-maintenance/src/main/kotlin/no/nav/paw/kafkakeymaintenance/pdlprocessor/DbReaderTask.kt index 34276aba..8d70e69b 100644 --- a/apps/kafka-key-maintenance/src/main/kotlin/no/nav/paw/kafkakeymaintenance/pdlprocessor/DbReaderTask.kt +++ b/apps/kafka-key-maintenance/src/main/kotlin/no/nav/paw/kafkakeymaintenance/pdlprocessor/DbReaderTask.kt @@ -1,7 +1,11 @@ package no.nav.paw.kafkakeymaintenance.pdlprocessor import io.opentelemetry.api.GlobalOpenTelemetry -import io.opentelemetry.api.trace.* +import io.opentelemetry.api.trace.Span +import io.opentelemetry.api.trace.SpanContext +import io.opentelemetry.api.trace.SpanKind +import io.opentelemetry.api.trace.TraceFlags +import io.opentelemetry.api.trace.TraceState import io.opentelemetry.context.Context import io.opentelemetry.instrumentation.annotations.WithSpan import no.nav.paw.arbeidssokerregisteret.intern.v1.Hendelse @@ -13,8 +17,8 @@ import no.nav.paw.kafkakeygenerator.client.LokaleAlias import no.nav.paw.kafkakeymaintenance.ApplicationContext import no.nav.paw.kafkakeymaintenance.ErrorOccurred import no.nav.paw.kafkakeymaintenance.ShutdownSignal -import no.nav.paw.kafkakeymaintenance.kafka.TransactionContext import no.nav.paw.kafkakeymaintenance.kafka.Topic +import no.nav.paw.kafkakeymaintenance.kafka.TransactionContext import no.nav.paw.kafkakeymaintenance.kafka.txContext import no.nav.paw.kafkakeymaintenance.pdlprocessor.functions.HendelseRecord import no.nav.paw.kafkakeymaintenance.pdlprocessor.lagring.Data @@ -94,7 +98,8 @@ class DbReaderTask( } .count() if (batch.isEmpty()) { - applicationContext.logger.info("Ingen meldinger klare for prosessering, venter ${dbReaderContext.aktorConfig.interval}") + val sleepUntil = Instant.now().plus(dbReaderContext.aktorConfig.interval) + applicationContext.logger.info("Ingen meldinger klare for prosessering, venter til $sleepUntil (+ ${dbReaderContext.aktorConfig.interval})") Thread.sleep(dbReaderContext.aktorConfig.interval.toMillis()) } else { applicationContext.logger.info("Genererte {} hendelser fra {} meldinger", count, batch.size) diff --git a/apps/kafka-key-maintenance/src/main/kotlin/no/nav/paw/kafkakeymaintenance/pdlprocessor/functions/HarAvvik.kt b/apps/kafka-key-maintenance/src/main/kotlin/no/nav/paw/kafkakeymaintenance/pdlprocessor/functions/HarAvvik.kt index 67336b0f..9a57aaf2 100644 --- a/apps/kafka-key-maintenance/src/main/kotlin/no/nav/paw/kafkakeymaintenance/pdlprocessor/functions/HarAvvik.kt +++ b/apps/kafka-key-maintenance/src/main/kotlin/no/nav/paw/kafkakeymaintenance/pdlprocessor/functions/HarAvvik.kt @@ -11,5 +11,5 @@ fun harAvvik(data: Data): Boolean = .map { it.arbeidsoekerId } .distinct().size > 1) .also { harAvvik -> - avviksDataLogger.debug("Har avvik: {}}, data: {}}", harAvvik, data.debugString()) + avviksDataLogger.debug("Har avvik: {}, data: {}", harAvvik, data.debugString()) } \ No newline at end of file diff --git a/docker/kafka/docker-compose.yaml b/docker/kafka/docker-compose.yaml index f7f6aabe..3f63b982 100644 --- a/docker/kafka/docker-compose.yaml +++ b/docker/kafka/docker-compose.yaml @@ -54,6 +54,7 @@ services: kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic paw.opplysninger-om-arbeidssoeker-v1 --replication-factor 1 --partitions 1 kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic paw.arbeidssoker-profilering-v1 --replication-factor 1 --partitions 1 kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic paw.arbeidssoker-bekreftelse-v1 --replication-factor 1 --partitions 1 + kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic paw.arbeidssoker-hendelseslogg-v1 --replication-factor 1 --partitions 1 kafka-topics --bootstrap-server kafka:29092 --create --if-not-exists --topic paw.arbeidssoker-bekreftelse-hendelseslogg-v1 --replication-factor 1 --partitions 1 echo -e 'Successfully created the following topics:' diff --git a/docker/mocks/config/mock-oauth2-server/config.json b/docker/mocks/config/mock-oauth2-server/config.json index 7f31c37a..c9e6a210 100644 --- a/docker/mocks/config/mock-oauth2-server/config.json +++ b/docker/mocks/config/mock-oauth2-server/config.json @@ -1,61 +1,85 @@ { - "interactiveLogin": true, - "httpServer": "NettyWrapper", - "tokenCallbacks": [ - { - "issuerId": "idporten", - "tokenExpiry": 3600, - "requestMappings": [ + "interactiveLogin": true, + "httpServer": "NettyWrapper", + "tokenCallbacks": [ { - "requestParam": "client_id", - "match": "paw-arbeidssoekerregisteret-api-bekreftelse", - "claims": { - "sub": "admin@paw-arbeidssoekerregisteret-api-bekreftelse", - "aud": [ - "paw-arbeidssoekerregisteret-api-bekreftelse" - ], - "pid": "17830348441", - "acr": "idporten-loa-high" - } - } - ] - }, - { - "issuerId": "tokenx", - "tokenExpiry": 3600, - "requestMappings": [ + "issuerId": "idporten", + "tokenExpiry": 3600, + "requestMappings": [ + { + "requestParam": "client_id", + "match": "paw-arbeidssoekerregisteret-api-bekreftelse", + "claims": { + "sub": "admin@paw-arbeidssoekerregisteret-api-bekreftelse", + "aud": [ + "paw-arbeidssoekerregisteret-api-bekreftelse" + ], + "pid": "17830348441", + "acr": "idporten-loa-high" + } + } + ] + }, { - "requestParam": "client_id", - "match": "paw-arbeidssoekerregisteret-api-bekreftelse", - "claims": { - "sub": "admin@paw-arbeidssoekerregisteret-api-bekreftelse", - "aud": [ - "paw-arbeidssoekerregisteret-api-bekreftelse" - ], - "pid": "17830348441", - "acr": "idporten-loa-high" - } - } - ] - }, - { - "issuerId": "azure", - "tokenExpiry": 3600, - "requestMappings": [ + "issuerId": "tokenx", + "tokenExpiry": 3600, + "requestMappings": [ + { + "requestParam": "client_id", + "match": "paw-kafka-key-generator", + "claims": { + "sub": "admin@paw-kafka-key-generator", + "aud": [ + "paw-kafka-key-generator" + ], + "pid": "01017012345", + "acr": "idporten-loa-high" + } + }, + { + "requestParam": "client_id", + "match": "paw-arbeidssoekerregisteret-api-bekreftelse", + "claims": { + "sub": "admin@paw-arbeidssoekerregisteret-api-bekreftelse", + "aud": [ + "paw-arbeidssoekerregisteret-api-bekreftelse" + ], + "pid": "01017012345", + "acr": "idporten-loa-high" + } + } + ] + }, { - "requestParam": "client_id", - "match": "paw-arbeidssoekerregisteret-api-bekreftelse", - "claims": { - "sub": "admin@paw-arbeidssoekerregisteret-api-bekreftelse", - "aud": [ - "paw-arbeidssoekerregisteret-api-bekreftelse" - ], - "oid": "84a9421a-610d-4258-ac2c-bae144216f14", - "name": "Nav Navesen", - "NAVident": "12345" - } + "issuerId": "azure", + "tokenExpiry": 3600, + "requestMappings": [ + { + "requestParam": "client_id", + "match": "paw-kafka-key-generator", + "claims": { + "sub": "admin@paw-kafka-key-generator", + "aud": [ + "paw-kafka-key-generator" + ], + "pid": "01017012345", + "acr": "idporten-loa-high" + } + }, + { + "requestParam": "client_id", + "match": "paw-arbeidssoekerregisteret-api-bekreftelse", + "claims": { + "sub": "admin@paw-arbeidssoekerregisteret-api-bekreftelse", + "aud": [ + "paw-arbeidssoekerregisteret-api-bekreftelse" + ], + "oid": "84a9421a-610d-4258-ac2c-bae144216f14", + "name": "Nav Navesen", + "NAVident": "NAV12345" + } + } + ] } - ] - } - ] + ] } \ No newline at end of file diff --git a/docker/mocks/config/wiremock/README.md b/docker/mocks/config/wiremock/README.md new file mode 100644 index 00000000..3cad169d --- /dev/null +++ b/docker/mocks/config/wiremock/README.md @@ -0,0 +1,12 @@ +# Wiremock + +## hentIdenter +```json +{ + "variables": { + "ident": "01017012345", + "historisk": true + }, + "query": "query($ident: ID!, $historisk: Boolean) {\n hentIdenter(ident: $ident, historikk: $historisk) {\n identer {\n ident\n gruppe\n historisk\n }\n }\n}" +} +``` diff --git a/docker/mocks/config/wiremock/pdl-hent-identer-01017012345.json b/docker/mocks/config/wiremock/pdl-hent-identer-01017012345.json new file mode 100644 index 00000000..b1eb8224 --- /dev/null +++ b/docker/mocks/config/wiremock/pdl-hent-identer-01017012345.json @@ -0,0 +1,30 @@ +{ + "request": { + "method": "POST", + "urlPathPattern": "/pdl", + "bodyPatterns": [ + { + "matchesJsonPath": "$.variables[?(@.ident == '01017012345')]" + } + ] + }, + "response": { + "status": 200, + "jsonBody": { + "data": { + "hentIdenter": { + "identer": [ + { + "ident": "01017012345", + "gruppe": "FOLKEREGISTERIDENT", + "historisk": false + } + ] + } + } + }, + "headers": { + "Content-Type": "application/json" + } + } +} diff --git a/docker/mocks/config/wiremock/pdl-hent-identer-02017012345.json b/docker/mocks/config/wiremock/pdl-hent-identer-02017012345.json new file mode 100644 index 00000000..f1cd7524 --- /dev/null +++ b/docker/mocks/config/wiremock/pdl-hent-identer-02017012345.json @@ -0,0 +1,30 @@ +{ + "request": { + "method": "POST", + "urlPathPattern": "/pdl", + "bodyPatterns": [ + { + "matchesJsonPath": "$.variables[?(@.ident == '02017012345')]" + } + ] + }, + "response": { + "status": 200, + "jsonBody": { + "data": { + "hentIdenter": { + "identer": [ + { + "ident": "02017012345", + "gruppe": "FOLKEREGISTERIDENT", + "historisk": false + } + ] + } + } + }, + "headers": { + "Content-Type": "application/json" + } + } +} diff --git a/docker/mocks/config/wiremock/pdl-hent-identer-03017012345.json b/docker/mocks/config/wiremock/pdl-hent-identer-03017012345.json new file mode 100644 index 00000000..8ba306d3 --- /dev/null +++ b/docker/mocks/config/wiremock/pdl-hent-identer-03017012345.json @@ -0,0 +1,30 @@ +{ + "request": { + "method": "POST", + "urlPathPattern": "/pdl", + "bodyPatterns": [ + { + "matchesJsonPath": "$.variables[?(@.ident == '03017012345')]" + } + ] + }, + "response": { + "status": 200, + "jsonBody": { + "data": { + "hentIdenter": { + "identer": [ + { + "ident": "03017012345", + "gruppe": "FOLKEREGISTERIDENT", + "historisk": false + } + ] + } + } + }, + "headers": { + "Content-Type": "application/json" + } + } +} diff --git a/docker/mocks/config/wiremock/pdl-hent-identer-04017012345.json b/docker/mocks/config/wiremock/pdl-hent-identer-04017012345.json new file mode 100644 index 00000000..b626d926 --- /dev/null +++ b/docker/mocks/config/wiremock/pdl-hent-identer-04017012345.json @@ -0,0 +1,35 @@ +{ + "request": { + "method": "POST", + "urlPathPattern": "/pdl", + "bodyPatterns": [ + { + "matchesJsonPath": "$.variables[?(@.ident == '04017012345')]" + } + ] + }, + "response": { + "status": 200, + "jsonBody": { + "data": { + "hentIdenter": { + "identer": [ + { + "ident": "04017012345", + "gruppe": "FOLKEREGISTERIDENT", + "historisk": false + }, + { + "ident": "05017012345", + "gruppe": "FOLKEREGISTERIDENT", + "historisk": true + } + ] + } + } + }, + "headers": { + "Content-Type": "application/json" + } + } +} diff --git a/docker/mocks/config/wiremock/pdl-hent-identer-05017012345.json b/docker/mocks/config/wiremock/pdl-hent-identer-05017012345.json new file mode 100644 index 00000000..0cc29a25 --- /dev/null +++ b/docker/mocks/config/wiremock/pdl-hent-identer-05017012345.json @@ -0,0 +1,35 @@ +{ + "request": { + "method": "POST", + "urlPathPattern": "/pdl", + "bodyPatterns": [ + { + "matchesJsonPath": "$.variables[?(@.ident == '05017012345')]" + } + ] + }, + "response": { + "status": 200, + "jsonBody": { + "data": { + "hentIdenter": { + "identer": [ + { + "ident": "04017012345", + "gruppe": "FOLKEREGISTERIDENT", + "historisk": false + }, + { + "ident": "05017012345", + "gruppe": "FOLKEREGISTERIDENT", + "historisk": true + } + ] + } + } + }, + "headers": { + "Content-Type": "application/json" + } + } +} diff --git a/docker/mocks/config/wiremock/pdl.json b/docker/mocks/config/wiremock/pdl-hent-person.json.bak similarity index 96% rename from docker/mocks/config/wiremock/pdl.json rename to docker/mocks/config/wiremock/pdl-hent-person.json.bak index 01327b99..3d37c314 100644 --- a/docker/mocks/config/wiremock/pdl.json +++ b/docker/mocks/config/wiremock/pdl-hent-person.json.bak @@ -35,7 +35,7 @@ "kommunenummer": "0301" }, "matrikkeladresse": null, - "ukjentBosted" : null, + "ukjentBosted": null, "utenlandskAdresse": null } ], diff --git a/docker/mocks/docker-compose.yaml b/docker/mocks/docker-compose.yaml index f4512ffa..b42c0d83 100644 --- a/docker/mocks/docker-compose.yaml +++ b/docker/mocks/docker-compose.yaml @@ -5,6 +5,8 @@ services: container_name: wiremock ports: - "8090:8080" + environment: + WIREMOCK_OPTIONS: "--verbose" volumes: - ./config/wiremock:/home/wiremock/mappings/ networks: diff --git a/docker/postgres/config/initdb/postgres-init.sql b/docker/postgres/config/initdb/postgres-init.sql index 96561f0d..92327649 100644 --- a/docker/postgres/config/initdb/postgres-init.sql +++ b/docker/postgres/config/initdb/postgres-init.sql @@ -1,3 +1,5 @@ +create user pawkafkakeys with password '5up3r_53cr3t_p455w0rd'; create user bekreftelse_api with password '5up3r_53cr3t_p455w0rd'; +create database pawkafkakeys with owner pawkafkakeys; create database bekreftelser with owner bekreftelse_api; \ No newline at end of file