Skip to content

Commit

Permalink
Flyttet bekreftelse-api til intern
Browse files Browse the repository at this point in the history
  • Loading branch information
nilsmsa committed Sep 10, 2024
1 parent ada76ee commit 2a714fc
Show file tree
Hide file tree
Showing 40 changed files with 1,201 additions and 1 deletion.
46 changes: 46 additions & 0 deletions apps/bekreftelse-api/nais/nais-dev.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
apiVersion: nais.io/v1alpha1
kind: Application
metadata:
name: paw-rapportering-api
namespace: paw
labels:
team: paw
spec:
image: {{ image }}
port: 8080
env:
- name: KAFKA_KEY_SCOPE
value: "api://dev-gcp.paw.paw-kafka-key-generator/.default"
resources:
limits:
memory: 1024Mi
requests:
cpu: 200m
memory: 256Mi
tokenx:
enabled: true
azure:
application:
enabled: true
allowAllUsers: true
claims:
extra:
- NAVident
replicas:
min: 1
max: 1
liveness:
path: /internal/isAlive
initialDelay: 10
readiness:
path: /internal/isReady
initialDelay: 10
prometheus:
enabled: true
path: /internal/metrics
observability:
autoInstrumentation:
enabled: true
runtime: java
kafka:
pool: nav-dev
46 changes: 46 additions & 0 deletions apps/bekreftelse-api/nais/nais-prod.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
apiVersion: nais.io/v1alpha1
kind: Application
metadata:
name: paw-rapportering-api
namespace: paw
labels:
team: paw
spec:
image: {{ image }}
port: 8080
env:
- name: KAFKA_KEY_SCOPE
value: "api://prod-gcp.paw.paw-kafka-key-generator/.default"
resources:
limits:
memory: 1024Mi
requests:
cpu: 200m
memory: 256Mi
tokenx:
enabled: true
azure:
application:
enabled: true
allowAllUsers: true
claims:
extra:
- NAVident
replicas:
min: 1
max: 1
liveness:
path: /internal/isAlive
initialDelay: 10
readiness:
path: /internal/isReady
initialDelay: 10
prometheus:
enabled: true
path: /internal/metrics
observability:
autoInstrumentation:
enabled: true
runtime: java
kafka:
pool: nav-prod
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
package no.nav.paw.rapportering.api

import io.ktor.server.application.Application
import io.ktor.server.engine.addShutdownHook
import io.ktor.server.engine.embeddedServer
import io.ktor.server.netty.Netty
import io.ktor.server.routing.routing
import no.nav.paw.config.hoplite.loadNaisOrLocalConfiguration
import no.nav.paw.config.kafka.KAFKA_CONFIG_WITH_SCHEME_REG
import no.nav.paw.config.kafka.KAFKA_STREAMS_CONFIG_WITH_SCHEME_REG
import no.nav.paw.config.kafka.KafkaConfig
import no.nav.paw.kafkakeygenerator.auth.AzureM2MConfig
import no.nav.paw.kafkakeygenerator.client.KafkaKeyConfig
import no.nav.paw.rapportering.api.config.APPLICATION_CONFIG_FILE_NAME
import no.nav.paw.rapportering.api.config.ApplicationConfig
import no.nav.paw.rapportering.api.plugins.configureAuthentication
import no.nav.paw.rapportering.api.plugins.configureHTTP
import no.nav.paw.rapportering.api.plugins.configureLogging
import no.nav.paw.rapportering.api.plugins.configureMetrics
import no.nav.paw.rapportering.api.plugins.configureOtel
import no.nav.paw.rapportering.api.plugins.configureSerialization
import no.nav.paw.rapportering.api.routes.healthRoutes
import no.nav.paw.rapportering.api.routes.rapporteringRoutes
import no.nav.paw.rapportering.api.routes.swaggerRoutes
import org.slf4j.LoggerFactory

fun main() {
val logger = LoggerFactory.getLogger("rapportering-api")
logger.info("Starter: ${ApplicationInfo.id}")

val applicationConfig = loadNaisOrLocalConfiguration<ApplicationConfig>(APPLICATION_CONFIG_FILE_NAME)
val kafkaConfig = loadNaisOrLocalConfiguration<KafkaConfig>(KAFKA_CONFIG_WITH_SCHEME_REG)
val kafkaStreamsConfig = loadNaisOrLocalConfiguration<KafkaConfig>(KAFKA_STREAMS_CONFIG_WITH_SCHEME_REG)
val azureM2MConfig = loadNaisOrLocalConfiguration<AzureM2MConfig>("azure_m2m_key_config.toml")
val kafkaKeyConfig = loadNaisOrLocalConfiguration<KafkaKeyConfig>("kafka_key_generator_client_config.toml")

val dependencies = createDependencies(
applicationConfig,
kafkaConfig,
kafkaStreamsConfig,
azureM2MConfig,
kafkaKeyConfig
)

embeddedServer(Netty, port = 8080) {
module(applicationConfig, dependencies)
}.apply {
addShutdownHook { stop(300, 300) }
start(wait = true)
}
}

fun Application.module(
applicationConfig: ApplicationConfig,
dependencies: Dependencies
) {
configureMetrics(dependencies.prometheusMeterRegistry)
configureHTTP()
configureAuthentication(applicationConfig.authProviders)
configureLogging()
configureSerialization()
configureOtel()

routing {
healthRoutes(dependencies.prometheusMeterRegistry, dependencies.health)
swaggerRoutes()
rapporteringRoutes(
kafkaKeyClient = dependencies.kafkaKeyClient,
rapporteringStateStore = dependencies.rapporteringStateStore,
rapporteringStateStoreName = applicationConfig.rapporteringStateStoreName,
kafkaStreams = dependencies.kafkaStreams,
httpClient = dependencies.httpClient,
rapporteringProducer = dependencies.rapporteringProducer,
autorisasjonService = dependencies.autorisasjonService
)
}
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
package no.nav.paw.rapportering.api

object ApplicationInfo {
val id = System.getenv("IMAGE_WITH_VERSION") ?: "UNSPECIFIED"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
package no.nav.paw.rapportering.api

import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde
import io.ktor.client.HttpClient
import io.ktor.client.plugins.contentnegotiation.ContentNegotiation
import io.ktor.serialization.jackson.jackson
import io.micrometer.prometheusmetrics.PrometheusMeterRegistry
import io.micrometer.prometheusmetrics.PrometheusConfig
import no.nav.paw.config.kafka.KafkaConfig
import no.nav.paw.config.kafka.streams.KafkaStreamsFactory
import no.nav.paw.kafkakeygenerator.auth.AzureM2MConfig
import no.nav.paw.kafkakeygenerator.auth.azureAdM2MTokenClient
import no.nav.paw.kafkakeygenerator.client.KafkaKeyConfig
import no.nav.paw.kafkakeygenerator.client.KafkaKeysClient
import no.nav.paw.kafkakeygenerator.client.kafkaKeysKlient
import no.nav.paw.rapportering.api.config.ApplicationConfig
import no.nav.paw.rapportering.api.kafka.RapporteringProducer
import no.nav.paw.rapportering.api.kafka.RapporteringTilgjengeligState
import no.nav.paw.rapportering.api.kafka.RapporteringTilgjengeligStateSerde
import no.nav.paw.rapportering.api.kafka.appTopology
import no.nav.paw.rapportering.api.services.AutorisasjonService
import no.nav.poao_tilgang.client.PoaoTilgangCachedClient
import no.nav.poao_tilgang.client.PoaoTilgangHttpClient
import org.apache.kafka.common.serialization.Serdes
import org.apache.kafka.streams.KafkaStreams
import org.apache.kafka.streams.StoreQueryParameters
import org.apache.kafka.streams.StreamsBuilder
import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler
import org.apache.kafka.streams.state.QueryableStoreTypes
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore
import org.apache.kafka.streams.state.Stores
import org.slf4j.LoggerFactory

fun createDependencies(
applicationConfig: ApplicationConfig,
kafkaConfig: KafkaConfig,
kafkaStreamsConfig: KafkaConfig,
azureM2MConfig: AzureM2MConfig,
kafkaKeyConfig: KafkaKeyConfig
): Dependencies {
val logger = LoggerFactory.getLogger("rapportering-api")

val azureM2MTokenClient = azureAdM2MTokenClient(applicationConfig.naisEnv, azureM2MConfig)
val kafkaKeyClient = kafkaKeysKlient(kafkaKeyConfig) {
azureM2MTokenClient.createMachineToMachineToken(kafkaKeyConfig.scope)
}

val prometheusMeterRegistry = PrometheusMeterRegistry(PrometheusConfig.DEFAULT)

val httpClient = HttpClient {
install(ContentNegotiation) {
jackson()
}
}

val streamsConfig = KafkaStreamsFactory(applicationConfig.applicationIdSuffix, kafkaStreamsConfig)
.withDefaultKeySerde(Serdes.LongSerde::class)
.withDefaultValueSerde(SpecificAvroSerde::class)

val streamsBuilder = StreamsBuilder()
.addStateStore(
Stores.keyValueStoreBuilder(
Stores.persistentKeyValueStore(applicationConfig.rapporteringStateStoreName),
Serdes.Long(),
RapporteringTilgjengeligStateSerde(),
)
)

val topology = streamsBuilder.appTopology(
prometheusRegistry = prometheusMeterRegistry,
rapporteringHendelseLoggTopic = applicationConfig.rapporteringHendelseLoggTopic,
rapporteringStateStoreName = applicationConfig.rapporteringStateStoreName,
)

val kafkaStreams = KafkaStreams(
topology,
streamsConfig.properties.apply {
put("application.server", applicationConfig.hostname)
}
)

kafkaStreams.setUncaughtExceptionHandler { throwable ->
logger.error("Uventet feil: ${throwable.message}", throwable)
StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_APPLICATION
}

kafkaStreams.start()

val rapporteringStateStore: ReadOnlyKeyValueStore<Long, RapporteringTilgjengeligState> = kafkaStreams.store(
StoreQueryParameters.fromNameAndType(
applicationConfig.rapporteringStateStoreName,
QueryableStoreTypes.keyValueStore()
)
)

val health = Health(kafkaStreams)

val rapporteringProducer = RapporteringProducer(kafkaConfig, applicationConfig)


val poaoTilgangClient = PoaoTilgangCachedClient(
PoaoTilgangHttpClient(
applicationConfig.poaoClientConfig.url,
{ azureM2MTokenClient.createMachineToMachineToken(applicationConfig.poaoClientConfig.scope) }
)
)

val autorisasjonService = AutorisasjonService(poaoTilgangClient)

return Dependencies(
kafkaKeyClient,
httpClient,
kafkaStreams,
prometheusMeterRegistry,
rapporteringStateStore,
health,
rapporteringProducer,
autorisasjonService
)
}

data class Dependencies(
val kafkaKeyClient: KafkaKeysClient,
val httpClient: HttpClient,
val kafkaStreams: KafkaStreams,
val prometheusMeterRegistry: PrometheusMeterRegistry,
val rapporteringStateStore: ReadOnlyKeyValueStore<Long, RapporteringTilgjengeligState>,
val health: Health,
val rapporteringProducer: RapporteringProducer,
val autorisasjonService: AutorisasjonService
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
package no.nav.paw.rapportering.api

import io.ktor.http.HttpStatusCode
import org.apache.kafka.streams.KafkaStreams

class Health(private val kafkaStreams: KafkaStreams) {
fun alive(): Status {
val state = kafkaStreams.state()
val httpStatusCode = when (state) {
KafkaStreams.State.CREATED -> HttpStatusCode.OK
KafkaStreams.State.REBALANCING -> HttpStatusCode.OK
KafkaStreams.State.RUNNING -> HttpStatusCode.OK
KafkaStreams.State.PENDING_SHUTDOWN -> HttpStatusCode.ServiceUnavailable
KafkaStreams.State.NOT_RUNNING -> HttpStatusCode.ServiceUnavailable
KafkaStreams.State.PENDING_ERROR -> HttpStatusCode.InternalServerError
KafkaStreams.State.ERROR -> HttpStatusCode.InternalServerError
null -> HttpStatusCode.InternalServerError
}
return status(httpStatusCode, state)
}

fun ready(): Status {
val state = kafkaStreams.state()
val httpStatusCode = when (state) {
KafkaStreams.State.RUNNING -> HttpStatusCode.OK
KafkaStreams.State.CREATED -> HttpStatusCode.ServiceUnavailable
KafkaStreams.State.REBALANCING -> HttpStatusCode.ServiceUnavailable
KafkaStreams.State.PENDING_SHUTDOWN -> HttpStatusCode.ServiceUnavailable
KafkaStreams.State.NOT_RUNNING -> HttpStatusCode.ServiceUnavailable
KafkaStreams.State.PENDING_ERROR -> HttpStatusCode.InternalServerError
KafkaStreams.State.ERROR -> HttpStatusCode.InternalServerError
null -> HttpStatusCode.InternalServerError
}
return status(httpStatusCode, state)
}

private fun status(kode: HttpStatusCode, kafkaStreamsTilstand: KafkaStreams.State?): Status =
Status(kode, "KafkaStreams tilstand: '${kafkaStreamsTilstand?.name}'")
}

data class Status(
val code: HttpStatusCode,
val message: String
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package no.nav.paw.rapportering.api.config

import no.nav.paw.config.env.NaisEnv
import no.nav.paw.config.env.currentNaisEnv
import java.net.InetAddress

const val APPLICATION_CONFIG_FILE_NAME = "application_config.toml"

data class ApplicationConfig(
val applicationIdSuffix: String,
val producerId: String,
val rapporteringTopic: String,
val rapporteringHendelseLoggTopic: String,
val rapporteringStateStoreName: String,
val authProviders: AuthProviders,
val naisEnv: NaisEnv = currentNaisEnv,
val hostname: String = InetAddress.getLocalHost().hostName,
val poaoClientConfig: ServiceClientConfig
)

data class ServiceClientConfig(
val url: String,
val scope: String
)

data class AuthProvider(
val name: String,
val discoveryUrl: String,
val tokenEndpointUrl: String,
val clientId: String,
val claims: Claims
)

typealias AuthProviders = List<AuthProvider>

data class Claims(
val map: List<String>,
val combineWithOr: Boolean = false
)
Loading

0 comments on commit 2a714fc

Please sign in to comment.