Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions apps/mn-original-populasjon/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ LABEL maintainer="Team Dolly"

ENV JAVA_OPTS="-Dspring.profiles.active=prod"

COPY init.sh /init-scripts/init.sh
ADD /build/libs/app.jar /app/app.jar

EXPOSE 8080
4 changes: 4 additions & 0 deletions apps/mn-original-populasjon/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,11 @@ dependencies {
implementation 'org.hibernate.validator:hibernate-validator:6.1.5.Final'
implementation 'org.hibernate.validator:hibernate-validator-annotation-processor:6.1.5.Final'
implementation 'javax.validation:validation-api:2.0.1.Final'
implementation 'org.apache.kafka:kafka-clients:2.5.1'
implementation 'io.confluent:kafka-avro-serializer:5.5.0'
implementation 'org.springframework.kafka:spring-kafka:2.5.6.RELEASE'
implementation project(':testnorge-oauth2')
implementation project(':testnorge-avro-schema')
testImplementation 'org.springframework.boot:spring-boot-starter-test:2.3.4.RELEASE'
testImplementation 'org.springframework.cloud:spring-cloud-contract-wiremock:2.2.4.RELEASE'
testImplementation project(':testnorge-testing')
Expand Down
4 changes: 2 additions & 2 deletions apps/mn-original-populasjon/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ spec:
vault:
enabled: true
paths:
- kvPath: /kv/preprod/fss/mn-original-populasjon/default
mountPath: /var/run/secrets/nais.io/vault
- kvPath: /serviceuser/data/dev/srvtestnorge
mountPath: /secret/serviceuser
resources:
requests:
cpu: 200m
Expand Down
4 changes: 4 additions & 0 deletions apps/mn-original-populasjon/init.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash

export SERVICEUSER_USERNAME=$(cat /secret/serviceuser/username)
export SERVICEUSER_PASSWORD=$(cat /secret/serviceuser/password)
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
package no.nav.registre.testnorge.originalpopulasjon.config;

import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
import io.confluent.kafka.serializers.KafkaAvroSerializer;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;
import org.apache.kafka.common.serialization.LongSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;

import java.io.File;
import java.net.InetSocketAddress;
import java.util.HashMap;
import java.util.Map;

import no.nav.registre.testnorge.libs.avro.person.Person;

@EnableKafka
@Configuration
public class KafkaConfig {

@Value("${kafka.bootstrapservers}")
public String bootstrapAddress;

@Value("${kafka.groupid}")
public String groupId;

@Value("${kafka.schemaregistryservers}")
public String schemaregistryServers;

@Value("${SERVICEUSER_USERNAME}")
public String username;

@Value("${SERVICEUSER_PASSWORD}")
public String password;

@Bean
public ProducerFactory<String, Person> producerFactory() {
InetSocketAddress inetSocketAddress = new InetSocketAddress(0);
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaregistryServers);
props.put(ProducerConfig.CLIENT_ID_CONFIG, groupId + inetSocketAddress.getHostString());
props.put(ProducerConfig.ACKS_CONFIG, "1");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);

props.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
props.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.plain.PlainLoginModule required username=" + username + " password=" + password + ";");

String navTruststorePath = System.getenv("NAV_TRUSTSTORE_PATH");
if (navTruststorePath != null) {
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL");
props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, new File(navTruststorePath).getAbsolutePath());
props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, System.getenv("NAV_TRUSTSTORE_PASSWORD"));
}
return new DefaultKafkaProducerFactory<>(props);
}

@Bean
public KafkaTemplate<String, Person> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
package no.nav.registre.testnorge.originalpopulasjon.consumer;

import static org.reflections.Reflections.log;

import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;

import java.util.ArrayList;

import no.nav.registre.testnorge.libs.avro.person.Adresse;
import no.nav.registre.testnorge.libs.avro.person.Person;
import no.nav.registre.testnorge.libs.dto.person.v1.AdresseDTO;
import no.nav.registre.testnorge.libs.dto.person.v1.PersonDTO;

@Component
public class HendelseConsumer {

public HendelseConsumer(@Value("${kafka.topic}") String topic, KafkaTemplate<String, Person> kafkaTemplate) {
this.topic = topic;
this.kafkaTemplate = kafkaTemplate;
}

private final String topic;
private final KafkaTemplate<String, Person> kafkaTemplate;

public void registrertOpprettelseAvPerson(PersonDTO personDTO) {
AdresseDTO adresseDTO = personDTO.getAdresse();
var adresse = adresseDTO == null
? null
: Adresse.newBuilder()
.setGatenavn(adresseDTO.getGatenavn())
.setPostnummer(adresseDTO.getPostnummer())
.setPoststed(adresseDTO.getPoststed())
.setKommunenummer(adresseDTO.getKommunenummer())
.build();

log.info("Sender person {} til kafkakø", personDTO.getIdent());
kafkaTemplate.send(
topic,
Person.newBuilder()
.setIdent(personDTO.getIdent())
.setFornavn(personDTO.getFornavn())
.setMellomnavn(personDTO.getMellomnavn() == null ? "" : personDTO.getMellomnavn())
.setEtternavn(personDTO.getEtternavn())
.setFoedselsdato(personDTO.getFoedselsdato().toString())
.setAdresse(adresse)
.setTags(new ArrayList<>(personDTO.getTags()))
.build()
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import java.util.stream.Collectors;

import no.nav.registre.testnorge.libs.dto.person.v1.PersonDTO;
import no.nav.registre.testnorge.originalpopulasjon.consumer.HendelseConsumer;
import no.nav.registre.testnorge.originalpopulasjon.domain.Person;
import no.nav.registre.testnorge.originalpopulasjon.service.PopulasjonService;

Expand All @@ -19,10 +20,14 @@
public class OriginalPopulasjonController {

private final PopulasjonService populasjonService;
private final HendelseConsumer hendelseConsumer;

@GetMapping
public List<PersonDTO> createPopulasjon(@RequestParam Integer antall) {
var populasjon = populasjonService.createPopulasjon(antall);
return populasjon.stream().map(Person::toDTO).collect(Collectors.toList());
List<PersonDTO> personliste = populasjon.stream().map(Person::toDTO).collect(Collectors.toList());

personliste.forEach(hendelseConsumer::registrertOpprettelseAvPerson);
return personliste;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
@RequiredArgsConstructor
public class PopulasjonService {

private static final Set<String> TAGS = Set.of("Mini-Norge");
private static final Set<String> TAGS = Set.of("MINI_NORGE");
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Foreløpig fiks før enum er på plass. Hvis vi lager en enum av tags, har vi både en enum og en database som holder oversikt. Smør på flesk?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Kansjke vi får se


private final StatistikkService statistikkService;
private final SyntPersonConsumer syntPersonConsumer;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
SERVICEUSER_USERNAME: dummy
SERVICEUSER_PASSWORD: dummy
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,10 @@ consumer:
synt-person-api:
url: https://testnorge-synt-person-api.dev.adeo.no
client_secret: ${SYNT_PERSON_API_CLIENT_SECRET}
client_id: 9e22aba7-8c92-4f5e-8c0e-073e00c82bcf
client_id: 9e22aba7-8c92-4f5e-8c0e-073e00c82bcf

kafka:
bootstrapservers: b27apvl00045.preprod.local:8443,b27apvl00046.preprod.local:8443,b27apvl00047.preprod.local:8443
schemaregistryservers: http://kafka-schema-registry.tpa.svc.nais.local:8081
groupid: testnorge-hendelse-api-v1 #HVA GJØR VI MED DENNE?
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hva skal group-id være her?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Den iden skal du ikke trenge med mindre du motar noe fra kjøen. Tror jeg

topic: testnorge-opprett-person-v1
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,13 @@ CLIENT_ID=dummy
CLIENT_SECRET=dummy

SYNT_PERSON_API_CLIENT_ID=dummy
SYNT_PERSON_API_CLIENT_SECRET=dummy
SYNT_PERSON_API_CLIENT_SECRET=dummy

SERVICEUSER_USERNAME: dummy
SERVICEUSER_PASSWORD: dummy

kafka:
bootstrapservers: dummy
schemaregistryservers: dummy
groupid: dummy
topic: testnorge-opprett-person-v1