1
0
mirror of https://github.com/pcvolkmer/etl-processor.git synced 2025-04-19 17:26:51 +00:00

feat: add endpoint for DNPM-Datamodel V2 using content negotiation (#104)

This simply adds an REST endpoint without proper implementation. The goal is to accept DNPM V2 JSON data.
This commit is contained in:
Paul-Christian Volkmer 2025-04-06 13:36:30 +02:00 committed by GitHub
parent 48b1e62e22
commit 7d97365aea
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 2446 additions and 13 deletions

View File

@ -17,6 +17,7 @@ version = "0.11.0-SNAPSHOT"
var versions = mapOf(
"bwhc-dto-java" to "0.4.0",
"mtb-dto" to "0.1.0-SNAPSHOT",
"hapi-fhir" to "7.6.0",
"mockito-kotlin" to "5.4.0",
"archunit" to "1.3.0",
@ -48,9 +49,18 @@ configurations {
compileOnly {
extendsFrom(configurations.annotationProcessor.get())
}
all {
resolutionStrategy {
cacheChangingModulesFor(5, "minutes")
}
}
}
repositories {
maven {
url = uri("https://git.dnpm.dev/api/packages/public-snapshots/maven")
}
maven {
url = uri("https://git.dnpm.dev/api/packages/public/maven")
}
@ -72,6 +82,7 @@ dependencies {
implementation("commons-codec:commons-codec")
implementation("io.projectreactor.kotlin:reactor-kotlin-extensions")
implementation("de.ukw.ccc:bwhc-dto-java:${versions["bwhc-dto-java"]}")
implementation("dev.pcvolkmer.mv64e:mtb-dto:${versions["mtb-dto"]}") { isChanging = true }
implementation("ca.uhn.hapi.fhir:hapi-fhir-base:${versions["hapi-fhir"]}")
implementation("ca.uhn.hapi.fhir:hapi-fhir-structures-r4:${versions["hapi-fhir"]}")
implementation("org.apache.httpcomponents.client5:httpclient5")

View File

@ -0,0 +1,35 @@
/*
* This file is part of ETL-Processor
*
* Copyright (c) 2025 Comprehensive Cancer Center Mainfranken, Datenintegrationszentrum Philipps-Universität Marburg and Contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package dev.dnpm.etl.processor
import org.springframework.http.MediaType
/**
* Custom MediaTypes
*
* @since 0.11.0
*/
object CustomMediaType {
val APPLICATION_VND_DNPM_V2_MTB_JSON = MediaType("application", "vnd.dnpm.v2.mtb+json")
const val APPLICATION_VND_DNPM_V2_MTB_JSON_VALUE = "application/vnd.dnpm.v2.mtb+json"
val APPLICATION_VND_DNPM_V2_RD_JSON = MediaType("application", "vnd.dnpm.v2.rd+json")
const val APPLICATION_VND_DNPM_V2_RD_JSON_VALUE = "application/vnd.dnpm.v2.rd+json"
}

View File

@ -1,7 +1,7 @@
/*
* This file is part of ETL-Processor
*
* Copyright (c) 2024 Comprehensive Cancer Center Mainfranken, Datenintegrationszentrum Philipps-Universität Marburg and Contributors
* Copyright (c) 2025 Comprehensive Cancer Center Mainfranken, Datenintegrationszentrum Philipps-Universität Marburg and Contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
@ -22,11 +22,13 @@ package dev.dnpm.etl.processor.input
import com.fasterxml.jackson.databind.ObjectMapper
import de.ukw.ccc.bwhc.dto.Consent
import de.ukw.ccc.bwhc.dto.MtbFile
import dev.dnpm.etl.processor.CustomMediaType
import dev.dnpm.etl.processor.PatientId
import dev.dnpm.etl.processor.RequestId
import dev.dnpm.etl.processor.services.RequestProcessor
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.slf4j.LoggerFactory
import org.springframework.http.MediaType
import org.springframework.kafka.listener.MessageListener
class KafkaInputListener(
@ -35,10 +37,29 @@ class KafkaInputListener(
) : MessageListener<String, String> {
private val logger = LoggerFactory.getLogger(KafkaInputListener::class.java)
override fun onMessage(data: ConsumerRecord<String, String>) {
val mtbFile = objectMapper.readValue(data.value(), MtbFile::class.java)
override fun onMessage(record: ConsumerRecord<String, String>) {
when (guessMimeType(record)) {
MediaType.APPLICATION_JSON_VALUE -> handleBwhcMessage(record)
CustomMediaType.APPLICATION_VND_DNPM_V2_MTB_JSON_VALUE -> handleDnpmV2Message(record)
else -> {
/* ignore other messages */
}
}
}
private fun guessMimeType(record: ConsumerRecord<String, String>): String {
if (record.headers().headers("contentType").toList().isEmpty()) {
// Fallback if no contentType set (old behavior)
return MediaType.APPLICATION_JSON_VALUE
}
return record.headers().headers("contentType")?.firstOrNull()?.value().contentToString()
}
private fun handleBwhcMessage(record: ConsumerRecord<String, String>) {
val mtbFile = objectMapper.readValue(record.value(), MtbFile::class.java)
val patientId = PatientId(mtbFile.patient.id)
val firstRequestIdHeader = data.headers().headers("requestId")?.firstOrNull()
val firstRequestIdHeader = record.headers().headers("requestId")?.firstOrNull()
val requestId = if (null != firstRequestIdHeader) {
RequestId(String(firstRequestIdHeader.value()))
} else {
@ -61,4 +82,10 @@ class KafkaInputListener(
}
}
}
}
private fun handleDnpmV2Message(record: ConsumerRecord<String, String>) {
// Do not handle DNPM-V2 for now
logger.warn("Ignoring MTB File in DNPM V2 format: Not implemented yet")
}
}

View File

@ -21,9 +21,13 @@ package dev.dnpm.etl.processor.input
import de.ukw.ccc.bwhc.dto.Consent
import de.ukw.ccc.bwhc.dto.MtbFile
import dev.dnpm.etl.processor.CustomMediaType
import dev.dnpm.etl.processor.PatientId
import dev.dnpm.etl.processor.services.RequestProcessor
import dev.pcvolkmer.mv64e.mtb.Mtb
import org.slf4j.LoggerFactory
import org.springframework.http.HttpStatus
import org.springframework.http.MediaType
import org.springframework.http.ResponseEntity
import org.springframework.web.bind.annotation.*
@ -40,7 +44,7 @@ class MtbFileRestController(
return ResponseEntity.ok("Test")
}
@PostMapping
@PostMapping( consumes = [ MediaType.APPLICATION_JSON_VALUE ] )
fun mtbFile(@RequestBody mtbFile: MtbFile): ResponseEntity<Unit> {
if (mtbFile.consent.status == Consent.Status.ACTIVE) {
logger.debug("Accepted MTB File for processing")
@ -53,6 +57,11 @@ class MtbFileRestController(
return ResponseEntity.accepted().build()
}
@PostMapping( consumes = [ CustomMediaType.APPLICATION_VND_DNPM_V2_MTB_JSON_VALUE] )
fun mtbFile(@RequestBody mtbFile: Mtb): ResponseEntity<Unit> {
return ResponseEntity.status(HttpStatus.NOT_IMPLEMENTED).build()
}
@DeleteMapping(path = ["{patientId}"])
fun deleteData(@PathVariable patientId: String): ResponseEntity<Unit> {
logger.debug("Accepted patient ID to process deletion")
@ -60,4 +69,4 @@ class MtbFileRestController(
return ResponseEntity.accepted().build()
}
}
}

View File

@ -1,7 +1,7 @@
/*
* This file is part of ETL-Processor
*
* Copyright (c) 2024 Comprehensive Cancer Center Mainfranken, Datenintegrationszentrum Philipps-Universität Marburg and Contributors
* Copyright (c) 2025 Comprehensive Cancer Center Mainfranken, Datenintegrationszentrum Philipps-Universität Marburg and Contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper
import de.ukw.ccc.bwhc.dto.Consent
import de.ukw.ccc.bwhc.dto.MtbFile
import de.ukw.ccc.bwhc.dto.Patient
import dev.dnpm.etl.processor.CustomMediaType
import dev.dnpm.etl.processor.services.RequestProcessor
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.header.internals.RecordHeader
@ -63,7 +64,15 @@ class KafkaInputListenerTest {
.withConsent(Consent.builder().withStatus(Consent.Status.ACTIVE).build())
.build()
kafkaInputListener.onMessage(ConsumerRecord("testtopic", 0, 0, "", this.objectMapper.writeValueAsString(mtbFile)))
kafkaInputListener.onMessage(
ConsumerRecord(
"testtopic",
0,
0,
"",
this.objectMapper.writeValueAsString(mtbFile)
)
)
verify(requestProcessor, times(1)).processMtbFile(any())
}
@ -75,7 +84,15 @@ class KafkaInputListenerTest {
.withConsent(Consent.builder().withStatus(Consent.Status.REJECTED).build())
.build()
kafkaInputListener.onMessage(ConsumerRecord("testtopic", 0, 0, "", this.objectMapper.writeValueAsString(mtbFile)))
kafkaInputListener.onMessage(
ConsumerRecord(
"testtopic",
0,
0,
"",
this.objectMapper.writeValueAsString(mtbFile)
)
)
verify(requestProcessor, times(1)).processDeletion(anyValueClass())
}
@ -89,7 +106,19 @@ class KafkaInputListenerTest {
val headers = RecordHeaders(listOf(RecordHeader("requestId", UUID.randomUUID().toString().toByteArray())))
kafkaInputListener.onMessage(
ConsumerRecord("testtopic", 0, 0, -1L, TimestampType.NO_TIMESTAMP_TYPE, -1, -1, "", this.objectMapper.writeValueAsString(mtbFile), headers, Optional.empty())
ConsumerRecord(
"testtopic",
0,
0,
-1L,
TimestampType.NO_TIMESTAMP_TYPE,
-1,
-1,
"",
this.objectMapper.writeValueAsString(mtbFile),
headers,
Optional.empty()
)
)
verify(requestProcessor, times(1)).processMtbFile(any(), anyValueClass())
@ -104,9 +133,52 @@ class KafkaInputListenerTest {
val headers = RecordHeaders(listOf(RecordHeader("requestId", UUID.randomUUID().toString().toByteArray())))
kafkaInputListener.onMessage(
ConsumerRecord("testtopic", 0, 0, -1L, TimestampType.NO_TIMESTAMP_TYPE, -1, -1, "", this.objectMapper.writeValueAsString(mtbFile), headers, Optional.empty())
ConsumerRecord(
"testtopic",
0,
0,
-1L,
TimestampType.NO_TIMESTAMP_TYPE,
-1,
-1,
"",
this.objectMapper.writeValueAsString(mtbFile),
headers,
Optional.empty()
)
)
verify(requestProcessor, times(1)).processDeletion(anyValueClass(), anyValueClass())
}
}
@Test
fun shouldNotProcessDnpmV2Request() {
val mtbFile = MtbFile.builder()
.withPatient(Patient.builder().withId("DUMMY_12345678").build())
.withConsent(Consent.builder().withStatus(Consent.Status.REJECTED).build())
.build()
val headers = RecordHeaders(
listOf(
RecordHeader("requestId", UUID.randomUUID().toString().toByteArray()),
RecordHeader("contentType", CustomMediaType.APPLICATION_VND_DNPM_V2_MTB_JSON_VALUE.toByteArray())
)
)
kafkaInputListener.onMessage(
ConsumerRecord(
"testtopic",
0,
0,
-1L,
TimestampType.NO_TIMESTAMP_TYPE,
-1,
-1,
"",
this.objectMapper.writeValueAsString(mtbFile),
headers,
Optional.empty()
)
)
verify(requestProcessor, times(0)).processDeletion(anyValueClass(), anyValueClass())
}
}

View File

@ -21,6 +21,7 @@ package dev.dnpm.etl.processor.input
import com.fasterxml.jackson.databind.ObjectMapper
import de.ukw.ccc.bwhc.dto.*
import dev.dnpm.etl.processor.CustomMediaType
import dev.dnpm.etl.processor.services.RequestProcessor
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Nested
@ -32,6 +33,7 @@ import org.mockito.Mockito.verify
import org.mockito.junit.jupiter.MockitoExtension
import org.mockito.kotlin.any
import org.mockito.kotlin.anyValueClass
import org.springframework.core.io.ClassPathResource
import org.springframework.http.MediaType
import org.springframework.test.web.servlet.MockMvc
import org.springframework.test.web.servlet.delete
@ -155,6 +157,40 @@ class MtbFileRestControllerTest {
}
}
@Nested
inner class RequestsForDnpmDataModel21 {
private lateinit var mockMvc: MockMvc
private lateinit var requestProcessor: RequestProcessor
@BeforeEach
fun setup(
@Mock requestProcessor: RequestProcessor
) {
this.requestProcessor = requestProcessor
val controller = MtbFileRestController(requestProcessor)
this.mockMvc = MockMvcBuilders.standaloneSetup(controller).build()
}
@Test
fun shouldRespondPostRequest() {
val mtbFileContent = ClassPathResource("mv64e-mtb-fake-patient.json").inputStream.readAllBytes().toString(Charsets.UTF_8)
mockMvc.post("/mtb") {
content = mtbFileContent
contentType = CustomMediaType.APPLICATION_VND_DNPM_V2_MTB_JSON
}.andExpect {
status {
isNotImplemented()
}
}
verify(requestProcessor, times(0)).processMtbFile(any())
}
}
companion object {
fun bwhcMtbFileContent(consentStatus: Consent.Status) = MtbFile.builder()
.withPatient(

File diff suppressed because it is too large Load Diff