1
0
mirror of https://github.com/pcvolkmer/mv64e-kafka-to-rest-gateway synced 2025-09-13 17:22:52 +00:00

Initial commit

This commit is contained in:
2025-08-27 16:02:18 +02:00
commit a5e4c908b6
13 changed files with 1318 additions and 0 deletions

73
src/cli.rs Normal file
View File

@@ -0,0 +1,73 @@
use clap::Parser;
#[derive(Parser)]
#[command(author, version, about)]
#[command(arg_required_else_help(true))]
pub struct Cli {
#[arg(
long,
env = "KAFKA_BOOTSTRAP_SERVERS",
default_value = "kafka:9094",
help = "Kafka Bootstrap Server"
)]
pub bootstrap_servers: String,
#[arg(
long,
env = "KAFKA_TOPIC",
default_value = "etl-processor_output",
help = "Kafka Topic"
)]
pub topic: String,
#[arg(
long,
env = "KAFKA_RESPONSE_TOPIC",
default_value = "etl-processor_output_response",
help = "Kafka Response Topic"
)]
pub response_topic: String,
#[arg(
long,
env = "KAFKA_GROUP_ID",
default_value = "mv64e-kafka-to-rest-gateway",
help = "Kafka Group ID"
)]
pub group_id: String,
#[arg(
long,
env = "DNPM_DIP_URI",
help = "DNPM:DIP URI for API requests"
)]
pub dnpm_dip_uri: String,
#[arg(
long,
env = "DNPM_DIP_USERNAME",
help = "DNPM:DIP Username"
)]
pub dnpm_dip_username: Option<String>,
#[arg(
long,
env = "DNPM_DIP_PASSWORD",
help = "DNPM:DIP Password"
)]
pub dnpm_dip_password: Option<String>,
#[arg(
long,
env = "KAFKA_SSL_CA_FILE",
help = "CA file for SSL connection to Kafka"
)]
pub ssl_ca_file: Option<String>,
#[arg(
long,
env = "KAFKA_SSL_CERT_FILE",
help = "Certificate file for SSL connection to Kafka"
)]
pub ssl_cert_file: Option<String>,
#[arg(
long,
env = "KAFKA_SSL_KEY_FILE",
help = "Key file for SSL connection to Kafka"
)]
pub ssl_key_file: Option<String>,
#[arg(long, env = "KAFKA_SSL_KEY_PASSWORD", help = "The SSL key password")]
pub ssl_key_password: Option<String>,
}

86
src/http_client.rs Normal file
View File

@@ -0,0 +1,86 @@
use mv64e_mtb_dto::Mtb;
use std::fmt::{Debug, Display, Formatter};
use std::time::Duration;
pub struct HttpResponse {
pub status_code: u16,
pub status_body: String,
}
#[derive(Debug, Clone)]
pub struct HttpClientError(String);
impl Display for HttpClientError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
pub struct HttpClient {
base_url: String,
username: Option<String>,
password: Option<String>,
client: reqwest::Client,
}
impl HttpClient {
pub fn new(base_url: &str, username: Option<String>, password: Option<String>) -> Self {
let user_agent_string = format!("{}/{}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"));
Self {
base_url: if base_url.ends_with("/") {
base_url[0..base_url.len() - 2].to_string()
} else {
base_url.to_string()
},
username,
password,
client: reqwest::Client::builder()
.user_agent(user_agent_string)
.build()
.expect("Failed to build HTTP client"),
}
}
pub async fn send_mtb_request(&self, mtb: &Mtb) -> Result<HttpResponse, HttpClientError> {
let response = self
.client
.post(format!("{}/mtb/etl/patient-record", &self.base_url))
.basic_auth(
&self.username.clone().unwrap_or_default(),
Some(self.password.clone().unwrap_or_default()),
)
.timeout(Duration::from_secs(5))
.json(&mtb)
.send()
.await
.map_err(|err| HttpClientError(format!("Failed to send MTB request: {err}")))?;
Ok(HttpResponse {
status_code: response.status().as_u16(),
status_body: response.text().await.unwrap_or_default(),
})
}
pub async fn send_delete_request(
&self,
patient_id: &str,
) -> Result<HttpResponse, HttpClientError> {
let response = self
.client
.delete(format!("{}/mtb/etl/patient/{}", &self.base_url, patient_id))
.basic_auth(
&self.username.clone().unwrap_or_default(),
Some(self.password.clone().unwrap_or_default()),
)
.timeout(Duration::from_secs(5))
.send()
.await
.map_err(|err| HttpClientError(format!("Failed to send delete request: {err}")))?;
Ok(HttpResponse {
status_code: response.status().as_u16(),
status_body: response.text().await.unwrap_or_default(),
})
}
}

179
src/main.rs Normal file
View File

@@ -0,0 +1,179 @@
use crate::cli::Cli;
use crate::http_client::{HttpClient, HttpClientError, HttpResponse};
use clap::Parser;
use mv64e_mtb_dto::Mtb;
use rdkafka::config::RDKafkaLogLevel;
use rdkafka::consumer::{CommitMode, Consumer, StreamConsumer};
use rdkafka::message::Headers;
use rdkafka::producer::{FutureProducer, FutureRecord};
use rdkafka::{ClientConfig, Message};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::error::Error;
use std::string::ToString;
use std::sync::LazyLock;
use std::time::Duration;
use tracing::{error, info};
mod cli;
mod http_client;
#[derive(Serialize, Deserialize)]
struct ResponsePayload {
request_id: String,
status_code: u16,
status_body: Value,
}
#[cfg(not(test))]
static CONFIG: LazyLock<Cli> = LazyLock::new(Cli::parse);
async fn handle_record(payload: Mtb) -> Result<HttpResponse, HttpClientError> {
let client = HttpClient::new(
&CONFIG.dnpm_dip_uri,
CONFIG.dnpm_dip_username.clone(),
CONFIG.dnpm_dip_password.clone(),
);
if let Some(metadata) = &payload.metadata {
if !metadata.model_project_consent.provisions.is_empty()
|| metadata.research_consents.is_some()
{
client.send_mtb_request(&payload).await
} else {
client.send_delete_request(&payload.patient.id).await
}
} else {
client.send_mtb_request(&payload).await
}
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
#[cfg(debug_assertions)]
{
tracing_subscriber::fmt()
.with_max_level(tracing::Level::DEBUG)
.init();
}
#[cfg(not(debug_assertions))]
{
tracing_subscriber::fmt()
.with_max_level(tracing::Level::INFO)
.init();
}
let consumer: StreamConsumer = ClientConfig::new()
.set("bootstrap.servers", &CONFIG.bootstrap_servers)
.set("group.id", &CONFIG.group_id)
.set("enable.partition.eof", "false")
.set("auto.offset.reset", "earliest")
.set("enable.auto.commit", "false")
.set_log_level(RDKafkaLogLevel::Debug)
.create()
.expect("Kafka consumer created");
let topic: &str = &CONFIG.topic.clone();
consumer.subscribe(&[topic])?;
info!("Kafka topic '{}' subscribed", CONFIG.topic);
let producer: &FutureProducer = &ClientConfig::new()
.set("bootstrap.servers", &CONFIG.bootstrap_servers.to_string())
.set("message.timeout.ms", "5000")
.create()
.expect("Producer creation error");
loop {
match consumer.recv().await {
Ok(msg) => match msg.payload_view::<str>() {
Some(Ok(payload)) => match msg.key_view::<str>() {
Some(Ok(key)) => {
let payload = if let Ok(payload) = serde_json::from_str::<Mtb>(&payload) {
payload
} else {
error!("Error deserializing payload");
continue;
};
let request_id = match msg.headers() {
None => None,
Some(headers) => {
if let Some(x) = headers
.iter()
.filter(|header| header.key == "requestId")
.next()
.unwrap()
.value
{
Some(str::from_utf8(x).unwrap().to_string())
} else {
None
}
}
}
.unwrap_or_default();
match handle_record(payload).await {
Err(err) => error!("{}", err),
Ok(response) => {
let response_payload = ResponsePayload {
request_id,
status_code: response.status_code,
status_body: serde_json::from_str::<Value>(
&response.status_body,
)
.unwrap_or(json!({})),
};
let payload = serde_json::to_string(&response_payload)?;
let response_record = FutureRecord::to(&CONFIG.response_topic)
.key(key)
.payload(&payload);
let _ = if let Some(headers) = msg.headers() {
producer
.send(
response_record.headers(headers.detach()),
Duration::from_secs(1),
)
.await
} else {
producer.send(response_record, Duration::from_secs(1)).await
};
consumer
.commit_message(&msg, CommitMode::Async)
.expect("Cound not commit message: {}");
}
}
}
_ => error!("Error getting key"),
},
_ => error!("Error getting message"),
},
Err(err) => {
error!("Error receiving message: {}", err);
}
}
}
}
// Test Configuration
#[cfg(test)]
static CONFIG: LazyLock<Cli> = LazyLock::new(|| Cli {
bootstrap_servers: "localhost:9094".to_string(),
topic: "test-topic".to_string(),
response_topic: "test-response-topic".to_string(),
group_id: "test-group-id".to_string(),
dnpm_dip_uri: "http://localhost:8000/api".to_string(),
dnpm_dip_username: None,
dnpm_dip_password: None,
ssl_ca_file: None,
ssl_cert_file: None,
ssl_key_file: None,
ssl_key_password: None,
});