diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 87f438c..0ef54d5 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -19,7 +19,7 @@ jobs: statuses: read with: image-prefix: "samply/" - components: '[ "routine-connector" ]' + components: '[ "transFAIR" ]' architectures: '[ "amd64" ]' test-via-script: true push-to: ${{ (github.ref_protected == true || github.event_name == 'workflow_dispatch') && 'dockerhub' || 'none' }} diff --git a/.sqlx/query-11248c2b310423843642a4e09bd29a49b982a591b7c6f91936ce6c671babd98b.json b/.sqlx/query-11248c2b310423843642a4e09bd29a49b982a591b7c6f91936ce6c671babd98b.json new file mode 100644 index 0000000..66b0d57 --- /dev/null +++ b/.sqlx/query-11248c2b310423843642a4e09bd29a49b982a591b7c6f91936ce6c671babd98b.json @@ -0,0 +1,38 @@ +{ + "db_name": "SQLite", + "query": "SELECT id, exchange_id, project_id, status as \"status: _\" FROM data_requests WHERE exchange_id = $1 AND project_id = $2;", + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Text" + }, + { + "name": "exchange_id", + "ordinal": 1, + "type_info": "Text" + }, + { + "name": "project_id", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "status: _", + "ordinal": 3, + "type_info": "Text" + } + ], + "parameters": { + "Right": 2 + }, + "nullable": [ + false, + false, + true, + false + ] + }, + "hash": "11248c2b310423843642a4e09bd29a49b982a591b7c6f91936ce6c671babd98b" +} diff --git a/.sqlx/query-33a1cd1d3463e0e0e84866fd3eb49c83a8460b702c61a327fb0c1240c270039a.json b/.sqlx/query-33a1cd1d3463e0e0e84866fd3eb49c83a8460b702c61a327fb0c1240c270039a.json new file mode 100644 index 0000000..5ed604e --- /dev/null +++ b/.sqlx/query-33a1cd1d3463e0e0e84866fd3eb49c83a8460b702c61a327fb0c1240c270039a.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "INSERT INTO data_requests (id, exchange_id, project_id, status) VALUES ($1, $2, $3, $4)", + "describe": { + "columns": [], + "parameters": { + "Right": 4 + }, + "nullable": [] + }, + "hash": "33a1cd1d3463e0e0e84866fd3eb49c83a8460b702c61a327fb0c1240c270039a" +} diff --git a/.sqlx/query-888dcc41f8df82f4f948662c8fd1cfb89468090723da5f731884acad18b51887.json b/.sqlx/query-888dcc41f8df82f4f948662c8fd1cfb89468090723da5f731884acad18b51887.json deleted file mode 100644 index 841f260..0000000 --- a/.sqlx/query-888dcc41f8df82f4f948662c8fd1cfb89468090723da5f731884acad18b51887.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "INSERT INTO data_requests (id, status) VALUES ($1, $2)", - "describe": { - "columns": [], - "parameters": { - "Right": 2 - }, - "nullable": [] - }, - "hash": "888dcc41f8df82f4f948662c8fd1cfb89468090723da5f731884acad18b51887" -} diff --git a/.sqlx/query-9e4a0286cab5539f4e9055974c8b5a9b56c1c1543b637490c772540b7794b6f9.json b/.sqlx/query-9e4a0286cab5539f4e9055974c8b5a9b56c1c1543b637490c772540b7794b6f9.json deleted file mode 100644 index 19430b5..0000000 --- a/.sqlx/query-9e4a0286cab5539f4e9055974c8b5a9b56c1c1543b637490c772540b7794b6f9.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "db_name": "SQLite", - "query": "SELECT id, status as \"status: _\" FROM data_requests WHERE id = $1;", - "describe": { - "columns": [ - { - "name": "id", - "ordinal": 0, - "type_info": "Text" - }, - { - "name": "status: _", - "ordinal": 1, - "type_info": "Text" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - false, - false - ] - }, - "hash": "9e4a0286cab5539f4e9055974c8b5a9b56c1c1543b637490c772540b7794b6f9" -} diff --git a/.sqlx/query-9ec29c86fd2771da65fffe3a680e0aed738fd21331dc86dc3959ffeca60296d7.json b/.sqlx/query-9ec29c86fd2771da65fffe3a680e0aed738fd21331dc86dc3959ffeca60296d7.json new file mode 100644 index 0000000..927e568 --- /dev/null +++ b/.sqlx/query-9ec29c86fd2771da65fffe3a680e0aed738fd21331dc86dc3959ffeca60296d7.json @@ -0,0 +1,38 @@ +{ + "db_name": "SQLite", + "query": "SELECT id, exchange_id, project_id, status as \"status: _\" FROM data_requests WHERE id = $1;", + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Text" + }, + { + "name": "exchange_id", + "ordinal": 1, + "type_info": "Text" + }, + { + "name": "project_id", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "status: _", + "ordinal": 3, + "type_info": "Text" + } + ], + "parameters": { + "Right": 1 + }, + "nullable": [ + false, + false, + true, + false + ] + }, + "hash": "9ec29c86fd2771da65fffe3a680e0aed738fd21331dc86dc3959ffeca60296d7" +} diff --git a/.sqlx/query-e643e3de461a0122755ad08cab4250e3067d0fd008a4c9af2ac80d7e839123cf.json b/.sqlx/query-e643e3de461a0122755ad08cab4250e3067d0fd008a4c9af2ac80d7e839123cf.json new file mode 100644 index 0000000..151c1e9 --- /dev/null +++ b/.sqlx/query-e643e3de461a0122755ad08cab4250e3067d0fd008a4c9af2ac80d7e839123cf.json @@ -0,0 +1,38 @@ +{ + "db_name": "SQLite", + "query": "SELECT id, exchange_id, project_id, status as \"status: _\" FROM data_requests;", + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Text" + }, + { + "name": "exchange_id", + "ordinal": 1, + "type_info": "Text" + }, + { + "name": "project_id", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "status: _", + "ordinal": 3, + "type_info": "Text" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + false, + false, + true, + false + ] + }, + "hash": "e643e3de461a0122755ad08cab4250e3067d0fd008a4c9af2ac80d7e839123cf" +} diff --git a/.sqlx/query-fc1b6fe25afc1643b4fb689b7729c71476cd8a4b0bc8cb395ebe9bf2ebbd2bcb.json b/.sqlx/query-fc1b6fe25afc1643b4fb689b7729c71476cd8a4b0bc8cb395ebe9bf2ebbd2bcb.json deleted file mode 100644 index bb1da65..0000000 --- a/.sqlx/query-fc1b6fe25afc1643b4fb689b7729c71476cd8a4b0bc8cb395ebe9bf2ebbd2bcb.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "db_name": "SQLite", - "query": "SELECT id, status as \"status: _\" FROM data_requests;", - "describe": { - "columns": [ - { - "name": "id", - "ordinal": 0, - "type_info": "Text" - }, - { - "name": "status: _", - "ordinal": 1, - "type_info": "Text" - } - ], - "parameters": { - "Right": 0 - }, - "nullable": [ - false, - false - ] - }, - "hash": "fc1b6fe25afc1643b4fb689b7729c71476cd8a4b0bc8cb395ebe9bf2ebbd2bcb" -} diff --git a/dev/test b/dev/test index 4d22a40..47e688b 100755 --- a/dev/test +++ b/dev/test @@ -9,7 +9,7 @@ function start_bg() { # Test Config Setup export INSTITUTE_TTP_URL="http://localhost:8081"; - export INSTITUTE_TTP_API_KEY="routine-connector-password"; + export INSTITUTE_TTP_API_KEY="transFAIR-password"; export PROJECT_ID_SYSTEM="PROJECT_1_ID" export FHIR_REQUEST_URL="http://localhost:8085" export FHIR_INPUT_URL="http://localhost:8086" @@ -38,8 +38,8 @@ function start_bg() { fi done done - chmod +x artifacts/binaries-amd64/routine-connector - artifacts/binaries-amd64/routine-connector & + chmod +x artifacts/binaries-amd64/transFAIR + artifacts/binaries-amd64/transFAIR & sleep 10 } diff --git a/docker-compose.yml b/docker-compose.yml index 5837114..61862e6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -17,7 +17,7 @@ services: ML_DB_USER: mainzelliste ML_DB_HOST: mainzelliste-db ML_DB_PASS: "${ML_DB_PASS:-my-secret-db-password}" - ML_ROUTINE_CONNECTOR_PASSPHRASE: "${ML_ROUTINE_CONNECTOR_PASSPHRASE:-routine-connector-password}" + ML_ROUTINE_CONNECTOR_PASSPHRASE: "${ML_ROUTINE_CONNECTOR_PASSPHRASE:-transFAIR-password}" ML_DIZ_PASSPHRASE: "${ML_DIZ_PASSPHRASE:-diz-password}" ML_LOG_LEVEL: "${ML_LOG_LEVEL:-info}" configs: diff --git a/migrations/20240614130457_init.up.sql b/migrations/20240614130457_init.up.sql index 3b28180..03b5dfe 100644 --- a/migrations/20240614130457_init.up.sql +++ b/migrations/20240614130457_init.up.sql @@ -10,6 +10,8 @@ VALUES ('Created', 1), ('Error', 4); CREATE TABLE IF NOT EXISTS data_requests ( - id CHAR(36) PRIMARY KEY NOT NULL, - status CHAR(16) NOT NULL DEFAULT ('Created') REFERENCES request_status(Type) + id CHAR(36) PRIMARY KEY NOT NULL, + exchange_id CHAR(36) NOT NULL, + project_id CHAR(36) NULL, + status CHAR(16) NOT NULL DEFAULT ('Created') REFERENCES request_status(Type) ) diff --git a/src/config.rs b/src/config.rs index fece089..6ecf0a8 100644 --- a/src/config.rs +++ b/src/config.rs @@ -20,22 +20,23 @@ pub struct Config { // Definition of the fhir server and credentials used for communicating data requests to the dic #[clap(long, env)] pub fhir_request_url: Url, - #[clap(long, env)] - pub fhir_request_credentials: Option, + #[clap(long, env, default_value = "")] + pub fhir_request_credentials: Auth, // Definition of the fhir server and credentials used for reading data from the dic #[clap(long, env)] pub fhir_input_url: Url, - #[clap(long, env)] - pub fhir_input_credentials: Option, + #[clap(long, env, default_value = "")] + pub fhir_input_credentials: Auth, // Definition of the fhir server and credentials used for adding data to the project data #[clap(long, env)] pub fhir_output_url: Url, - #[clap(long, env)] - pub fhir_output_credentials: Option, + #[clap(long, env, default_value = "")] + pub fhir_output_credentials: Auth, } #[derive(Debug, Clone)] pub enum Auth { + None, Basic { user: String, pw: String, @@ -46,6 +47,9 @@ impl FromStr for Auth { type Err = &'static str; fn from_str(s: &str) -> Result { + if s.is_empty() { + return Ok(Self::None); + } let (user, pw) = s.split_once(":").ok_or("Credentials should be in the form of ':'")?; Ok(Self::Basic { user: user.to_owned(), pw: pw.to_owned() }) } diff --git a/src/data_access/data_requests.rs b/src/data_access/data_requests.rs new file mode 100644 index 0000000..e275b72 --- /dev/null +++ b/src/data_access/data_requests.rs @@ -0,0 +1,84 @@ +use super::models::DataRequest; +use sqlx::SqlitePool; +use tracing::debug; + +pub async fn get_all(db_pool: &SqlitePool) -> Result, sqlx::Error> { + let data_request = sqlx::query_as!( + DataRequest, + r#"SELECT id, exchange_id, project_id, status as "status: _" FROM data_requests;"#, + ) + .fetch_all(db_pool) + .await; + + data_request +} + +pub async fn get_by_id(db_pool: &SqlitePool, id: &str) -> Result, sqlx::Error> { + let data_request = sqlx::query_as!( + DataRequest, + r#"SELECT id, exchange_id, project_id, status as "status: _" FROM data_requests WHERE id = $1;"#, + id + ) + .fetch_optional(db_pool) + .await; + + data_request +} + +pub async fn get_by( + db_pool: &SqlitePool, + exchange_id: &str, + project_id: Option<&str>, +) -> Result, sqlx::Error> { + let data_request = if project_id.is_some() { + sqlx::query_as!( + DataRequest, + r#"SELECT id, exchange_id, project_id, status as "status: _" FROM data_requests WHERE exchange_id = $1 AND project_id = $2;"#, + exchange_id, project_id + ) + .fetch_optional(db_pool) + .await + } else { + sqlx::query_as!( + DataRequest, + r#"SELECT id, exchange_id, project_id, status as "status: _" FROM data_requests WHERE exchange_id = $1;"#, + exchange_id + ) + .fetch_optional(db_pool) + .await + }; + + debug!("exchange_id: {exchange_id}, project id: {}, data request: {:?}", project_id.unwrap_or(""), data_request.is_err()); + data_request +} + +pub async fn exists(db_pool: &SqlitePool, exchange_id: &str, project_id: Option<&str>) -> bool { + let data_request = get_by(db_pool, exchange_id, project_id).await.unwrap_or(None); + data_request.is_some() +} + +pub async fn insert(db_pool: &SqlitePool, data_request: &DataRequest) -> Result { + let query = if data_request.project_id.is_some() { + sqlx::query!( + "INSERT INTO data_requests (id, exchange_id, project_id, status) VALUES ($1, $2, $3, $4)", + data_request.id, + data_request.exchange_id, + data_request.project_id, + data_request.status + ) + } else { + sqlx::query!( + "INSERT INTO data_requests (id, exchange_id, status) VALUES ($1, $2, $3)", + data_request.id, + data_request.exchange_id, + data_request.status + ) + }; + + let insert_query_result = query + .execute(db_pool) + .await + .map(|qr| qr.last_insert_rowid()); + + insert_query_result +} diff --git a/src/data_access/mod.rs b/src/data_access/mod.rs new file mode 100644 index 0000000..0bb6d27 --- /dev/null +++ b/src/data_access/mod.rs @@ -0,0 +1,2 @@ +pub mod data_requests; +pub mod models; diff --git a/src/data_access/models.rs b/src/data_access/models.rs new file mode 100644 index 0000000..2eee8c1 --- /dev/null +++ b/src/data_access/models.rs @@ -0,0 +1,36 @@ +use fhir_sdk::r4b::resources::{Consent, Patient}; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Default, Serialize, Deserialize, sqlx::Type)] +pub enum RequestStatus { + Created = 1, + _DataLoaded = 2, + _UpdateAvailable = 3, + #[default] + Error = 4, +} + +#[derive(Clone, Default, Serialize, Deserialize, sqlx::FromRow)] +pub struct DataRequest { + pub id: String, + pub exchange_id: String, + pub project_id: Option, + pub status: RequestStatus, +} + +impl DataRequest { + pub fn new(id: String, exchange_id: String, project_id: Option) -> Self { + Self { + id, + exchange_id, + project_id, + status: RequestStatus::Created, + } + } +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct DataRequestPayload { + pub patient: Patient, + pub consent: Consent, +} diff --git a/src/fhir.rs b/src/fhir.rs index add9657..82e06e5 100644 --- a/src/fhir.rs +++ b/src/fhir.rs @@ -7,32 +7,30 @@ use fhir_sdk::r4b::{ use reqwest::{header, Client, StatusCode, Url}; use tracing::{debug, error, warn}; -use crate::{config::Auth, requests::DataRequestPayload, CONFIG}; +use crate::{config::Auth, data_access::models::DataRequestPayload, CONFIG}; #[derive(Clone, Debug)] pub struct FhirServer { url: Url, - auth: Option, + auth: Auth, client: Client, } trait ClientBuilderExt { - fn add_auth(self, auth: &Option) -> Self; + fn add_auth(self, auth: &Auth) -> Self; } impl ClientBuilderExt for reqwest::RequestBuilder { - fn add_auth(self, auth: &Option) -> Self { - let Some(auth) = auth else { - return self - }; + fn add_auth(self, auth: &Auth) -> Self { match auth { Auth::Basic { user, pw } => self.basic_auth(user, Some(pw)), + Auth::None => self, } } } impl FhirServer { - pub fn new(url: Url, auth: Option) -> Self { + pub fn new(url: Url, auth: Auth) -> Self { Self { url, auth, client: Client::new() } } diff --git a/src/main.rs b/src/main.rs index 706fa98..56acd82 100644 --- a/src/main.rs +++ b/src/main.rs @@ -16,6 +16,7 @@ mod config; mod fhir; mod requests; mod ttp; +mod data_access; static CONFIG: Lazy = Lazy::new(Config::parse); static SERVER_ADDRESS: &str = "0.0.0.0:8080"; @@ -132,27 +133,55 @@ async fn fetch_data(input_fhir_server: &FhirServer, output_fhir_server: &FhirSer mod tests { use pretty_assertions::assert_eq; use reqwest::StatusCode; - - use crate::requests::DataRequest; - async fn post_data_request() -> DataRequest { - let bytes = include_bytes!("../docs/examples/data_request.json"); - let json = &serde_json::from_slice::(bytes).unwrap(); + use crate::data_access::models::DataRequest; + + const BASE_API_URL: &str = "http://localhost:8080/requests"; + async fn get_all_data_requests() -> Vec { let response = reqwest::Client::new() - .post(format!("http://localhost:8080/requests")) - .json(json) + .get(BASE_API_URL) .send() .await - .expect("POST endpoint (/requests) should give a valid response"); - assert_eq!(response.status(), StatusCode::CREATED); - response.json().await.unwrap() + .expect("GET endpoint (/requests) should give a valid response"); + assert_eq!(response.status(), StatusCode::OK); + + let data_requests = response.json::>().await.unwrap(); + dbg!("number of rows in data_requests table: {}", data_requests.len()); + data_requests + } + + async fn post_data_request() -> DataRequest { + let data_requests = get_all_data_requests().await; + if data_requests.len() > 0 { + // NOTE: the tests always use a hard-coded patient from the + // ../docs/examples/data_request.json file, so this test is fine. + // Even if we find a single row in the data_requests table, it is + // for this one single patient only. + dbg!("a data request for this patient already exists, returning the saved one"); + data_requests[0].clone() + } else { + dbg!("creating a new data request"); + let bytes = include_bytes!("../docs/examples/data_request.json"); + let json = &serde_json::from_slice::(bytes).unwrap(); + + let response = reqwest::Client::new() + .post(BASE_API_URL) + .json(json) + .send() + .await + .expect("POST endpoint (/requests) should give a valid response"); + assert_eq!(response.status(), StatusCode::CREATED); + response.json().await.unwrap() + } } #[tokio::test] async fn get_data_request() { let data_request = post_data_request().await; - let url = format!("http://localhost:8080/requests/{}", data_request.id); + dbg!("data request id: {}", &data_request.id); + + let url = format!("{BASE_API_URL}/{}", data_request.id); let response = reqwest::Client::new() .get(url) diff --git a/src/requests.rs b/src/requests.rs index 256a45d..8475c8e 100644 --- a/src/requests.rs +++ b/src/requests.rs @@ -1,131 +1,174 @@ -use axum::{extract::{Path, State}, Json}; +use axum::{ + extract::{Path, State}, + Json, +}; -use fhir_sdk::r4b::{resources::{Consent, Patient}, types::Reference}; +use fhir_sdk::r4b::{ + resources::{Consent, Patient}, + types::Reference, +}; use once_cell::sync::Lazy; use reqwest::StatusCode; -use serde::{Serialize, Deserialize}; use sqlx::{Pool, Sqlite}; -use tracing::{trace, debug, error}; +use tracing::{debug, error, trace}; -use crate::{fhir::{FhirServer, PatientExt}, CONFIG}; +use crate::{ + data_access::{ + data_requests::{exists, get_all, get_by_id, insert}, + models::{DataRequest, DataRequestPayload}, + }, fhir::{FhirServer, PatientExt}, CONFIG +}; static REQUEST_SERVER: Lazy = Lazy::new(|| { - FhirServer::new(CONFIG.fhir_request_url.clone(), CONFIG.fhir_request_credentials.clone()) + FhirServer::new( + CONFIG.fhir_request_url.clone(), + CONFIG.fhir_request_credentials.clone(), + ) }); -#[derive(Serialize, Deserialize, sqlx::Type)] -pub enum RequestStatus { - Created = 1, - _DataLoaded = 2, - _UpdateAvailable = 3, - Error = 4, -} - -#[derive(Serialize, Deserialize, sqlx::FromRow)] -pub struct DataRequest { - pub id: String, - pub status: RequestStatus, -} - -#[derive(Serialize, Deserialize, Clone)] -pub struct DataRequestPayload { - pub patient: Patient, - pub consent: Consent -} - // POST /requests; Creates a new Data Request pub async fn create_data_request( State(database_pool): State>, - Json(payload): Json + Json(payload): Json, ) -> axum::response::Result<(StatusCode, Json)> { let mut consent = payload.consent; let mut patient = payload.patient; + let mut project_id: Option<&str> = None; + if let Some(ttp) = &CONFIG.ttp { - patient = patient - .add_id_request(CONFIG.exchange_id_system.clone())? - .add_id_request(ttp.project_id_system.clone())?; + project_id = match ttp.project_id_system.trim() { + "" => None, + x => Some(x), + }; + + if let Some(proj_id) = project_id { + patient = patient + .add_id_request(CONFIG.exchange_id_system.clone())? + .add_id_request(proj_id.into())?; + } else { + patient = patient + .add_id_request(CONFIG.exchange_id_system.clone())?; + } + // pseudonymize the patient patient = ttp.request_project_pseudonym(&mut patient).await?; // now, the patient should have project1id data (which can be stored in the DB) - trace!("TTP Returned these patient with project pseudonym {:#?}", &patient); + trace!( + "TTP Returned these patient with project pseudonym {:#?}", + &patient + ); consent = ttp.document_patient_consent(consent, &patient).await?; trace!("TTP returned this consent for Patient {:?}", consent); } else { // ohne) das vorhandensein des linkbaren Pseudonym überprüft werden (identifier existiert, eventuell mit Wert in Konfiguration abgleichen?) if !patient.contains_exchange_identifier() { - return Err( - (StatusCode::BAD_REQUEST, format!("Couldn't identify a valid identifier with system {}!", &CONFIG.exchange_id_system)).into() - ); + return Err(( + StatusCode::BAD_REQUEST, + format!( + "Couldn't identify a valid identifier with system {}!", + &CONFIG.exchange_id_system + ), + ) + .into()); } } - patient = patient.pseudonymize()?; - consent = link_patient_consent(&consent, &patient)?; - // und in beiden fällen anschließend die Anfrage beim Datenintegrationszentrum abgelegt werden - let data_request_id = REQUEST_SERVER.post_data_request(DataRequestPayload { - patient, - consent - }).await?; - let data_request = DataRequest { - id: dbg!(data_request_id), - status: RequestStatus::Created, + let Some(patient_id) = patient.id.clone() else { + let err_str = format!("Couldn't find a patient without a valid id!"); + let err_tuple = (StatusCode::BAD_REQUEST,err_str); + return Err(err_tuple.into()); }; + debug!("patient id: {patient_id}"); + // check if this patient_id (which is exchange_id), project_id combination does not exist in the DB (and then only post the request) + if exists(&database_pool, &patient_id, project_id).await { + let err_str = format!("A request for a patient {} in the project {:?} has already been generated!", patient_id, project_id); + let err_tuple = (StatusCode::BAD_REQUEST,err_str); + return Err(err_tuple.into()); + } + + patient = patient.pseudonymize()?; + consent = link_patient_consent(&consent, &patient)?; + // und in beiden fällen anschließend die Anfrage beim Datenintegrationszentrum abgelegt werden + let data_request_id = REQUEST_SERVER + .post_data_request(DataRequestPayload { patient, consent }) + .await?; + + let data_request = DataRequest::new(data_request_id, patient_id, project_id.map(str::to_string)); // storage for associated project id - let sqlite_query_result = sqlx::query!( - "INSERT INTO data_requests (id, status) VALUES ($1, $2)", - data_request.id, data_request.status - ).execute(&database_pool).await.map_err(|e| { + let last_insert_rowid = insert(&database_pool, &data_request) + .await + .map_err(|e| { error!("Unable to persist data request to database. {}", e); - (StatusCode::INTERNAL_SERVER_ERROR, "Unable to persist data request to database.") + ( + StatusCode::INTERNAL_SERVER_ERROR, + "Unable to persist data request to database.", + ) })?; - let last_insert_rowid = sqlite_query_result.last_insert_rowid(); debug!("Inserted data request in row {}", last_insert_rowid); - Ok((StatusCode::CREATED, Json(data_request))) } // GET /requests; Lists all running Data Requests pub async fn list_data_requests( - State(database_pool): State> + State(database_pool): State>, ) -> Result>, (StatusCode, &'static str)> { - let data_requests = sqlx::query_as!( - DataRequest, - r#"SELECT id, status as "status: _" FROM data_requests;"#, - ).fetch_all(&database_pool).await.map_err(|e| { - error!("Unable to fetch data requests from database: {}", e); - (StatusCode::INTERNAL_SERVER_ERROR, "Unable to fetch data requests from database!") - }).unwrap(); - Ok(Json(data_requests)) + get_all(&database_pool) + .await + .map_err(|e| { + error!("Unable to fetch data requests from database: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + "Unable to fetch data requests from database!", + ) + }) + .map(Json) } // GET /requests/; Gets the Request specified by id in Path pub async fn get_data_request( State(database_pool): State>, - Path(request_id): Path -) -> Result, (StatusCode, &'static str)> { + Path(request_id): Path, +) -> Result, (StatusCode, String)> { debug!("Information on data request {} requested.", request_id); - let data_request = sqlx::query_as!( - DataRequest, - r#"SELECT id, status as "status: _" FROM data_requests WHERE id = $1;"#, - request_id - ).fetch_optional(&database_pool).await.map_err(|e| { - error!("Unable to fetch data request {} from database: {}", request_id, e); - (StatusCode::INTERNAL_SERVER_ERROR, format!("Unable to fetch data request with id {}", request_id)) - }).unwrap(); - match data_request { - Some(data_request) => Ok(Json(data_request)), - None => Err((StatusCode::NOT_FOUND, "Couldn't retrieve data request with id")) - } + let data_request = get_by_id(&database_pool, &request_id) + .await + .map_err(|e| { + error!( + "Unable to fetch data request {} from database: {}", + request_id, e + ); + ( + StatusCode::INTERNAL_SERVER_ERROR, + format!("Unable to fetch data request with id {}", request_id), + ) + })? + .map(Json); + + data_request.ok_or(( + StatusCode::NOT_FOUND, + "Couldn't retrieve data request with id".to_string(), + )) } -fn link_patient_consent(consent: &Consent, patient: &Patient) -> Result { +fn link_patient_consent( + consent: &Consent, + patient: &Patient, +) -> Result { let mut linked_consent = consent.clone(); - let exchange_identifier= patient.get_exchange_identifier(); + let exchange_identifier = patient.get_exchange_identifier(); let Some(exchange_identifier) = exchange_identifier else { - return Err((StatusCode::INTERNAL_SERVER_ERROR, "Unable to generate exchange identifier")); + return Err(( + StatusCode::INTERNAL_SERVER_ERROR, + "Unable to generate exchange identifier", + )); }; - linked_consent.patient = Some(Reference::builder().identifier(exchange_identifier.clone()).build().expect("TODO: Handle this error")); + linked_consent.patient = Some( + Reference::builder() + .identifier(exchange_identifier.clone()) + .build() + .expect("TODO: Handle this error"), + ); Ok(linked_consent) }