Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Turn FHIR server back on for LAC to collect ELR data #322

Merged
merged 17 commits into from
Nov 9, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 16 additions & 2 deletions .github/workflows/end-to-end.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ jobs:
run: |
TIMESTAMP=$(date "+%Y-%m-%dT%H:%M:%S")
echo "timestamp=$TIMESTAMP" >> $GITHUB_OUTPUT
az storage blob upload --account-name phdi${TF_ENV}phi${SHORT_CID} --container-name source-data --name vxu/VXU-V04-01_success_single_$TIMESTAMP.hl7 --file sample-data/VXU-V04-01_success_single.hl7
az storage blob upload --account-name phdi${TF_ENV}phi${SHORT_CID} --container-name source-data --name elr/ELR_e2e_sample_1_$TIMESTAMP.hl7 --file sample-data/ELR_e2e_sample_1.hl7
- name: Check pipeline run
env:
TF_ENV: ${{ steps.set-environment.outputs.tf_env }}
Expand All @@ -62,7 +62,7 @@ jobs:
az extension add --name datafactory --upgrade
START_DATE=$(date --date="1 day ago" "+%Y-%m-%dT%H:%M:%S")
END_DATE=$(date --date="1 day" "+%Y-%m-%dT%H:%M:%S")
SOURCE_FILE="source-data/vxu/VXU-V04-01_success_single_$TIMESTAMP.hl7"
SOURCE_FILE="source-data/elr/ELR_e2e_sample_1_$TIMESTAMP.hl7"
CHECK_COUNT=0

check_pipeline_run_count() {
Expand All @@ -88,3 +88,17 @@ jobs:
done

echo "Pipeline run succeeded!"

- name: Query FHIR server
env:
TF_ENV: ${{ steps.set-environment.outputs.tf_env }}
SHORT_CID: ${{ steps.set-environment.outputs.short_cid }}
TIMESTAMP: ${{ steps.upload-sample-data.outputs.timestamp }}
run: |
TOKEN=$(az account get-access-token --resource=https://${TF_ENV}${SHORT_CID}-fhir-server.fhir.azurehealthcareapis.com --query accessToken --output tsv)
RESPONSE=$(curl -X GET --header "Authorization: Bearer $TOKEN" "https://${TF_ENV}${SHORT_CID}-fhir-server.fhir.azurehealthcareapis.com/Patient?family=EVERYMAN&given=ADAM")
echo $RESPONSE | jq -e '.entry[0].resource.name[0].family == "EVERYMAN" and .entry[0].resource.name[0].given[0] == "ADAM"'
PATIENT_ID=$(echo $RESPONSE | jq -r '.entry[0].resource.id')
echo "FHIR server query succeeded! Deleting sample data..."
az storage blob delete --account-name phdi${TF_ENV}phi${SHORT_CID} --container-name source-data --name elr/ELR_e2e_sample_1_$TIMESTAMP.hl7
curl -X DELETE --header "Authorization: Bearer $TOKEN" "https://${TF_ENV}${SHORT_CID}-fhir-server.fhir.azurehealthcareapis.com/Patient?identifier=${PATIENT_ID}&hardDelete=true"
1 change: 1 addition & 0 deletions terraform/implementation/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ module "data_factory" {
ingestion_container_url = module.shared.ingestion_container_url
validation_container_url = module.shared.validation_container_url
message_parser_url = module.shared.message_parser_url
fhir_server_url = module.shared.fhir_server_url
phi_storage_account_endpoint_url = module.shared.phi_storage_account_endpoint_url
record_linkage_container_url = module.shared.record_linkage_container_url
pipeline_runner_id = module.shared.pipeline_runner_id
Expand Down
87 changes: 87 additions & 0 deletions terraform/modules/data_factory/ingestion-pipeline.json
Original file line number Diff line number Diff line change
Expand Up @@ -518,6 +518,93 @@
}
]
}
},
{
"name": "case_type_elr_if_condition",
"type": "IfCondition",
"dependsOn": [
{
"activity": "record_linkage",
"dependencyConditions": [
"Succeeded"
]
}
],
"userProperties": [],
"typeProperties": {
"expression": {
"value": "@equals(pipeline().parameters.message_type, 'elr')",
"type": "Expression"
},
"ifTrueActivities": [
{
"name": "upload_fhir_bundle",
"description": "Compute a hashed identifier that can be used for record linkage.",
"type": "WebActivity",
"policy": {
"timeout": "0.12:00:00",
"retry": 3,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"url": "${ingestion_container_url}/fhir/transport/http/upload_bundle_to_fhir_server",
"method": "POST",
"body": {
"value": "{\n \"fhir_url\": \"${fhir_server_url}\",\n \"bundle\": @{activity('record_linkage').output.updated_bundle},\n \"cred_manager\": \"azure\"\n}",
"type": "Expression"
},
"authentication": {
"resource": "api://phdi-${environment}-ingestion",
"credential": {
"referenceName": "pipeline-runner-credential",
"type": "CredentialReference"
},
"type": "UserAssignedManagedIdentity"
}
}
},
{
"name": "log_fhir_upload_failure",
"description": "Write FHIR upload failures to storage.",
"type": "WebActivity",
"dependsOn": [
{
"activity": "upload_fhir_bundle",
"dependencyConditions": [
"Failed"
]
}
],
"policy": {
"timeout": "0.12:00:00",
"retry": 3,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"url": "${ingestion_container_url}/cloud/storage/write_blob_to_storage",
"method": "POST",
"body": {
"value": "{\n \"blob\": @{activity('upload_fhir_bundle').output.message},\n \"cloud_provider\": \"azure\",\n \"bucket_name\": \"${fhir_upload_failures_container_name}\",\n \"file_name\": \"@{concat(substring(pipeline().parameters.filename, 12, 4), pipeline().RunId, '_', substring(pipeline().parameters.filename, 16, sub(length(pipeline().parameters.filename),length(substring(pipeline().parameters.filename, 0, add(length('source_data/'), 4))))))}\"\n}",
"type": "Expression"
},
"authentication": {
"resource": "api://phdi-${environment}-ingestion",
"credential": {
"referenceName": "pipeline-runner-credential",
"type": "CredentialReference"
},
"type": "UserAssignedManagedIdentity"
}
}
}
]
}
}
],
"parameters": {
Expand Down
1 change: 1 addition & 0 deletions terraform/modules/data_factory/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ locals {
environment = terraform.workspace,
fhir_converter_url = var.fhir_converter_url,
ingestion_container_url = var.ingestion_container_url,
fhir_server_url = var.fhir_server_url,
message_parser_url = var.message_parser_url,
storage_account_url = var.phi_storage_account_endpoint_url,
validation_failures_container_name = var.validation_failures_container_name,
Expand Down
5 changes: 5 additions & 0 deletions terraform/modules/data_factory/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ variable "record_linkage_container_url" {
description = "URL of the record linkage container"
}

variable "fhir_server_url" {
type = string
description = "URL of the FHIR server"
}

variable "phi_storage_account_endpoint_url" {
type = string
description = "URL of the PHI storage account"
Expand Down
64 changes: 57 additions & 7 deletions terraform/modules/shared/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -513,6 +513,56 @@ resource "azurerm_container_app_environment_storage" "custom_schema_storage" {
access_mode = "ReadWrite"
}

##### FHIR Server #####

resource "azurerm_healthcare_workspace" "fhir_server" {
name = "${terraform.workspace}${substr(var.client_id, 0, 8)}"
location = "westus2"
resource_group_name = var.resource_group_name
}

resource "azurerm_healthcare_fhir_service" "fhir_server" {
name = "fhir-server"
location = "westus2"
resource_group_name = var.resource_group_name
workspace_id = azurerm_healthcare_workspace.fhir_server.id
kind = "fhir-R4"

authentication {
authority = "https://login.microsoftonline.com/${data.azurerm_client_config.current.tenant_id}"
audience = "https://${terraform.workspace}${substr(var.client_id, 0, 8)}-fhir-server.fhir.azurehealthcareapis.com"
}

cors {
allowed_origins = ["https://${azurerm_container_app.container_app["ingestion"].latest_revision_fqdn}"]
allowed_headers = ["*"]
allowed_methods = ["GET", "DELETE", "PUT", "POST"]
max_age_in_seconds = 3600
credentials_allowed = true
}

lifecycle {
ignore_changes = [name, tags]
}

tags = {
environment = terraform.workspace
managed-by = "terraform"
}
}

resource "azurerm_role_assignment" "gh_sp_fhir_contributor" {
scope = azurerm_healthcare_fhir_service.fhir_server.id
role_definition_name = "FHIR Data Contributor"
principal_id = var.object_id
}

resource "azurerm_role_assignment" "pipeline_runner_fhir_contributor" {
scope = azurerm_healthcare_fhir_service.fhir_server.id
role_definition_name = "FHIR Data Contributor"
principal_id = azurerm_user_assigned_identity.pipeline_runner.principal_id
}

##### User Assigned Identity #####

resource "azurerm_user_assigned_identity" "pipeline_runner" {
Expand Down Expand Up @@ -562,13 +612,6 @@ resource "azurerm_synapse_workspace" "phdi" {
}
}

resource "azurerm_synapse_firewall_rule" "allow_azure_services" {
name = "AllowAllWindowsAzureIps"
synapse_workspace_id = azurerm_synapse_workspace.phdi.id
start_ip_address = "0.0.0.0"
end_ip_address = "0.0.0.0"
}

resource "azurerm_synapse_spark_pool" "phdi" {
name = "sparkpool"
synapse_workspace_id = azurerm_synapse_workspace.phdi.id
Expand Down Expand Up @@ -608,6 +651,13 @@ EOF
}
}

resource "azurerm_synapse_firewall_rule" "synapse_firewall_rule" {
name = "AllowAll"
synapse_workspace_id = azurerm_synapse_workspace.phdi.id
start_ip_address = "0.0.0.0"
end_ip_address = "255.255.255.255"
}

resource "azurerm_role_assignment" "synapse_blob_contributor" {
scope = azurerm_storage_account.phi.id
role_definition_name = "Storage Blob Data Contributor"
Expand Down
4 changes: 4 additions & 0 deletions terraform/modules/shared/outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ output "phi_storage_account_key" {
value = azurerm_storage_account.phi.primary_access_key
}

output "fhir_server_url" {
value = azurerm_healthcare_fhir_service.fhir_server.authentication[0].audience
}

output "pipeline_runner_id" {
value = azurerm_user_assigned_identity.pipeline_runner.id
}
Expand Down
Loading