Bump phdi to v1.1.5 (#359) #1095
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Deployment | |
on: | |
workflow_dispatch: | |
inputs: | |
environment: | |
description: "Environment to deploy to" | |
type: environment | |
required: true | |
push: | |
branches: | |
- main | |
permissions: | |
id-token: write | |
contents: read | |
packages: write | |
jobs: | |
terraform: | |
name: Run Terraform (${{github.event.inputs.environment}}) | |
runs-on: ubuntu-latest | |
environment: main | |
defaults: | |
run: | |
shell: bash | |
working-directory: ./terraform/implementation | |
outputs: | |
tf_env: ${{ steps.set-environment.outputs.tf_env }} | |
short_cid: ${{ steps.set-environment.outputs.short_cid }} | |
record_linkage_container_url: ${{ steps.terraform.outputs.record_linkage_container_url }} | |
steps: | |
- name: Check Out Changes | |
uses: actions/checkout@v3 | |
- name: Setup Terraform | |
uses: hashicorp/setup-terraform@v2 | |
- name: "Azure login" | |
uses: azure/login@v1 | |
with: | |
client-id: ${{ secrets.CLIENT_ID }} | |
tenant-id: ${{ secrets.TENANT_ID }} | |
subscription-id: ${{ secrets.SUBSCRIPTION_ID }} | |
- name: Load input variables | |
env: | |
SUBSCRIPTION_ID: ${{ secrets.SUBSCRIPTION_ID }} | |
LOCATION: ${{ secrets.LOCATION }} | |
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }} | |
SMARTY_AUTH_ID: ${{ secrets.SMARTY_AUTH_ID }} | |
SMARTY_AUTH_TOKEN: ${{ secrets.SMARTY_AUTH_TOKEN }} | |
SMARTY_LICENSE_TYPE: ${{ secrets.SMARTY_LICENSE_TYPE }} | |
CLIENT_ID: ${{ secrets.CLIENT_ID }} | |
OBJECT_ID: ${{ secrets.OBJECT_ID }} | |
GHCR_USERNAME: ${{ github.actor }} | |
GHCR_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
run: | | |
echo subscription_id=\""$SUBSCRIPTION_ID"\" >> terraform.tfvars | |
echo location=\""$LOCATION"\" >> terraform.tfvars | |
echo resource_group_name=\""$RESOURCE_GROUP_NAME"\" >> terraform.tfvars | |
echo smarty_auth_id=\""$SMARTY_AUTH_ID"\" >> terraform.tfvars | |
echo smarty_auth_token=\""$SMARTY_AUTH_TOKEN"\" >> terraform.tfvars | |
echo smarty_license_type=\""$SMARTY_LICENSE_TYPE"\" >> terraform.tfvars | |
echo client_id=\""$CLIENT_ID"\" >> terraform.tfvars | |
echo object_id=\""$OBJECT_ID"\" >> terraform.tfvars | |
echo ghcr_username=\""$GHCR_USERNAME"\" >> terraform.tfvars | |
echo ghcr_token=\""$GHCR_TOKEN"\" >> terraform.tfvars | |
echo use_oidc=true >> terraform.tfvars | |
echo resource_group_name=\""$RESOURCE_GROUP_NAME"\" >> backend.tfvars | |
echo storage_account_name=\"phditfstate"${CLIENT_ID:0:8}"\" >> backend.tfvars | |
echo use_oidc=true >> backend.tfvars | |
echo use_msi=true >> backend.tfvars | |
az config set defaults.location=$LOCATION defaults.group=$RESOURCE_GROUP_NAME | |
- name: Set environment | |
id: set-environment | |
env: | |
CLIENT_ID: ${{ secrets.CLIENT_ID }} | |
run: |- | |
echo "tf_env=$( | |
if [[ "${{ github.event.inputs.environment }}" != "" ]]; then | |
echo ${{ github.event.inputs.environment }} | |
else | |
echo dev | |
fi | |
)" >> $GITHUB_OUTPUT | |
echo "short_cid=${CLIENT_ID:0:8}" >> $GITHUB_OUTPUT | |
- name: terraform | |
env: | |
ARM_CLIENT_ID: ${{ secrets.CLIENT_ID }} | |
ARM_TENANT_ID: ${{ secrets.TENANT_ID }} | |
ARM_SUBSCRIPTION_ID: ${{ secrets.SUBSCRIPTION_ID }} | |
TF_ENV: ${{ steps.set-environment.outputs.tf_env }} | |
run: | | |
terraform init -backend-config=backend.tfvars | |
terraform workspace select $TF_ENV || terraform workspace new $TF_ENV | |
terraform apply -auto-approve -lock-timeout=30m \ | |
-replace="module.shared.docker_tag.tag_for_azure[\"fhir-converter\"]" \ | |
-replace="module.shared.docker_tag.tag_for_azure[\"ingestion\"]" \ | |
-replace="module.shared.docker_tag.tag_for_azure[\"message-parser\"]" \ | |
-replace="module.shared.docker_tag.tag_for_azure[\"validation\"]" \ | |
-replace="module.shared.docker_tag.tag_for_azure[\"record-linkage\"]" | |
azure-cli: | |
name: Run Azure CLI steps | |
needs: terraform | |
runs-on: ubuntu-latest | |
environment: main | |
defaults: | |
run: | |
shell: bash | |
working-directory: ./terraform/implementation | |
steps: | |
- name: Check Out Changes | |
uses: actions/checkout@v3 | |
- name: Install jq | |
run: sudo apt-get update && sudo apt-get install -y jq | |
- name: "Azure login" | |
uses: azure/login@v1 | |
with: | |
client-id: ${{ secrets.CLIENT_ID }} | |
tenant-id: ${{ secrets.TENANT_ID }} | |
subscription-id: ${{ secrets.SUBSCRIPTION_ID }} | |
- name: Setup Python Environment | |
uses: actions/setup-python@v4 | |
with: | |
python-version: "3.9" | |
- name: Install python dependencies | |
working-directory: ./serverless-functions | |
run: | | |
python -m pip install --upgrade pip | |
pip install -r requirements.txt --target=".python_packages/lib/site-packages" | |
- name: Get publish profile | |
env: | |
TF_ENV: ${{ needs.terraform.outputs.tf_env }} | |
SHORT_CID: ${{ needs.terraform.outputs.short_cid }} | |
LOCATION: ${{ secrets.LOCATION }} | |
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }} | |
run: | | |
az config set defaults.location=$LOCATION defaults.group=$RESOURCE_GROUP_NAME | |
echo 'PUBLISH_PROFILE<<EOF' >> $GITHUB_ENV | |
az functionapp deployment list-publishing-profiles --name $TF_ENV-read-source-data-$SHORT_CID --xml >> $GITHUB_ENV | |
echo 'EOF' >> $GITHUB_ENV | |
- name: "Run Azure Functions Action" | |
uses: Azure/functions-action@v1 | |
env: | |
TF_ENV: ${{ needs.terraform.outputs.tf_env }} | |
SHORT_CID: ${{ needs.terraform.outputs.short_cid }} | |
with: | |
app-name: "$TF_ENV-read-source-data-$SHORT_CID" | |
package: serverless-functions | |
publish-profile: ${{ env.PUBLISH_PROFILE }} | |
- name: "Azure login" | |
uses: azure/login@v1 | |
with: | |
client-id: ${{ secrets.CLIENT_ID }} | |
tenant-id: ${{ secrets.TENANT_ID }} | |
subscription-id: ${{ secrets.SUBSCRIPTION_ID }} | |
- name: Add Event Grid subscription | |
env: | |
SUBSCRIPTION_ID: ${{ secrets.SUBSCRIPTION_ID }} | |
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }} | |
TF_ENV: ${{ needs.terraform.outputs.tf_env }} | |
SHORT_CID: ${{ needs.terraform.outputs.short_cid }} | |
run: | | |
az eventgrid event-subscription create \ | |
--name phdi${TF_ENV}phi${SHORT_CID}-FlushWithClose-sourcedata-queue \ | |
--source-resource-id "/subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP_NAME/providers/Microsoft.Storage/storageaccounts/phdi${TF_ENV}phi${SHORT_CID}" \ | |
--included-event-types Microsoft.Storage.BlobCreated \ | |
--subject-begins-with /blobServices/default/containers/source-data/blobs/ \ | |
--advanced-filter data.api StringContains FlushWithClose\ | |
--endpoint "/subscriptions/${SUBSCRIPTION_ID}/resourceGroups/${RESOURCE_GROUP_NAME}/providers/Microsoft.Storage/storageAccounts/phdi${TF_ENV}phi${SHORT_CID}/queueservices/default/queues/sourcedataqueue" \ | |
--endpoint-type storagequeue | |
az eventgrid event-subscription create \ | |
--name phdi${TF_ENV}phi${SHORT_CID}-PutBlob-sourcedata-queue \ | |
--source-resource-id "/subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP_NAME/providers/Microsoft.Storage/storageaccounts/phdi${TF_ENV}phi${SHORT_CID}" \ | |
--included-event-types Microsoft.Storage.BlobCreated \ | |
--subject-begins-with /blobServices/default/containers/source-data/blobs/ \ | |
--advanced-filter data.api StringContains PutBlob\ | |
--endpoint "/subscriptions/${SUBSCRIPTION_ID}/resourceGroups/${RESOURCE_GROUP_NAME}/providers/Microsoft.Storage/storageAccounts/phdi${TF_ENV}phi${SHORT_CID}/queueservices/default/queues/sourcedataqueue" \ | |
--endpoint-type storagequeue | |
- name: Add scaling rules to container apps | |
env: | |
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }} | |
TF_ENV: ${{ needs.terraform.outputs.tf_env }} | |
run: | | |
for SERVICE in fhir-converter ingestion validation message-parser record-linkage; do | |
az containerapp update --name phdi-$TF_ENV-$SERVICE \ | |
--resource-group $RESOURCE_GROUP_NAME \ | |
--scale-rule-name concurrent-http-requests \ | |
--scale-rule-http-concurrency 10 | |
done | |
- name: Add authentication to container apps | |
env: | |
TENANT_ID: ${{ secrets.TENANT_ID }} | |
TF_ENV: ${{ needs.terraform.outputs.tf_env }} | |
run: | | |
az extension add --name containerapp | |
DEFAULT_DOMAIN=$(az containerapp env show --name $TF_ENV --query properties.defaultDomain -o tsv) | |
for SERVICE in fhir-converter ingestion validation message-parser record-linkage; do | |
if [[ $(az containerapp auth show --name phdi-$TF_ENV-$SERVICE) == "{}" ]]; then | |
if [[ $(az ad app list | grep -c phdi-$TF_ENV-$SERVICE) -gt 0 ]]; then | |
APP_ID=$(az ad app list --display-name phdi-$TF_ENV-$SERVICE --query [].appId -o tsv) | |
else | |
APP_ID=$(az ad app create --display-name phdi-$TF_ENV-$SERVICE --identifier-uris api://phdi-$TF_ENV-$SERVICE --sign-in-audience AzureADMyOrg --enable-access-token-issuance false --enable-id-token-issuance true --web-home-page-url https://phdi-$TF_ENV-$SERVICE.$DEFAULT_DOMAIN --web-redirect-uris https://phdi-$TF_ENV-$SERVICE.$DEFAULT_DOMAIN/.auth/login/aad/callback --required-resource-access '[{"resourceAccess": [{"id": "e1fe6dd8-ba31-4d61-89e7-88639da4683d", "type": "Scope"} ], "resourceAppId": "00000003-0000-0000-c000-000000000000"}]' --query appId -o tsv) | |
az ad sp create --id $APP_ID | |
SCOPE_ID=$(uuidgen) | |
az ad app update --id $APP_ID --set api='{"oauth2PermissionScopes":[{"adminConsentDescription":"Allow the application to access phdi-$TF_ENV-$SERVICE on behalf of the signed-in user.","adminConsentDisplayName":"Access phdi-$TF_ENV-$SERVICE","id":"'"$SCOPE_ID"'","isEnabled":true,"type":"User","userConsentDescription":"Allow the application to access phdi-$TF_ENV-$SERVICE on your behalf.","userConsentDisplayName":"Access phdi-$TF_ENV-$SERVICE","value":"user_impersonation"}],"preAuthorizedApplications":[]}' | |
az ad app update --id $APP_ID --set api='{"oauth2PermissionScopes":[{"adminConsentDescription":"Allow the application to access phdi-$TF_ENV-$SERVICE on behalf of the signed-in user.","adminConsentDisplayName":"Access phdi-$TF_ENV-$SERVICE","id":"'"$SCOPE_ID"'","isEnabled":true,"type":"User","userConsentDescription":"Allow the application to access phdi-$TF_ENV-$SERVICE on your behalf.","userConsentDisplayName":"Access phdi-$TF_ENV-$SERVICE","value":"user_impersonation"}],"preAuthorizedApplications":[{"appId":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","delegatedPermissionIds":["'"$SCOPE_ID"'"]}]}' | |
fi | |
AUTH_SECRET=$(az ad app credential reset --id $APP_ID --display-name "Generated by App Service" --only-show-errors --query password -o tsv) | |
az containerapp secret set --name phdi-$TF_ENV-$SERVICE --secrets auth-secret=$AUTH_SECRET | |
az containerapp auth microsoft update --name phdi-$TF_ENV-$SERVICE --client-id $APP_ID --client-secret-name auth-secret --issuer "https://sts.windows.net/${TENANT_ID}/v2.0" --allowed-audiences "api://phdi-$TF_ENV-$SERVICE" | |
az containerapp auth update --name phdi-$TF_ENV-$SERVICE --enabled --action Return401 | |
fi | |
done | |
- name: Deploy Synapse Notebooks, Pipeline and Trigger | |
env: | |
TF_ENV: ${{ needs.terraform.outputs.tf_env }} | |
SHORT_CID: ${{ needs.terraform.outputs.short_cid }} | |
TENANT_ID: ${{ secrets.TENANT_ID }} | |
RECORD_LINKAGE_CONTAINER_URL: ${{ needs.terraform.outputs.record_linkage_container_url }} | |
run: | | |
export STORAGE_ACCOUNT=phdi${TF_ENV}phi${SHORT_CID} | |
export KEY_VAULT=${TF_ENV}vault${SHORT_CID} | |
export MPI_DB_HOST=phdi${TF_ENV}mpi${SHORT_CID}.postgres.database.azure.com | |
export KEY_VAULT_LINKED_SERVICE=${TF_ENV}${SHORT_CID}-keyvault-linked-service | |
export BLOB_STORAGE_LINKED_SERVICE=phdi${TF_ENV}${SHORT_CID}-blob-storage-linked-service | |
export SCOPE=${{ secrets.RESOURCE_GROUP_NAME }}-${TF_ENV} | |
if az synapse trigger show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline Weekly Trigger"; then | |
az synapse trigger delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline Weekly Trigger" --yes | |
fi | |
if az synapse pipeline show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline"; then | |
az synapse pipeline delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline" --yes | |
fi | |
for FILE in ../../scripts/Synapse/* | |
do | |
if [[ $FILE == *.ipynb ]]; then | |
FILENAME="$(basename $FILE .ipynb)" | |
if az synapse notebook show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name $FILENAME; then | |
az synapse notebook delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name $FILENAME --yes | |
fi | |
cat "../../scripts/Synapse/${FILENAME}.ipynb" | envsubst > "../../scripts/Synapse/${FILENAME}Substituted.ipynb" | |
az synapse notebook create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name $FILENAME --file "@../../scripts/Synapse/${FILENAME}Substituted.ipynb" --spark-pool-name sparkpool | |
fi | |
done | |
az synapse pipeline create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline" --file @../../scripts/Synapse/config/SynapseAnalyticsPipelineConfig.json | |
cat ../../scripts/Synapse/config/SynapseAnalyticsPipelineWeeklyTriggerConfig.json | envsubst > ../../scripts/Synapse/config/SynapseAnalyticsPipelineWeeklyTriggerConfigSubstituted.json | |
az synapse trigger create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline Weekly Trigger" --file @../../scripts/Synapse/config/SynapseAnalyticsPipelineWeeklyTriggerConfigSubstituted.json | |
az synapse trigger start --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline Weekly Trigger" | |
- name: "Azure login" | |
uses: azure/login@v1 | |
with: | |
client-id: ${{ secrets.CLIENT_ID }} | |
tenant-id: ${{ secrets.TENANT_ID }} | |
subscription-id: ${{ secrets.SUBSCRIPTION_ID }} | |
- name: Deploy Daily ECR Refresh Pipeline and Trigger | |
env: | |
TF_ENV: ${{ needs.terraform.outputs.tf_env }} | |
SHORT_CID: ${{ needs.terraform.outputs.short_cid }} | |
run: | | |
if az synapse trigger show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh Daily Trigger"; then | |
az synapse trigger delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh Daily Trigger" --yes | |
fi | |
if az synapse pipeline show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh"; then | |
az synapse pipeline delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh" --yes | |
fi | |
az synapse pipeline create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh" --file @../../scripts/Synapse/config/ECRDatastoreRefreshConfig.json | |
az synapse trigger create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh Daily Trigger" --file @../../scripts/Synapse/config/ECRDatastoreRefreshDailyTriggerConfig.json | |
az synapse trigger start --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh Daily Trigger" | |
- name: Deploy Update MPI Pipeline and Trigger | |
env: | |
SUBSCRIPTION_ID: ${{ secrets.SUBSCRIPTION_ID }} | |
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }} | |
TF_ENV: ${{ needs.terraform.outputs.tf_env }} | |
SHORT_CID: ${{ needs.terraform.outputs.short_cid }} | |
run: | | |
if az synapse trigger show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI Trigger"; then | |
az synapse trigger delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI Trigger" --yes | |
fi | |
if az synapse pipeline show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI"; then | |
az synapse pipeline delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI" --yes | |
fi | |
az synapse pipeline create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI" --file @../../scripts/Synapse/config/UpdateMPIConfig.json | |
cat ../../scripts/Synapse/config/UpdateMPITriggerConfig.json | envsubst > ../../scripts/Synapse/config/UpdateMPITriggerConfigSubstituted.json | |
az synapse trigger create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI Trigger" --file @../../scripts/Synapse/config/UpdateMPITriggerConfigSubstituted.json | |
az synapse trigger start --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI Trigger" | |
- name: Deploy Update MII Pipeline and Trigger | |
env: | |
SUBSCRIPTION_ID: ${{ secrets.SUBSCRIPTION_ID }} | |
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }} | |
TF_ENV: ${{ needs.terraform.outputs.tf_env }} | |
SHORT_CID: ${{ needs.terraform.outputs.short_cid }} | |
run: | | |
if az synapse trigger show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII Trigger"; then | |
az synapse trigger delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII Trigger" --yes | |
fi | |
if az synapse pipeline show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII"; then | |
az synapse pipeline delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII" --yes | |
fi | |
az synapse pipeline create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII" --file @../../scripts/Synapse/config/UpdateMIIConfig.json | |
cat ../../scripts/Synapse/config/UpdateMIITriggerConfig.json | envsubst > ../../scripts/Synapse/config/UpdateMIITriggerConfigSubstituted.json | |
az synapse trigger create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII Trigger" --file @../../scripts/Synapse/config/UpdateMIITriggerConfigSubstituted.json | |
az synapse trigger start --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII Trigger" | |
- name: "Azure login" | |
uses: azure/login@v1 | |
with: | |
client-id: ${{ secrets.CLIENT_ID }} | |
tenant-id: ${{ secrets.TENANT_ID }} | |
subscription-id: ${{ secrets.SUBSCRIPTION_ID }} | |
- name: Add custom schema to message parser | |
env: | |
TF_ENV: ${{ needs.terraform.outputs.tf_env }} | |
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }} | |
run: | | |
# Get token for message parser | |
TOKEN=$(az account get-access-token --resource api://phdi-${TF_ENV}-message-parser --query accessToken -o tsv) | |
# Wrap ecr_datastore_config.json into variable | |
ECR_DATASTORE_CONFIG=$(jq -n --argjson value "$(cat ../../scripts/Synapse/config/ecr_datastore_config.json)" '{"parsing_schema": $value}') | |
# Get message parser container app URL | |
MESSAGE_PARSER_URL=$(az containerapp show -g $RESOURCE_GROUP_NAME -n phdi-${TF_ENV}-message-parser --query properties.configuration.ingress.fqdn -o tsv) | |
# PUT ecr_datastore_config.json to message parser | |
curl -X PUT -H "Content-Type: application/json" -H "Authorization: Bearer $TOKEN" -d "$ECR_DATASTORE_CONFIG" $MESSAGE_PARSER_URL/schemas/ecr.json | |
end-to-end: | |
name: End-to-end tests | |
needs: | |
- terraform | |
- azure-cli | |
if: ${{ needs.terraform.outputs.tf_env == 'dev' }} | |
uses: ./.github/workflows/end-to-end.yaml | |
with: | |
environment: ${{ needs.terraform.outputs.tf_env }} | |
secrets: inherit |