generated from CDCgov/template
-
Notifications
You must be signed in to change notification settings - Fork 3
354 lines (325 loc) · 20.1 KB
/
deployment.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
name: Deployment
on:
workflow_dispatch:
inputs:
environment:
description: "Environment to deploy to"
type: environment
required: true
push:
branches:
- main
permissions:
id-token: write
contents: read
packages: write
jobs:
terraform:
name: Run Terraform (${{github.event.inputs.environment}})
runs-on: ubuntu-latest
environment: main
defaults:
run:
shell: bash
working-directory: ./terraform/implementation
outputs:
tf_env: ${{ steps.set-environment.outputs.tf_env }}
short_cid: ${{ steps.set-environment.outputs.short_cid }}
record_linkage_container_url: ${{ steps.terraform.outputs.record_linkage_container_url }}
steps:
- name: Check Out Changes
uses: actions/checkout@v3
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
- name: "Azure login"
uses: azure/login@v1
with:
client-id: ${{ secrets.CLIENT_ID }}
tenant-id: ${{ secrets.TENANT_ID }}
subscription-id: ${{ secrets.SUBSCRIPTION_ID }}
- name: Load input variables
env:
SUBSCRIPTION_ID: ${{ secrets.SUBSCRIPTION_ID }}
LOCATION: ${{ secrets.LOCATION }}
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }}
SMARTY_AUTH_ID: ${{ secrets.SMARTY_AUTH_ID }}
SMARTY_AUTH_TOKEN: ${{ secrets.SMARTY_AUTH_TOKEN }}
SMARTY_LICENSE_TYPE: ${{ secrets.SMARTY_LICENSE_TYPE }}
CLIENT_ID: ${{ secrets.CLIENT_ID }}
OBJECT_ID: ${{ secrets.OBJECT_ID }}
GHCR_USERNAME: ${{ github.actor }}
GHCR_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo subscription_id=\""$SUBSCRIPTION_ID"\" >> terraform.tfvars
echo location=\""$LOCATION"\" >> terraform.tfvars
echo resource_group_name=\""$RESOURCE_GROUP_NAME"\" >> terraform.tfvars
echo smarty_auth_id=\""$SMARTY_AUTH_ID"\" >> terraform.tfvars
echo smarty_auth_token=\""$SMARTY_AUTH_TOKEN"\" >> terraform.tfvars
echo smarty_license_type=\""$SMARTY_LICENSE_TYPE"\" >> terraform.tfvars
echo client_id=\""$CLIENT_ID"\" >> terraform.tfvars
echo object_id=\""$OBJECT_ID"\" >> terraform.tfvars
echo ghcr_username=\""$GHCR_USERNAME"\" >> terraform.tfvars
echo ghcr_token=\""$GHCR_TOKEN"\" >> terraform.tfvars
echo use_oidc=true >> terraform.tfvars
echo resource_group_name=\""$RESOURCE_GROUP_NAME"\" >> backend.tfvars
echo storage_account_name=\"phditfstate"${CLIENT_ID:0:8}"\" >> backend.tfvars
echo use_oidc=true >> backend.tfvars
echo use_msi=true >> backend.tfvars
az config set defaults.location=$LOCATION defaults.group=$RESOURCE_GROUP_NAME
- name: Set environment
id: set-environment
env:
CLIENT_ID: ${{ secrets.CLIENT_ID }}
run: |-
echo "tf_env=$(
if [[ "${{ github.event.inputs.environment }}" != "" ]]; then
echo ${{ github.event.inputs.environment }}
else
echo dev
fi
)" >> $GITHUB_OUTPUT
echo "short_cid=${CLIENT_ID:0:8}" >> $GITHUB_OUTPUT
- name: terraform
env:
ARM_CLIENT_ID: ${{ secrets.CLIENT_ID }}
ARM_TENANT_ID: ${{ secrets.TENANT_ID }}
ARM_SUBSCRIPTION_ID: ${{ secrets.SUBSCRIPTION_ID }}
TF_ENV: ${{ steps.set-environment.outputs.tf_env }}
run: |
terraform init -backend-config=backend.tfvars
terraform workspace select $TF_ENV || terraform workspace new $TF_ENV
terraform apply -auto-approve -lock-timeout=30m \
-replace="module.shared.docker_tag.tag_for_azure[\"fhir-converter\"]" \
-replace="module.shared.docker_tag.tag_for_azure[\"ingestion\"]" \
-replace="module.shared.docker_tag.tag_for_azure[\"message-parser\"]" \
-replace="module.shared.docker_tag.tag_for_azure[\"validation\"]" \
-replace="module.shared.docker_tag.tag_for_azure[\"record-linkage\"]"
azure-cli:
name: Run Azure CLI steps
needs: terraform
runs-on: ubuntu-latest
environment: main
defaults:
run:
shell: bash
working-directory: ./terraform/implementation
steps:
- name: Check Out Changes
uses: actions/checkout@v3
- name: Install jq
run: sudo apt-get update && sudo apt-get install -y jq
- name: "Azure login"
uses: azure/login@v1
with:
client-id: ${{ secrets.CLIENT_ID }}
tenant-id: ${{ secrets.TENANT_ID }}
subscription-id: ${{ secrets.SUBSCRIPTION_ID }}
- name: Setup Python Environment
uses: actions/setup-python@v4
with:
python-version: "3.9"
- name: Install python dependencies
working-directory: ./serverless-functions
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt --target=".python_packages/lib/site-packages"
- name: Get publish profile
env:
TF_ENV: ${{ needs.terraform.outputs.tf_env }}
SHORT_CID: ${{ needs.terraform.outputs.short_cid }}
LOCATION: ${{ secrets.LOCATION }}
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }}
run: |
az config set defaults.location=$LOCATION defaults.group=$RESOURCE_GROUP_NAME
echo 'PUBLISH_PROFILE<<EOF' >> $GITHUB_ENV
az functionapp deployment list-publishing-profiles --name $TF_ENV-read-source-data-$SHORT_CID --xml >> $GITHUB_ENV
echo 'EOF' >> $GITHUB_ENV
- name: "Run Azure Functions Action"
uses: Azure/functions-action@v1
env:
TF_ENV: ${{ needs.terraform.outputs.tf_env }}
SHORT_CID: ${{ needs.terraform.outputs.short_cid }}
with:
app-name: "$TF_ENV-read-source-data-$SHORT_CID"
package: serverless-functions
publish-profile: ${{ env.PUBLISH_PROFILE }}
- name: "Azure login"
uses: azure/login@v1
with:
client-id: ${{ secrets.CLIENT_ID }}
tenant-id: ${{ secrets.TENANT_ID }}
subscription-id: ${{ secrets.SUBSCRIPTION_ID }}
- name: Add Event Grid subscription
env:
SUBSCRIPTION_ID: ${{ secrets.SUBSCRIPTION_ID }}
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }}
TF_ENV: ${{ needs.terraform.outputs.tf_env }}
SHORT_CID: ${{ needs.terraform.outputs.short_cid }}
run: |
az eventgrid event-subscription create \
--name phdi${TF_ENV}phi${SHORT_CID}-FlushWithClose-sourcedata-queue \
--source-resource-id "/subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP_NAME/providers/Microsoft.Storage/storageaccounts/phdi${TF_ENV}phi${SHORT_CID}" \
--included-event-types Microsoft.Storage.BlobCreated \
--subject-begins-with /blobServices/default/containers/source-data/blobs/ \
--advanced-filter data.api StringContains FlushWithClose\
--endpoint "/subscriptions/${SUBSCRIPTION_ID}/resourceGroups/${RESOURCE_GROUP_NAME}/providers/Microsoft.Storage/storageAccounts/phdi${TF_ENV}phi${SHORT_CID}/queueservices/default/queues/sourcedataqueue" \
--endpoint-type storagequeue
az eventgrid event-subscription create \
--name phdi${TF_ENV}phi${SHORT_CID}-PutBlob-sourcedata-queue \
--source-resource-id "/subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP_NAME/providers/Microsoft.Storage/storageaccounts/phdi${TF_ENV}phi${SHORT_CID}" \
--included-event-types Microsoft.Storage.BlobCreated \
--subject-begins-with /blobServices/default/containers/source-data/blobs/ \
--advanced-filter data.api StringContains PutBlob\
--endpoint "/subscriptions/${SUBSCRIPTION_ID}/resourceGroups/${RESOURCE_GROUP_NAME}/providers/Microsoft.Storage/storageAccounts/phdi${TF_ENV}phi${SHORT_CID}/queueservices/default/queues/sourcedataqueue" \
--endpoint-type storagequeue
- name: Add scaling rules to container apps
env:
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }}
TF_ENV: ${{ needs.terraform.outputs.tf_env }}
run: |
for SERVICE in fhir-converter ingestion validation message-parser record-linkage; do
az containerapp update --name phdi-$TF_ENV-$SERVICE \
--resource-group $RESOURCE_GROUP_NAME \
--scale-rule-name concurrent-http-requests \
--scale-rule-http-concurrency 10
done
- name: Add authentication to container apps
env:
TENANT_ID: ${{ secrets.TENANT_ID }}
TF_ENV: ${{ needs.terraform.outputs.tf_env }}
run: |
az extension add --name containerapp
DEFAULT_DOMAIN=$(az containerapp env show --name $TF_ENV --query properties.defaultDomain -o tsv)
for SERVICE in fhir-converter ingestion validation message-parser record-linkage; do
if [[ $(az containerapp auth show --name phdi-$TF_ENV-$SERVICE) == "{}" ]]; then
if [[ $(az ad app list | grep -c phdi-$TF_ENV-$SERVICE) -gt 0 ]]; then
APP_ID=$(az ad app list --display-name phdi-$TF_ENV-$SERVICE --query [].appId -o tsv)
else
APP_ID=$(az ad app create --display-name phdi-$TF_ENV-$SERVICE --identifier-uris api://phdi-$TF_ENV-$SERVICE --sign-in-audience AzureADMyOrg --enable-access-token-issuance false --enable-id-token-issuance true --web-home-page-url https://phdi-$TF_ENV-$SERVICE.$DEFAULT_DOMAIN --web-redirect-uris https://phdi-$TF_ENV-$SERVICE.$DEFAULT_DOMAIN/.auth/login/aad/callback --required-resource-access '[{"resourceAccess": [{"id": "e1fe6dd8-ba31-4d61-89e7-88639da4683d", "type": "Scope"} ], "resourceAppId": "00000003-0000-0000-c000-000000000000"}]' --query appId -o tsv)
az ad sp create --id $APP_ID
SCOPE_ID=$(uuidgen)
az ad app update --id $APP_ID --set api='{"oauth2PermissionScopes":[{"adminConsentDescription":"Allow the application to access phdi-$TF_ENV-$SERVICE on behalf of the signed-in user.","adminConsentDisplayName":"Access phdi-$TF_ENV-$SERVICE","id":"'"$SCOPE_ID"'","isEnabled":true,"type":"User","userConsentDescription":"Allow the application to access phdi-$TF_ENV-$SERVICE on your behalf.","userConsentDisplayName":"Access phdi-$TF_ENV-$SERVICE","value":"user_impersonation"}],"preAuthorizedApplications":[]}'
az ad app update --id $APP_ID --set api='{"oauth2PermissionScopes":[{"adminConsentDescription":"Allow the application to access phdi-$TF_ENV-$SERVICE on behalf of the signed-in user.","adminConsentDisplayName":"Access phdi-$TF_ENV-$SERVICE","id":"'"$SCOPE_ID"'","isEnabled":true,"type":"User","userConsentDescription":"Allow the application to access phdi-$TF_ENV-$SERVICE on your behalf.","userConsentDisplayName":"Access phdi-$TF_ENV-$SERVICE","value":"user_impersonation"}],"preAuthorizedApplications":[{"appId":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","delegatedPermissionIds":["'"$SCOPE_ID"'"]}]}'
fi
AUTH_SECRET=$(az ad app credential reset --id $APP_ID --display-name "Generated by App Service" --only-show-errors --query password -o tsv)
az containerapp secret set --name phdi-$TF_ENV-$SERVICE --secrets auth-secret=$AUTH_SECRET
az containerapp auth microsoft update --name phdi-$TF_ENV-$SERVICE --client-id $APP_ID --client-secret-name auth-secret --issuer "https://sts.windows.net/${TENANT_ID}/v2.0" --allowed-audiences "api://phdi-$TF_ENV-$SERVICE"
az containerapp auth update --name phdi-$TF_ENV-$SERVICE --enabled --action Return401
fi
done
- name: Deploy Synapse Notebooks, Pipeline and Trigger
env:
TF_ENV: ${{ needs.terraform.outputs.tf_env }}
SHORT_CID: ${{ needs.terraform.outputs.short_cid }}
TENANT_ID: ${{ secrets.TENANT_ID }}
RECORD_LINKAGE_CONTAINER_URL: ${{ needs.terraform.outputs.record_linkage_container_url }}
run: |
export STORAGE_ACCOUNT=phdi${TF_ENV}phi${SHORT_CID}
export KEY_VAULT=${TF_ENV}vault${SHORT_CID}
export MPI_DB_HOST=phdi${TF_ENV}mpi${SHORT_CID}.postgres.database.azure.com
export KEY_VAULT_LINKED_SERVICE=${TF_ENV}${SHORT_CID}-keyvault-linked-service
export BLOB_STORAGE_LINKED_SERVICE=phdi${TF_ENV}${SHORT_CID}-blob-storage-linked-service
export SCOPE=${{ secrets.RESOURCE_GROUP_NAME }}-${TF_ENV}
if az synapse trigger show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline Weekly Trigger"; then
az synapse trigger delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline Weekly Trigger" --yes
fi
if az synapse pipeline show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline"; then
az synapse pipeline delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline" --yes
fi
for FILE in ../../scripts/Synapse/*
do
if [[ $FILE == *.ipynb ]]; then
FILENAME="$(basename $FILE .ipynb)"
if az synapse notebook show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name $FILENAME; then
az synapse notebook delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name $FILENAME --yes
fi
cat "../../scripts/Synapse/${FILENAME}.ipynb" | envsubst > "../../scripts/Synapse/${FILENAME}Substituted.ipynb"
az synapse notebook create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name $FILENAME --file "@../../scripts/Synapse/${FILENAME}Substituted.ipynb" --spark-pool-name sparkpool
fi
done
az synapse pipeline create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline" --file @../../scripts/Synapse/config/SynapseAnalyticsPipelineConfig.json
cat ../../scripts/Synapse/config/SynapseAnalyticsPipelineWeeklyTriggerConfig.json | envsubst > ../../scripts/Synapse/config/SynapseAnalyticsPipelineWeeklyTriggerConfigSubstituted.json
az synapse trigger create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline Weekly Trigger" --file @../../scripts/Synapse/config/SynapseAnalyticsPipelineWeeklyTriggerConfigSubstituted.json
az synapse trigger start --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Synapse Analytics Pipeline Weekly Trigger"
- name: "Azure login"
uses: azure/login@v1
with:
client-id: ${{ secrets.CLIENT_ID }}
tenant-id: ${{ secrets.TENANT_ID }}
subscription-id: ${{ secrets.SUBSCRIPTION_ID }}
- name: Deploy Daily ECR Refresh Pipeline and Trigger
env:
TF_ENV: ${{ needs.terraform.outputs.tf_env }}
SHORT_CID: ${{ needs.terraform.outputs.short_cid }}
run: |
if az synapse trigger show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh Daily Trigger"; then
az synapse trigger delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh Daily Trigger" --yes
fi
if az synapse pipeline show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh"; then
az synapse pipeline delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh" --yes
fi
az synapse pipeline create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh" --file @../../scripts/Synapse/config/ECRDatastoreRefreshConfig.json
az synapse trigger create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh Daily Trigger" --file @../../scripts/Synapse/config/ECRDatastoreRefreshDailyTriggerConfig.json
az synapse trigger start --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "ECR Datastore Refresh Daily Trigger"
- name: Deploy Update MPI Pipeline and Trigger
env:
SUBSCRIPTION_ID: ${{ secrets.SUBSCRIPTION_ID }}
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }}
TF_ENV: ${{ needs.terraform.outputs.tf_env }}
SHORT_CID: ${{ needs.terraform.outputs.short_cid }}
run: |
if az synapse trigger show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI Trigger"; then
az synapse trigger delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI Trigger" --yes
fi
if az synapse pipeline show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI"; then
az synapse pipeline delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI" --yes
fi
az synapse pipeline create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI" --file @../../scripts/Synapse/config/UpdateMPIConfig.json
cat ../../scripts/Synapse/config/UpdateMPITriggerConfig.json | envsubst > ../../scripts/Synapse/config/UpdateMPITriggerConfigSubstituted.json
az synapse trigger create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI Trigger" --file @../../scripts/Synapse/config/UpdateMPITriggerConfigSubstituted.json
az synapse trigger start --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MPI Trigger"
- name: Deploy Update MII Pipeline and Trigger
env:
SUBSCRIPTION_ID: ${{ secrets.SUBSCRIPTION_ID }}
RESOURCE_GROUP_NAME: ${{ secrets.RESOURCE_GROUP_NAME }}
TF_ENV: ${{ needs.terraform.outputs.tf_env }}
SHORT_CID: ${{ needs.terraform.outputs.short_cid }}
run: |
if az synapse trigger show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII Trigger"; then
az synapse trigger delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII Trigger" --yes
fi
if az synapse pipeline show --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII"; then
az synapse pipeline delete --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII" --yes
fi
az synapse pipeline create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII" --file @../../scripts/Synapse/config/UpdateMIIConfig.json
cat ../../scripts/Synapse/config/UpdateMIITriggerConfig.json | envsubst > ../../scripts/Synapse/config/UpdateMIITriggerConfigSubstituted.json
az synapse trigger create --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII Trigger" --file @../../scripts/Synapse/config/UpdateMIITriggerConfigSubstituted.json
az synapse trigger start --workspace-name phdi${TF_ENV}synapse${SHORT_CID} --name "Update MII Trigger"
- name: "Azure login"
uses: azure/login@v1
with:
client-id: ${{ secrets.CLIENT_ID }}
tenant-id: ${{ secrets.TENANT_ID }}
subscription-id: ${{ secrets.SUBSCRIPTION_ID }}
- name: Add custom schema to message parser
env:
TF_ENV: ${{ needs.terraform.outputs.tf_env }}
run: |
# Get token for message parser
TOKEN=$(az account get-access-token --resource api://phdi-${TF_ENV}-message-parser --query accessToken -o tsv)
# Wrap ecr_datastore_config.json into variable
ECR_DATASTORE_CONFIG=$(jq -n --argjson value "$(cat ../../scripts/Synapse/config/ecr_datastore_config.json)" '{"parsing_schema": $value}')
# Get message parser container app URL
MESSAGE_PARSER_URL=$(az containerapp show -g phdi -n phdi-${TF_ENV}-message-parser --query properties.configuration.ingress.fqdn -o tsv)
# PUT ecr_datastore_config.json to message parser
curl -X PUT -H "Content-Type: application/json" -H "Authorization: Bearer $TOKEN" -d "$ECR_DATASTORE_CONFIG" $MESSAGE_PARSER_URL/schemas/ecr.json
end-to-end:
name: End-to-end tests
needs:
- terraform
- azure-cli
if: ${{ needs.terraform.outputs.tf_env == 'dev' }}
uses: ./.github/workflows/end-to-end.yaml
with:
environment: ${{ needs.terraform.outputs.tf_env }}
secrets: inherit