-
Notifications
You must be signed in to change notification settings - Fork 81
Expand file tree
/
Copy pathdata_pipeline_ci_cd.yml
More file actions
94 lines (87 loc) · 3.3 KB
/
data_pipeline_ci_cd.yml
File metadata and controls
94 lines (87 loc) · 3.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
name: CICD
pr:
branches:
include:
- main
- adf_publish
trigger:
branches:
include:
- main
paths:
include:
- scripts/
variables:
- group: datapipeline-vg
- group: keys-vg
pool:
vmImage: ubuntu-latest
stages:
- stage: 'CI'
displayName: 'CI'
jobs:
- job: "CI_Job"
displayName: "CI Job"
# The CI stage produces two artifacts (notebooks and ADF pipelines).
# The pipelines Azure Resource Manager templates are stored in a technical branch "adf_publish"
steps:
- checkout: self
- script: dir $(Build.SourcesDirectory)/$(Build.Repository.Name)
- publish: $(Build.SourcesDirectory)/$(Build.Repository.Name)/azure-data-pipeline/notebooks
artifact: notebooks
- checkout: git://${{variables['System.TeamProject']}}@adf_publish
- script: dir $(Build.SourcesDirectory)/$(Build.Repository.Name)
- publish: $(Build.SourcesDirectory)/$(Build.Repository.Name)/$(DATA_FACTORY_DEV_NAME)
artifact: adf-pipelines
- stage: 'CD'
displayName: 'CD'
jobs:
- deployment: "Deploy_to_Databricks"
displayName: 'Deploy to Databricks'
timeoutInMinutes: 0
environment: qa
strategy:
runOnce:
deploy:
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.x'
addToPath: true
architecture: 'x64'
displayName: 'Use Python3'
# Install Databricks CLI
- script: |
curl -fsSL https://raw.githubusercontent.com/databricks/setup-cli/main/install.sh | sh
echo "databricks version" $(databricks --version)
displayName: 'Install Databricks CLI'
- script: |
mkdir -p ~/.databricks
echo "[DEFAULT]" > ~/.databrickscfg
echo "host = $DATABRICKS_URL" >> ~/.databrickscfg
echo "token = $(databricks-token)" >> ~/.databrickscfg
databricks auth profiles
displayName: "Configure Databricks CLI"
- script: |
echo "Uploading notebooks from $(Pipeline.Workspace)/notebooks to /Shared..."
databricks workspace import-dir "$(Pipeline.Workspace)/notebooks" /Shared
displayName: "Upload Notebooks to /Shared in the Databricks cluster"
- deployment: "Deploy_to_ADF"
displayName: 'Deploy to ADF'
timeoutInMinutes: 0
environment: qa
strategy:
runOnce:
deploy:
steps:
- task: AzureResourceGroupDeployment@2
displayName: 'Deploy ADF resources'
inputs:
azureSubscription: $(AZURE_RM_CONNECTION)
resourceGroupName: $(RESOURCE_GROUP)
location: $(LOCATION)
csmFile: '$(Pipeline.Workspace)/adf-pipelines/ARMTemplateForFactory.json'
csmParametersFile: '$(Pipeline.Workspace)/adf-pipelines/ARMTemplateParametersForFactory.json'
overrideParameters: -factoryName "$(DATA_FACTORY_TEST_NAME)"
-DataPipeline_properties_variables_storage_account_name_defaultValue "$(STORAGE_ACCOUNT_NAME)"
-DataPipeline_properties_variables_storage_container_name_defaultValue "$(STORAGE_CONTAINER_NAME)"