Skip to content

Build and Release

Build and Release #316

Workflow file for this run

name: Build and Release
on:
create:
pull_request:
branches: [ main, release/* ]
paths:
- '**.cs'
- '**.csproj'
- 'tests/integration/**'
workflow_dispatch:
inputs:
tags:
description: 'Flag as workflow dispatch'
required: true
type: boolean
env:
DOTNET_VERSION: '6.x.x' # The .NET SDK version to use
REPO: "function-app/adb-to-purview/src/"
TEST: "function-app/adb-to-purview/tests/unit-tests"
jobs:
meta:
name: Meta Information
runs-on: ubuntu-latest
steps:
- name: Metadata about this action
run: |
echo "Github Event Name: ${{ github.event_name }}"
echo "Github Ref: ${{ github.ref }}"
echo "Github Ref Type: ${{ github.ref_type }}"
echo "Github Tags: ${{ inputs.tags }}"
build:
if: |
github.event_name == 'pull_request' ||
(github.event_name == 'create' && github.ref_type == 'tag') ||
${{github.event_name == 'create' && inputs.tags}}
name: build-${{matrix.os}}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest]
steps:
- uses: actions/checkout@v2
- name: Setup .NET Core
uses: actions/setup-dotnet@v1
with:
dotnet-version: ${{ env.DOTNET_VERSION }}
- name: Install dependencies
run: dotnet restore ${{ env.REPO }}
- name: Build
run: dotnet build ${{ env.REPO }} --configuration Release --no-restore
- name: Test
run: dotnet test ${{ env.TEST }}
- name: Publish
run: dotnet publish ${{ env.REPO }}
- name: Confirm Mappings Match
run: python tests/deployment/test_arm_mapping_matches_json.py
- name: Display structure of downloaded files
run: |
mkdir ~/artifact
Compress-Archive -Path D:/a/Purview-ADB-Lineage-Solution-Accelerator/Purview-ADB-Lineage-Solution-Accelerator/function-app/adb-to-purview/src/bin/Debug/net6.0/publish/* -DestinationPath ~/artifact/FunctionZip.zip
- name: Upload Function Zip Build Artifact
uses: actions/upload-artifact@v3
with:
name: FunctionZip
path: ~/artifact/FunctionZip.zip
- name: Create One Line OlToPurviewMappings
run: |
mkdir ~/artifact-mappings
python ./deployment/util/mappings-remove-spaces.py ./deployment/infra/OlToPurviewMappings.json > ~/artifact-mappings/one-line-mappings.json
ls ~/artifact-mappings
- name: Upload One Line OlToPurviewMappings Build Artifact
uses: actions/upload-artifact@v3
with:
name: Mappings
path: ~/artifact-mappings/one-line-mappings.json
runIntegrationTests:
name: Test on Integration Tests
needs: [build]
runs-on: ubuntu-latest
environment:
name: Integration
steps:
- uses: actions/checkout@v3
# Deploy the Function to integration environment
- name: Download Artifact
uses: actions/download-artifact@v3
with:
name: FunctionZip
path: ./artifacts
- name: Deploy Azure Function to Integration Env
uses: Azure/functions-action@v1.4.6
with:
app-name: ${{ secrets.INT_FUNC_NAME }}
package: ./artifacts/FunctionZip.zip
publish-profile: ${{ secrets.INT_PUBLISH_PROFILE }}
- name: Azure Login
uses: azure/login@v1
with:
creds: ${{ secrets.INT_AZ_CLI_CREDENTIALS }}
- name: Compare and Update App Settings on Deployed Function
uses: azure/CLI@v1
with:
azcliversion: 2.34.1
inlineScript: |
chmod +x $GITHUB_WORKSPACE/tests/deployment/compare-app-settings.py
python3 $GITHUB_WORKSPACE/tests/deployment/compare-app-settings.py ${{ secrets.INT_FUNC_NAME }} ${{ secrets.INT_RG_NAME }}
# TODO: Deploy / Update Spark jobs
# Start up Synapse Pool and Execute Tests
- name: Start Integration Synapse SQL Pool
run: source tests/integration/manage-sql-pool.sh start ${{ secrets.INT_SUBSCRIPTION_ID }} ${{ secrets.INT_SYNAPSE_SQLPOOL_RG_NAME }} ${{ secrets.INT_SYNAPSE_WKSP_NAME }} ${{ secrets.INT_SYNAPSE_SQLPOOL_NAME }}
env:
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
- name: Install Python Requirements and Databricks CLI
run: pip install pyapacheatlas==0.12.0 azure-identity databricks-cli
- name: Configure Databricks CLI
run: |
echo "[DEFAULT]
host = https://${{ secrets.INT_DATABRICKS_WKSP_ID }}.azuredatabricks.net
token = ${{ secrets.INT_DATABRICKS_ACCESS_TOKEN }}" > ./config.ini
export DATABRICKS_CONFIG_FILE=./config.ini
- name: Confirm Databricks CLI is configured
run: databricks clusters spark-versions
env:
DATABRICKS_CONFIG_FILE: ./config.ini
- name: Cleanup Integration Environment
run: python ./tests/integration/runner.py --cleanup --dontwait None None None
env:
PURVIEW_INTEGRATION_TARGET: ${{ secrets.INT_PURVIEW_NAME }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
DATABRICKS_CONFIG_FILE: ./config.ini
- name: Run Integration Tests
run: source ./tests/integration/run-test.sh ALL ${{ secrets.INT_DATABRICKS_WKSP_ID }}
env:
PURVIEW_INTEGRATION_TARGET: ${{ secrets.INT_PURVIEW_NAME }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
DATABRICKS_CONFIG_FILE: ./config.ini
- name: Stop Integration Synapse SQL Pool
run: source tests/integration/manage-sql-pool.sh stop ${{ secrets.INT_SUBSCRIPTION_ID }} ${{ secrets.INT_SYNAPSE_SQLPOOL_RG_NAME }} ${{ secrets.INT_SYNAPSE_WKSP_NAME }} ${{ secrets.INT_SYNAPSE_SQLPOOL_NAME }}
env:
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
createRelease:
name: Create Release
if: ${{ github.event_name == 'create' && github.ref_type == 'tag' }}
needs: [runIntegrationTests]
runs-on: ubuntu-latest
environment:
name: ReleaseEnvironment
steps:
- uses: actions/checkout@v3
- name: Download Artifact
uses: actions/download-artifact@v3
with:
name: FunctionZip
path: ~/artifacts
- name: Create Release
uses: ncipollo/release-action@v1
if: github.ref_type == 'tag'
with:
artifacts: ~/artifacts/FunctionZip.zip
token: ${{ secrets.GITHUB_TOKEN }}