diff --git a/.azuredevops/pipelines/database.build.yml b/.azuredevops/pipelines/database.build.yml index f3a5b6a5..2a094905 100644 --- a/.azuredevops/pipelines/database.build.yml +++ b/.azuredevops/pipelines/database.build.yml @@ -4,7 +4,7 @@ trigger: - none variables: - name: SqlProjects - value: metadata.control, metadata.ingest, metadata.transform + value: metadata.common, metadata.control, metadata.ingest, metadata.transform stages: - stage: BuildAndPublish diff --git a/.azuredevops/pipelines/datafactory.build.yml b/.azuredevops/pipelines/datafactory.build.yml deleted file mode 100644 index 95d225a5..00000000 --- a/.azuredevops/pipelines/datafactory.build.yml +++ /dev/null @@ -1,46 +0,0 @@ -# This steps template automatically builds the ARM template and additional scripts for an Azure Data Factory (ADF) resource - -parameters: -- name: DataFactoryName - type: string -- name: ResourceGroupName - type: string -- name: ADFArtifactName - type: string -- name: WorkingDirectory # Used to grab the ADF template parameter files and any other files needed to build the ADF - type: string - -steps: -- checkout: self - displayName: "Checkout Source Repo Code" - -- powershell: | - Write-Host "Attempting to validate and build the ADF ARM template from Data Factory: ${{ parameters.DataFactoryName }} in resource group: ${{ parameters.ResourceGroupName }}." - displayName: "Display The Build ADF Info" - -- task: NodeTool@0 - displayName: "Install Node.js" - inputs: - versionSpec: "18.x" - -- task: Npm@1 - displayName: "Install NPM Package" - inputs: - command: "install" - workingDir: "${{ parameters.WorkingDirectory }}" - verbose: true - -- task: Npm@1 - displayName: "Validate And Generate ADF ARM Template And Scripts" - inputs: - command: "custom" - workingDir: ${{ parameters.WorkingDirectory }} - customCommand: 'run build export ${{ parameters.WorkingDirectory }} /subscriptions/$(DEVSubscriptionID)/resourceGroups/${{ parameters.ResourceGroupName }}/providers/Microsoft.DataFactory/factories/${{ parameters.DataFactoryName }} "ArmTemplate"' - # DEVSubscriptionID is a secret variable. Go to the pipeline in Azure DevOps and select edit pipeline and then click variables and click the checkbox to save a variable as secret variable. - -- task: PublishPipelineArtifact@1 - displayName: "Publish ADF ARM Template And Scripts" - inputs: - targetPath: "${{ parameters.WorkingDirectory }}/ArmTemplate" - artifactName: "${{ parameters.ADFArtifactName }}" - publishLocation: "pipeline" diff --git a/.azuredevops/pipelines/datafactory.cicd.yml b/.azuredevops/pipelines/datafactory.cicd.yml deleted file mode 100644 index d7e1680a..00000000 --- a/.azuredevops/pipelines/datafactory.cicd.yml +++ /dev/null @@ -1,167 +0,0 @@ -name: DataFactory-CICD-Pipeline-$(Date:yyyyMMdd)$(Rev:.r) - -# ADF is the shortened version of Azure Data Factory - -trigger: -- none # Will trigger the pipeline after a commit to the main branch. Can update to trigger: none to only run the pipeline manually. - -pool: - vmImage: "windows-latest" - -stages: -- stage: BuildDEVDataFactory - displayName: "Build DEV Data Factory" - - variables: - - template: ../.././infrastructure/configuration/dev/dev-variables.yml - - jobs: - - job: ValidateAndBuildDataFactoryTemplate - displayName: "Build DEV Data Factory" - steps: - - template: datafactory.build.yml - parameters: - DataFactoryName: "${{ variables.BuildDataFactoryName }}" - ResourceGroupName: "${{ variables.BuildDataFactoryResourceGroupName }}" - ADFArtifactName: "${{ variables.ADFArtifactName }}" - WorkingDirectory: "${{ variables.WorkingDirectory }}" - -# DEPLOY TO Test Factory -- stage: DeployToTestFactory - dependsOn: BuildDEVDataFactory - condition: succeeded() - displayName: "Deploy To Test Factory" - - variables: - - template: ../.././infrastructure/configuration/test/test-variables.yml - - jobs: - - deployment: ApprovalCheckDeployToTestFactory - displayName: "Approval Check To Deploy To Test Factory" - environment: Test - strategy: - runOnce: - deploy: - steps: - - powershell: | - Write-Host "Deploy To Test Factory has been fully approved. Starting the deployment to Test." - - - job: DeployDataFactory - displayName: "Deploy ADF ARM Template To Target ADF Factory" - dependsOn: ApprovalCheckDeployToTestFactory - condition: succeeded() - steps: - - template: datafactory.deploy.yml - parameters: - AzureResourceManagerConnection: "${{ variables.AzureResourceManagerConnection }}" - DataFactoryName: "${{ variables.DataFactoryNameFactory }}" - DataFactoryTemplateParametersFilePath: "${{ variables.DataFactoryTemplateParametersFilePathFactory }}" - ResourceGroupName: "${{ variables.ADFResourceGroupName }}" - ResourceGroupLocation: "${{ variables.ResourceGroupLocation }}" - ADFArtifactName: "${{ variables.ADFArtifactName }}" - Environment: "${{ variables.Environment }}" - -# DEPLOY TO Test Workers -- stage: DeployToTestWorkers - dependsOn: BuildDEVDataFactory - condition: and(succeeded(), eq(variables['DeployWorkers'], 'true')) - displayName: "Deploy To Test Workers" - - variables: - - template: ../.././infrastructure/configuration/test/test-variables.yml - - jobs: - - deployment: ApprovalCheckDeployToTestWorkers - displayName: "Approval Check To Deploy To Test Workers" - environment: Test - strategy: - runOnce: - deploy: - steps: - - powershell: | - Write-Host "Deploy To Test Workers has been fully approved. Starting the deployment to Test." - - - job: DeployDataWorkers - displayName: "Deploy ADF ARM Template To Target ADF Workers" - dependsOn: ApprovalCheckDeployToTestWorkers - condition: succeeded() - steps: - - template: datafactory.deploy.yaml - parameters: - AzureResourceManagerConnection: "${{ variables.AzureResourceManagerConnection }}" - DataFactoryName: "${{ variables.DataFactoryNameWorkers }}" - DataFactoryTemplateParametersFilePath: "${{ variables.DataFactoryTemplateParametersFilePathWorkers }}" - ResourceGroupName: "${{ variables.ADFResourceGroupName }}" - ResourceGroupLocation: "${{ variables.ResourceGroupLocation }}" - ADFArtifactName: "${{ variables.ADFArtifactName }}" - Environment: "${{ variables.Environment }}" - -# DEPLOY TO Prod Factory -- stage: DeployToProdFactory - dependsOn: BuildDEVDataFactory - condition: succeeded() - displayName: "Deploy To Prod Factory" - - variables: - - template: ../.././infrastructure/configuration/prod/prod-variables.yml - - jobs: - - deployment: ApprovalCheckDeployToProdFactory - displayName: "Approval Check To Deploy To Prod Factory" - environment: Prod - strategy: - runOnce: - deploy: - steps: - - powershell: | - Write-Host "Deploy To Prod Factory has been fully approved. Starting the deployment to Prod." - - - job: DeployDataFactory - displayName: "Deploy ADF ARM Template To Target ADF Factory" - dependsOn: ApprovalCheckDeployToProdFactory - condition: succeeded() - steps: - - template: datafactory.deploy.yml - parameters: - AzureResourceManagerConnection: "${{ variables.AzureResourceManagerConnection }}" - DataFactoryName: "${{ variables.DataFactoryNameFactory }}" - DataFactoryTemplateParametersFilePath: "${{ variables.DataFactoryTemplateParametersFilePathFactory }}" - ResourceGroupName: "${{ variables.ADFResourceGroupName }}" - ResourceGroupLocation: "${{ variables.ResourceGroupLocation }}" - ADFArtifactName: "${{ variables.ADFArtifactName }}" - Environment: "${{ variables.Environment }}" - -# DEPLOY TO Prod Workers -- stage: DeployToProdWorkers - dependsOn: BuildDEVDataFactory - condition: and(succeeded(), eq(variables['DeployWorkers'], 'true')) - displayName: "Deploy To Prod Workers" - - variables: - - template: ../.././infrastructure/configuration/prod/prod-variables.yml - - jobs: - - deployment: ApprovalCheckDeployToProdWorkers - displayName: "Approval Check To Deploy To Prod Workers" - environment: Prod - strategy: - runOnce: - deploy: - steps: - - powershell: | - Write-Host "Deploy To Prod Workers has been fully approved. Starting the deployment to Prod." - - - job: DeployDataWorkers - displayName: "Deploy ADF ARM Template To Target ADF Workers" - dependsOn: ApprovalCheckDeployToProdWorkers - condition: succeeded() - steps: - - template: datafactory.deploy.yml - parameters: - AzureResourceManagerConnection: "${{ variables.AzureResourceManagerConnection }}" - DataFactoryName: "${{ variables.DataFactoryNameWorkers }}" - DataFactoryTemplateParametersFilePath: "${{ variables.DataFactoryTemplateParametersFilePathWorkers }}" - ResourceGroupName: "${{ variables.ADFResourceGroupName }}" - ResourceGroupLocation: "${{ variables.ResourceGroupLocation }}" - ADFArtifactName: "${{ variables.ADFArtifactName }}" - Environment: "${{ variables.Environment }}" \ No newline at end of file diff --git a/.azuredevops/pipelines/datafactory.deploy.yml b/.azuredevops/pipelines/datafactory.deploy.yml deleted file mode 100644 index 99b84071..00000000 --- a/.azuredevops/pipelines/datafactory.deploy.yml +++ /dev/null @@ -1,95 +0,0 @@ - -# This steps template take an ADF published artifact and deploys the ARM template to the target ADF instance - -parameters: -- name: ADFArtifactName - type: string -- name: AzureResourceManagerConnection - type: string -- name: DataFactoryName - type: string -- name: ResourceGroupName - type: string -- name: ResourceGroupLocation - type: string -- name: DataFactoryTemplateParametersFilePath - type: string -- name: Environment - type: string - values: - - TEST - - PROD - -steps: -- checkout: self - displayName: "Checkout Source Repo Code" - -- powershell: | - Write-Host "Attempting to deploy the ADF ARM Template to Data Factory ${{ parameters.DataFactoryName }} in resource group: ${{ parameters.ResourceGroupName }} in the ${{ parameters.Environment }} environment." - displayName: "Display The Deployed To ADF Info" - -- task: DownloadPipelineArtifact@2 - displayName: "Download The ADF Artifact" - inputs: - buildType: "current" - artifactName: "${{ parameters.ADFArtifactName }}" - targetpath: "$(Pipeline.Workspace)/ADFArtifact" - -- task: CmdLine@2 - displayName: "List Contents of the Pipeline ADF Artifact Workspace" - inputs: - script: dir - workingDirectory: "$(Pipeline.Workspace)/ADFArtifact" - -- task: AzurePowerShell@5 - displayName: "Stop Current ADF Triggers" - inputs: - azureSubscription: "${{ parameters.AzureResourceManagerConnection }}" - pwsh: true - azurePowerShellVersion: "LatestVersion" - ScriptType: "FilePath" - ScriptPath: "$(Pipeline.Workspace)/ADFArtifact/PrePostDeploymentScript.ps1" - scriptArguments: - -ArmTemplate "$(Pipeline.Workspace)/ADFArtifact/ARMTemplateForFactory.json" - -ArmTemplateParameters "${{ parameters.DataFactoryTemplateParametersFilePath }}" - -ResourceGroupName "${{ parameters.ResourceGroupName }}" - -DataFactoryName "${{ parameters.DataFactoryName }}" - -predeployment $true - -deleteDeployment $false - -- task: AzureResourceManagerTemplateDeployment@3 - displayName: "Deploy ADF ARM Template" - inputs: - deploymentScope: "Resource Group" - azureResourceManagerConnection: "${{ parameters.AzureResourceManagerConnection }}" - ${{ if eq(parameters.Environment, 'TEST') }}: - subscriptionId: "$(TESTSubscriptionID)" - ${{ else }}: - subscriptionId: "$(PRODSubscriptionID)" - action: "Create Or Update Resource Group" - resourceGroupName: "${{ parameters.ResourceGroupName }}" - location: "${{ parameters.ResourceGroupLocation }}" - templateLocation: "Linked artifact" - csmFile: "$(Pipeline.Workspace)/ADFArtifact/ARMTemplateForFactory.json" - csmParametersFile: "${{ parameters.DataFactoryTemplateParametersFilePath }}" - deploymentOutputs: "armADFOutputs" - deploymentMode: "Incremental" - -- task: AzurePowerShell@5 - displayName: "Cleanup Resource And Start ADF Triggers" - inputs: - azureSubscription: "${{ parameters.azureResourceManagerConnection }}" - pwsh: true - azurePowerShellVersion: "LatestVersion" - ScriptType: "FilePath" - ScriptPath: "$(Pipeline.Workspace)/ADFArtifact/PrePostDeploymentScript.ps1" - ScriptArguments: - -ArmTemplate "$(Pipeline.Workspace)/ADFArtifact/ARMTemplateForFactory.json" - -ArmTemplateParameters "${{ parameters.DataFactoryTemplateParametersFilePath }}" - -ResourceGroupName "${{ parameters.ResourceGroupName }}" - -DataFactoryName "${{ parameters.DataFactoryName }}" - -predeployment $false - -deleteDeployment $true - - - diff --git a/src/azure.databricks/python/notebooks/utils/WriteToDelta.py b/src/azure.databricks/python/notebooks/utils/WriteToDelta.py index f733fcef..cbf6d9ad 100644 --- a/src/azure.databricks/python/notebooks/utils/WriteToDelta.py +++ b/src/azure.databricks/python/notebooks/utils/WriteToDelta.py @@ -67,7 +67,7 @@ def overwriteDelta(df: DataFrame, schemaName: str, tableName: str) -> None: tableName (str): Name of the target table for the dataset. """ - df.write.format("delta").mode("overwrite").insertInto(f"{schemaName}.{tableName}") + df.write.format("delta").mode("overwrite").option("mergeSchema", "true").saveAsTable(f"{schemaName}.{tableName}") return diff --git a/src/azure.datafactory/dataset/Ingest_DS_Salesforce_OAuth.json b/src/azure.datafactory/dataset/Ingest_DS_Salesforce_OAuth.json deleted file mode 100644 index 74b20549..00000000 --- a/src/azure.datafactory/dataset/Ingest_DS_Salesforce_OAuth.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "Ingest_DS_Salesforce_OAuth", - "properties": { - "linkedServiceName": { - "referenceName": "Ingest_LS_Salesforce_OAuth", - "type": "LinkedServiceReference", - "parameters": { - "LSClientId": { - "value": "@dataset().DSClientId", - "type": "Expression" - }, - "LSAPIVersion": "62.0" - } - }, - "parameters": { - "DSClientId": { - "type": "String" - }, - "DSAPIVersion": { - "type": "string", - "defaultValue": "62.0" - } - }, - "folder": { - "name": "Ingest" - }, - "annotations": [], - "type": "SalesforceV2Object", - "schema": [] - } -} \ No newline at end of file diff --git a/src/azure.datafactory/linkedService/Ingest_LS_Salesforce_OAuth.json b/src/azure.datafactory/linkedService/Ingest_LS_Salesforce_OAuth.json deleted file mode 100644 index ceb96ab0..00000000 --- a/src/azure.datafactory/linkedService/Ingest_LS_Salesforce_OAuth.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "Ingest_LS_Salesforce_OAuth", - "properties": { - "parameters": { - "LSClientId": { - "type": "string" - }, - "LSAPIVersion": { - "type": "string", - "defaultValue": "62.0" - } - }, - "annotations": [], - "type": "SalesforceV2", - "typeProperties": { - "environmentUrl": "https://syncron--preprod.sandbox.my.salesforce.com/", - "clientId": "@{linkedService().LSClientId}", - "clientSecret": { - "type": "AzureKeyVaultSecret", - "store": { - "referenceName": "Common_LS_cumuluskeys", - "type": "LinkedServiceReference" - }, - "secretName": "salesforcepreprodsandbox-clientsecret" - }, - "apiVersion": "@{linkedService().LSAPIVersion}", - "authenticationType": "OAuth2ClientCredential" - } - } -} \ No newline at end of file diff --git a/src/azure.datafactory/pipeline/Ingest_PL_Salesforce.json b/src/azure.datafactory/pipeline/Ingest_PL_Salesforce.json deleted file mode 100644 index e18183d9..00000000 --- a/src/azure.datafactory/pipeline/Ingest_PL_Salesforce.json +++ /dev/null @@ -1,389 +0,0 @@ -{ - "name": "Ingest_PL_Salesforce", - "properties": { - "activities": [ - { - "name": "Salesforce Type", - "type": "Switch", - "dependsOn": [ - { - "activity": "Set Directory Path", - "dependencyConditions": [ - "Succeeded" - ] - } - ], - "userProperties": [], - "typeProperties": { - "on": { - "value": "@replace(activity('Get Ingest Payload').output.firstRow.LinkedServiceName,'Ingest_LS_','')", - "type": "Expression" - }, - "cases": [ - { - "value": "Salesforce_OAuth", - "activities": [ - { - "name": "Salesforce OAuth Copy", - "type": "Copy", - "dependsOn": [ - { - "activity": "Fetch Salesforce Client Id", - "dependencyConditions": [ - "Succeeded" - ] - } - ], - "policy": { - "timeout": "0.12:00:00", - "retry": 0, - "retryIntervalInSeconds": 30, - "secureOutput": true, - "secureInput": false - }, - "userProperties": [], - "typeProperties": { - "source": { - "type": "SalesforceV2Source", - "additionalColumns": [ - { - "name": "PipelineRunId", - "value": { - "value": "@pipeline().RunId", - "type": "Expression" - } - }, - { - "name": "PipelineExecutionDateTime", - "value": { - "value": "@utcnow()", - "type": "Expression" - } - } - ], - "query": { - "value": "@activity('Get Ingest Payload').output.firstRow.SourceQuery", - "type": "Expression" - }, - "includeDeletedObjects": false - }, - "sink": { - "type": "ParquetSink", - "storeSettings": { - "type": "AzureBlobFSWriteSettings" - }, - "formatSettings": { - "type": "ParquetWriteSettings" - } - }, - "enableStaging": false, - "translator": { - "type": "TabularTranslator", - "typeConversion": true, - "typeConversionSettings": { - "allowDataTruncation": true, - "treatBooleanAsNumber": false - } - } - }, - "inputs": [ - { - "referenceName": "Ingest_DS_Salesforce_OAuth", - "type": "DatasetReference", - "parameters": { - "DSClientId": { - "value": "@activity('Fetch Salesforce Client Id').output.value", - "type": "Expression" - }, - "DSAPIVersion": "62.0" - } - } - ], - "outputs": [ - { - "referenceName": "Ingest_DS_DataLake_Parquet", - "type": "DatasetReference", - "parameters": { - "DSStorageName": { - "value": "@activity('Get Ingest Payload').output.firstRow.TargetStorageName", - "type": "Expression" - }, - "DSContainerName": { - "value": "@activity('Get Ingest Payload').output.firstRow.TargetStorageContainer", - "type": "Expression" - }, - "DSDirectoryName": { - "value": "@variables('DirectoryName')", - "type": "Expression" - }, - "DSFileName": { - "value": "@activity('Get Ingest Payload').output.firstRow.DatasetDisplayName", - "type": "Expression" - } - } - } - ] - }, - { - "name": "Fetch Salesforce Client Id", - "type": "WebActivity", - "dependsOn": [], - "policy": { - "timeout": "0.12:00:00", - "retry": 0, - "retryIntervalInSeconds": 30, - "secureOutput": true, - "secureInput": false - }, - "userProperties": [], - "typeProperties": { - "method": "GET", - "url": { - "value": "@concat(activity('Get Ingest Payload').output.firstRow.Username,'?api-version=7.0')", - "type": "Expression" - }, - "authentication": { - "type": "MSI", - "resource": "https://vault.azure.net" - } - } - } - ] - } - ], - "defaultActivities": [ - { - "name": "Supported Linked Service Type", - "type": "Fail", - "dependsOn": [], - "userProperties": [], - "typeProperties": { - "message": { - "value": "@concat('The Linked Service type and authentication combination is not currently supported.')", - "type": "Expression" - }, - "errorCode": "16" - } - } - ] - } - }, - { - "name": "Get Ingest Payload", - "type": "Lookup", - "dependsOn": [], - "policy": { - "timeout": "0.12:00:00", - "retry": 0, - "retryIntervalInSeconds": 30, - "secureOutput": false, - "secureInput": false - }, - "userProperties": [], - "typeProperties": { - "source": { - "type": "AzureSqlSource", - "sqlReaderStoredProcedureName": "[ingest].[GetDatasetPayload]", - "storedProcedureParameters": { - "DatasetId": { - "type": "Int16", - "value": { - "value": "@pipeline().parameters.DatasetId", - "type": "Expression" - } - } - }, - "queryTimeout": "02:00:00", - "partitionOption": "None" - }, - "dataset": { - "referenceName": "GetSetMetadata", - "type": "DatasetReference" - }, - "firstRowOnly": true - } - }, - { - "name": "Set Run DateTime", - "type": "SetVariable", - "dependsOn": [], - "policy": { - "secureOutput": false, - "secureInput": false - }, - "userProperties": [], - "typeProperties": { - "variableName": "LocalRunDateTime", - "value": { - "value": "@if(equals(pipeline().parameters.RunDateTime,' '),string(utcnow()),pipeline().parameters.RunDateTime)", - "type": "Expression" - } - } - }, - { - "name": "Set Target Path", - "type": "SetVariable", - "dependsOn": [ - { - "activity": "Set Run DateTime", - "dependencyConditions": [ - "Succeeded" - ] - } - ], - "policy": { - "secureOutput": false, - "secureInput": false - }, - "userProperties": [], - "typeProperties": { - "variableName": "TargetPath", - "value": { - "value": "@formatDateTime(variables('LocalRunDateTime'), '\\ye\\ar=yyyy/\\mon\\t\\h=MM/\\d\\a\\y=dd/\\hour=HH')", - "type": "Expression" - } - } - }, - { - "name": "Set LoadType", - "description": "Set the Data Load type:\nIncremental Load = 1\nFull Load = 0", - "type": "SetVariable", - "dependsOn": [ - { - "activity": "Get Ingest Payload", - "dependencyConditions": [ - "Succeeded" - ] - } - ], - "policy": { - "secureOutput": false, - "secureInput": false - }, - "userProperties": [], - "typeProperties": { - "variableName": "LoadType", - "value": { - "value": "@activity('Get Ingest Payload').output.firstRow.LoadAction", - "type": "Expression" - } - } - }, - { - "name": "Set Directory Path", - "type": "SetVariable", - "dependsOn": [ - { - "activity": "Set Target Path", - "dependencyConditions": [ - "Succeeded" - ] - }, - { - "activity": "Set LoadType", - "dependencyConditions": [ - "Succeeded" - ] - } - ], - "policy": { - "secureOutput": false, - "secureInput": false - }, - "userProperties": [], - "typeProperties": { - "variableName": "DirectoryName", - "value": { - "value": "@concat(\n activity('Get Ingest Payload').output.firstRow.ConnectionDisplayName,\n '\\',\n activity('Get Ingest Payload').output.firstRow.DatasetDisplayName,\n '\\',\n 'version=',\n activity('Get Ingest Payload').output.firstRow.VersionNumber,\n '\\',\n variables('LoadType'),\n '\\',\n variables('TargetPath')\n )", - "type": "Expression" - } - } - }, - { - "name": "Update Metadata Load Status", - "type": "SqlServerStoredProcedure", - "dependsOn": [ - { - "activity": "Salesforce Type", - "dependencyConditions": [ - "Succeeded" - ] - } - ], - "policy": { - "timeout": "0.12:00:00", - "retry": 0, - "retryIntervalInSeconds": 30, - "secureOutput": false, - "secureInput": false - }, - "userProperties": [], - "typeProperties": { - "storedProcedureName": "[ingest].[SetIngestLoadStatus]", - "storedProcedureParameters": { - "DatasetId": { - "value": { - "value": "@pipeline().parameters.DatasetId", - "type": "Expression" - }, - "type": "Int32" - }, - "IngestStage": { - "value": "Raw", - "type": "String" - }, - "LoadType": { - "value": { - "value": "@activity('Get Ingest Payload').output.firstRow.LoadType", - "type": "Expression" - }, - "type": "String" - }, - "FileLoadDateTime": { - "value": { - "value": "@variables('LocalRunDateTime')", - "type": "Expression" - }, - "type": "DateTime" - } - } - }, - "linkedServiceName": { - "referenceName": "Common_LS_cumulusdatabase", - "type": "LinkedServiceReference" - } - } - ], - "parameters": { - "DatasetId": { - "type": "int" - }, - "RunDateTime": { - "type": "string", - "defaultValue": " " - } - }, - "variables": { - "LocalRunDateTime": { - "type": "String" - }, - "TargetPath": { - "type": "String" - }, - "DirectoryName": { - "type": "String" - }, - "LoadType": { - "type": "String" - } - }, - "folder": { - "name": "Cumulus.Ingest" - }, - "annotations": [ - "Cloud Formations", - "CF.Cumulus", - "Ingest" - ] - } -} \ No newline at end of file diff --git a/src/metadata.control/control/Views/PipelineSummary.sql b/src/metadata.control/control/Views/PipelineSummary.sql index b1d43d1f..ba2b6609 100644 --- a/src/metadata.control/control/Views/PipelineSummary.sql +++ b/src/metadata.control/control/Views/PipelineSummary.sql @@ -16,7 +16,12 @@ SELECT pp.ParameterValue, p.PipelineId, s.StageName, - pd.DependantPipelineId + pd.DependantPipelineId, + CASE + WHEN ids.DatasetDisplayname IS NOT NULL THEN p.Enabled & ids.Enabled + WHEN tds.DatasetName IS NOT NULL THEN p.Enabled & tds.Enabled + ELSE p.Enabled + END AS Enabled FROM control.pipelines AS p INNER JOIN control.pipelineparameters AS pp ON p.PipelineId = pp.PipelineId @@ -27,11 +32,11 @@ ON p.StageId = s.StageId LEFT JOIN ingest.Datasets AS ids ON pp.parametervalue = CAST(ids.datasetid AS VARCHAR(4)) AND p.pipelineName LIKE 'Ingest_PL_%' -LEFT JOIN transform.Datasets as tds +LEFT JOIN transform.Datasets AS tds ON pp.parametervalue = CAST(tds.datasetid AS VARCHAR(4)) AND p.pipelineName LIKE 'Transform_PL_%' ) SELECT cte.*, cte2.DatasetName AS DependsOnDataset, cte2.PipelineName AS DependsOnPipelineName, cte2.PipelineId AS DependsOnPipelineId FROM cte LEFT JOIN cte AS cte2 -ON cte.PipelineId = cte2.DependantPipelineId +ON cte.PipelineId = cte2.DependantPipelineId \ No newline at end of file